def do_package(d):
    # Change the following version to cause sstate to invalidate the package
    # cache.  This is useful if an item this class depends on changes in a
    # way that the output of this class changes.  rpmdeps is a good example
    # as any change to rpmdeps requires this to be rerun.
    # PACKAGE_BBCLASS_VERSION = "4"

    # Init cachedpath
    global cpath
    cpath = oe.cachedpath.CachedPath()

    ###########################################################################
    # Sanity test the setup
    ###########################################################################

    packages = (d.getVar('PACKAGES') or "").split()
    if len(packages) < 1:
        bb.debug(1, "No packages to build, skipping do_package")
        return

    workdir = d.getVar('WORKDIR')
    outdir = d.getVar('DEPLOY_DIR')
    dest = d.getVar('D')
    dvar = d.getVar('PKGD')
    pn = d.getVar('PN')

    if not workdir or not outdir or not dest or not dvar or not pn:
        msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package"
        package_qa_handle_error("var-undefined", msg, d)
        return

    bb.build.exec_func("package_convert_pr_autoinc", d)

    ###########################################################################
    # Optimisations
    ###########################################################################

    # Continually expanding complex expressions is inefficient, particularly
    # when we write to the datastore and invalidate the expansion cache. This
    # code pre-expands some frequently used variables

    def expandVar(x, d):
        d.setVar(x, d.getVar(x))

    for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO':
        expandVar(x, d)

    ###########################################################################
    # Setup PKGD (from D)
    ###########################################################################

    for f in (d.getVar('PACKAGEBUILDPKGD') or '').split():
        bb.build.exec_func(f, d)

    ###########################################################################
    # Split up PKGD into PKGDEST
    ###########################################################################

    cpath = oe.cachedpath.CachedPath()

    for f in (d.getVar('PACKAGESPLITFUNCS') or '').split():
        bb.build.exec_func(f, d)

    ###########################################################################
    # Process PKGDEST
    ###########################################################################

    # Build global list of files in each split package
    global pkgfiles
    pkgfiles = {}
    packages = d.getVar('PACKAGES').split()
    pkgdest = d.getVar('PKGDEST')
    for pkg in packages:
        pkgfiles[pkg] = []
        for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg):
            for file in files:
                pkgfiles[pkg].append(walkroot + os.sep + file)

    for f in (d.getVar('PACKAGEFUNCS') or '').split():
        bb.build.exec_func(f, d)

    qa_sane = d.getVar("QA_SANE")
    if not qa_sane:
        bb.fatal("Fatal QA errors found, failing task.")

do_package(d)

def package_do_pkgconfig(d):
    import re

    packages = d.getVar('PACKAGES')
    workdir = d.getVar('WORKDIR')
    pkgdest = d.getVar('PKGDEST')

    shlibs_dirs = d.getVar('SHLIBSDIRS').split()
    shlibswork_dir = d.getVar('SHLIBSWORKDIR')

    pc_re = re.compile(r'(.*)\.pc$')
    var_re = re.compile(r'(.*)=(.*)')
    field_re = re.compile(r'(.*): (.*)')

    pkgconfig_provided = {}
    pkgconfig_needed = {}
    for pkg in packages.split():
        pkgconfig_provided[pkg] = []
        pkgconfig_needed[pkg] = []
        for file in pkgfiles[pkg]:
                m = pc_re.match(file)
                if m:
                    pd = bb.data.init()
                    name = m.group(1)
                    pkgconfig_provided[pkg].append(name)
                    if not os.access(file, os.R_OK):
                        continue
                    with open(file, 'r') as f:
                        lines = f.readlines()
                    for l in lines:
                        m = var_re.match(l)
                        if m:
                            name = m.group(1)
                            val = m.group(2)
                            pd.setVar(name, pd.expand(val))
                            continue
                        m = field_re.match(l)
                        if m:
                            hdr = m.group(1)
                            exp = pd.expand(m.group(2))
                            if hdr == 'Requires':
                                pkgconfig_needed[pkg] += exp.replace(',', ' ').split()

    for pkg in packages.split():
        pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
        if pkgconfig_provided[pkg] != []:
            with open(pkgs_file, 'w') as f:
                for p in pkgconfig_provided[pkg]:
                    f.write('%s\n' % p)

    # Go from least to most specific since the last one found wins
    for dir in reversed(shlibs_dirs):
        if not os.path.exists(dir):
            continue
        for file in sorted(os.listdir(dir)):
            m = re.match(r'^(.*)\.pclist$', file)
            if m:
                pkg = m.group(1)
                with open(os.path.join(dir, file)) as fd:
                    lines = fd.readlines()
                pkgconfig_provided[pkg] = []
                for l in lines:
                    pkgconfig_provided[pkg].append(l.rstrip())

    for pkg in packages.split():
        deps = []
        for n in pkgconfig_needed[pkg]:
            found = False
            for k in pkgconfig_provided.keys():
                if n in pkgconfig_provided[k]:
                    if k != pkg and not (k in deps):
                        deps.append(k)
                    found = True
            if found == False:
                bb.note("couldn't find pkgconfig module '%s' in any package" % n)
        deps_file = os.path.join(pkgdest, pkg + ".pcdeps")
        if len(deps):
            with open(deps_file, 'w') as fd:
                for dep in deps:
                    fd.write(dep + '\n')

def populate_packages(d):
    import glob, re

    workdir = d.getVar('WORKDIR')
    outdir = d.getVar('DEPLOY_DIR')
    dvar = d.getVar('PKGD')
    packages = d.getVar('PACKAGES').split()
    pn = d.getVar('PN')

    bb.utils.mkdirhier(outdir)
    os.chdir(dvar)

    autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False)

    split_source_package = (d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg')

    # If debug-with-srcpkg mode is enabled then add the source package if it
    # doesn't exist and add the source file contents to the source package.
    if split_source_package:
        src_package_name = ('%s-src' % d.getVar('PN'))
        if not src_package_name in packages:
            packages.append(src_package_name)
        d.setVar('FILES_%s' % src_package_name, '/usr/src/debug')

    # Sanity check PACKAGES for duplicates
    # Sanity should be moved to sanity.bbclass once we have the infrastructure
    package_dict = {}

    for i, pkg in enumerate(packages):
        if pkg in package_dict:
            msg = "%s is listed in PACKAGES multiple times, this leads to packaging errors." % pkg
            package_qa_handle_error("packages-list", msg, d)
        # Ensure the source package gets the chance to pick up the source files
        # before the debug package by ordering it first in PACKAGES. Whether it
        # actually picks up any source files is controlled by
        # PACKAGE_DEBUG_SPLIT_STYLE.
        elif pkg.endswith("-src"):
            package_dict[pkg] = (10, i)
        elif autodebug and pkg.endswith("-dbg"):
            package_dict[pkg] = (30, i)
        else:
            package_dict[pkg] = (50, i)
    packages = sorted(package_dict.keys(), key=package_dict.get)
    d.setVar('PACKAGES', ' '.join(packages))
    pkgdest = d.getVar('PKGDEST')

    seen = []

    # os.mkdir masks the permissions with umask so we have to unset it first
    oldumask = os.umask(0)

    debug = []
    for root, dirs, files in cpath.walk(dvar):
        dir = root[len(dvar):]
        if not dir:
            dir = os.sep
        for f in (files + dirs):
            path = "." + os.path.join(dir, f)
            if "/.debug/" in path or "/.debug-static/" in path or path.endswith("/.debug"):
                debug.append(path)

    for pkg in packages:
        root = os.path.join(pkgdest, pkg)
        bb.utils.mkdirhier(root)

        filesvar = d.getVar('FILES_%s' % pkg) or ""
        if "//" in filesvar:
            msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg
            package_qa_handle_error("files-invalid", msg, d)
            filesvar.replace("//", "/")

        origfiles = filesvar.split()
        files, symlink_paths = files_from_filevars(origfiles)

        if autodebug and pkg.endswith("-dbg"):
            files.extend(debug)

        for file in files:
            if (not cpath.islink(file)) and (not cpath.exists(file)):
                continue
            if file in seen:
                continue
            seen.append(file)

            def mkdir(src, dest, p):
                src = os.path.join(src, p)
                dest = os.path.join(dest, p)
                fstat = cpath.stat(src)
                os.mkdir(dest)
                os.chmod(dest, fstat.st_mode)
                os.chown(dest, fstat.st_uid, fstat.st_gid)
                if p not in seen:
                    seen.append(p)
                cpath.updatecache(dest)

            def mkdir_recurse(src, dest, paths):
                if cpath.exists(dest + '/' + paths):
                    return
                while paths.startswith("./"):
                    paths = paths[2:]
                p = "."
                for c in paths.split("/"):
                    p = os.path.join(p, c)
                    if not cpath.exists(os.path.join(dest, p)):
                        mkdir(src, dest, p)

            if cpath.isdir(file) and not cpath.islink(file):
                mkdir_recurse(dvar, root, file)
                continue

            mkdir_recurse(dvar, root, os.path.dirname(file))
            fpath = os.path.join(root,file)
            if not cpath.islink(file):
                os.link(file, fpath)
                continue
            ret = bb.utils.copyfile(file, fpath)
            if ret is False or ret == 0:
                bb.fatal("File population failed")

        # Check if symlink paths exist
        for file in symlink_paths:
            if not os.path.exists(os.path.join(root,file)):
                bb.fatal("File '%s' cannot be packaged into '%s' because its "
                         "parent directory structure does not exist. One of "
                         "its parent directories is a symlink whose target "
                         "directory is not included in the package." %
                         (file, pkg))

    os.umask(oldumask)
    os.chdir(workdir)

    # Handle LICENSE_EXCLUSION
    package_list = []
    for pkg in packages:
        licenses = d.getVar('LICENSE_EXCLUSION-' + pkg)
        if licenses:
            msg = "Excluding %s from packaging as it has incompatible license(s): %s" % (pkg, licenses)
            package_qa_handle_error("incompatible-license", msg, d)
        else:
            package_list.append(pkg)
    d.setVar('PACKAGES', ' '.join(package_list))

    unshipped = []
    for root, dirs, files in cpath.walk(dvar):
        dir = root[len(dvar):]
        if not dir:
            dir = os.sep
        for f in (files + dirs):
            path = os.path.join(dir, f)
            if ('.' + path) not in seen:
                unshipped.append(path)

    if unshipped != []:
        msg = pn + ": Files/directories were installed but not shipped in any package:"
        if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split():
            bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn)
        else:
            for f in unshipped:
                msg = msg + "\n  " + f
            msg = msg + "\nPlease set FILES such that these items are packaged. Alternatively if they are unneeded, avoid installing them or delete them within do_install.\n"
            msg = msg + "%s: %d installed and not shipped files." % (pn, len(unshipped))
            package_qa_handle_error("installed-vs-shipped", msg, d)

def populate_packages_updatealternatives(d):
    if not update_alternatives_enabled(d):
        return

    # Do actual update alternatives processing
    for pkg in (d.getVar('PACKAGES') or "").split():
        # Create post install/removal scripts
        alt_setup_links = ""
        alt_remove_links = ""
        updates = update_alternatives_alt_targets(d, pkg)
        for alt_name, alt_link, alt_target, alt_priority in updates:
            alt_setup_links  += '\tupdate-alternatives --install %s %s %s %s\n' % (alt_link, alt_name, alt_target, alt_priority)
            alt_remove_links += '\tupdate-alternatives --remove  %s %s\n' % (alt_name, alt_target)

        if alt_setup_links:
            # RDEPENDS setup
            provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives')
            if provider:
                #bb.note('adding runtime requirement for update-alternatives for %s' % pkg)
                d.appendVar('RDEPENDS_%s' % pkg, ' ' + d.getVar('MLPREFIX', False) + provider)

            bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg)
            bb.note('%s' % alt_setup_links)
            postinst = d.getVar('pkg_postinst_%s' % pkg)
            if postinst:
                postinst = alt_setup_links + postinst
            else:
                postinst = '#!/bin/sh\n' + alt_setup_links
            d.setVar('pkg_postinst_%s' % pkg, postinst)

            bb.note('%s' % alt_remove_links)
            prerm = d.getVar('pkg_prerm_%s' % pkg) or '#!/bin/sh\n'
            prerm += alt_remove_links
            d.setVar('pkg_prerm_%s' % pkg, prerm)

def package_qa_handle_error(error_class, error_msg, d):
    if error_class in (d.getVar("ERROR_QA") or "").split():
        package_qa_write_error(error_class, error_msg, d)
        bb.error("QA Issue: %s [%s]" % (error_msg, error_class))
        d.setVar("QA_SANE", False)
        return False
    elif error_class in (d.getVar("WARN_QA") or "").split():
        package_qa_write_error(error_class, error_msg, d)
        bb.warn("QA Issue: %s [%s]" % (error_msg, error_class))
    else:
        bb.note("QA Issue: %s [%s]" % (error_msg, error_class))
    return True

def package_fixsymlinks(d):
    import errno
    pkgdest = d.getVar('PKGDEST')
    packages = d.getVar("PACKAGES", False).split()

    dangling_links = {}
    pkg_files = {}
    for pkg in packages:
        dangling_links[pkg] = []
        pkg_files[pkg] = []
        inst_root = os.path.join(pkgdest, pkg)
        for path in pkgfiles[pkg]:
                rpath = path[len(inst_root):]
                pkg_files[pkg].append(rpath)
                rtarget = cpath.realpath(path, inst_root, True, assume_dir = True)
                if not cpath.lexists(rtarget):
                    dangling_links[pkg].append(os.path.normpath(rtarget[len(inst_root):]))

    newrdepends = {}
    for pkg in dangling_links:
        for l in dangling_links[pkg]:
            found = False
            bb.debug(1, "%s contains dangling link %s" % (pkg, l))
            for p in packages:
                if l in pkg_files[p]:
                        found = True
                        bb.debug(1, "target found in %s" % p)
                        if p == pkg:
                            break
                        if pkg not in newrdepends:
                            newrdepends[pkg] = []
                        newrdepends[pkg].append(p)
                        break
            if found == False:
                bb.note("%s contains dangling symlink to %s" % (pkg, l))

    for pkg in newrdepends:
        rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
        for p in newrdepends[pkg]:
            if p not in rdepends:
                rdepends[p] = []
        d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))

def package_depchains(d):
    """
    For a given set of prefix and postfix modifiers, make those packages
    RRECOMMENDS on the corresponding packages for its RDEPENDS.

    Example:  If package A depends upon package B, and A's .bb emits an
    A-dev package, this would make A-dev Recommends: B-dev.

    If only one of a given suffix is specified, it will take the RRECOMMENDS
    based on the RDEPENDS of *all* other packages. If more than one of a given
    suffix is specified, its will only use the RDEPENDS of the single parent
    package.
    """

    packages  = d.getVar('PACKAGES')
    postfixes = (d.getVar('DEPCHAIN_POST') or '').split()
    prefixes  = (d.getVar('DEPCHAIN_PRE') or '').split()

    def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d):

        #bb.note('depends for %s is %s' % (base, depends))
        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")

        for depend in sorted(depends):
            if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'):
                #bb.note("Skipping %s" % depend)
                continue
            if depend.endswith('-dev'):
                depend = depend[:-4]
            if depend.endswith('-dbg'):
                depend = depend[:-4]
            pkgname = getname(depend, suffix)
            #bb.note("Adding %s for %s" % (pkgname, depend))
            if pkgname not in rreclist and pkgname != pkg:
                rreclist[pkgname] = []

        #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
        d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))

    def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d):

        #bb.note('rdepends for %s is %s' % (base, rdepends))
        rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "")

        for depend in sorted(rdepends):
            if depend.find('virtual-locale-') != -1:
                #bb.note("Skipping %s" % depend)
                continue
            if depend.endswith('-dev'):
                depend = depend[:-4]
            if depend.endswith('-dbg'):
                depend = depend[:-4]
            pkgname = getname(depend, suffix)
            #bb.note("Adding %s for %s" % (pkgname, depend))
            if pkgname not in rreclist and pkgname != pkg:
                rreclist[pkgname] = []

        #bb.note('setting: RRECOMMENDS_%s=%s' % (pkg, ' '.join(rreclist)))
        d.setVar('RRECOMMENDS_%s' % pkg, bb.utils.join_deps(rreclist, commasep=False))

    def add_dep(list, dep):
        if dep not in list:
            list.append(dep)

    depends = []
    for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""):
        add_dep(depends, dep)

    rdepends = []
    for pkg in packages.split():
        for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""):
            add_dep(rdepends, dep)

    #bb.note('rdepends is %s' % rdepends)

    def post_getname(name, suffix):
        return '%s%s' % (name, suffix)
    def pre_getname(name, suffix):
        return '%s%s' % (suffix, name)

    pkgs = {}
    for pkg in packages.split():
        for postfix in postfixes:
            if pkg.endswith(postfix):
                if not postfix in pkgs:
                    pkgs[postfix] = {}
                pkgs[postfix][pkg] = (pkg[:-len(postfix)], post_getname)

        for prefix in prefixes:
            if pkg.startswith(prefix):
                if not prefix in pkgs:
                    pkgs[prefix] = {}
                pkgs[prefix][pkg] = (pkg[:-len(prefix)], pre_getname)

    if "-dbg" in pkgs:
        pkglibdeps = read_libdep_files(d)
        pkglibdeplist = []
        for pkg in pkglibdeps:
            for k in pkglibdeps[pkg]:
                add_dep(pkglibdeplist, k)
        dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d)))

    for suffix in pkgs:
        for pkg in pkgs[suffix]:
            if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'):
                continue
            (base, func) = pkgs[suffix][pkg]
            if suffix == "-dev":
                pkg_adddeprrecs(pkg, base, suffix, func, depends, d)
            elif suffix == "-dbg":
                if not dbgdefaultdeps:
                    pkg_addrrecs(pkg, base, suffix, func, pkglibdeplist, d)
                    continue
            if len(pkgs[suffix]) == 1:
                pkg_addrrecs(pkg, base, suffix, func, rdepends, d)
            else:
                rdeps = []
                for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""):
                    add_dep(rdeps, dep)
                pkg_addrrecs(pkg, base, suffix, func, rdeps, d)

def package_do_filedeps(d):
    if d.getVar('SKIP_FILEDEPS') == '1':
        return

    pkgdest = d.getVar('PKGDEST')
    packages = d.getVar('PACKAGES')
    rpmdeps = d.getVar('RPMDEPS')

    def chunks(files, n):
        return [files[i:i+n] for i in range(0, len(files), n)]

    pkglist = []
    for pkg in packages.split():
        if d.getVar('SKIP_FILEDEPS_' + pkg) == '1':
            continue
        if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-') or pkg.endswith('-src'):
            continue
        for files in chunks(pkgfiles[pkg], 100):
            pkglist.append((pkg, files, rpmdeps, pkgdest))

    processed = oe.utils.multiprocess_launch(oe.package.filedeprunner, pkglist, d)

    provides_files = {}
    requires_files = {}

    for result in processed:
        (pkg, provides, requires) = result

        if pkg not in provides_files:
            provides_files[pkg] = []
        if pkg not in requires_files:
            requires_files[pkg] = []

        for file in sorted(provides):
            provides_files[pkg].append(file)
            key = "FILERPROVIDES_" + file + "_" + pkg
            d.appendVar(key, " " + " ".join(provides[file]))

        for file in sorted(requires):
            requires_files[pkg].append(file)
            key = "FILERDEPENDS_" + file + "_" + pkg
            d.appendVar(key, " " + " ".join(requires[file]))

    for pkg in requires_files:
        d.setVar("FILERDEPENDSFLIST_" + pkg, " ".join(requires_files[pkg]))
    for pkg in provides_files:
        d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
    if update_alternatives_enabled(d):
        apply_update_alternative_provides(d)

def emit_pkgdata(d):
    from glob import glob
    import json

    def process_postinst_on_target(pkg, mlprefix):
        pkgval = d.getVar('PKG_%s' % pkg)
        if pkgval is None:
            pkgval = pkg

        defer_fragment = """
if [ -n "$D" ]; then
    $INTERCEPT_DIR/postinst_intercept delay_to_first_boot %s mlprefix=%s
    exit 0
fi
""" % (pkgval, mlprefix)

        postinst = d.getVar('pkg_postinst_%s' % pkg)
        postinst_ontarget = d.getVar('pkg_postinst_ontarget_%s' % pkg)

        if postinst_ontarget:
            bb.debug(1, 'adding deferred pkg_postinst_ontarget() to pkg_postinst() for %s' % pkg)
            if not postinst:
                postinst = '#!/bin/sh\n'
            postinst += defer_fragment
            postinst += postinst_ontarget
            d.setVar('pkg_postinst_%s' % pkg, postinst)

    def add_set_e_to_scriptlets(pkg):
        for scriptlet_name in ('pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm'):
            scriptlet = d.getVar('%s_%s' % (scriptlet_name, pkg))
            if scriptlet:
                scriptlet_split = scriptlet.split('\n')
                if scriptlet_split[0].startswith("#!"):
                    scriptlet = scriptlet_split[0] + "\nset -e\n" + "\n".join(scriptlet_split[1:])
                else:
                    scriptlet = "set -e\n" + "\n".join(scriptlet_split[0:])
            d.setVar('%s_%s' % (scriptlet_name, pkg), scriptlet)

    def write_if_exists(f, pkg, var):
        def encode(str):
            import codecs
            c = codecs.getencoder("unicode_escape")
            return c(str)[0].decode("latin1")

        val = d.getVar('%s_%s' % (var, pkg))
        if val:
            f.write('%s_%s: %s\n' % (var, pkg, encode(val)))
            return val
        val = d.getVar('%s' % (var))
        if val:
            f.write('%s: %s\n' % (var, encode(val)))
        return val

    def write_extra_pkgs(variants, pn, packages, pkgdatadir):
        for variant in variants:
            with open("%s/%s-%s" % (pkgdatadir, variant, pn), 'w') as fd:
                fd.write("PACKAGES: %s\n" % ' '.join(
                            map(lambda pkg: '%s-%s' % (variant, pkg), packages.split())))

    def write_extra_runtime_pkgs(variants, packages, pkgdatadir):
        for variant in variants:
            for pkg in packages.split():
                ml_pkg = "%s-%s" % (variant, pkg)
                subdata_file = "%s/runtime/%s" % (pkgdatadir, ml_pkg)
                with open(subdata_file, 'w') as fd:
                    fd.write("PKG_%s: %s" % (ml_pkg, pkg))

    packages = d.getVar('PACKAGES')
    pkgdest = d.getVar('PKGDEST')
    pkgdatadir = d.getVar('PKGDESTWORK')

    data_file = pkgdatadir + d.expand("/${PN}")
    with open(data_file, 'w') as fd:
        fd.write("PACKAGES: %s\n" % packages)

    pn = d.getVar('PN')
    global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
    variants = (d.getVar('MULTILIB_VARIANTS') or "").split()

    if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
        write_extra_pkgs(variants, pn, packages, pkgdatadir)

    if bb.data.inherits_class('allarch', d) and not variants \
        and not bb.data.inherits_class('packagegroup', d):
        write_extra_pkgs(global_variants, pn, packages, pkgdatadir)

    workdir = d.getVar('WORKDIR')

    for pkg in packages.split():
        pkgval = d.getVar('PKG_%s' % pkg)
        if pkgval is None:
            pkgval = pkg
            d.setVar('PKG_%s' % pkg, pkg)

        pkgdestpkg = os.path.join(pkgdest, pkg)
        files = {}
        total_size = 0
        seen = set()
        for f in pkgfiles[pkg]:
            relpth = os.path.relpath(f, pkgdestpkg)
            fstat = os.lstat(f)
            files[os.sep + relpth] = fstat.st_size
            if fstat.st_ino not in seen:
                seen.add(fstat.st_ino)
                total_size += fstat.st_size
        d.setVar('FILES_INFO', json.dumps(files, sort_keys=True))

        process_postinst_on_target(pkg, d.getVar("MLPREFIX"))
        add_set_e_to_scriptlets(pkg)

        subdata_file = pkgdatadir + "/runtime/%s" % pkg
        with open(subdata_file, 'w') as sf:
            for var in (d.getVar('PKGDATA_VARS') or "").split():
                val = write_if_exists(sf, pkg, var)

            write_if_exists(sf, pkg, 'FILERPROVIDESFLIST')
            for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split():
                write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile)

            write_if_exists(sf, pkg, 'FILERDEPENDSFLIST')
            for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split():
                write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile)

            sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))

        # Symlinks needed for rprovides lookup
        rprov = d.getVar('RPROVIDES_%s' % pkg) or d.getVar('RPROVIDES')
        if rprov:
            for p in bb.utils.explode_deps(rprov):
                subdata_sym = pkgdatadir + "/runtime-rprovides/%s/%s" % (p, pkg)
                bb.utils.mkdirhier(os.path.dirname(subdata_sym))
                oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True)

        allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg)
        if not allow_empty:
            allow_empty = d.getVar('ALLOW_EMPTY')
        root = "%s/%s" % (pkgdest, pkg)
        os.chdir(root)
        g = glob('*')
        if g or allow_empty == "1":
            # Symlinks needed for reverse lookups (from the final package name)
            subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
            oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)

            packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
            open(packagedfile, 'w').close()

    if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d):
        write_extra_runtime_pkgs(variants, packages, pkgdatadir)

    if bb.data.inherits_class('allarch', d) and not variants \
        and not bb.data.inherits_class('packagegroup', d):
        write_extra_runtime_pkgs(global_variants, packages, pkgdatadir)

def package_prepare_pkgdata(d):
    import copy
    import glob

    taskdepdata = d.getVar("BB_TASKDEPDATA", False)
    mytaskname = d.getVar("BB_RUNTASK")
    if mytaskname.endswith("_setscene"):
        mytaskname = mytaskname.replace("_setscene", "")
    workdir = d.getVar("WORKDIR")
    pn = d.getVar("PN")
    stagingdir = d.getVar("PKGDATA_DIR")
    pkgdatadir = d.getVar("WORKDIR_PKGDATA")

    # Detect bitbake -b usage
    nodeps = d.getVar("BB_LIMITEDDEPS") or False
    if nodeps:
        staging_package_populate_pkgdata_dir(pkgdatadir, d)
        return

    start = None
    configuredeps = []
    for dep in taskdepdata:
        data = taskdepdata[dep]
        if data[1] == mytaskname and data[0] == pn:
            start = dep
            break
    if start is None:
        bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")

    # We need to figure out which sysroot files we need to expose to this task.
    # This needs to match what would get restored from sstate, which is controlled
    # ultimately by calls from bitbake to setscene_depvalid().
    # That function expects a setscene dependency tree. We build a dependency tree
    # condensed to inter-sstate task dependencies, similar to that used by setscene
    # tasks. We can then call into setscene_depvalid() and decide
    # which dependencies we can "see" and should expose in the recipe specific sysroot.
    setscenedeps = copy.deepcopy(taskdepdata)

    start = set([start])

    sstatetasks = d.getVar("SSTATETASKS").split()
    # Add recipe specific tasks referenced by setscene_depvalid()
    sstatetasks.append("do_stash_locale")

    # If start is an sstate task (like do_package) we need to add in its direct dependencies
    # else the code below won't recurse into them.
    for dep in set(start):
        for dep2 in setscenedeps[dep][3]:
            start.add(dep2)
        start.remove(dep)

    # Create collapsed do_populate_sysroot -> do_populate_sysroot tree
    for dep in taskdepdata:
        data = setscenedeps[dep]
        if data[1] not in sstatetasks:
            for dep2 in setscenedeps:
                data2 = setscenedeps[dep2]
                if dep in data2[3]:
                    data2[3].update(setscenedeps[dep][3])
                    data2[3].remove(dep)
            if dep in start:
                start.update(setscenedeps[dep][3])
                start.remove(dep)
            del setscenedeps[dep]

    # Remove circular references
    for dep in setscenedeps:
        if dep in setscenedeps[dep][3]:
            setscenedeps[dep][3].remove(dep)

    # Direct dependencies should be present and can be depended upon
    for dep in set(start):
        if setscenedeps[dep][1] == "do_packagedata":
            if dep not in configuredeps:
                configuredeps.append(dep)

    msgbuf = []
    # Call into setscene_depvalid for each sub-dependency and only copy sysroot files
    # for ones that would be restored from sstate.
    done = list(start)
    next = list(start)
    while next:
        new = []
        for dep in next:
            data = setscenedeps[dep]
            for datadep in data[3]:
                if datadep in done:
                    continue
                taskdeps = {}
                taskdeps[dep] = setscenedeps[dep][:2]
                taskdeps[datadep] = setscenedeps[datadep][:2]
                retval = setscene_depvalid(datadep, taskdeps, [], d, msgbuf)
                done.append(datadep)
                new.append(datadep)
                if retval:
                    msgbuf.append("Skipping setscene dependency %s" % datadep)
                    continue
                if datadep not in configuredeps and setscenedeps[datadep][1] == "do_packagedata":
                    configuredeps.append(datadep)
                    msgbuf.append("Adding dependency on %s" % setscenedeps[datadep][0])
                else:
                    msgbuf.append("Following dependency on %s" % setscenedeps[datadep][0])
        next = new

    # This logging is too verbose for day to day use sadly
    #bb.debug(2, "\n".join(msgbuf))

    seendirs = set()
    postinsts = []
    multilibs = {}
    manifests = {}

    msg_adding = []

    for dep in configuredeps:
        c = setscenedeps[dep][0]
        msg_adding.append(c)

        manifest, d2 = oe.sstatesig.find_sstate_manifest(c, setscenedeps[dep][2], "packagedata", d, multilibs)
        destsysroot = pkgdatadir

        if manifest:
            targetdir = destsysroot
            with open(manifest, "r") as f:
                manifests[dep] = manifest
                for l in f:
                    l = l.strip()
                    dest = targetdir + l.replace(stagingdir, "")
                    if l.endswith("/"):
                        staging_copydir(l, targetdir, dest, seendirs)
                        continue
                    staging_copyfile(l, targetdir, dest, postinsts, seendirs)

    bb.note("Installed into pkgdata-sysroot: %s" % str(msg_adding))

def fixup_perms(d):
    import pwd, grp

    # init using a string with the same format as a line as documented in
    # the fs-perms.txt file
    # <path> <mode> <uid> <gid> <walk> <fmode> <fuid> <fgid>
    # <path> link <link target>
    #
    # __str__ can be used to print out an entry in the input format
    #
    # if fs_perms_entry.path is None:
    #    an error occurred
    # if fs_perms_entry.link, you can retrieve:
    #    fs_perms_entry.path = path
    #    fs_perms_entry.link = target of link
    # if not fs_perms_entry.link, you can retrieve:
    #    fs_perms_entry.path = path
    #    fs_perms_entry.mode = expected dir mode or None
    #    fs_perms_entry.uid = expected uid or -1
    #    fs_perms_entry.gid = expected gid or -1
    #    fs_perms_entry.walk = 'true' or something else
    #    fs_perms_entry.fmode = expected file mode or None
    #    fs_perms_entry.fuid = expected file uid or -1
    #    fs_perms_entry_fgid = expected file gid or -1
    class fs_perms_entry():
        def __init__(self, line):
            lsplit = line.split()
            if len(lsplit) == 3 and lsplit[1].lower() == "link":
                self._setlink(lsplit[0], lsplit[2])
            elif len(lsplit) == 8:
                self._setdir(lsplit[0], lsplit[1], lsplit[2], lsplit[3], lsplit[4], lsplit[5], lsplit[6], lsplit[7])
            else:
                msg = "Fixup Perms: invalid config line %s" % line
                package_qa_handle_error("perm-config", msg, d)
                self.path = None
                self.link = None

        def _setdir(self, path, mode, uid, gid, walk, fmode, fuid, fgid):
            self.path = os.path.normpath(path)
            self.link = None
            self.mode = self._procmode(mode)
            self.uid  = self._procuid(uid)
            self.gid  = self._procgid(gid)
            self.walk = walk.lower()
            self.fmode = self._procmode(fmode)
            self.fuid = self._procuid(fuid)
            self.fgid = self._procgid(fgid)

        def _setlink(self, path, link):
            self.path = os.path.normpath(path)
            self.link = link

        def _procmode(self, mode):
            if not mode or (mode and mode == "-"):
                return None
            else:
                return int(mode,8)

        # Note uid/gid -1 has special significance in os.lchown
        def _procuid(self, uid):
            if uid is None or uid == "-":
                return -1
            elif uid.isdigit():
                return int(uid)
            else:
                return pwd.getpwnam(uid).pw_uid

        def _procgid(self, gid):
            if gid is None or gid == "-":
                return -1
            elif gid.isdigit():
                return int(gid)
            else:
                return grp.getgrnam(gid).gr_gid

        # Use for debugging the entries
        def __str__(self):
            if self.link:
                return "%s link %s" % (self.path, self.link)
            else:
                mode = "-"
                if self.mode:
                    mode = "0%o" % self.mode
                fmode = "-"
                if self.fmode:
                    fmode = "0%o" % self.fmode
                uid = self._mapugid(self.uid)
                gid = self._mapugid(self.gid)
                fuid = self._mapugid(self.fuid)
                fgid = self._mapugid(self.fgid)
                return "%s %s %s %s %s %s %s %s" % (self.path, mode, uid, gid, self.walk, fmode, fuid, fgid)

        def _mapugid(self, id):
            if id is None or id == -1:
                return "-"
            else:
                return "%d" % id

    # Fix the permission, owner and group of path
    def fix_perms(path, mode, uid, gid, dir):
        if mode and not os.path.islink(path):
            #bb.note("Fixup Perms: chmod 0%o %s" % (mode, dir))
            os.chmod(path, mode)
        # -1 is a special value that means don't change the uid/gid
        # if they are BOTH -1, don't bother to lchown
        if not (uid == -1 and gid == -1):
            #bb.note("Fixup Perms: lchown %d:%d %s" % (uid, gid, dir))
            os.lchown(path, uid, gid)

    # Return a list of configuration files based on either the default
    # files/fs-perms.txt or the contents of FILESYSTEM_PERMS_TABLES
    # paths are resolved via BBPATH
    def get_fs_perms_list(d):
        str = ""
        bbpath = d.getVar('BBPATH')
        fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') or ""
        for conf_file in fs_perms_tables.split():
            confpath = bb.utils.which(bbpath, conf_file)
            if confpath:
                str += " %s" % bb.utils.which(bbpath, conf_file)
            else:
                bb.warn("cannot find %s specified in FILESYSTEM_PERMS_TABLES" % conf_file)
        return str



    dvar = d.getVar('PKGD')

    fs_perms_table = {}
    fs_link_table = {}

    # By default all of the standard directories specified in
    # bitbake.conf will get 0755 root:root.
    target_path_vars = [    'base_prefix',
                'prefix',
                'exec_prefix',
                'base_bindir',
                'base_sbindir',
                'base_libdir',
                'datadir',
                'sysconfdir',
                'servicedir',
                'sharedstatedir',
                'localstatedir',
                'infodir',
                'mandir',
                'docdir',
                'bindir',
                'sbindir',
                'libexecdir',
                'libdir',
                'includedir',
                'oldincludedir' ]

    for path in target_path_vars:
        dir = d.getVar(path) or ""
        if dir == "":
            continue
        fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir)))

    # Now we actually load from the configuration files
    for conf in get_fs_perms_list(d).split():
        if not os.path.exists(conf):
            continue
        with open(conf) as f:
            for line in f:
                if line.startswith('#'):
                    continue
                lsplit = line.split()
                if len(lsplit) == 0:
                    continue
                if len(lsplit) != 8 and not (len(lsplit) == 3 and lsplit[1].lower() == "link"):
                    msg = "Fixup perms: %s invalid line: %s" % (conf, line)
                    package_qa_handle_error("perm-line", msg, d)
                    continue
                entry = fs_perms_entry(d.expand(line))
                if entry and entry.path:
                    if entry.link:
                        fs_link_table[entry.path] = entry
                        if entry.path in fs_perms_table:
                            fs_perms_table.pop(entry.path)
                    else:
                        fs_perms_table[entry.path] = entry
                        if entry.path in fs_link_table:
                            fs_link_table.pop(entry.path)

    # Debug -- list out in-memory table
    #for dir in fs_perms_table:
    #    bb.note("Fixup Perms: %s: %s" % (dir, str(fs_perms_table[dir])))
    #for link in fs_link_table:
    #    bb.note("Fixup Perms: %s: %s" % (link, str(fs_link_table[link])))

    # We process links first, so we can go back and fixup directory ownership
    # for any newly created directories
    # Process in sorted order so /run gets created before /run/lock, etc.
    for entry in sorted(fs_link_table.values(), key=lambda x: x.link):
        link = entry.link
        dir = entry.path
        origin = dvar + dir
        if not (cpath.exists(origin) and cpath.isdir(origin) and not cpath.islink(origin)):
            continue

        if link[0] == "/":
            target = dvar + link
            ptarget = link
        else:
            target = os.path.join(os.path.dirname(origin), link)
            ptarget = os.path.join(os.path.dirname(dir), link)
        if os.path.exists(target):
            msg = "Fixup Perms: Unable to correct directory link, target already exists: %s -> %s" % (dir, ptarget)
            package_qa_handle_error("perm-link", msg, d)
            continue

        # Create path to move directory to, move it, and then setup the symlink
        bb.utils.mkdirhier(os.path.dirname(target))
        #bb.note("Fixup Perms: Rename %s -> %s" % (dir, ptarget))
        os.rename(origin, target)
        #bb.note("Fixup Perms: Link %s -> %s" % (dir, link))
        os.symlink(link, origin)

    for dir in fs_perms_table:
        origin = dvar + dir
        if not (cpath.exists(origin) and cpath.isdir(origin)):
            continue

        fix_perms(origin, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)

        if fs_perms_table[dir].walk == 'true':
            for root, dirs, files in os.walk(origin):
                for dr in dirs:
                    each_dir = os.path.join(root, dr)
                    fix_perms(each_dir, fs_perms_table[dir].mode, fs_perms_table[dir].uid, fs_perms_table[dir].gid, dir)
                for f in files:
                    each_file = os.path.join(root, f)
                    fix_perms(each_file, fs_perms_table[dir].fmode, fs_perms_table[dir].fuid, fs_perms_table[dir].fgid, dir)

def package_convert_pr_autoinc(d):
    pkgv = d.getVar("PKGV")

    # Adjust pkgv as necessary...
    if 'AUTOINC' in pkgv:
        d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))

    # Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
    d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
    d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')

def perform_packagecopy(d):
    enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
    if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled:
        lic_files_paths = find_license_files(d)

        # LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY
        destdir = d.getVar('D') + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY'), d.getVar('PN'))
        copy_license_files(lic_files_paths, destdir)
        add_package_and_files(d)
    import subprocess

    dest = d.getVar('D')
    dvar = d.getVar('PKGD')

    # Start by package population by taking a copy of the installed
    # files to operate on
    # Preserve sparse files and hard links
    cmd = 'tar -cf - -C %s -p -S . | tar -xf - -C %s' % (dest, dvar)
    subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)

    # replace RPATHs for the nativesdk binaries, to make them relocatable
    if bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('cross-canadian', d):
        rpath_replace (dvar, d)

def split_and_strip_files(d):
    import stat, errno
    import subprocess

    dvar = d.getVar('PKGD')
    pn = d.getVar('PN')
    hostos = d.getVar('HOST_OS')

    oldcwd = os.getcwd()
    os.chdir(dvar)

    # We default to '.debug' style
    if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory':
        # Single debug-file-directory style debug info
        debugappend = ".debug"
        debugstaticappend = ""
        debugdir = ""
        debugstaticdir = ""
        debuglibdir = "/usr/lib/debug"
        debugstaticlibdir = "/usr/lib/debug-static"
        debugsrcdir = "/usr/src/debug"
    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src':
        # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug
        debugappend = ""
        debugstaticappend = ""
        debugdir = "/.debug"
        debugstaticdir = "/.debug-static"
        debuglibdir = ""
        debugstaticlibdir = ""
        debugsrcdir = ""
    elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-with-srcpkg':
        debugappend = ""
        debugstaticappend = ""
        debugdir = "/.debug"
        debugstaticdir = "/.debug-static"
        debuglibdir = ""
        debugstaticlibdir = ""
        debugsrcdir = "/usr/src/debug"
    else:
        # Original OE-core, a.k.a. ".debug", style debug info
        debugappend = ""
        debugstaticappend = ""
        debugdir = "/.debug"
        debugstaticdir = "/.debug-static"
        debuglibdir = ""
        debugstaticlibdir = ""
        debugsrcdir = "/usr/src/debug"

    #
    # First lets figure out all of the files we may have to process ... do this only once!
    #
    elffiles = {}
    symlinks = {}
    kernmods = []
    staticlibs = []
    inodes = {}
    libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir"))
    baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir"))
    skipfiles = (d.getVar("INHIBIT_PACKAGE_STRIP_FILES") or "").split()
    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \
            d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
        checkelf = {}
        checkelflinks = {}
        for root, dirs, files in cpath.walk(dvar):
            for f in files:
                file = os.path.join(root, f)

                # Skip debug files
                if debugappend and file.endswith(debugappend):
                    continue
                if debugdir and debugdir in os.path.dirname(file[len(dvar):]):
                    continue

                if file in skipfiles:
                    continue

                if file.endswith(".ko") and file.find("/lib/modules/") != -1:
                    kernmods.append(file)
                    continue
                if oe.package.is_static_lib(file):
                    staticlibs.append(file)
                    continue

                try:
                    ltarget = cpath.realpath(file, dvar, False)
                    s = cpath.lstat(ltarget)
                except OSError as e:
                    (err, strerror) = e.args
                    if err != errno.ENOENT:
                        raise
                    # Skip broken symlinks
                    continue
                if not s:
                    continue
                # Check its an executable
                if (s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH) \
                        or ((file.startswith(libdir) or file.startswith(baselibdir)) and (".so" in f or ".node" in f)):

                    if cpath.islink(file):
                        checkelflinks[file] = ltarget
                        continue
                    # Use a reference of device ID and inode number to identify files
                    file_reference = "%d_%d" % (s.st_dev, s.st_ino)
                    checkelf[file] = (file, file_reference)

        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelflinks.values(), d)
        results_map = {}
        for (ltarget, elf_file) in results:
            results_map[ltarget] = elf_file
        for file in checkelflinks:
            ltarget = checkelflinks[file]
            # If it's a symlink, and points to an ELF file, we capture the readlink target
            if results_map[ltarget]:
                target = os.readlink(file)
                #bb.note("Sym: %s (%d)" % (ltarget, results_map[ltarget]))
                symlinks[file] = target

        results = oe.utils.multiprocess_launch(oe.package.is_elf, checkelf.keys(), d)

        # Sort results by file path. This ensures that the files are always
        # processed in the same order, which is important to make sure builds
        # are reproducible when dealing with hardlinks
        results.sort(key=lambda x: x[0])

        for (file, elf_file) in results:
            # It's a file (or hardlink), not a link
            # ...but is it ELF, and is it already stripped?
            if elf_file & 1:
                if elf_file & 2:
                    if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split():
                        bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn))
                    else:
                        msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)
                        package_qa_handle_error("already-stripped", msg, d)
                    continue

                # At this point we have an unstripped elf file. We need to:
                #  a) Make sure any file we strip is not hardlinked to anything else outside this tree
                #  b) Only strip any hardlinked file once (no races)
                #  c) Track any hardlinks between files so that we can reconstruct matching debug file hardlinks

                # Use a reference of device ID and inode number to identify files
                file_reference = checkelf[file][1]
                if file_reference in inodes:
                    os.unlink(file)
                    os.link(inodes[file_reference][0], file)
                    inodes[file_reference].append(file)
                else:
                    inodes[file_reference] = [file]
                    # break hardlink
                    bb.utils.break_hardlinks(file)
                    elffiles[file] = elf_file
                # Modified the file so clear the cache
                cpath.updatecache(file)

    #
    # First lets process debug splitting
    #
    if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'):
        results = oe.utils.multiprocess_launch(splitdebuginfo, list(elffiles), d, extraargs=(dvar, debugdir, debuglibdir, debugappend, debugsrcdir, d))

        if debugsrcdir and not hostos.startswith("mingw"):
            if (d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
                results = oe.utils.multiprocess_launch(splitstaticdebuginfo, staticlibs, d, extraargs=(dvar, debugstaticdir, debugstaticlibdir, debugstaticappend, debugsrcdir, d))
            else:
                for file in staticlibs:
                    results.append( (file,source_info(file, d)) )

        sources = set()
        for r in results:
            sources.update(r[1])

        # Hardlink our debug symbols to the other hardlink copies
        for ref in inodes:
            if len(inodes[ref]) == 1:
                continue

            target = inodes[ref][0][len(dvar):]
            for file in inodes[ref][1:]:
                src = file[len(dvar):]
                dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(target) + debugappend
                fpath = dvar + dest
                ftarget = dvar + debuglibdir + os.path.dirname(target) + debugdir + "/" + os.path.basename(target) + debugappend
                bb.utils.mkdirhier(os.path.dirname(fpath))
                # Only one hardlink of separated debug info file in each directory
                if not os.access(fpath, os.R_OK):
                    #bb.note("Link %s -> %s" % (fpath, ftarget))
                    os.link(ftarget, fpath)

        # Create symlinks for all cases we were able to split symbols
        for file in symlinks:
            src = file[len(dvar):]
            dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend
            fpath = dvar + dest
            # Skip it if the target doesn't exist
            try:
                s = os.stat(fpath)
            except OSError as e:
                (err, strerror) = e.args
                if err != errno.ENOENT:
                    raise
                continue

            ltarget = symlinks[file]
            lpath = os.path.dirname(ltarget)
            lbase = os.path.basename(ltarget)
            ftarget = ""
            if lpath and lpath != ".":
                ftarget += lpath + debugdir + "/"
            ftarget += lbase + debugappend
            if lpath.startswith(".."):
                ftarget = os.path.join("..", ftarget)
            bb.utils.mkdirhier(os.path.dirname(fpath))
            #bb.note("Symlink %s -> %s" % (fpath, ftarget))
            os.symlink(ftarget, fpath)

        # Process the debugsrcdir if requested...
        # This copies and places the referenced sources for later debugging...
        copydebugsources(debugsrcdir, sources, d)
    #
    # End of debug splitting
    #

    #
    # Now lets go back over things and strip them
    #
    if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'):
        strip = d.getVar("STRIP")
        sfiles = []
        for file in elffiles:
            elf_file = int(elffiles[file])
            #bb.note("Strip %s" % file)
            sfiles.append((file, elf_file, strip))
        for f in kernmods:
            sfiles.append((f, 16, strip))
        if (d.getVar('PACKAGE_STRIP_STATIC') == '1' or d.getVar('PACKAGE_DEBUG_STATIC_SPLIT') == '1'):
            for f in staticlibs:
                sfiles.append((f, 16, strip))

        oe.utils.multiprocess_launch(oe.package.runstrip, sfiles, d)

    #
    # End of strip
    #
    os.chdir(oldcwd)

def package_do_split_locales(d):
    if (d.getVar('PACKAGE_NO_LOCALE') == '1'):
        bb.debug(1, "package requested not splitting locales")
        return

    packages = (d.getVar('PACKAGES') or "").split()

    datadir = d.getVar('datadir')
    if not datadir:
        bb.note("datadir not defined")
        return

    dvar = d.getVar('PKGD')
    pn = d.getVar('LOCALEBASEPN')

    if pn + '-locale' in packages:
        packages.remove(pn + '-locale')

    localedir = os.path.join(dvar + datadir, 'locale')

    if not cpath.isdir(localedir):
        bb.debug(1, "No locale files in this package")
        return

    locales = os.listdir(localedir)

    summary = d.getVar('SUMMARY') or pn
    description = d.getVar('DESCRIPTION') or ""
    locale_section = d.getVar('LOCALE_SECTION')
    mlprefix = d.getVar('MLPREFIX') or ""
    for l in sorted(locales):
        ln = legitimize_package_name(l)
        pkg = pn + '-locale-' + ln
        packages.append(pkg)
        d.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l))
        d.setVar('RRECOMMENDS_' + pkg, '%svirtual-locale-%s' % (mlprefix, ln))
        d.setVar('RPROVIDES_' + pkg, '%s-locale %s%s-translation' % (pn, mlprefix, ln))
        d.setVar('SUMMARY_' + pkg, '%s - %s translations' % (summary, l))
        d.setVar('DESCRIPTION_' + pkg, '%s  This package contains language translation files for the %s locale.' % (description, l))
        if locale_section:
            d.setVar('SECTION_' + pkg, locale_section)

    d.setVar('PACKAGES', ' '.join(packages))

    # Disabled by RP 18/06/07
    # Wildcards aren't supported in debian
    # They break with ipkg since glibc-locale* will mean that
    # glibc-localedata-translit* won't install as a dependency
    # for some other package which breaks meta-toolchain
    # Probably breaks since virtual-locale- isn't provided anywhere
    #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split()
    #rdep.append('%s-locale*' % pn)
    #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep))

def read_shlibdeps(d):
    pkglibdeps = read_libdep_files(d)

    packages = d.getVar('PACKAGES').split()
    for pkg in packages:
        rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "")
        for dep in sorted(pkglibdeps[pkg]):
            # Add the dep if it's not already there, or if no comparison is set
            if dep not in rdepends:
                rdepends[dep] = []
            for v in pkglibdeps[pkg][dep]:
                if v not in rdepends[dep]:
                    rdepends[dep].append(v)
        d.setVar('RDEPENDS_' + pkg, bb.utils.join_deps(rdepends, commasep=False))

def package_do_shlibs(d):
    import itertools
    import re, pipes
    import subprocess

    exclude_shlibs = d.getVar('EXCLUDE_FROM_SHLIBS', False)
    if exclude_shlibs:
        bb.note("not generating shlibs")
        return

    lib_re = re.compile(r"^.*\.so")
    libdir_re = re.compile(r".*/%s$" % d.getVar('baselib'))

    packages = d.getVar('PACKAGES')

    shlib_pkgs = []
    exclusion_list = d.getVar("EXCLUDE_PACKAGES_FROM_SHLIBS")
    if exclusion_list:
        for pkg in packages.split():
            if pkg not in exclusion_list.split():
                shlib_pkgs.append(pkg)
            else:
                bb.note("not generating shlibs for %s" % pkg)
    else:
        shlib_pkgs = packages.split()

    hostos = d.getVar('HOST_OS')

    workdir = d.getVar('WORKDIR')

    ver = d.getVar('PKGV')
    if not ver:
        msg = "PKGV not defined"
        package_qa_handle_error("pkgv-undefined", msg, d)
        return

    pkgdest = d.getVar('PKGDEST')

    shlibswork_dir = d.getVar('SHLIBSWORKDIR')

    def linux_so(file, pkg, pkgver, d):
        needs_ldconfig = False
        needed = set()
        sonames = set()
        renames = []
        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
        cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null"
        fd = os.popen(cmd)
        lines = fd.readlines()
        fd.close()
        rpath = tuple()
        for l in lines:
            m = re.match(r"\s+RPATH\s+([^\s]*)", l)
            if m:
                rpaths = m.group(1).replace("$ORIGIN", ldir).split(":")
                rpath = tuple(map(os.path.normpath, rpaths))
        for l in lines:
            m = re.match(r"\s+NEEDED\s+([^\s]*)", l)
            if m:
                dep = m.group(1)
                if dep not in needed:
                    needed.add((dep, file, rpath))
            m = re.match(r"\s+SONAME\s+([^\s]*)", l)
            if m:
                this_soname = m.group(1)
                prov = (this_soname, ldir, pkgver)
                if not prov in sonames:
                    # if library is private (only used by package) then do not build shlib for it
                    import fnmatch
                    if not private_libs or len([i for i in private_libs if fnmatch.fnmatch(this_soname, i)]) == 0:
                        sonames.add(prov)
                if libdir_re.match(os.path.dirname(file)):
                    needs_ldconfig = True
                if snap_symlinks and (os.path.basename(file) != this_soname):
                    renames.append((file, os.path.join(os.path.dirname(file), this_soname)))
        return (needs_ldconfig, needed, sonames, renames)

    def darwin_so(file, needed, sonames, renames, pkgver):
        if not os.path.exists(file):
            return
        ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')

        def get_combinations(base):
            #
            # Given a base library name, find all combinations of this split by "." and "-"
            #
            combos = []
            options = base.split(".")
            for i in range(1, len(options) + 1):
                combos.append(".".join(options[0:i]))
            options = base.split("-")
            for i in range(1, len(options) + 1):
                combos.append("-".join(options[0:i]))
            return combos

        if (file.endswith('.dylib') or file.endswith('.so')) and not pkg.endswith('-dev') and not pkg.endswith('-dbg') and not pkg.endswith('-src'):
            # Drop suffix
            name = os.path.basename(file).rsplit(".",1)[0]
            # Find all combinations
            combos = get_combinations(name)
            for combo in combos:
                if not combo in sonames:
                    prov = (combo, ldir, pkgver)
                    sonames.add(prov)
        if file.endswith('.dylib') or file.endswith('.so'):
            rpath = []
            p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, err = p.communicate()
            # If returned successfully, process stdout for results
            if p.returncode == 0:
                for l in out.split("\n"):
                    l = l.strip()
                    if l.startswith('path '):
                        rpath.append(l.split()[1])

        p = subprocess.Popen([d.expand("${HOST_PREFIX}otool"), '-L', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        out, err = p.communicate()
        # If returned successfully, process stdout for results
        if p.returncode == 0:
            for l in out.split("\n"):
                l = l.strip()
                if not l or l.endswith(":"):
                    continue
                if "is not an object file" in l:
                    continue
                name = os.path.basename(l.split()[0]).rsplit(".", 1)[0]
                if name and name not in needed[pkg]:
                     needed[pkg].add((name, file, tuple()))

    def mingw_dll(file, needed, sonames, renames, pkgver):
        if not os.path.exists(file):
            return

        if file.endswith(".dll"):
            # assume all dlls are shared objects provided by the package
            sonames.add((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver))

        if (file.endswith(".dll") or file.endswith(".exe")):
            # use objdump to search for "DLL Name: .*\.dll"
            p = subprocess.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            out, err = p.communicate()
            # process the output, grabbing all .dll names
            if p.returncode == 0:
                for m in re.finditer(r"DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE):
                    dllname = m.group(1)
                    if dllname:
                        needed[pkg].add((dllname, file, tuple()))

    if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1":
        snap_symlinks = True
    else:
        snap_symlinks = False

    needed = {}

    shlib_provider = oe.package.read_shlib_providers(d)

    for pkg in shlib_pkgs:
        private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
        private_libs = private_libs.split()
        needs_ldconfig = False
        bb.debug(2, "calculating shlib provides for %s" % pkg)

        pkgver = d.getVar('PKGV_' + pkg)
        if not pkgver:
            pkgver = d.getVar('PV_' + pkg)
        if not pkgver:
            pkgver = ver

        needed[pkg] = set()
        sonames = set()
        renames = []
        linuxlist = []
        for file in pkgfiles[pkg]:
                soname = None
                if cpath.islink(file):
                    continue
                if hostos == "darwin" or hostos == "darwin8":
                    darwin_so(file, needed, sonames, renames, pkgver)
                elif hostos.startswith("mingw"):
                    mingw_dll(file, needed, sonames, renames, pkgver)
                elif os.access(file, os.X_OK) or lib_re.match(file):
                    linuxlist.append(file)

        if linuxlist:
            results = oe.utils.multiprocess_launch(linux_so, linuxlist, d, extraargs=(pkg, pkgver, d))
            for r in results:
                ldconfig = r[0]
                needed[pkg] |= r[1]
                sonames |= r[2]
                renames.extend(r[3])
                needs_ldconfig = needs_ldconfig or ldconfig

        for (old, new) in renames:
            bb.note("Renaming %s to %s" % (old, new))
            os.rename(old, new)
            pkgfiles[pkg].remove(old)

        shlibs_file = os.path.join(shlibswork_dir, pkg + ".list")
        if len(sonames):
            with open(shlibs_file, 'w') as fd:
                for s in sorted(sonames):
                    if s[0] in shlib_provider and s[1] in shlib_provider[s[0]]:
                        (old_pkg, old_pkgver) = shlib_provider[s[0]][s[1]]
                        if old_pkg != pkg:
                            bb.warn('%s-%s was registered as shlib provider for %s, changing it to %s-%s because it was built later' % (old_pkg, old_pkgver, s[0], pkg, pkgver))
                    bb.debug(1, 'registering %s-%s as shlib provider for %s' % (pkg, pkgver, s[0]))
                    fd.write(s[0] + ':' + s[1] + ':' + s[2] + '\n')
                    if s[0] not in shlib_provider:
                        shlib_provider[s[0]] = {}
                    shlib_provider[s[0]][s[1]] = (pkg, pkgver)
        if needs_ldconfig:
            bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg)
            postinst = d.getVar('pkg_postinst_%s' % pkg)
            if not postinst:
                postinst = '#!/bin/sh\n'
            postinst += d.getVar('ldconfig_postinst_fragment')
            d.setVar('pkg_postinst_%s' % pkg, postinst)
        bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames))

    assumed_libs = d.getVar('ASSUME_SHLIBS')
    if assumed_libs:
        libdir = d.getVar("libdir")
        for e in assumed_libs.split():
            l, dep_pkg = e.split(":")
            lib_ver = None
            dep_pkg = dep_pkg.rsplit("_", 1)
            if len(dep_pkg) == 2:
                lib_ver = dep_pkg[1]
            dep_pkg = dep_pkg[0]
            if l not in shlib_provider:
                shlib_provider[l] = {}
            shlib_provider[l][libdir] = (dep_pkg, lib_ver)

    libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')]

    for pkg in shlib_pkgs:
        bb.debug(2, "calculating shlib requirements for %s" % pkg)

        private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
        private_libs = private_libs.split()

        deps = list()
        for n in needed[pkg]:
            # if n is in private libraries, don't try to search provider for it
            # this could cause problem in case some abc.bb provides private
            # /opt/abc/lib/libfoo.so.1 and contains /usr/bin/abc depending on system library libfoo.so.1
            # but skipping it is still better alternative than providing own
            # version and then adding runtime dependency for the same system library
            import fnmatch
            if private_libs and len([i for i in private_libs if fnmatch.fnmatch(n[0], i)]) > 0:
                bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
                continue
            if n[0] in shlib_provider.keys():
                shlib_provider_map = shlib_provider[n[0]]
                matches = set()
                for p in itertools.chain(list(n[2]), sorted(shlib_provider_map.keys()), libsearchpath):
                    if p in shlib_provider_map:
                        matches.add(p)
                if len(matches) > 1:
                    matchpkgs = ', '.join([shlib_provider_map[match][0] for match in matches])
                    bb.error("%s: Multiple shlib providers for %s: %s (used by files: %s)" % (pkg, n[0], matchpkgs, n[1]))
                elif len(matches) == 1:
                    (dep_pkg, ver_needed) = shlib_provider_map[matches.pop()]

                    bb.debug(2, '%s: Dependency %s requires package %s (used by files: %s)' % (pkg, n[0], dep_pkg, n[1]))

                    if dep_pkg == pkg:
                        continue

                    if ver_needed:
                        dep = "%s (>= %s)" % (dep_pkg, ver_needed)
                    else:
                        dep = dep_pkg
                    if not dep in deps:
                        deps.append(dep)
                    continue
            bb.note("Couldn't find shared library provider for %s, used by files: %s" % (n[0], n[1]))

        deps_file = os.path.join(pkgdest, pkg + ".shlibdeps")
        if os.path.exists(deps_file):
            os.remove(deps_file)
        if deps:
            with open(deps_file, 'w') as fd:
                for dep in sorted(deps):
                    fd.write(dep + '\n')

def apply_update_alternative_renames(d):
    if not update_alternatives_enabled(d):
       return

    import re

    def update_files(alt_target, alt_target_rename, pkg, d):
        f = d.getVar('FILES_' + pkg)
        if f:
            f = re.sub(r'(^|\s)%s(\s|$)' % re.escape (alt_target), r'\1%s\2' % alt_target_rename, f)
            d.setVar('FILES_' + pkg, f)

    # Check for deprecated usage...
    pn = d.getVar('BPN')
    if d.getVar('ALTERNATIVE_LINKS') != None:
        bb.fatal('%s: Use of ALTERNATIVE_LINKS/ALTERNATIVE_PATH/ALTERNATIVE_NAME is no longer supported, please convert to the updated syntax, see update-alternatives.bbclass for more info.' % pn)

    # Do actual update alternatives processing
    pkgdest = d.getVar('PKGD')
    for pkg in (d.getVar('PACKAGES') or "").split():
        # If the src == dest, we know we need to rename the dest by appending ${BPN}
        link_rename = []
        for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
            alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name)
            if not alt_link:
                alt_link = "%s/%s" % (d.getVar('bindir'), alt_name)
                d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link)
            if alt_link.startswith(os.path.join(d.getVar('sysconfdir'), 'init.d')):
                # Managing init scripts does not work (bug #10433), foremost
                # because of a race with update-rc.d
                bb.fatal("Using update-alternatives for managing SysV init scripts is not supported")

            alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name)
            alt_target   = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link
            # Sometimes alt_target is specified as relative to the link name.
            alt_target   = os.path.join(os.path.dirname(alt_link), alt_target)

            # If the link and target are the same name, we need to rename the target.
            if alt_link == alt_target:
                src = '%s/%s' % (pkgdest, alt_target)
                alt_target_rename = '%s.%s' % (alt_target, pn)
                dest = '%s/%s' % (pkgdest, alt_target_rename)
                if os.path.lexists(dest):
                    bb.note('%s: Already renamed: %s' % (pn, alt_target_rename))
                elif os.path.lexists(src):
                    if os.path.islink(src):
                        # Delay rename of links
                        link_rename.append((alt_target, alt_target_rename))
                    else:
                        bb.note('%s: Rename %s -> %s' % (pn, alt_target, alt_target_rename))
                        os.rename(src, dest)
                        update_files(alt_target, alt_target_rename, pkg, d)
                else:
                    bb.warn("%s: alternative target (%s or %s) does not exist, skipping..." % (pn, alt_target, alt_target_rename))
                    continue
                d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, alt_target_rename)

        # Process delayed link names
        # Do these after other renames so we can correct broken links
        for (alt_target, alt_target_rename) in link_rename:
            src = '%s/%s' % (pkgdest, alt_target)
            dest = '%s/%s' % (pkgdest, alt_target_rename)
            link_target = oe.path.realpath(src, pkgdest, True)

            if os.path.lexists(link_target):
                # Ok, the link_target exists, we can rename
                bb.note('%s: Rename (link) %s -> %s' % (pn, alt_target, alt_target_rename))
                os.rename(src, dest)
            else:
                # Try to resolve the broken link to link.${BPN}
                link_maybe = '%s.%s' % (os.readlink(src), pn)
                if os.path.lexists(os.path.join(os.path.dirname(src), link_maybe)):
                    # Ok, the renamed link target exists.. create a new link, and remove the original
                    bb.note('%s: Creating new link %s -> %s' % (pn, alt_target_rename, link_maybe))
                    os.symlink(link_maybe, dest)
                    os.unlink(src)
                else:
                    bb.warn('%s: Unable to resolve dangling symlink: %s' % (pn, alt_target))
                    continue
            update_files(alt_target, alt_target_rename, pkg, d)

def package_name_hook(d):
    bb.build.exec_func('debian_package_name_hook', d)

def copy_license_files(lic_files_paths, destdir):
    import shutil
    import errno

    bb.utils.mkdirhier(destdir)
    for (basename, path, beginline, endline) in lic_files_paths:
        try:
            src = path
            dst = os.path.join(destdir, basename)
            if os.path.exists(dst):
                os.remove(dst)
            if os.path.islink(src):
                src = os.path.realpath(src)
            canlink = os.access(src, os.W_OK) and (os.stat(src).st_dev == os.stat(destdir).st_dev) and beginline is None and endline is None
            if canlink:
                try:
                    os.link(src, dst)
                except OSError as err:
                    if err.errno == errno.EXDEV:
                        # Copy license files if hard-link is not possible even if st_dev is the
                        # same on source and destination (docker container with device-mapper?)
                        canlink = False
                    else:
                        raise
                # Only chown if we did hardling, and, we're running under pseudo
                if canlink and os.environ.get('PSEUDO_DISABLED') == '0':
                    os.chown(dst,0,0)
            if not canlink:
                begin_idx = int(beginline)-1 if beginline is not None else None
                end_idx = int(endline) if endline is not None else None
                if begin_idx is None and end_idx is None:
                    shutil.copyfile(src, dst)
                else:
                    with open(src, 'rb') as src_f:
                        with open(dst, 'wb') as dst_f:
                            dst_f.write(b''.join(src_f.readlines()[begin_idx:end_idx]))

        except Exception as e:
            bb.warn("Could not copy license file %s to %s: %s" % (src, dst, e))

def staging_copyfile(c, target, dest, postinsts, seendirs):
    import errno

    destdir = os.path.dirname(dest)
    if destdir not in seendirs:
        bb.utils.mkdirhier(destdir)
        seendirs.add(destdir)
    if "/usr/bin/postinst-" in c:
        postinsts.append(dest)
    if os.path.islink(c):
        linkto = os.readlink(c)
        if os.path.lexists(dest):
            if not os.path.islink(dest):
                raise OSError(errno.EEXIST, "Link %s already exists as a file" % dest, dest)
            if os.readlink(dest) == linkto:
                return dest
            raise OSError(errno.EEXIST, "Link %s already exists to a different location? (%s vs %s)" % (dest, os.readlink(dest), linkto), dest)
        os.symlink(linkto, dest)
        #bb.warn(c)
    else:
        try:
            os.link(c, dest)
        except OSError as err:
            if err.errno == errno.EXDEV:
                bb.utils.copyfile(c, dest)
            else:
                raise
    return dest

def package_qa_write_error(type, error, d):
    logfile = d.getVar('QA_LOGFILE')
    if logfile:
        p = d.getVar('P')
        with open(logfile, "a+") as f:
            f.write("%s: %s [%s]\n" % (p, error, type))

def setscene_depvalid(task, taskdependees, notneeded, d, log=None):
    # taskdependees is a dict of tasks which depend on task, each being a 3 item list of [PN, TASKNAME, FILENAME]
    # task is included in taskdependees too
    # Return - False - We need this dependency
    #        - True - We can skip this dependency
    import re

    def logit(msg, log):
        if log is not None:
            log.append(msg)
        else:
            bb.debug(2, msg)

    logit("Considering setscene task: %s" % (str(taskdependees[task])), log)

    def isNativeCross(x):
        return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x or x.endswith("-cross")

    # We only need to trigger populate_lic through direct dependencies
    if taskdependees[task][1] == "do_populate_lic":
        return True

    # stash_locale and gcc_stash_builddir are never needed as a dependency for built objects
    if taskdependees[task][1] == "do_stash_locale" or taskdependees[task][1] == "do_gcc_stash_builddir":
        return True

    # We only need to trigger packagedata through direct dependencies
    # but need to preserve packagedata on packagedata links
    if taskdependees[task][1] == "do_packagedata":
        for dep in taskdependees:
            if taskdependees[dep][1] == "do_packagedata":
                return False
        return True

    for dep in taskdependees:
        logit("  considering dependency: %s" % (str(taskdependees[dep])), log)
        if task == dep:
            continue
        if dep in notneeded:
            continue
        # do_package_write_* and do_package doesn't need do_package
        if taskdependees[task][1] == "do_package" and taskdependees[dep][1] in ['do_package', 'do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm', 'do_packagedata', 'do_package_qa']:
            continue
        # do_package_write_* need do_populate_sysroot as they're mainly postinstall dependencies
        if taskdependees[task][1] == "do_populate_sysroot" and taskdependees[dep][1] in ['do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm']:
            return False
        # do_package/packagedata/package_qa don't need do_populate_sysroot
        if taskdependees[task][1] == "do_populate_sysroot" and taskdependees[dep][1] in ['do_package', 'do_packagedata', 'do_package_qa']:
            continue
        # Native/Cross packages don't exist and are noexec anyway
        if isNativeCross(taskdependees[dep][0]) and taskdependees[dep][1] in ['do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm', 'do_packagedata', 'do_package', 'do_package_qa']:
            continue

        # This is due to the [depends] in useradd.bbclass complicating matters
        # The logic *is* reversed here due to the way hard setscene dependencies are injected
        if (taskdependees[task][1] == 'do_package' or taskdependees[task][1] == 'do_populate_sysroot') and taskdependees[dep][0].endswith(('shadow-native', 'shadow-sysroot', 'base-passwd', 'pseudo-native')) and taskdependees[dep][1] == 'do_populate_sysroot':
            continue

        # Consider sysroot depending on sysroot tasks
        if taskdependees[task][1] == 'do_populate_sysroot' and taskdependees[dep][1] == 'do_populate_sysroot':
            # Allow excluding certain recursive dependencies. If a recipe needs it should add a
            # specific dependency itself, rather than relying on one of its dependees to pull
            # them in.
            # See also http://lists.openembedded.org/pipermail/openembedded-core/2018-January/146324.html
            not_needed = False
            excludedeps = d.getVar('_SSTATE_EXCLUDEDEPS_SYSROOT')
            if excludedeps is None:
                # Cache the regular expressions for speed
                excludedeps = []
                for excl in (d.getVar('SSTATE_EXCLUDEDEPS_SYSROOT') or "").split():
                    excludedeps.append((re.compile(excl.split('->', 1)[0]), re.compile(excl.split('->', 1)[1])))
                d.setVar('_SSTATE_EXCLUDEDEPS_SYSROOT', excludedeps)
            for excl in excludedeps:
                if excl[0].match(taskdependees[dep][0]):
                    if excl[1].match(taskdependees[task][0]):
                        not_needed = True
                        break
            if not_needed:
                continue
            # For meta-extsdk-toolchain we want all sysroot dependencies
            if taskdependees[dep][0] == 'meta-extsdk-toolchain':
                return False
            # Native/Cross populate_sysroot need their dependencies
            if isNativeCross(taskdependees[task][0]) and isNativeCross(taskdependees[dep][0]):
                return False
            # Target populate_sysroot depended on by cross tools need to be installed
            if isNativeCross(taskdependees[dep][0]):
                return False
            # Native/cross tools depended upon by target sysroot are not needed
            # Add an exception for shadow-native as required by useradd.bbclass
            if isNativeCross(taskdependees[task][0]) and taskdependees[task][0] != 'shadow-native':
                continue
            # Target populate_sysroot need their dependencies
            return False

        if taskdependees[task][1] == 'do_shared_workdir':
            continue

        if taskdependees[dep][1] == "do_populate_lic":
            continue


        # Safe fallthrough default
        logit(" Default setscene dependency fall through due to dependency: %s" % (str(taskdependees[dep])), log)
        return False
    return True

def debian_package_name_hook(d):
    import glob, copy, stat, errno, re, pathlib, subprocess

    pkgdest = d.getVar("PKGDEST")
    packages = d.getVar('PACKAGES')
    so_re = re.compile(r"lib.*\.so")

    def socrunch(s):
        s = s.lower().replace('_', '-')
        m = re.match(r"^(.*)(.)\.so\.(.*)$", s)
        if m is None:
            return None
        if m.group(2) in '0123456789':
            bin = '%s%s-%s' % (m.group(1), m.group(2), m.group(3))
        else:
            bin = m.group(1) + m.group(2) + m.group(3)
        dev = m.group(1) + m.group(2)
        return (bin, dev)

    def isexec(path):
        try:
            s = os.stat(path)
        except (os.error, AttributeError):
            return 0
        return (s[stat.ST_MODE] & stat.S_IEXEC)

    def add_rprovides(pkg, d):
        newpkg = d.getVar('PKG_' + pkg)
        if newpkg and newpkg != pkg:
            provs = (d.getVar('RPROVIDES_' + pkg) or "").split()
            if pkg not in provs:
                d.appendVar('RPROVIDES_' + pkg, " " + pkg + " (=" + d.getVar("PKGV") + ")")

    def auto_libname(packages, orig_pkg):
        p = lambda var: pathlib.PurePath(d.getVar(var))
        libdirs = (p("base_libdir"), p("libdir"))
        bindirs = (p("base_bindir"), p("base_sbindir"), p("bindir"), p("sbindir"))

        sonames = []
        has_bins = 0
        has_libs = 0
        for f in pkgfiles[orig_pkg]:
            # This is .../packages-split/orig_pkg/
            pkgpath = pathlib.PurePath(pkgdest, orig_pkg)
            # Strip pkgpath off the full path to a file in the package, re-root
            # so it is absolute, and then get the parent directory of the file.
            path = pathlib.PurePath("/") / (pathlib.PurePath(f).relative_to(pkgpath).parent)
            if path in bindirs:
                has_bins = 1
            if path in libdirs:
                has_libs = 1
                if so_re.match(os.path.basename(f)):
                    try:
                        cmd = [d.expand("${TARGET_PREFIX}objdump"), "-p", f]
                        output = subprocess.check_output(cmd).decode("utf-8")
                        for m in re.finditer(r"\s+SONAME\s+([^\s]+)", output):
                            if m.group(1) not in sonames:
                                sonames.append(m.group(1))
                    except subprocess.CalledProcessError:
                        pass
        bb.debug(1, 'LIBNAMES: pkg %s libs %d bins %d sonames %s' % (orig_pkg, has_libs, has_bins, sonames))
        soname = None
        if len(sonames) == 1:
            soname = sonames[0]
        elif len(sonames) > 1:
            lead = d.getVar('LEAD_SONAME')
            if lead:
                r = re.compile(lead)
                filtered = []
                for s in sonames:
                    if r.match(s):
                        filtered.append(s)
                if len(filtered) == 1:
                    soname = filtered[0]
                elif len(filtered) > 1:
                    bb.note("Multiple matches (%s) for LEAD_SONAME '%s'" % (", ".join(filtered), lead))
                else:
                    bb.note("Multiple libraries (%s) found, but LEAD_SONAME '%s' doesn't match any of them" % (", ".join(sonames), lead))
            else:
                bb.note("Multiple libraries (%s) found and LEAD_SONAME not defined" % ", ".join(sonames))

        if has_libs and not has_bins and soname:
            soname_result = socrunch(soname)
            if soname_result:
                (pkgname, devname) = soname_result
                for pkg in packages.split():
                    if (d.getVar('PKG_' + pkg, False) or d.getVar('DEBIAN_NOAUTONAME_' + pkg, False)):
                        add_rprovides(pkg, d)
                        continue
                    debian_pn = d.getVar('DEBIANNAME_' + pkg, False)
                    if debian_pn:
                        newpkg = debian_pn
                    elif pkg == orig_pkg:
                        newpkg = pkgname
                    else:
                        newpkg = pkg.replace(orig_pkg, devname, 1)
                    mlpre=d.getVar('MLPREFIX')
                    if mlpre:
                        if not newpkg.find(mlpre) == 0:
                            newpkg = mlpre + newpkg
                    if newpkg != pkg:
                        bb.note("debian: renaming %s to %s" % (pkg, newpkg))
                        d.setVar('PKG_' + pkg, newpkg)
                        add_rprovides(pkg, d)
        else:
            add_rprovides(orig_pkg, d)

    # reversed sort is needed when some package is substring of another
    # ie in ncurses we get without reverse sort:
    # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libtic orig_pkg ncurses-libtic debian_pn None newpkg libtic5
    # and later
    # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw
    # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5
    for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS') or "").split(), reverse=True):
        auto_libname(packages, pkg)

def add_package_and_files(d):
    packages = d.getVar('PACKAGES')
    files = d.getVar('LICENSE_FILES_DIRECTORY')
    pn = d.getVar('PN')
    pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False))
    if pn_lic in packages.split():
        bb.warn("%s package already existed in %s." % (pn_lic, pn))
    else:
        # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY
        d.setVar('PACKAGES', "%s %s" % (pn_lic, packages))
        d.setVar('FILES_' + pn_lic, files)
    for pn in packages.split():
        if pn == pn_lic:
            continue
        rrecommends_pn = d.getVar('RRECOMMENDS_' + pn)
        if rrecommends_pn:
            d.setVar('RRECOMMENDS_' + pn, "%s %s" % (pn_lic, rrecommends_pn))
        else:
            d.setVar('RRECOMMENDS_' + pn, "%s" % (pn_lic))

def rpath_replace (path, d):
    bindirs = d.expand("${bindir} ${sbindir} ${base_sbindir} ${base_bindir} ${libdir} ${base_libdir} ${libexecdir} ${PREPROCESS_RELOCATE_DIRS}").split()

    for bindir in bindirs:
        #bb.note ("Processing directory " + bindir)
        directory = path + "/" + bindir
        process_dir (path, directory, d)

def apply_update_alternative_provides(d):
    pn = d.getVar('BPN')
    pkgdest = d.getVar('PKGDEST')

    for pkg in d.getVar('PACKAGES').split():
        for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
            alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name)
            alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name)
            alt_target   = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link

            if alt_link == alt_target:
                bb.warn('%s: alt_link == alt_target: %s == %s' % (pn, alt_link, alt_target))
                alt_target = '%s.%s' % (alt_target, pn)

            if not os.path.lexists('%s/%s/%s' % (pkgdest, pkg, alt_target)):
                continue

            # Add file provide
            trans_target = oe.package.file_translate(alt_target)
            d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link)
            if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg) or ""):
                d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target)

def copydebugsources(debugsrcdir, sources, d):
    # The debug src information written out to sourcefile is further processed
    # and copied to the destination here.

    import stat
    import subprocess

    if debugsrcdir and sources:
        sourcefile = d.expand("${WORKDIR}/debugsources.list")
        bb.utils.remove(sourcefile)

        # filenames are null-separated - this is an artefact of the previous use
        # of rpm's debugedit, which was writing them out that way, and the code elsewhere
        # is still assuming that.
        debuglistoutput = '\0'.join(sources) + '\0'
        with open(sourcefile, 'a') as sf:
           sf.write(debuglistoutput)

        dvar = d.getVar('PKGD')
        strip = d.getVar("STRIP")
        objcopy = d.getVar("OBJCOPY")
        workdir = d.getVar("WORKDIR")
        workparentdir = os.path.dirname(os.path.dirname(workdir))
        workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir)

        # If build path exists in sourcefile, it means toolchain did not use
        # -fdebug-prefix-map to compile
        if checkbuildpath(sourcefile, d):
            localsrc_prefix = workparentdir + "/"
        else:
            localsrc_prefix = "/usr/src/debug/"

        nosuchdir = []
        basepath = dvar
        for p in debugsrcdir.split("/"):
            basepath = basepath + "/" + p
            if not cpath.exists(basepath):
                nosuchdir.append(basepath)
        bb.utils.mkdirhier(basepath)
        cpath.updatecache(basepath)

        # Ignore files from the recipe sysroots (target and native)
        processdebugsrc =  "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | "
        # We need to ignore files that are not actually ours
        # we do this by only paying attention to items from this package
        processdebugsrc += "fgrep -zw '%s' | "
        # Remove prefix in the source paths
        processdebugsrc += "sed 's#%s##g' | "
        processdebugsrc += "(cd '%s' ; cpio -pd0mlL --no-preserve-owner '%s%s' 2>/dev/null)"

        cmd = processdebugsrc % (sourcefile, workbasedir, localsrc_prefix, workparentdir, dvar, debugsrcdir)
        try:
            subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError:
            # Can "fail" if internal headers/transient sources are attempted
            pass

        # cpio seems to have a bug with -lL together and symbolic links are just copied, not dereferenced.
        # Work around this by manually finding and copying any symbolic links that made it through.
        cmd = "find %s%s -type l -print0 -delete | sed s#%s%s/##g | (cd '%s' ; cpio -pd0mL --no-preserve-owner '%s%s')" % \
                (dvar, debugsrcdir, dvar, debugsrcdir, workparentdir, dvar, debugsrcdir)
        subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)

        # The copy by cpio may have resulted in some empty directories!  Remove these
        cmd = "find %s%s -empty -type d -delete" % (dvar, debugsrcdir)
        subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)

        # Also remove debugsrcdir if its empty
        for p in nosuchdir[::-1]:
            if os.path.exists(p) and not os.listdir(p):
                os.rmdir(p)

#
# Package data handling routines
#

def staging_copydir(c, target, dest, seendirs):
    if dest not in seendirs:
        bb.utils.mkdirhier(dest)
        seendirs.add(dest)

def update_alternatives_alt_targets(d, pkg):
    """
    Returns the update-alternatives metadata for a package.

    The returned format is a list of tuples where the tuple contains:
    alt_name:     The binary name
    alt_link:     The path for the binary (Shared by different packages)
    alt_target:   The path for the renamed binary (Unique per package)
    alt_priority: The priority of the alt_target

    All the alt_targets will be installed into the sysroot. The alt_link is
    a symlink pointing to the alt_target with the highest priority.
    """

    pn = d.getVar('BPN')
    pkgdest = d.getVar('PKGD')
    updates = list()
    for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split():
        alt_link     = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name)
        alt_target   = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or \
                       d.getVarFlag('ALTERNATIVE_TARGET', alt_name) or \
                       d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or \
                       d.getVar('ALTERNATIVE_TARGET') or \
                       alt_link
        alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg,  alt_name) or \
                       d.getVarFlag('ALTERNATIVE_PRIORITY',  alt_name) or \
                       d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg) or  \
                       d.getVar('ALTERNATIVE_PRIORITY')

        # This shouldn't trigger, as it should have been resolved earlier!
        if alt_link == alt_target:
            bb.note('alt_link == alt_target: %s == %s -- correcting, this should not happen!' % (alt_link, alt_target))
            alt_target = '%s.%s' % (alt_target, pn)

        if not os.path.lexists('%s/%s' % (pkgdest, alt_target)):
            bb.warn('%s: NOT adding alternative provide %s: %s does not exist' % (pn, alt_link, alt_target))
            continue

        alt_target = os.path.normpath(alt_target)
        updates.append( (alt_name, alt_link, alt_target, alt_priority) )

    return updates

def files_from_filevars(filevars):
    import os,glob
    cpath = oe.cachedpath.CachedPath()
    files = []
    for f in filevars:
        if os.path.isabs(f):
            f = '.' + f
        if not f.startswith("./"):
            f = './' + f
        globbed = glob.glob(f)
        if globbed:
            if [ f ] != globbed:
                files += globbed
                continue
        files.append(f)

    symlink_paths = []
    for ind, f in enumerate(files):
        # Handle directory symlinks. Truncate path to the lowest level symlink
        parent = ''
        for dirname in f.split('/')[:-1]:
            parent = os.path.join(parent, dirname)
            if dirname == '.':
                continue
            if cpath.islink(parent):
                bb.warn("FILES contains file '%s' which resides under a "
                        "directory symlink. Please fix the recipe and use the "
                        "real path for the file." % f[1:])
                symlink_paths.append(f)
                files[ind] = parent
                f = parent
                break

        if not cpath.islink(f):
            if cpath.isdir(f):
                newfiles = [ os.path.join(f,x) for x in os.listdir(f) ]
                if newfiles:
                    files += newfiles

    return files, symlink_paths

# Called in package_<rpm,ipk,deb>.bbclass to get the correct list of configuration files

def source_info(file, d, fatal=True):
    import subprocess

    cmd = ["dwarfsrcfiles", file]
    try:
        output = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.STDOUT)
        retval = 0
    except subprocess.CalledProcessError as exc:
        output = exc.output
        retval = exc.returncode

    # 255 means a specific file wasn't fully parsed to get the debug file list, which is not a fatal failure
    if retval != 0 and retval != 255:
        msg = "dwarfsrcfiles failed with exit code %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else "")
        if fatal:
            bb.fatal(msg)
        bb.note(msg)

    debugsources = parse_debugsources_from_dwarfsrcfiles_output(output)

    return list(debugsources)

def update_alternatives_enabled(d):
    # Update Alternatives only works on target packages...
    if bb.data.inherits_class('native', d) or \
       bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or \
       bb.data.inherits_class('cross-canadian', d):
        return False

    # Disable when targeting mingw32 (no target support)
    if d.getVar("TARGET_OS") == "mingw32":
        return False

    return True

def find_license_files(d):
    """
    Creates list of files used in LIC_FILES_CHKSUM and generic LICENSE files.
    """
    import shutil
    import oe.license
    from collections import defaultdict, OrderedDict

    # All the license files for the package
    lic_files = d.getVar('LIC_FILES_CHKSUM') or ""
    pn = d.getVar('PN')
    # The license files are located in S/LIC_FILE_CHECKSUM.
    srcdir = d.getVar('S')
    # Directory we store the generic licenses as set in the distro configuration
    generic_directory = d.getVar('COMMON_LICENSE_DIR')
    # List of basename, path tuples
    lic_files_paths = []
    # hash for keep track generic lics mappings
    non_generic_lics = {}
    # Entries from LIC_FILES_CHKSUM
    lic_chksums = {}
    license_source_dirs = []
    license_source_dirs.append(generic_directory)
    try:
        additional_lic_dirs = d.getVar('LICENSE_PATH').split()
        for lic_dir in additional_lic_dirs:
            license_source_dirs.append(lic_dir)
    except:
        pass

    class FindVisitor(oe.license.LicenseVisitor):
        def visit_Str(self, node):
            #
            # Until I figure out what to do with
            # the two modifiers I support (or greater = +
            # and "with exceptions" being *
            # we'll just strip out the modifier and put
            # the base license.
            find_license(node.s.replace("+", "").replace("*", ""))
            self.generic_visit(node)

    def find_license(license_type):
        try:
            bb.utils.mkdirhier(gen_lic_dest)
        except:
            pass
        spdx_generic = None
        license_source = None
        # If the generic does not exist we need to check to see if there is an SPDX mapping to it,
        # unless NO_GENERIC_LICENSE is set.
        for lic_dir in license_source_dirs:
            if not os.path.isfile(os.path.join(lic_dir, license_type)):
                if d.getVarFlag('SPDXLICENSEMAP', license_type) != None:
                    # Great, there is an SPDXLICENSEMAP. We can copy!
                    bb.debug(1, "We need to use a SPDXLICENSEMAP for %s" % (license_type))
                    spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type)
                    license_source = lic_dir
                    break
            elif os.path.isfile(os.path.join(lic_dir, license_type)):
                spdx_generic = license_type
                license_source = lic_dir
                break

        non_generic_lic = d.getVarFlag('NO_GENERIC_LICENSE', license_type)
        if spdx_generic and license_source:
            # we really should copy to generic_ + spdx_generic, however, that ends up messing the manifest
            # audit up. This should be fixed in emit_pkgdata (or, we actually got and fix all the recipes)

            lic_files_paths.append(("generic_" + license_type, os.path.join(license_source, spdx_generic),
                                    None, None))

            # The user may attempt to use NO_GENERIC_LICENSE for a generic license which doesn't make sense
            # and should not be allowed, warn the user in this case.
            if d.getVarFlag('NO_GENERIC_LICENSE', license_type):
                bb.warn("%s: %s is a generic license, please don't use NO_GENERIC_LICENSE for it." % (pn, license_type))

        elif non_generic_lic and non_generic_lic in lic_chksums:
            # if NO_GENERIC_LICENSE is set, we copy the license files from the fetched source
            # of the package rather than the license_source_dirs.
            lic_files_paths.append(("generic_" + license_type,
                                    os.path.join(srcdir, non_generic_lic), None, None))
            non_generic_lics[non_generic_lic] = license_type
        else:
            # Add explicity avoid of CLOSED license because this isn't generic
            if license_type != 'CLOSED':
                # And here is where we warn people that their licenses are lousy
                bb.warn("%s: No generic license file exists for: %s in any provider" % (pn, license_type))
            pass

    if not generic_directory:
        bb.fatal("COMMON_LICENSE_DIR is unset. Please set this in your distro config")

    for url in lic_files.split():
        try:
            (method, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
            if method != "file" or not path:
                raise bb.fetch.MalformedUrl()
        except bb.fetch.MalformedUrl:
            bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL:  %s" % (d.getVar('PF'), url))
        # We want the license filename and path
        chksum = parm.get('md5', None)
        beginline = parm.get('beginline')
        endline = parm.get('endline')
        lic_chksums[path] = (chksum, beginline, endline)

    v = FindVisitor()
    try:
        v.visit_string(d.getVar('LICENSE'))
    except oe.license.InvalidLicense as exc:
        bb.fatal('%s: %s' % (d.getVar('PF'), exc))
    except SyntaxError:
        bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF')))
    # Add files from LIC_FILES_CHKSUM to list of license files
    lic_chksum_paths = defaultdict(OrderedDict)
    for path, data in sorted(lic_chksums.items()):
        lic_chksum_paths[os.path.basename(path)][data] = (os.path.join(srcdir, path), data[1], data[2])
    for basename, files in lic_chksum_paths.items():
        if len(files) == 1:
            # Don't copy again a LICENSE already handled as non-generic
            if basename in non_generic_lics:
                continue
            data = list(files.values())[0]
            lic_files_paths.append(tuple([basename] + list(data)))
        else:
            # If there are multiple different license files with identical
            # basenames we rename them to <file>.0, <file>.1, ...
            for i, data in enumerate(files.values()):
                lic_files_paths.append(tuple(["%s.%d" % (basename, i)] + list(data)))

    return lic_files_paths

def read_libdep_files(d):
    pkglibdeps = {}
    packages = d.getVar('PACKAGES').split()
    for pkg in packages:
        pkglibdeps[pkg] = {}
        for extension in ".shlibdeps", ".pcdeps", ".clilibdeps":
            depsfile = d.expand("${PKGDEST}/" + pkg + extension)
            if os.access(depsfile, os.R_OK):
                with open(depsfile) as fd:
                    lines = fd.readlines()
                for l in lines:
                    l.rstrip()
                    deps = bb.utils.explode_dep_versions2(l)
                    for dep in deps:
                        if not dep in pkglibdeps[pkg]:
                            pkglibdeps[pkg][dep] = deps[dep]
    return pkglibdeps

def legitimize_package_name(s):
    """
    Make sure package names are legitimate strings
    """
    import re

    def fixutf(m):
        cp = m.group(1)
        if cp:
            return ('\\u%s' % cp).encode('latin-1').decode('unicode_escape')

    # Handle unicode codepoints encoded as <U0123>, as in glibc locale files.
    s = re.sub(r'<U([0-9A-Fa-f]{1,4})>', fixutf, s)

    # Remaining package name validity fixes
    return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-')

def checkbuildpath(file, d):
    tmpdir = d.getVar('TMPDIR')
    with open(file) as f:
        file_content = f.read()
        if tmpdir in file_content:
            return True

    return False

def process_dir(rootdir, directory, d, break_hardlinks = False):
    bb.debug(2, "Checking %s for binaries to process" % directory)
    if not os.path.exists(directory):
        return

    import stat

    rootdir = os.path.normpath(rootdir)
    cmd = d.expand('${CHRPATH_BIN}')
    tmpdir = os.path.normpath(d.getVar('TMPDIR', False))
    baseprefix = os.path.normpath(d.expand('${base_prefix}'))
    hostos = d.getVar("HOST_OS")

    if "linux" in hostos:
        process_file = process_file_linux
    elif "darwin" in hostos:
        process_file = process_file_darwin
    else:
        # Relocations not supported
        return

    dirs = os.listdir(directory)
    for file in dirs:
        fpath = directory + "/" + file
        fpath = os.path.normpath(fpath)
        if os.path.islink(fpath):
            # Skip symlinks
            continue

        if os.path.isdir(fpath):
            process_dir(rootdir, fpath, d, break_hardlinks = break_hardlinks)
        else:
            #bb.note("Testing %s for relocatability" % fpath)

            # We need read and write permissions for chrpath, if we don't have
            # them then set them temporarily. Take a copy of the files
            # permissions so that we can restore them afterwards.
            perms = os.stat(fpath)[stat.ST_MODE]
            if os.access(fpath, os.W_OK|os.R_OK):
                perms = None
            else:
                # Temporarily make the file writeable so we can chrpath it
                os.chmod(fpath, perms|stat.S_IRWXU)

            process_file(cmd, fpath, rootdir, baseprefix, tmpdir, d, break_hardlinks = break_hardlinks)

            if perms:
                os.chmod(fpath, perms)

def parse_debugsources_from_dwarfsrcfiles_output(dwarfsrcfiles_output):
    debugfiles = {}

    for line in dwarfsrcfiles_output.splitlines():
        if line.startswith("\t"):
            debugfiles[os.path.normpath(line.split()[0])] = ""

    return debugfiles.keys()