[OE-core] Parsing exception with latest oe-core

Barros Pena, Belen belen.barros.pena at intel.com
Fri Feb 12 11:31:10 UTC 2016



On 12/02/2016 10:52, "openembedded-core-bounces at lists.openembedded.org on
behalf of Martin Jansa" <openembedded-core-bounces at lists.openembedded.org
on behalf of martin.jansa at gmail.com> wrote:

>Hi,
>
>
>is anyone else seeing this unhelpful fatal exception?

FWIW, I am 

http://piratepad.net/2E3lnayxi8

Cheers

Belén

>
>
>ERROR: Failure expanding variable
>__anon_648__home_jenkins_oe_world_shr_core_openembedded_core_meta_classes_
>base_bbclass, expression was def
>__anon_648__home_jenkins_oe_world_shr_core_openembedded_core_meta_classes_
>base_bbclass(d):
>    import string, re
>
>    # Handle PACKAGECONFIG
>    #
>    # These take the form:
>    #
>    # PACKAGECONFIG ??= "<default options>"
>    # PACKAGECONFIG[foo] =
>"--enable-foo,--disable-foo,foo_depends,foo_runtime_depends"
>    pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {}
>    if pkgconfigflags:
>        pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split()
>        pn = d.getVar("PN", True)
>
>        mlprefix = d.getVar("MLPREFIX", True)
>
>        def expandFilter(appends, extension, prefix):
>            appends = bb.utils.explode_deps(d.expand(" ".join(appends)))
>            newappends = []
>            for a in appends:
>                if a.endswith("-native") or ("-cross-" in a):
>                    newappends.append(a)
>                elif a.startswith("virtual/"):
>                    subs = a.split("/", 1)[1]
>                    if subs.startswith(prefix):
>                        newappends.append(a + extension)
>                    else:
>                        newappends.append("virtual/" + prefix + subs +
>extension)
>                else:
>                    if a.startswith(prefix):
>                        newappends.append(a + extension)
>                    else:
>                        newappends.append(prefix + a + extension)
>            return newappends
>
>        def appendVar(varname, appends):
>            if not appends:
>                return
>            if varname.find("DEPENDS") != -1:
>                if pn.startswith("nativesdk-"):
>                    appends = expandFilter(appends, "", "nativesdk-")
>                if pn.endswith("-native"):
>                    appends = expandFilter(appends, "-native", "")
>                if mlprefix:
>                    appends = expandFilter(appends, "", mlprefix)
>            varname = d.expand(varname)
>            d.appendVar(varname, " " + " ".join(appends))
>
>        extradeps = []
>        extrardeps = []
>        extraconf = []
>        for flag, flagval in sorted(pkgconfigflags.items()):
>            items = flagval.split(",")
>            num = len(items)
>            if num > 4:
>                bb.error("%s: PACKAGECONFIG[%s] Only
>enable,disable,depend,rdepend can be specified!"
>                    % (d.getVar('PN', True), flag))
>
>            if flag in pkgconfig:
>                if num >= 3 and items[2]:
>                    extradeps.append(items[2])
>                if num >= 4 and items[3]:
>                    extrardeps.append(items[3])
>                if num >= 1 and items[0]:
>                    extraconf.append(items[0])
>            elif num >= 2 and items[1]:
>                    extraconf.append(items[1])
>        appendVar('DEPENDS', extradeps)
>        appendVar('RDEPENDS_defaultpkgname', extrardeps)
>        if bb.data.inherits_class('cmake', d):
>            appendVar('EXTRA_OECMAKE', extraconf)
>        else:
>            appendVar('EXTRA_OECONF', extraconf)
>
>    pn = d.getVar('PN', True)
>    license = d.getVar('LICENSE', True)
>    if license == "INVALID":
>        bb.fatal('This recipe does not have the LICENSE field set (%s)' %
>pn)
>
>    if bb.data.inherits_class('license', d):
>        check_license_format(d)
>        unmatched_license_flag = check_license_flags(d)
>        if unmatched_license_flag:
>            bb.debug(1, "Skipping %s because it has a restricted license
>not"
>                 " whitelisted in LICENSE_FLAGS_WHITELIST" % pn)
>            raise bb.parse.SkipPackage("because it has a restricted
>license not"
>                 " whitelisted in LICENSE_FLAGS_WHITELIST")
>
>    # If we're building a target package we need to use fakeroot (pseudo)
>    # in order to capture permissions, owners, groups and special files
>    if not bb.data.inherits_class('native', d) and not
>bb.data.inherits_class('cross', d):
>        d.setVarFlag('do_unpack', 'umask', '022')
>        d.setVarFlag('do_configure', 'umask', '022')
>        d.setVarFlag('do_compile', 'umask', '022')
>        d.appendVarFlag('do_install', 'depends', '
>virtual/fakeroot-native:do_populate_sysroot')
>        d.setVarFlag('do_install', 'fakeroot', '1')
>        d.setVarFlag('do_install', 'umask', '022')
>        d.appendVarFlag('do_package', 'depends', '
>virtual/fakeroot-native:do_populate_sysroot')
>        d.setVarFlag('do_package', 'fakeroot', '1')
>        d.setVarFlag('do_package', 'umask', '022')
>        d.setVarFlag('do_package_setscene', 'fakeroot', '1')
>        d.appendVarFlag('do_package_setscene', 'depends', '
>virtual/fakeroot-native:do_populate_sysroot')
>        d.setVarFlag('do_devshell', 'fakeroot', '1')
>        d.appendVarFlag('do_devshell', 'depends', '
>virtual/fakeroot-native:do_populate_sysroot')
>
>    need_machine = d.getVar('COMPATIBLE_MACHINE', True)
>    if need_machine:
>        import re
>        compat_machines = (d.getVar('MACHINEOVERRIDES', True) or
>"").split(":")
>        for m in compat_machines:
>            if re.match(need_machine, m):
>                break
>        else:
>            raise bb.parse.SkipPackage("incompatible with machine %s (not
>in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True))
>
>    source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', 0)
>    if not source_mirror_fetch:
>        need_host = d.getVar('COMPATIBLE_HOST', True)
>        if need_host:
>            import re
>            this_host = d.getVar('HOST_SYS', True)
>            if not re.match(need_host, this_host):
>                raise bb.parse.SkipPackage("incompatible with host %s
>(not in COMPATIBLE_HOST)" % this_host)
>
>        bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or
>"").split()
>
>        check_license = False if pn.startswith("nativesdk-") else True
>        for t in ["-native", "-cross-arm", "-cross-initial-arm",
>              "-crosssdk-x86_64", "-crosssdk-initial-x86_64",
>              "-cross-canadian-arm"]:
>            if pn.endswith(d.expand(t)):
>                check_license = False
>        if pn.startswith("gcc-source-"):
>            check_license = False
>
>        if check_license and bad_licenses:
>            bad_licenses = expand_wildcard_licenses(d, bad_licenses)
>
>            whitelist = []
>            incompatwl = []
>            htincompatwl = []
>            for lic in bad_licenses:
>                spdx_license = return_spdx(d, lic)
>                for w in ["HOSTTOOLS_WHITELIST_", "LGPLv2_WHITELIST_",
>"WHITELIST_"]:
>                    whitelist.extend((d.getVar(w + lic, True) or
>"").split())
>                    if spdx_license:
>                        whitelist.extend((d.getVar(w + spdx_license,
>True) or "").split())
>                    '''
>                    We need to track what we are whitelisting and why. If
>pn is
>                    incompatible and is not HOSTTOOLS_WHITELIST_ we need
>to be
>                    able to note that the image that is created may infact
>                    contain incompatible licenses despite
>INCOMPATIBLE_LICENSE
>                    being set.
>                    '''
>                    if "HOSTTOOLS" in w:
>                        htincompatwl.extend((d.getVar(w + lic, True) or
>"").split())
>                        if spdx_license:
>                            htincompatwl.extend((d.getVar(w +
>spdx_license, True) or "").split())
>                    else:
>                        incompatwl.extend((d.getVar(w + lic, True) or
>"").split())
>                        if spdx_license:
>                            incompatwl.extend((d.getVar(w + spdx_license,
>True) or "").split())
>
>            if not pn in whitelist:
>                recipe_license = d.getVar('LICENSE', True)
>                pkgs = d.getVar('PACKAGES', True).split()
>                skipped_pkgs = []
>                unskipped_pkgs = []
>                for pkg in pkgs:
>                    if incompatible_license(d, bad_licenses, pkg):
>                        skipped_pkgs.append(pkg)
>                    else:
>                        unskipped_pkgs.append(pkg)
>                all_skipped = skipped_pkgs and not unskipped_pkgs
>                if unskipped_pkgs:
>                    for pkg in skipped_pkgs:
>                        bb.debug(1, "SKIPPING the package " + pkg + " at
>do_rootfs because it's " + recipe_license)
>                        mlprefix = d.getVar('MLPREFIX', True)
>                        d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1)
>                    for pkg in unskipped_pkgs:
>                        bb.debug(1, "INCLUDING the package " + pkg)
>                elif all_skipped or incompatible_license(d, bad_licenses):
>                    bb.debug(1, "SKIPPING recipe %s because it's %s" %
>(pn, recipe_license))
>                    raise bb.parse.SkipPackage("incompatible with license
>%s" % recipe_license)
>            elif pn in whitelist:
>                if pn in incompatwl:
>                    bb.note("INCLUDING " + pn + " as buildable despite
>INCOMPATIBLE_LICENSE because it has been whitelisted")
>                elif pn in htincompatwl:
>                    bb.note("INCLUDING " + pn + " as buildable despite
>INCOMPATIBLE_LICENSE because it has been whitelisted for HOSTTOOLS")
>
>    needsrcrev = False
>    srcuri = d.getVar('SRC_URI', True)
>    for uri in srcuri.split():
>        (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3]
>
>        # HTTP/FTP use the wget fetcher
>        if scheme in ("http", "https", "ftp"):
>            d.appendVarFlag('do_fetch', 'depends', '
>wget-native:do_populate_sysroot')
>
>        # Svn packages should DEPEND on subversion-native
>        if scheme == "svn":
>            needsrcrev = True
>            d.appendVarFlag('do_fetch', 'depends', '
>subversion-native:do_populate_sysroot')
>
>        # Git packages should DEPEND on git-native
>        elif scheme == "git":
>            needsrcrev = True
>            d.appendVarFlag('do_fetch', 'depends', '
>git-native:do_populate_sysroot')
>
>        # Mercurial packages should DEPEND on mercurial-native
>        elif scheme == "hg":
>            needsrcrev = True
>            d.appendVarFlag('do_fetch', 'depends', '
>mercurial-native:do_populate_sysroot')
>
>        # OSC packages should DEPEND on osc-native
>        elif scheme == "osc":
>            d.appendVarFlag('do_fetch', 'depends', '
>osc-native:do_populate_sysroot')
>
>        # *.lz4 should DEPEND on lz4-native for unpacking
>        if path.endswith('.lz4'):
>            d.appendVarFlag('do_unpack', 'depends', '
>lz4-native:do_populate_sysroot')
>
>        # *.lz should DEPEND on lzip-native for unpacking
>        elif path.endswith('.lz'):
>            d.appendVarFlag('do_unpack', 'depends', '
>lzip-native:do_populate_sysroot')
>
>        # *.xz should DEPEND on xz-native for unpacking
>        elif path.endswith('.xz'):
>            d.appendVarFlag('do_unpack', 'depends', '
>xz-native:do_populate_sysroot')
>
>        # .zip should DEPEND on unzip-native for unpacking
>        elif path.endswith('.zip'):
>            d.appendVarFlag('do_unpack', 'depends', '
>unzip-native:do_populate_sysroot')
>
>        # file is needed by rpm2cpio.sh
>        elif path.endswith('.src.rpm'):
>            d.appendVarFlag('do_unpack', 'depends', '
>file-native:do_populate_sysroot')
>
>    if needsrcrev:
>        d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
>
>    set_packagetriplet(d)
>
>    # 'multimachine' handling
>    mach_arch = d.getVar('MACHINE_ARCH', True)
>    pkg_arch = d.getVar('PACKAGE_ARCH', True)
>
>    if (pkg_arch == mach_arch):
>        # Already machine specific - nothing further to do
>        return
>
>    #
>    # We always try to scan SRC_URI for urls with machine overrides
>    # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
>    #
>    override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True)
>    if override != '0':
>        paths = []
>        fpaths = (d.getVar('FILESPATH', True) or '').split(':')
>        machine = d.getVar('MACHINE', True)
>        for p in fpaths:
>            if os.path.basename(p) == machine and os.path.isdir(p):
>                paths.append(p)
>
>        if len(paths) != 0:
>            for s in srcuri.split():
>                if not s.startswith("file://"):
>                    continue
>                fetcher = bb.fetch2.Fetch([s], d)
>                local = fetcher.localpath(s)
>                for mp in paths:
>                    if local.startswith(mp):
>                        #bb.note("overriding PACKAGE_ARCH from %s to %s
>for %s" % (pkg_arch, mach_arch, pn))
>                        d.setVar('PACKAGE_ARCH', "qemuarm")
>                        return
>
>    packages = d.getVar('PACKAGES', True).split()
>    for pkg in packages:
>        pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True)
>
>        # We could look for != PACKAGE_ARCH here but how to choose
>        # if multiple differences are present?
>        # Look through PACKAGE_ARCHS for the priority order?
>        if pkgarch and pkgarch == mach_arch:
>            d.setVar('PACKAGE_ARCH', "qemuarm")
>            bb.warn("Recipe %s is marked as only being architecture
>specific but seems to have machine specific packages?! The recipe may as
>well mark itself as machine specific directly." % d.getVar("PN", True))
> which triggered exception FetchError: Fetcher failure: SRCREV was used
>yet no valid SCM was found in SRC_URI
>
>
>
>




More information about the Openembedded-core mailing list