summaryrefslogtreecommitdiffstats
path: root/external/poky/meta/lib/oe
diff options
context:
space:
mode:
Diffstat (limited to 'external/poky/meta/lib/oe')
-rw-r--r--external/poky/meta/lib/oe/__init__.py4
-rw-r--r--external/poky/meta/lib/oe/buildhistory_analysis.py31
-rw-r--r--external/poky/meta/lib/oe/cachedpath.py2
-rw-r--r--external/poky/meta/lib/oe/classextend.py8
-rw-r--r--external/poky/meta/lib/oe/classutils.py3
-rw-r--r--external/poky/meta/lib/oe/copy_buildsystem.py21
-rw-r--r--external/poky/meta/lib/oe/data.py4
-rw-r--r--external/poky/meta/lib/oe/distro_check.py4
-rw-r--r--external/poky/meta/lib/oe/elf.py11
-rw-r--r--external/poky/meta/lib/oe/gpg_sign.py49
-rw-r--r--external/poky/meta/lib/oe/license.py12
-rw-r--r--external/poky/meta/lib/oe/lsb.py12
-rw-r--r--external/poky/meta/lib/oe/maketype.py3
-rw-r--r--external/poky/meta/lib/oe/manifest.py4
-rw-r--r--external/poky/meta/lib/oe/package.py41
-rw-r--r--external/poky/meta/lib/oe/package_manager.py160
-rw-r--r--external/poky/meta/lib/oe/packagedata.py9
-rw-r--r--external/poky/meta/lib/oe/packagegroup.py4
-rw-r--r--external/poky/meta/lib/oe/patch.py42
-rw-r--r--external/poky/meta/lib/oe/path.py31
-rw-r--r--external/poky/meta/lib/oe/prservice.py72
-rw-r--r--external/poky/meta/lib/oe/qa.py8
-rw-r--r--external/poky/meta/lib/oe/recipeutils.py214
-rw-r--r--external/poky/meta/lib/oe/rootfs.py60
-rw-r--r--external/poky/meta/lib/oe/sdk.py8
-rw-r--r--external/poky/meta/lib/oe/sstatesig.py272
-rw-r--r--external/poky/meta/lib/oe/terminal.py22
-rw-r--r--external/poky/meta/lib/oe/types.py8
-rw-r--r--external/poky/meta/lib/oe/useradd.py5
-rw-r--r--external/poky/meta/lib/oe/utils.py85
30 files changed, 854 insertions, 355 deletions
diff --git a/external/poky/meta/lib/oe/__init__.py b/external/poky/meta/lib/oe/__init__.py
index 3ad9513f..4e7c09da 100644
--- a/external/poky/meta/lib/oe/__init__.py
+++ b/external/poky/meta/lib/oe/__init__.py
@@ -1,2 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
diff --git a/external/poky/meta/lib/oe/buildhistory_analysis.py b/external/poky/meta/lib/oe/buildhistory_analysis.py
index d3cde4f6..5b28774c 100644
--- a/external/poky/meta/lib/oe/buildhistory_analysis.py
+++ b/external/poky/meta/lib/oe/buildhistory_analysis.py
@@ -3,6 +3,8 @@
# Copyright (C) 2012-2013, 2016-2017 Intel Corporation
# Author: Paul Eggleton <paul.eggleton@linux.intel.com>
#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# Note: requires GitPython 0.3.1+
#
# You can use this from the command line by running scripts/buildhistory-diff
@@ -179,7 +181,7 @@ class ChangeRecord:
diff = difflib.unified_diff(alines, blines, self.fieldname, self.fieldname, lineterm='')
out += '\n '.join(list(diff)[2:])
out += '\n --'
- elif self.fieldname in img_monitor_files or '/image-files/' in self.path:
+ elif self.fieldname in img_monitor_files or '/image-files/' in self.path or self.fieldname == "sysroot":
if self.filechanges or (self.oldvalue and self.newvalue):
fieldname = self.fieldname
if '/image-files/' in self.path:
@@ -280,7 +282,7 @@ def file_list_to_dict(lines):
return adict
-def compare_file_lists(alines, blines):
+def compare_file_lists(alines, blines, compare_ownership=True):
adict = file_list_to_dict(alines)
bdict = file_list_to_dict(blines)
filechanges = []
@@ -292,16 +294,20 @@ def compare_file_lists(alines, blines):
newvalue = newsplitv[0][0]
if oldvalue != newvalue:
filechanges.append(FileChange(path, FileChange.changetype_type, oldvalue, newvalue))
+
# Check permissions
oldvalue = splitv[0][1:]
newvalue = newsplitv[0][1:]
if oldvalue != newvalue:
filechanges.append(FileChange(path, FileChange.changetype_perms, oldvalue, newvalue))
- # Check owner/group
- oldvalue = '%s/%s' % (splitv[1], splitv[2])
- newvalue = '%s/%s' % (newsplitv[1], newsplitv[2])
- if oldvalue != newvalue:
- filechanges.append(FileChange(path, FileChange.changetype_ownergroup, oldvalue, newvalue))
+
+ if compare_ownership:
+ # Check owner/group
+ oldvalue = '%s/%s' % (splitv[1], splitv[2])
+ newvalue = '%s/%s' % (newsplitv[1], newsplitv[2])
+ if oldvalue != newvalue:
+ filechanges.append(FileChange(path, FileChange.changetype_ownergroup, oldvalue, newvalue))
+
# Check symlink target
if newsplitv[0][0] == 'l':
if len(splitv) > 3:
@@ -407,7 +413,7 @@ def compare_dict_blobs(path, ablob, bblob, report_all, report_ver):
if abs(percentchg) < monitor_numeric_threshold:
continue
elif (not report_all) and key in list_fields:
- if key == "FILELIST" and path.endswith("-dbg") and bstr.strip() != '':
+ if key == "FILELIST" and (path.endswith("-dbg") or path.endswith("-src")) and bstr.strip() != '':
continue
if key in ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS']:
(depvera, depverb) = compare_pkg_lists(astr, bstr)
@@ -569,6 +575,15 @@ def process_changes(repopath, revision1, revision2='HEAD', report_all=False, rep
elif filename.startswith('latest.'):
chg = ChangeRecord(path, filename, d.a_blob.data_stream.read().decode('utf-8'), d.b_blob.data_stream.read().decode('utf-8'), True)
changes.append(chg)
+ elif filename == 'sysroot':
+ alines = d.a_blob.data_stream.read().decode('utf-8').splitlines()
+ blines = d.b_blob.data_stream.read().decode('utf-8').splitlines()
+ filechanges = compare_file_lists(alines,blines, compare_ownership=False)
+ if filechanges:
+ chg = ChangeRecord(path, filename, None, None, True)
+ chg.filechanges = filechanges
+ changes.append(chg)
+
elif path.startswith('images/'):
filename = os.path.basename(d.a_blob.path)
if filename in img_monitor_files:
diff --git a/external/poky/meta/lib/oe/cachedpath.py b/external/poky/meta/lib/oe/cachedpath.py
index 0840cc4c..254257a8 100644
--- a/external/poky/meta/lib/oe/cachedpath.py
+++ b/external/poky/meta/lib/oe/cachedpath.py
@@ -1,4 +1,6 @@
#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# Based on standard python library functions but avoid
# repeated stat calls. Its assumed the files will not change from under us
# so we can cache stat calls.
diff --git a/external/poky/meta/lib/oe/classextend.py b/external/poky/meta/lib/oe/classextend.py
index d2eeaf0e..f02fbe9f 100644
--- a/external/poky/meta/lib/oe/classextend.py
+++ b/external/poky/meta/lib/oe/classextend.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import collections
class ClassExtender(object):
@@ -20,6 +24,8 @@ class ClassExtender(object):
if not subs.startswith(self.extname):
return "virtual/" + self.extname + "-" + subs
return name
+ if name.startswith("/"):
+ return name
if not name.startswith(self.extname):
return self.extname + "-" + name
return name
@@ -114,7 +120,7 @@ class NativesdkClassExtender(ClassExtender):
def map_depends(self, dep):
if dep.startswith(self.extname):
return dep
- if dep.endswith(("-gcc-initial", "-gcc", "-g++")):
+ if dep.endswith(("-gcc", "-g++")):
return dep + "-crosssdk"
elif dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('-cross-' in dep) or ('-crosssdk-' in dep):
return dep
diff --git a/external/poky/meta/lib/oe/classutils.py b/external/poky/meta/lib/oe/classutils.py
index 45cd5249..08bb66b3 100644
--- a/external/poky/meta/lib/oe/classutils.py
+++ b/external/poky/meta/lib/oe/classutils.py
@@ -1,3 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
class ClassRegistryMeta(type):
"""Give each ClassRegistry their own registry"""
diff --git a/external/poky/meta/lib/oe/copy_buildsystem.py b/external/poky/meta/lib/oe/copy_buildsystem.py
index 7cb784cf..31a84f5b 100644
--- a/external/poky/meta/lib/oe/copy_buildsystem.py
+++ b/external/poky/meta/lib/oe/copy_buildsystem.py
@@ -1,3 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
# This class should provide easy access to the different aspects of the
# buildsystem such as layers, bitbake location, etc.
#
@@ -42,6 +45,9 @@ class BuildSystem(object):
corebase = os.path.abspath(self.d.getVar('COREBASE'))
layers.append(corebase)
+ # Get relationship between TOPDIR and COREBASE
+ # Layers should respect it
+ corebase_relative = os.path.dirname(os.path.relpath(os.path.abspath(self.d.getVar('TOPDIR')), corebase))
# The bitbake build system uses the meta-skeleton layer as a layout
# for common recipies, e.g: the recipetool script to create kernel recipies
# Add the meta-skeleton layer to be included as part of the eSDK installation
@@ -95,7 +101,10 @@ class BuildSystem(object):
if corebase == os.path.dirname(layer):
layerdestpath += '/' + os.path.basename(corebase)
else:
- layer_relative = os.path.basename(corebase) + '/' + os.path.relpath(layer, corebase)
+ layer_relative = os.path.relpath(layer, corebase)
+ if os.path.dirname(layer_relative) == corebase_relative:
+ layer_relative = os.path.dirname(corebase_relative) + '/' + layernewname
+ layer_relative = os.path.basename(corebase) + '/' + layer_relative
if os.path.dirname(layer_relative) != layernewname:
layerdestpath += '/' + os.path.dirname(layer_relative)
@@ -165,10 +174,10 @@ class BuildSystem(object):
def generate_locked_sigs(sigfile, d):
bb.utils.mkdirhier(os.path.dirname(sigfile))
depd = d.getVar('BB_TASKDEPDATA', False)
- tasks = ['%s.%s' % (v[2], v[1]) for v in depd.values()]
+ tasks = ['%s:%s' % (v[2], v[1]) for v in depd.values()]
bb.parse.siggen.dump_lockedsigs(sigfile, tasks)
-def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, pruned_output):
+def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, onlynative, pruned_output):
with open(lockedsigs, 'r') as infile:
bb.utils.mkdirhier(os.path.dirname(pruned_output))
with open(pruned_output, 'w') as f:
@@ -178,7 +187,11 @@ def prune_lockedsigs(excluded_tasks, excluded_targets, lockedsigs, pruned_output
if line.endswith('\\\n'):
splitval = line.strip().split(':')
if not splitval[1] in excluded_tasks and not splitval[0] in excluded_targets:
- f.write(line)
+ if onlynative:
+ if 'nativesdk' in splitval[0]:
+ f.write(line)
+ else:
+ f.write(line)
else:
f.write(line)
invalue = False
diff --git a/external/poky/meta/lib/oe/data.py b/external/poky/meta/lib/oe/data.py
index b8901e63..602130a9 100644
--- a/external/poky/meta/lib/oe/data.py
+++ b/external/poky/meta/lib/oe/data.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import json
import oe.maketype
diff --git a/external/poky/meta/lib/oe/distro_check.py b/external/poky/meta/lib/oe/distro_check.py
index e775c3a6..88e46c35 100644
--- a/external/poky/meta/lib/oe/distro_check.py
+++ b/external/poky/meta/lib/oe/distro_check.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
def create_socket(url, d):
import urllib
from bb.utils import export_proxies
diff --git a/external/poky/meta/lib/oe/elf.py b/external/poky/meta/lib/oe/elf.py
index 0ed59ae0..df0a4593 100644
--- a/external/poky/meta/lib/oe/elf.py
+++ b/external/poky/meta/lib/oe/elf.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
def machine_dict(d):
# TARGET_OS TARGET_ARCH MACHINE, OSABI, ABIVERSION, Little Endian, 32bit?
machdata = {
@@ -11,13 +15,13 @@ def machine_dict(d):
"aarch64" : (183, 0, 0, True, 64),
"aarch64_be" :(183, 0, 0, False, 64),
"i586" : (3, 0, 0, True, 32),
+ "i686" : (3, 0, 0, True, 32),
"x86_64": (62, 0, 0, True, 64),
"epiphany": (4643, 0, 0, True, 32),
"lm32": (138, 0, 0, False, 32),
"mips": ( 8, 0, 0, False, 32),
"mipsel": ( 8, 0, 0, True, 32),
"microblaze": (189, 0, 0, False, 32),
- "microblazeeb":(189, 0, 0, False, 32),
"microblazeel":(189, 0, 0, True, 32),
"powerpc": (20, 0, 0, False, 32),
"riscv32": (243, 0, 0, True, 32),
@@ -30,6 +34,7 @@ def machine_dict(d):
"armeb": (40, 97, 0, False, 32),
"powerpc": (20, 0, 0, False, 32),
"powerpc64": (21, 0, 0, False, 64),
+ "powerpc64le": (21, 0, 0, True, 64),
"i386": ( 3, 0, 0, True, 32),
"i486": ( 3, 0, 0, True, 32),
"i586": ( 3, 0, 0, True, 32),
@@ -54,7 +59,6 @@ def machine_dict(d):
"sh4": (42, 0, 0, True, 32),
"sparc": ( 2, 0, 0, False, 32),
"microblaze": (189, 0, 0, False, 32),
- "microblazeeb":(189, 0, 0, False, 32),
"microblazeel":(189, 0, 0, True, 32),
},
"linux-musl" : {
@@ -63,6 +67,8 @@ def machine_dict(d):
"arm" : ( 40, 97, 0, True, 32),
"armeb": ( 40, 97, 0, False, 32),
"powerpc": ( 20, 0, 0, False, 32),
+ "powerpc64": ( 21, 0, 0, False, 64),
+ "powerpc64le": (21, 0, 0, True, 64),
"i386": ( 3, 0, 0, True, 32),
"i486": ( 3, 0, 0, True, 32),
"i586": ( 3, 0, 0, True, 32),
@@ -73,7 +79,6 @@ def machine_dict(d):
"mips64": ( 8, 0, 0, False, 64),
"mips64el": ( 8, 0, 0, True, 64),
"microblaze": (189, 0, 0, False, 32),
- "microblazeeb":(189, 0, 0, False, 32),
"microblazeel":(189, 0, 0, True, 32),
"riscv32": (243, 0, 0, True, 32),
"riscv64": (243, 0, 0, True, 64),
diff --git a/external/poky/meta/lib/oe/gpg_sign.py b/external/poky/meta/lib/oe/gpg_sign.py
index ccd5aee4..7634d7ef 100644
--- a/external/poky/meta/lib/oe/gpg_sign.py
+++ b/external/poky/meta/lib/oe/gpg_sign.py
@@ -1,8 +1,11 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
"""Helper module for GPG signing"""
import os
import bb
-import oe.utils
import subprocess
import shlex
@@ -11,21 +14,27 @@ class LocalSigner(object):
def __init__(self, d):
self.gpg_bin = d.getVar('GPG_BIN') or \
bb.utils.which(os.getenv('PATH'), 'gpg')
+ self.gpg_cmd = [self.gpg_bin]
+ self.gpg_agent_bin = bb.utils.which(os.getenv('PATH'), "gpg-agent")
+ # Without this we see "Cannot allocate memory" errors when running processes in parallel
+ # It needs to be set for any gpg command since any agent launched can stick around in memory
+ # and this parameter must be set.
+ if self.gpg_agent_bin:
+ self.gpg_cmd += ["--agent-program=%s|--auto-expand-secmem" % (self.gpg_agent_bin)]
self.gpg_path = d.getVar('GPG_PATH')
- self.gpg_version = self.get_gpg_version()
self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpmsign")
- self.gpg_agent_bin = bb.utils.which(os.getenv('PATH'), "gpg-agent")
+ self.gpg_version = self.get_gpg_version()
+
def export_pubkey(self, output_file, keyid, armor=True):
"""Export GPG public key to a file"""
- cmd = '%s --no-permission-warning --batch --yes --export -o %s ' % \
- (self.gpg_bin, output_file)
+ cmd = self.gpg_cmd + ["--no-permission-warning", "--batch", "--yes", "--export", "-o", output_file]
if self.gpg_path:
- cmd += "--homedir %s " % self.gpg_path
+ cmd += ["--homedir", self.gpg_path]
if armor:
- cmd += "--armor "
- cmd += keyid
- subprocess.check_output(shlex.split(cmd), stderr=subprocess.STDOUT)
+ cmd += ["--armor"]
+ cmd += [keyid]
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
def sign_rpms(self, files, keyid, passphrase, digest, sign_chunk, fsk=None, fsk_password=None):
"""Sign RPM files"""
@@ -55,7 +64,7 @@ class LocalSigner(object):
if passphrase_file and passphrase:
raise Exception("You should use either passphrase_file of passphrase, not both")
- cmd = [self.gpg_bin, '--detach-sign', '--no-permission-warning', '--batch',
+ cmd = self.gpg_cmd + ['--detach-sign', '--no-permission-warning', '--batch',
'--no-tty', '--yes', '--passphrase-fd', '0', '-u', keyid]
if self.gpg_path:
@@ -68,9 +77,6 @@ class LocalSigner(object):
if self.gpg_version > (2,1,):
cmd += ['--pinentry-mode', 'loopback']
- if self.gpg_agent_bin:
- cmd += ["--agent-program=%s|--auto-expand-secmem" % (self.gpg_agent_bin)]
-
cmd += [input_file]
try:
@@ -82,8 +88,7 @@ class LocalSigner(object):
(_, stderr) = job.communicate(passphrase.encode("utf-8"))
if job.returncode:
- raise bb.build.FuncFailed("GPG exited with code %d: %s" %
- (job.returncode, stderr.decode("utf-8")))
+ bb.fatal("GPG exited with code %d: %s" % (job.returncode, stderr.decode("utf-8")))
except IOError as e:
bb.error("IO error (%s): %s" % (e.errno, e.strerror))
@@ -97,19 +102,21 @@ class LocalSigner(object):
def get_gpg_version(self):
"""Return the gpg version as a tuple of ints"""
try:
- ver_str = subprocess.check_output((self.gpg_bin, "--version", "--no-permission-warning")).split()[2].decode("utf-8")
+ cmd = self.gpg_cmd + ["--version", "--no-permission-warning"]
+ ver_str = subprocess.check_output(cmd).split()[2].decode("utf-8")
return tuple([int(i) for i in ver_str.split("-")[0].split('.')])
except subprocess.CalledProcessError as e:
- raise bb.build.FuncFailed("Could not get gpg version: %s" % e)
+ bb.fatal("Could not get gpg version: %s" % e)
def verify(self, sig_file):
"""Verify signature"""
- cmd = self.gpg_bin + " --verify --no-permission-warning "
+ cmd = self.gpg_cmd + [" --verify", "--no-permission-warning"]
if self.gpg_path:
- cmd += "--homedir %s " % self.gpg_path
- cmd += sig_file
- status = subprocess.call(shlex.split(cmd))
+ cmd += ["--homedir", self.gpg_path]
+
+ cmd += [sig_file]
+ status = subprocess.call(cmd)
ret = False if status else True
return ret
diff --git a/external/poky/meta/lib/oe/license.py b/external/poky/meta/lib/oe/license.py
index ca385d51..c1274a61 100644
--- a/external/poky/meta/lib/oe/license.py
+++ b/external/poky/meta/lib/oe/license.py
@@ -1,4 +1,6 @@
-# vi:sts=4:sw=4:et
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
"""Code for parsing OpenEmbedded license strings"""
import ast
@@ -13,8 +15,8 @@ def license_ok(license, dont_want_licenses):
# will exclude a trailing '+' character from LICENSE in
# case INCOMPATIBLE_LICENSE is not a 'X+' license.
lic = license
- if not re.search('\+$', dwl):
- lic = re.sub('\+', '', license)
+ if not re.search(r'\+$', dwl):
+ lic = re.sub(r'\+', '', license)
if fnmatch(lic, dwl):
return False
return True
@@ -40,8 +42,8 @@ class InvalidLicense(LicenseError):
return "invalid characters in license '%s'" % self.license
license_operator_chars = '&|() '
-license_operator = re.compile('([' + license_operator_chars + '])')
-license_pattern = re.compile('[a-zA-Z0-9.+_\-]+$')
+license_operator = re.compile(r'([' + license_operator_chars + '])')
+license_pattern = re.compile(r'[a-zA-Z0-9.+_\-]+$')
class LicenseVisitor(ast.NodeVisitor):
"""Get elements based on OpenEmbedded license strings"""
diff --git a/external/poky/meta/lib/oe/lsb.py b/external/poky/meta/lib/oe/lsb.py
index 71c0992c..43e46380 100644
--- a/external/poky/meta/lib/oe/lsb.py
+++ b/external/poky/meta/lib/oe/lsb.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
def get_os_release():
"""Get all key-value pairs from /etc/os-release as a dict"""
from collections import OrderedDict
@@ -106,12 +110,12 @@ def distro_identifier(adjust_hook=None):
if adjust_hook:
distro_id, release = adjust_hook(distro_id, release)
if not distro_id:
- return "Unknown"
- # Filter out any non-alphanumerics
- distro_id = re.sub(r'\W', '', distro_id)
+ return "unknown"
+ # Filter out any non-alphanumerics and convert to lowercase
+ distro_id = re.sub(r'\W', '', distro_id).lower()
if release:
- id_str = '{0}-{1}'.format(distro_id.lower(), release)
+ id_str = '{0}-{1}'.format(distro_id, release)
else:
id_str = distro_id
return id_str.replace(' ','-').replace('/','-')
diff --git a/external/poky/meta/lib/oe/maketype.py b/external/poky/meta/lib/oe/maketype.py
index c36e7b56..d929c8b3 100644
--- a/external/poky/meta/lib/oe/maketype.py
+++ b/external/poky/meta/lib/oe/maketype.py
@@ -1,3 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
"""OpenEmbedded variable typing support
Types are defined in the metadata by name, using the 'type' flag on a
diff --git a/external/poky/meta/lib/oe/manifest.py b/external/poky/meta/lib/oe/manifest.py
index 674303c8..f7c88f9a 100644
--- a/external/poky/meta/lib/oe/manifest.py
+++ b/external/poky/meta/lib/oe/manifest.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
from abc import ABCMeta, abstractmethod
import os
import re
diff --git a/external/poky/meta/lib/oe/package.py b/external/poky/meta/lib/oe/package.py
index efd36b37..dd700cbb 100644
--- a/external/poky/meta/lib/oe/package.py
+++ b/external/poky/meta/lib/oe/package.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import stat
import mmap
import subprocess
@@ -255,13 +259,13 @@ def read_shlib_providers(d):
shlib_provider = {}
shlibs_dirs = d.getVar('SHLIBSDIRS').split()
- list_re = re.compile('^(.*)\.list$')
+ list_re = re.compile(r'^(.*)\.list$')
# Go from least to most specific since the last one found wins
for dir in reversed(shlibs_dirs):
bb.debug(2, "Reading shlib providers in %s" % (dir))
if not os.path.exists(dir):
continue
- for file in os.listdir(dir):
+ for file in sorted(os.listdir(dir)):
m = list_re.match(file)
if m:
dep_pkg = m.group(1)
@@ -279,36 +283,3 @@ def read_shlib_providers(d):
shlib_provider[s[0]] = {}
shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
return shlib_provider
-
-
-def npm_split_package_dirs(pkgdir):
- """
- Work out the packages fetched and unpacked by BitBake's npm fetcher
- Returns a dict of packagename -> (relpath, package.json) ordered
- such that it is suitable for use in PACKAGES and FILES
- """
- from collections import OrderedDict
- import json
- packages = {}
- for root, dirs, files in os.walk(pkgdir):
- if os.path.basename(root) == 'node_modules':
- for dn in dirs:
- relpth = os.path.relpath(os.path.join(root, dn), pkgdir)
- pkgitems = ['${PN}']
- for pathitem in relpth.split('/'):
- if pathitem == 'node_modules':
- continue
- pkgitems.append(pathitem)
- pkgname = '-'.join(pkgitems).replace('_', '-')
- pkgname = pkgname.replace('@', '')
- pkgfile = os.path.join(root, dn, 'package.json')
- data = None
- if os.path.exists(pkgfile):
- with open(pkgfile, 'r') as f:
- data = json.loads(f.read())
- packages[pkgname] = (relpth, data)
- # We want the main package for a module sorted *after* its subpackages
- # (so that it doesn't otherwise steal the files for the subpackage), so
- # this is a cheap way to do that whilst still having an otherwise
- # alphabetical sort
- return OrderedDict((key, packages[key]) for key in sorted(packages, key=lambda pkg: pkg + '~'))
diff --git a/external/poky/meta/lib/oe/package_manager.py b/external/poky/meta/lib/oe/package_manager.py
index 882e7c42..b0660411 100644
--- a/external/poky/meta/lib/oe/package_manager.py
+++ b/external/poky/meta/lib/oe/package_manager.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
from abc import ABCMeta, abstractmethod
import os
import glob
@@ -29,15 +33,16 @@ def opkg_query(cmd_output):
a dictionary with the information of the packages. This is used
when the packages are in deb or ipk format.
"""
- verregex = re.compile(' \([=<>]* [^ )]*\)')
+ verregex = re.compile(r' \([=<>]* [^ )]*\)')
output = dict()
pkg = ""
arch = ""
ver = ""
filename = ""
dep = []
+ prov = []
pkgarch = ""
- for line in cmd_output.splitlines():
+ for line in cmd_output.splitlines()+['']:
line = line.rstrip()
if ':' in line:
if line.startswith("Package: "):
@@ -60,6 +65,10 @@ def opkg_query(cmd_output):
dep.append("%s [REC]" % recommend)
elif line.startswith("PackageArch: "):
pkgarch = line.split(": ")[1]
+ elif line.startswith("Provides: "):
+ provides = verregex.sub('', line.split(": ")[1])
+ for provide in provides.split(", "):
+ prov.append(provide)
# When there is a blank line save the package information
elif not line:
@@ -68,20 +77,15 @@ def opkg_query(cmd_output):
filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
if pkg:
output[pkg] = {"arch":arch, "ver":ver,
- "filename":filename, "deps": dep, "pkgarch":pkgarch }
+ "filename":filename, "deps": dep, "pkgarch":pkgarch, "provs": prov}
pkg = ""
arch = ""
ver = ""
filename = ""
dep = []
+ prov = []
pkgarch = ""
- if pkg:
- if not filename:
- filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
- output[pkg] = {"arch":arch, "ver":ver,
- "filename":filename, "deps": dep }
-
return output
def failed_postinsts_abort(pkgs, log_path):
@@ -94,6 +98,8 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
# Pretty sure we don't need this for locale archive generation but
# keeping it to be safe...
locale_arch_options = { \
+ "arc": ["--uint32-align=4", "--little-endian"],
+ "arceb": ["--uint32-align=4", "--big-endian"],
"arm": ["--uint32-align=4", "--little-endian"],
"armeb": ["--uint32-align=4", "--big-endian"],
"aarch64": ["--uint32-align=4", "--little-endian"],
@@ -101,6 +107,7 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
"sh4": ["--uint32-align=4", "--big-endian"],
"powerpc": ["--uint32-align=4", "--big-endian"],
"powerpc64": ["--uint32-align=4", "--big-endian"],
+ "powerpc64le": ["--uint32-align=4", "--little-endian"],
"mips": ["--uint32-align=4", "--big-endian"],
"mipsisa32r6": ["--uint32-align=4", "--big-endian"],
"mips64": ["--uint32-align=4", "--big-endian"],
@@ -125,7 +132,7 @@ def generate_locale_archive(d, rootfs, target_arch, localedir):
env = dict(os.environ)
env["LOCALEARCHIVE"] = oe.path.join(localedir, "locale-archive")
- for name in os.listdir(localedir):
+ for name in sorted(os.listdir(localedir)):
path = os.path.join(localedir, name)
if os.path.isdir(path):
cmd = ["cross-localedef", "--verbose"]
@@ -211,7 +218,7 @@ class OpkgIndexer(Indexer):
if not os.path.exists(pkgs_file):
open(pkgs_file, "w").close()
- index_cmds.add('%s -r %s -p %s -m %s' %
+ index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s' %
(opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir))
index_sign_files.add(pkgs_file)
@@ -250,8 +257,8 @@ class DpkgIndexer(Indexer):
with open(os.path.join(self.d.expand("${STAGING_ETCDIR_NATIVE}"),
"apt", "apt.conf.sample")) as apt_conf_sample:
for line in apt_conf_sample.read().split("\n"):
- line = re.sub("#ROOTFS#", "/dev/null", line)
- line = re.sub("#APTCONF#", self.apt_conf_dir, line)
+ line = re.sub(r"#ROOTFS#", "/dev/null", line)
+ line = re.sub(r"#APTCONF#", self.apt_conf_dir, line)
apt_conf.write(line + "\n")
def write_index(self):
@@ -292,7 +299,7 @@ class DpkgIndexer(Indexer):
release.write("Label: %s\n" % arch)
cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
-
+
index_cmds.append(cmd)
deb_dirs_found = True
@@ -318,7 +325,7 @@ class PkgsList(object, metaclass=ABCMeta):
class RpmPkgsList(PkgsList):
def list_pkgs(self):
- return RpmPM(self.d, self.rootfs_dir, self.d.getVar('TARGET_VENDOR')).list_installed()
+ return RpmPM(self.d, self.rootfs_dir, self.d.getVar('TARGET_VENDOR'), needfeed=False).list_installed()
class OpkgPkgsList(PkgsList):
def __init__(self, d, rootfs_dir, config_file):
@@ -354,7 +361,7 @@ class DpkgPkgsList(PkgsList):
"--admindir=%s/var/lib/dpkg" % self.rootfs_dir,
"-W"]
- cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\n\n")
+ cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\nProvides: ${Provides}\n\n")
try:
cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8")
@@ -406,7 +413,7 @@ class PackageManager(object, metaclass=ABCMeta):
with open(postinst_intercept_hook) as intercept:
registered_pkgs = None
for line in intercept.read().split("\n"):
- m = re.match("^##PKGS:(.*)", line)
+ m = re.match(r"^##PKGS:(.*)", line)
if m is not None:
registered_pkgs = m.group(1).strip()
break
@@ -437,6 +444,11 @@ class PackageManager(object, metaclass=ABCMeta):
self._postpone_to_first_boot(script_full)
continue
+ if populate_sdk == 'host' and self.d.getVar('SDK_OS') == 'mingw32':
+ bb.note("The postinstall intercept hook '%s' could not be executed due to missing wine support, details in %s/log.do_%s"
+ % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
+ continue
+
bb.note("> Executing %s intercept ..." % script)
try:
@@ -445,10 +457,10 @@ class PackageManager(object, metaclass=ABCMeta):
except subprocess.CalledProcessError as e:
bb.note("Exit code %d. Output:\n%s" % (e.returncode, e.output.decode("utf-8")))
if populate_sdk == 'host':
- bb.warn("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
+ bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
elif populate_sdk == 'target':
if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
- bb.warn("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
+ bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
% (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
else:
bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
@@ -559,6 +571,8 @@ class PackageManager(object, metaclass=ABCMeta):
for lang in split_linguas:
globs += " *-locale-%s" % lang
+ for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split():
+ globs += (" " + complementary_linguas) % lang
if globs is None:
return
@@ -567,6 +581,11 @@ class PackageManager(object, metaclass=ABCMeta):
# oe-pkgdata-util reads it from a file
with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
pkgs = self.list_installed()
+
+ provided_pkgs = set()
+ for pkg in pkgs.values():
+ provided_pkgs |= set(pkg.get('provs', []))
+
output = oe.utils.format_pkg_list(pkgs, "arch")
installed_pkgs.write(output)
installed_pkgs.flush()
@@ -578,10 +597,15 @@ class PackageManager(object, metaclass=ABCMeta):
if exclude:
cmd.extend(['--exclude=' + '|'.join(exclude.split())])
try:
- bb.note("Installing complementary packages ...")
bb.note('Running %s' % cmd)
complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8")
- self.install(complementary_pkgs.split(), attempt_only=True)
+ complementary_pkgs = set(complementary_pkgs.split())
+ skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
+ install_pkgs = sorted(complementary_pkgs - provided_pkgs)
+ bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
+ ' '.join(install_pkgs),
+ ' '.join(skip_pkgs)))
+ self.install(install_pkgs, attempt_only=True)
except subprocess.CalledProcessError as e:
bb.fatal("Could not compute complementary packages list. Command "
"'%s' returned %d:\n%s" %
@@ -644,7 +668,7 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
pn = d.getVar("PN")
seendirs = set()
multilibs = {}
-
+
bb.utils.remove(subrepo_dir, recurse=True)
bb.utils.mkdirhier(subrepo_dir)
@@ -722,7 +746,8 @@ class RpmPM(PackageManager):
arch_var=None,
os_var=None,
rpm_repo_workdir="oe-rootfs-repo",
- filterbydependencies=True):
+ filterbydependencies=True,
+ needfeed=True):
super(RpmPM, self).__init__(d, target_rootfs)
self.target_vendor = target_vendor
self.task_name = task_name
@@ -735,8 +760,9 @@ class RpmPM(PackageManager):
else:
self.primary_arch = self.d.getVar('MACHINE_ARCH')
- self.rpm_repo_dir = oe.path.join(self.d.getVar('WORKDIR'), rpm_repo_workdir)
- create_packages_dir(self.d, oe.path.join(self.rpm_repo_dir, "rpm"), d.getVar("DEPLOY_DIR_RPM"), "package_write_rpm", filterbydependencies)
+ if needfeed:
+ self.rpm_repo_dir = oe.path.join(self.d.getVar('WORKDIR'), rpm_repo_workdir)
+ create_packages_dir(self.d, oe.path.join(self.rpm_repo_dir, "rpm"), d.getVar("DEPLOY_DIR_RPM"), "package_write_rpm", filterbydependencies)
self.saved_packaging_data = self.d.expand('${T}/saved_packaging_data/%s' % self.task_name)
if not os.path.exists(self.d.expand('${T}/saved_packaging_data')):
@@ -753,6 +779,8 @@ class RpmPM(PackageManager):
# This prevents accidental matching against libsolv's built-in policies
if len(archs) <= 1:
archs = archs + ["bogusarch"]
+ # This architecture needs to be upfront so that packages using it are properly prioritized
+ archs = ["sdk_provides_dummy_target"] + archs
confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/")
bb.utils.mkdirhier(confdir)
open(confdir + "arch", 'w').write(":".join(archs))
@@ -860,7 +888,7 @@ class RpmPM(PackageManager):
failed_scriptlets_pkgnames = collections.OrderedDict()
for line in output.splitlines():
- if line.startswith("Non-fatal POSTIN scriptlet failure in rpm package"):
+ if line.startswith("Error in POSTIN scriptlet in rpm package"):
failed_scriptlets_pkgnames[line.split()[-1]] = True
if len(failed_scriptlets_pkgnames) > 0:
@@ -962,13 +990,14 @@ class RpmPM(PackageManager):
os.environ['RPM_ETCCONFIGDIR'] = self.target_rootfs
dnf_cmd = bb.utils.which(os.getenv('PATH'), "dnf")
- standard_dnf_args = ["-v", "--rpmverbosity=debug", "-y",
+ standard_dnf_args = ["-v", "--rpmverbosity=info", "-y",
"-c", oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"),
"--setopt=reposdir=%s" %(oe.path.join(self.target_rootfs, "etc/yum.repos.d")),
- "--repofrompath=oe-repo,%s" % (self.rpm_repo_dir),
"--installroot=%s" % (self.target_rootfs),
"--setopt=logdir=%s" % (self.d.getVar('T'))
]
+ if hasattr(self, "rpm_repo_dir"):
+ standard_dnf_args.append("--repofrompath=oe-repo,%s" % (self.rpm_repo_dir))
cmd = [dnf_cmd] + standard_dnf_args + dnf_args
bb.note('Running %s' % ' '.join(cmd))
try:
@@ -992,8 +1021,8 @@ class RpmPM(PackageManager):
def load_old_install_solution(self):
if not os.path.exists(self.solution_manifest):
return []
-
- return open(self.solution_manifest, 'r').read().split()
+ with open(self.solution_manifest, 'r') as fd:
+ return fd.read().split()
def _script_num_prefix(self, path):
files = os.listdir(path)
@@ -1108,10 +1137,7 @@ class OpkgDpkgPM(PackageManager):
tmp_dir = tempfile.mkdtemp()
current_dir = os.getcwd()
os.chdir(tmp_dir)
- if self.d.getVar('IMAGE_PKGTYPE') == 'deb':
- data_tar = 'data.tar.xz'
- else:
- data_tar = 'data.tar.gz'
+ data_tar = 'data.tar.xz'
try:
cmd = [ar_cmd, 'x', pkg_path]
@@ -1212,7 +1238,7 @@ class OpkgPM(OpkgDpkgPM):
priority += 5
for line in (self.d.getVar('IPK_FEED_URIS') or "").split():
- feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line)
+ feed_match = re.match(r"^[ \t]*(.*)##([^ \t]*)[ \t]*$", line)
if feed_match is not None:
feed_name = feed_match.group(1)
@@ -1286,6 +1312,8 @@ class OpkgPM(OpkgDpkgPM):
rootfs_config = os.path.join('%s/etc/opkg/base-feeds.conf'
% self.target_rootfs)
+ os.makedirs('%s/etc/opkg' % self.target_rootfs, exist_ok=True)
+
feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split())
archs = self.pkg_archs.split() if feed_archs is None else feed_archs.split()
@@ -1329,6 +1357,8 @@ class OpkgPM(OpkgDpkgPM):
cmd = "%s %s" % (self.opkg_cmd, self.opkg_args)
for exclude in (self.d.getVar("PACKAGE_EXCLUDE") or "").split():
cmd += " --add-exclude %s" % exclude
+ for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split():
+ cmd += " --add-ignore-recommends %s" % bad_recommendation
cmd += " install "
cmd += " ".join(pkgs)
@@ -1397,45 +1427,6 @@ class OpkgPM(OpkgDpkgPM):
def list_installed(self):
return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs()
- def handle_bad_recommendations(self):
- bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS") or ""
- if bad_recommendations.strip() == "":
- return
-
- status_file = os.path.join(self.opkg_dir, "status")
-
- # If status file existed, it means the bad recommendations has already
- # been handled
- if os.path.exists(status_file):
- return
-
- cmd = "%s %s info " % (self.opkg_cmd, self.opkg_args)
-
- with open(status_file, "w+") as status:
- for pkg in bad_recommendations.split():
- pkg_info = cmd + pkg
-
- try:
- output = subprocess.check_output(pkg_info.split(), stderr=subprocess.STDOUT).strip().decode("utf-8")
- except subprocess.CalledProcessError as e:
- bb.fatal("Cannot get package info. Command '%s' "
- "returned %d:\n%s" % (pkg_info, e.returncode, e.output.decode("utf-8")))
-
- if output == "":
- bb.note("Ignored bad recommendation: '%s' is "
- "not a package" % pkg)
- continue
-
- for line in output.split('\n'):
- if line.startswith("Status:"):
- status.write("Status: deinstall hold not-installed\n")
- else:
- status.write(line + "\n")
-
- # Append a blank line after each package entry to ensure that it
- # is separated from the following entry
- status.write("\n")
-
def dummy_install(self, pkgs):
"""
The following function dummy installs pkgs and returns the log of output.
@@ -1520,7 +1511,7 @@ class OpkgPM(OpkgDpkgPM):
"trying to extract the package." % pkg)
tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info)
- bb.utils.remove(os.path.join(tmp_dir, "data.tar.gz"))
+ bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
return tmp_dir
@@ -1592,7 +1583,7 @@ class DpkgPM(OpkgDpkgPM):
with open(status_file, "r") as status:
for line in status.read().split('\n'):
- m = re.match("^Package: (.*)", line)
+ m = re.match(r"^Package: (.*)", line)
if m is not None:
installed_pkgs.append(m.group(1))
@@ -1643,7 +1634,7 @@ class DpkgPM(OpkgDpkgPM):
os.environ['APT_CONFIG'] = self.apt_conf_file
- cmd = "%s %s install --force-yes --allow-unauthenticated %s" % \
+ cmd = "%s %s install --force-yes --allow-unauthenticated --no-remove %s" % \
(self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
try:
@@ -1657,13 +1648,13 @@ class DpkgPM(OpkgDpkgPM):
# rename *.dpkg-new files/dirs
for root, dirs, files in os.walk(self.target_rootfs):
for dir in dirs:
- new_dir = re.sub("\.dpkg-new", "", dir)
+ new_dir = re.sub(r"\.dpkg-new", "", dir)
if dir != new_dir:
os.rename(os.path.join(root, dir),
os.path.join(root, new_dir))
for file in files:
- new_file = re.sub("\.dpkg-new", "", file)
+ new_file = re.sub(r"\.dpkg-new", "", file)
if file != new_file:
os.rename(os.path.join(root, file),
os.path.join(root, new_file))
@@ -1728,7 +1719,7 @@ class DpkgPM(OpkgDpkgPM):
sources_file.write("deb %s ./\n" % uri)
def _create_configs(self, archs, base_archs):
- base_archs = re.sub("_", "-", base_archs)
+ base_archs = re.sub(r"_", r"-", base_archs)
if os.path.exists(self.apt_conf_dir):
bb.utils.remove(self.apt_conf_dir, True)
@@ -1782,7 +1773,7 @@ class DpkgPM(OpkgDpkgPM):
with open(self.apt_conf_file, "w+") as apt_conf:
with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample:
for line in apt_conf_sample.read().split("\n"):
- match_arch = re.match(" Architecture \".*\";$", line)
+ match_arch = re.match(r" Architecture \".*\";$", line)
architectures = ""
if match_arch:
for base_arch in base_arch_list:
@@ -1790,8 +1781,8 @@ class DpkgPM(OpkgDpkgPM):
apt_conf.write(" Architectures {%s};\n" % architectures);
apt_conf.write(" Architecture \"%s\";\n" % base_archs)
else:
- line = re.sub("#ROOTFS#", self.target_rootfs, line)
- line = re.sub("#APTCONF#", self.apt_conf_dir, line)
+ line = re.sub(r"#ROOTFS#", self.target_rootfs, line)
+ line = re.sub(r"#APTCONF#", self.apt_conf_dir, line)
apt_conf.write(line + "\n")
target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs
@@ -1805,8 +1796,7 @@ class DpkgPM(OpkgDpkgPM):
open(os.path.join(target_dpkg_dir, "available"), "w+").close()
def remove_packaging_data(self):
- bb.utils.remove(os.path.join(self.target_rootfs,
- self.d.getVar('opkglibdir')), True)
+ bb.utils.remove(self.target_rootfs + self.d.getVar('opkglibdir'), True)
bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
def fix_broken_dependencies(self):
diff --git a/external/poky/meta/lib/oe/packagedata.py b/external/poky/meta/lib/oe/packagedata.py
index 32e5c82a..a82085a7 100644
--- a/external/poky/meta/lib/oe/packagedata.py
+++ b/external/poky/meta/lib/oe/packagedata.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import codecs
import os
@@ -13,9 +17,8 @@ def read_pkgdatafile(fn):
if os.access(fn, os.R_OK):
import re
- f = open(fn, 'r')
- lines = f.readlines()
- f.close()
+ with open(fn, 'r') as f:
+ lines = f.readlines()
r = re.compile("([^:]+):\s*(.*)")
for l in lines:
m = r.match(l)
diff --git a/external/poky/meta/lib/oe/packagegroup.py b/external/poky/meta/lib/oe/packagegroup.py
index 4bc5d3e4..2419cbb6 100644
--- a/external/poky/meta/lib/oe/packagegroup.py
+++ b/external/poky/meta/lib/oe/packagegroup.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import itertools
def is_optional(feature, d):
diff --git a/external/poky/meta/lib/oe/patch.py b/external/poky/meta/lib/oe/patch.py
index e0f06042..2b1eee10 100644
--- a/external/poky/meta/lib/oe/patch.py
+++ b/external/poky/meta/lib/oe/patch.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import oe.path
import oe.types
@@ -38,21 +42,10 @@ def runcmd(args, dir = None):
if exitstatus != 0:
raise CmdError(cmd, exitstatus >> 8, output)
if " fuzz " in output:
- bb.warn("""
-Some of the context lines in patches were ignored. This can lead to incorrectly applied patches.
-The context lines in the patches can be updated with devtool:
-
- devtool modify <recipe>
- devtool finish --force-patch-refresh <recipe> <layer_path>
-
-Then the updated patches and the source tree (in devtool's workspace)
-should be reviewed to make sure the patches apply in the correct place
-and don't introduce duplicate lines (which can, and does happen
-when some of the context is ignored). Further information:
-http://lists.openembedded.org/pipermail/openembedded-core/2018-March/148675.html
-https://bugzilla.yoctoproject.org/show_bug.cgi?id=10450
-Details:
-{}""".format(output))
+ # Drop patch fuzz info with header and footer to log file so
+ # insane.bbclass can handle to throw error/warning
+ bb.note("--- Patch fuzz start ---\n%s\n--- Patch fuzz end ---" % format(output))
+
return output
finally:
@@ -334,8 +327,8 @@ class GitApplyTree(PatchTree):
@staticmethod
def interpretPatchHeader(headerlines):
import re
- author_re = re.compile('[\S ]+ <\S+@\S+\.\S+>')
- from_commit_re = re.compile('^From [a-z0-9]{40} .*')
+ author_re = re.compile(r'[\S ]+ <\S+@\S+\.\S+>')
+ from_commit_re = re.compile(r'^From [a-z0-9]{40} .*')
outlines = []
author = None
date = None
@@ -790,9 +783,11 @@ class UserResolver(Resolver):
def patch_path(url, fetch, workdir, expand=True):
- """Return the local path of a patch, or None if this isn't a patch"""
+ """Return the local path of a patch, or return nothing if this isn't a patch"""
local = fetch.localpath(url)
+ if os.path.isdir(local):
+ return
base, ext = os.path.splitext(os.path.basename(local))
if ext in ('.gz', '.bz2', '.xz', '.Z'):
if expand:
@@ -856,6 +851,7 @@ def src_patches(d, all=False, expand=True):
def should_apply(parm, d):
+ import bb.utils
if "mindate" in parm or "maxdate" in parm:
pn = d.getVar('PN')
srcdate = d.getVar('SRCDATE_%s' % pn)
@@ -892,5 +888,15 @@ def should_apply(parm, d):
if srcrev and parm["notrev"] in srcrev:
return False, "doesn't apply to revision"
+ if "maxver" in parm:
+ pv = d.getVar('PV')
+ if bb.utils.vercmp_string_op(pv, parm["maxver"], ">"):
+ return False, "applies to earlier version"
+
+ if "minver" in parm:
+ pv = d.getVar('PV')
+ if bb.utils.vercmp_string_op(pv, parm["minver"], "<"):
+ return False, "applies to later version"
+
return True, None
diff --git a/external/poky/meta/lib/oe/path.py b/external/poky/meta/lib/oe/path.py
index 1e24d058..08297245 100644
--- a/external/poky/meta/lib/oe/path.py
+++ b/external/poky/meta/lib/oe/path.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import errno
import glob
import shutil
@@ -90,12 +94,27 @@ def copytree(src, dst):
subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
def copyhardlinktree(src, dst):
- """ Make the hard link when possible, otherwise copy. """
+ """Make a tree of hard links when possible, otherwise copy."""
bb.utils.mkdirhier(dst)
if os.path.isdir(src) and not len(os.listdir(src)):
return
- if (os.stat(src).st_dev == os.stat(dst).st_dev):
+ canhard = False
+ testfile = None
+ for root, dirs, files in os.walk(src):
+ if len(files):
+ testfile = os.path.join(root, files[0])
+ break
+
+ if testfile is not None:
+ try:
+ os.link(testfile, os.path.join(dst, 'testfile'))
+ os.unlink(os.path.join(dst, 'testfile'))
+ canhard = True
+ except Exception as e:
+ bb.debug(2, "Hardlink test failed with " + str(e))
+
+ if (canhard):
# Need to copy directories only with tar first since cp will error if two
# writers try and create a directory at the same time
cmd = "cd %s; find . -type d -print | tar --xattrs --xattrs-include='*' -cf - -S -C %s -p --no-recursion --files-from - | tar --xattrs --xattrs-include='*' -xhf - -C %s" % (src, src, dst)
@@ -114,6 +133,14 @@ def copyhardlinktree(src, dst):
else:
copytree(src, dst)
+def copyhardlink(src, dst):
+ """Make a hard link when possible, otherwise copy."""
+
+ try:
+ os.link(src, dst)
+ except OSError:
+ shutil.copy(src, dst)
+
def remove(path, recurse=True):
"""
Equivalent to rm -f or rm -rf
diff --git a/external/poky/meta/lib/oe/prservice.py b/external/poky/meta/lib/oe/prservice.py
index 32dfc15e..2d3c9c7e 100644
--- a/external/poky/meta/lib/oe/prservice.py
+++ b/external/poky/meta/lib/oe/prservice.py
@@ -1,5 +1,12 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
def prserv_make_conn(d, check = False):
+ # Otherwise this fails when called from recipes which e.g. inherit python3native (which sets _PYTHON_SYSCONFIGDATA_NAME) with:
+ # No module named '_sysconfigdata'
+ if '_PYTHON_SYSCONFIGDATA_NAME' in os.environ:
+ del os.environ['_PYTHON_SYSCONFIGDATA_NAME']
import prserv.serv
host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f])
try:
@@ -76,41 +83,40 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
df = d.getVar('PRSERV_DUMPFILE')
#write data
lf = bb.utils.lockfile("%s.lock" % df)
- f = open(df, "a")
- if metainfo:
- #dump column info
- f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
- f.write("#Table: %s\n" % metainfo['tbl_name'])
- f.write("#Columns:\n")
- f.write("#name \t type \t notn \t dflt \t pk\n")
- f.write("#----------\t --------\t --------\t --------\t ----\n")
- for i in range(len(metainfo['col_info'])):
- f.write("#%10s\t %8s\t %8s\t %8s\t %4s\n" %
- (metainfo['col_info'][i]['name'],
- metainfo['col_info'][i]['type'],
- metainfo['col_info'][i]['notnull'],
- metainfo['col_info'][i]['dflt_value'],
- metainfo['col_info'][i]['pk']))
- f.write("\n")
+ with open(df, "a") as f:
+ if metainfo:
+ #dump column info
+ f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
+ f.write("#Table: %s\n" % metainfo['tbl_name'])
+ f.write("#Columns:\n")
+ f.write("#name \t type \t notn \t dflt \t pk\n")
+ f.write("#----------\t --------\t --------\t --------\t ----\n")
+ for i in range(len(metainfo['col_info'])):
+ f.write("#%10s\t %8s\t %8s\t %8s\t %4s\n" %
+ (metainfo['col_info'][i]['name'],
+ metainfo['col_info'][i]['type'],
+ metainfo['col_info'][i]['notnull'],
+ metainfo['col_info'][i]['dflt_value'],
+ metainfo['col_info'][i]['pk']))
+ f.write("\n")
- if lockdown:
- f.write("PRSERV_LOCKDOWN = \"1\"\n\n")
+ if lockdown:
+ f.write("PRSERV_LOCKDOWN = \"1\"\n\n")
- if datainfo:
- idx = {}
- for i in range(len(datainfo)):
- pkgarch = datainfo[i]['pkgarch']
- value = datainfo[i]['value']
- if pkgarch not in idx:
- idx[pkgarch] = i
- elif value > datainfo[idx[pkgarch]]['value']:
- idx[pkgarch] = i
- f.write("PRAUTO$%s$%s$%s = \"%s\"\n" %
- (str(datainfo[i]['version']), pkgarch, str(datainfo[i]['checksum']), str(value)))
- if not nomax:
- for i in idx:
- f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
- f.close()
+ if datainfo:
+ idx = {}
+ for i in range(len(datainfo)):
+ pkgarch = datainfo[i]['pkgarch']
+ value = datainfo[i]['value']
+ if pkgarch not in idx:
+ idx[pkgarch] = i
+ elif value > datainfo[idx[pkgarch]]['value']:
+ idx[pkgarch] = i
+ f.write("PRAUTO$%s$%s$%s = \"%s\"\n" %
+ (str(datainfo[i]['version']), pkgarch, str(datainfo[i]['checksum']), str(value)))
+ if not nomax:
+ for i in idx:
+ f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
bb.utils.unlockfile(lf)
def prserv_check_avail(d):
diff --git a/external/poky/meta/lib/oe/qa.py b/external/poky/meta/lib/oe/qa.py
index 59c72ce5..ea831b93 100644
--- a/external/poky/meta/lib/oe/qa.py
+++ b/external/poky/meta/lib/oe/qa.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import os, struct, mmap
class NotELFFileError(Exception):
@@ -37,13 +41,15 @@ class ELFFile:
def __init__(self, name):
self.name = name
self.objdump_output = {}
+ self.data = None
# Context Manager functions to close the mmap explicitly
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
- self.data.close()
+ if self.data:
+ self.data.close()
def open(self):
with open(self.name, "rb") as f:
diff --git a/external/poky/meta/lib/oe/recipeutils.py b/external/poky/meta/lib/oe/recipeutils.py
index aa64553c..fde1ad3d 100644
--- a/external/poky/meta/lib/oe/recipeutils.py
+++ b/external/poky/meta/lib/oe/recipeutils.py
@@ -4,6 +4,8 @@
#
# Copyright (C) 2013-2017 Intel Corporation
#
+# SPDX-License-Identifier: GPL-2.0-only
+#
import sys
import os
@@ -16,40 +18,40 @@ import shutil
import re
import fnmatch
import glob
-from collections import OrderedDict, defaultdict
+import bb.tinfoil
+from collections import OrderedDict, defaultdict
+from bb.utils import vercmp_string
# Help us to find places to insert values
-recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRCPV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()']
+recipe_progression = ['SUMMARY', 'DESCRIPTION', 'AUTHOR', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRCPV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND']
# Variables that sometimes are a bit long but shouldn't be wrapped
-nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', 'SRC_URI\[(.+\.)?md5sum\]', 'SRC_URI\[(.+\.)?sha256sum\]']
+nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha256sum\]']
list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM']
meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION']
-def pn_to_recipe(cooker, pn, mc=''):
- """Convert a recipe name (PN) to the path to the recipe file"""
-
- best = cooker.findBestProvider(pn, mc)
- return best[3]
-
-
-def get_unavailable_reasons(cooker, pn):
- """If a recipe could not be found, find out why if possible"""
- import bb.taskdata
- taskdata = bb.taskdata.TaskData(None, skiplist=cooker.skiplist)
- return taskdata.get_reasons(pn)
-
-
-def parse_recipe(cooker, fn, appendfiles):
+def simplify_history(history, d):
"""
- Parse an individual recipe file, optionally with a list of
- bbappend files.
+ Eliminate any irrelevant events from a variable history
"""
- import bb.cache
- parser = bb.cache.NoCache(cooker.databuilder)
- envdata = parser.loadDataFull(fn, appendfiles)
- return envdata
+ ret_history = []
+ has_set = False
+ # Go backwards through the history and remove any immediate operations
+ # before the most recent set
+ for event in reversed(history):
+ if 'flag' in event or not 'file' in event:
+ continue
+ if event['op'] == 'set':
+ if has_set:
+ continue
+ has_set = True
+ elif event['op'] in ('append', 'prepend', 'postdot', 'predot'):
+ # Reminder: "append" and "prepend" mean += and =+ respectively, NOT _append / _prepend
+ if has_set:
+ continue
+ ret_history.insert(0, event)
+ return ret_history
def get_var_files(fn, varlist, d):
@@ -58,11 +60,19 @@ def get_var_files(fn, varlist, d):
"""
varfiles = {}
for v in varlist:
- history = d.varhistory.variable(v)
files = []
- for event in history:
- if 'file' in event and not 'flag' in event:
- files.append(event['file'])
+ if '[' in v:
+ varsplit = v.split('[')
+ varflag = varsplit[1].split(']')[0]
+ history = d.varhistory.variable(varsplit[0])
+ for event in history:
+ if 'file' in event and event.get('flag', '') == varflag:
+ files.append(event['file'])
+ else:
+ history = d.varhistory.variable(v)
+ for event in history:
+ if 'file' in event and not 'flag' in event:
+ files.append(event['file'])
if files:
actualfile = files[-1]
else:
@@ -153,7 +163,7 @@ def patch_recipe_lines(fromlines, values, trailing_newline=True):
key = item[:-2]
else:
key = item
- restr = '%s(_[a-zA-Z0-9-_$(){}]+|\[[^\]]*\])?' % key
+ restr = r'%s(_[a-zA-Z0-9-_$(){}]+|\[[^\]]*\])?' % key
if item.endswith('()'):
recipe_progression_restrs.append(restr + '()')
else:
@@ -176,7 +186,14 @@ def patch_recipe_lines(fromlines, values, trailing_newline=True):
def outputvalue(name, lines, rewindcomments=False):
if values[name] is None:
return
- rawtext = '%s = "%s"%s' % (name, values[name], newline)
+ if isinstance(values[name], tuple):
+ op, value = values[name]
+ if op == '+=' and value.strip() == '':
+ return
+ else:
+ value = values[name]
+ op = '='
+ rawtext = '%s %s "%s"%s' % (name, op, value, newline)
addlines = []
nowrap = False
for nowrap_re in nowrap_vars_res:
@@ -186,10 +203,10 @@ def patch_recipe_lines(fromlines, values, trailing_newline=True):
if nowrap:
addlines.append(rawtext)
elif name in list_vars:
- splitvalue = split_var_value(values[name], assignment=False)
+ splitvalue = split_var_value(value, assignment=False)
if len(splitvalue) > 1:
linesplit = ' \\\n' + (' ' * (len(name) + 4))
- addlines.append('%s = "%s%s"%s' % (name, linesplit.join(splitvalue), linesplit, newline))
+ addlines.append('%s %s "%s%s"%s' % (name, op, linesplit.join(splitvalue), linesplit, newline))
else:
addlines.append(rawtext)
else:
@@ -321,12 +338,47 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None
"""Modify a list of variable values in the specified recipe. Handles inc files if
used by the recipe.
"""
+ overrides = d.getVar('OVERRIDES').split(':')
+ def override_applicable(hevent):
+ op = hevent['op']
+ if '[' in op:
+ opoverrides = op.split('[')[1].split(']')[0].split('_')
+ for opoverride in opoverrides:
+ if not opoverride in overrides:
+ return False
+ return True
+
varlist = varvalues.keys()
+ fn = os.path.abspath(fn)
varfiles = get_var_files(fn, varlist, d)
locs = localise_file_vars(fn, varfiles, varlist)
patches = []
for f,v in locs.items():
vals = {k: varvalues[k] for k in v}
+ f = os.path.abspath(f)
+ if f == fn:
+ extravals = {}
+ for var, value in vals.items():
+ if var in list_vars:
+ history = simplify_history(d.varhistory.variable(var), d)
+ recipe_set = False
+ for event in history:
+ if os.path.abspath(event['file']) == fn:
+ if event['op'] == 'set':
+ recipe_set = True
+ if not recipe_set:
+ for event in history:
+ if event['op'].startswith('_remove'):
+ continue
+ if not override_applicable(event):
+ continue
+ newvalue = value.replace(event['detail'], '')
+ if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith('_'):
+ op = event['op'].replace('[', '_').replace(']', '')
+ extravals[var + op] = None
+ value = newvalue
+ vals[var] = ('+=', value)
+ vals.update(extravals)
patchdata = patch_recipe_file(f, vals, patch, relpath, redirect_output)
if patch:
patches.append(patchdata)
@@ -369,6 +421,8 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True, all_variants=F
# Ensure we handle class-target if we're dealing with one of the variants
variants.append('target')
for variant in variants:
+ if variant.startswith("devupstream"):
+ localdata.setVar('SRCPV', 'git')
localdata.setVar('CLASSOVERRIDE', 'class-%s' % variant)
fetch_urls(localdata)
@@ -432,7 +486,14 @@ def get_recipe_local_files(d, patches=False, archives=False):
unpack = fetch.ud[uri].parm.get('unpack', True)
if unpack:
continue
- ret[fname] = localpath
+ if os.path.isdir(localpath):
+ for root, dirs, files in os.walk(localpath):
+ for fname in files:
+ fileabspath = os.path.join(root,fname)
+ srcdir = os.path.dirname(localpath)
+ ret[os.path.relpath(fileabspath,srcdir)] = fileabspath
+ else:
+ ret[fname] = localpath
return ret
@@ -875,7 +936,7 @@ def get_recipe_pv_without_srcpv(pv, uri_type):
sfx = ''
if uri_type == 'git':
- git_regex = re.compile("(?P<pfx>v?)(?P<ver>[^\+]*)((?P<sfx>\+(git)?r?(AUTOINC\+))(?P<rev>.*))?")
+ git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+))(?P<rev>.*)")
m = git_regex.match(pv)
if m:
@@ -883,7 +944,7 @@ def get_recipe_pv_without_srcpv(pv, uri_type):
pfx = m.group('pfx')
sfx = m.group('sfx')
else:
- regex = re.compile("(?P<pfx>(v|r)?)(?P<ver>.*)")
+ regex = re.compile(r"(?P<pfx>(v|r)?)(?P<ver>.*)")
m = regex.match(pv)
if m:
pv = m.group('ver')
@@ -969,3 +1030,86 @@ def get_recipe_upstream_version(rd):
ru['datetime'] = datetime.now()
return ru
+
+def _get_recipe_upgrade_status(data):
+ uv = get_recipe_upstream_version(data)
+
+ pn = data.getVar('PN')
+ cur_ver = uv['current_version']
+
+ upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN')
+ if not uv['version']:
+ status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
+ else:
+ cmp = vercmp_string(uv['current_version'], uv['version'])
+ if cmp == -1:
+ status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN"
+ elif cmp == 0:
+ status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN"
+ else:
+ status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
+
+ next_ver = uv['version'] if uv['version'] else "N/A"
+ revision = uv['revision'] if uv['revision'] else "N/A"
+ maintainer = data.getVar('RECIPE_MAINTAINER')
+ no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')
+
+ return (pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason)
+
+def get_recipe_upgrade_status(recipes=None):
+ pkgs_list = []
+ data_copy_list = []
+ copy_vars = ('SRC_URI',
+ 'PV',
+ 'DL_DIR',
+ 'PN',
+ 'CACHE',
+ 'PERSISTENT_DIR',
+ 'BB_URI_HEADREVS',
+ 'UPSTREAM_CHECK_COMMITS',
+ 'UPSTREAM_CHECK_GITTAGREGEX',
+ 'UPSTREAM_CHECK_REGEX',
+ 'UPSTREAM_CHECK_URI',
+ 'UPSTREAM_VERSION_UNKNOWN',
+ 'RECIPE_MAINTAINER',
+ 'RECIPE_NO_UPDATE_REASON',
+ 'RECIPE_UPSTREAM_VERSION',
+ 'RECIPE_UPSTREAM_DATE',
+ 'CHECK_DATE',
+ )
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False)
+
+ if not recipes:
+ recipes = tinfoil.all_recipe_files(variants=False)
+
+ for fn in recipes:
+ try:
+ if fn.startswith("/"):
+ data = tinfoil.parse_recipe_file(fn)
+ else:
+ data = tinfoil.parse_recipe(fn)
+ except bb.providers.NoProvider:
+ bb.note(" No provider for %s" % fn)
+ continue
+
+ unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
+ if unreliable == "1":
+ bb.note(" Skip package %s as upstream check unreliable" % pn)
+ continue
+
+ data_copy = bb.data.init()
+ for var in copy_vars:
+ data_copy.setVar(var, data.getVar(var))
+ for k in data:
+ if k.startswith('SRCREV'):
+ data_copy.setVar(k, data.getVar(k))
+
+ data_copy_list.append(data_copy)
+
+ from concurrent.futures import ProcessPoolExecutor
+ with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
+ pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)
+
+ return pkgs_list
diff --git a/external/poky/meta/lib/oe/rootfs.py b/external/poky/meta/lib/oe/rootfs.py
index e5512d09..cd65e620 100644
--- a/external/poky/meta/lib/oe/rootfs.py
+++ b/external/poky/meta/lib/oe/rootfs.py
@@ -1,3 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
from abc import ABCMeta, abstractmethod
from oe.utils import execute_pre_post_process
from oe.package_manager import *
@@ -123,17 +126,16 @@ class Rootfs(object, metaclass=ABCMeta):
bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(dir))
shutil.copytree(self.image_rootfs + '-orig' + dir, self.image_rootfs + dir, symlinks=True)
- cpath = oe.cachedpath.CachedPath()
# Copy files located in /usr/lib/debug or /usr/src/debug
for dir in ["/usr/lib/debug", "/usr/src/debug"]:
src = self.image_rootfs + '-orig' + dir
- if cpath.exists(src):
+ if os.path.exists(src):
dst = self.image_rootfs + dir
bb.utils.mkdirhier(os.path.dirname(dst))
shutil.copytree(src, dst)
# Copy files with suffix '.debug' or located in '.debug' dir.
- for root, dirs, files in cpath.walk(self.image_rootfs + '-orig'):
+ for root, dirs, files in os.walk(self.image_rootfs + '-orig'):
relative_dir = root[len(self.image_rootfs + '-orig'):]
for f in files:
if f.endswith('.debug') or '/.debug' in relative_dir:
@@ -354,9 +356,9 @@ class Rootfs(object, metaclass=ABCMeta):
class RpmRootfs(Rootfs):
def __init__(self, d, manifest_dir, progress_reporter=None, logcatcher=None):
super(RpmRootfs, self).__init__(d, progress_reporter, logcatcher)
- self.log_check_regex = '(unpacking of archive failed|Cannot find package'\
- '|exit 1|ERROR: |Error: |Error |ERROR '\
- '|Failed |Failed: |Failed$|Failed\(\d+\):)'
+ self.log_check_regex = r'(unpacking of archive failed|Cannot find package'\
+ r'|exit 1|ERROR: |Error: |Error |ERROR '\
+ r'|Failed |Failed: |Failed$|Failed\(\d+\):)'
self.manifest = RpmManifest(d, manifest_dir)
self.pm = RpmPM(d,
@@ -499,7 +501,7 @@ class DpkgOpkgRootfs(Rootfs):
pkg_depends_list = []
# filter version requirements like libc (>= 1.1)
for dep in pkg_depends.split(', '):
- m_dep = re.match("^(.*) \(.*\)$", dep)
+ m_dep = re.match(r"^(.*) \(.*\)$", dep)
if m_dep:
dep = m_dep.group(1)
pkg_depends_list.append(dep)
@@ -515,21 +517,33 @@ class DpkgOpkgRootfs(Rootfs):
data = status.read()
status.close()
for line in data.split('\n'):
- m_pkg = re.match("^Package: (.*)", line)
- m_status = re.match("^Status:.*unpacked", line)
- m_depends = re.match("^Depends: (.*)", line)
+ m_pkg = re.match(r"^Package: (.*)", line)
+ m_status = re.match(r"^Status:.*unpacked", line)
+ m_depends = re.match(r"^Depends: (.*)", line)
+ #Only one of m_pkg, m_status or m_depends is not None at time
+ #If m_pkg is not None, we started a new package
if m_pkg is not None:
- if pkg_name and pkg_status_match:
- pkgs[pkg_name] = _get_pkg_depends_list(pkg_depends)
-
+ #Get Package name
pkg_name = m_pkg.group(1)
+ #Make sure we reset other variables
pkg_status_match = False
pkg_depends = ""
elif m_status is not None:
+ #New status matched
pkg_status_match = True
elif m_depends is not None:
+ #New depends macthed
pkg_depends = m_depends.group(1)
+ else:
+ pass
+
+ #Now check if we can process package depends and postinst
+ if "" != pkg_name and pkg_status_match:
+ pkgs[pkg_name] = _get_pkg_depends_list(pkg_depends)
+ else:
+ #Not enough information
+ pass
# remove package dependencies not in postinsts
pkg_names = list(pkgs.keys())
@@ -632,6 +646,7 @@ class DpkgRootfs(DpkgOpkgRootfs):
if pkg_type in pkgs_to_install:
self.pm.install(pkgs_to_install[pkg_type],
[False, True][pkg_type == Manifest.PKG_TYPE_ATTEMPT_ONLY])
+ self.pm.fix_broken_dependencies()
if self.progress_reporter:
# Don't support attemptonly, so skip that
@@ -735,15 +750,16 @@ class OpkgRootfs(DpkgOpkgRootfs):
if filecmp.cmp(f1, f2):
return True
- if self.image_rootfs not in f1:
- self._prelink_file(f1.replace(key, ''), f1)
+ if bb.data.inherits_class('image-prelink', self.d):
+ if self.image_rootfs not in f1:
+ self._prelink_file(f1.replace(key, ''), f1)
- if self.image_rootfs not in f2:
- self._prelink_file(f2.replace(key, ''), f2)
+ if self.image_rootfs not in f2:
+ self._prelink_file(f2.replace(key, ''), f2)
- # Both of them are prelinked
- if filecmp.cmp(f1, f2):
- return True
+ # Both of them are prelinked
+ if filecmp.cmp(f1, f2):
+ return True
# Not equal
return False
@@ -759,7 +775,7 @@ class OpkgRootfs(DpkgOpkgRootfs):
if allow_replace is None:
allow_replace = ""
- allow_rep = re.compile(re.sub("\|$", "", allow_replace))
+ allow_rep = re.compile(re.sub(r"\|$", r"", allow_replace))
error_prompt = "Multilib check error:"
files = {}
@@ -879,8 +895,6 @@ class OpkgRootfs(DpkgOpkgRootfs):
self.pm.update()
- self.pm.handle_bad_recommendations()
-
if self.progress_reporter:
self.progress_reporter.next_stage()
diff --git a/external/poky/meta/lib/oe/sdk.py b/external/poky/meta/lib/oe/sdk.py
index ef81f8cf..d02a2748 100644
--- a/external/poky/meta/lib/oe/sdk.py
+++ b/external/poky/meta/lib/oe/sdk.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
from abc import ABCMeta, abstractmethod
from oe.utils import execute_pre_post_process
from oe.manifest import *
@@ -91,8 +95,8 @@ class Sdk(object, metaclass=ABCMeta):
if linguas == "all":
pm.install_glob("nativesdk-glibc-binary-localedata-*.utf-8", sdk=True)
else:
- for lang in linguas.split():
- pm.install("nativesdk-glibc-binary-localedata-%s.utf-8" % lang)
+ pm.install(["nativesdk-glibc-binary-localedata-%s.utf-8" % \
+ lang for lang in linguas.split()])
# Generate a locale archive of them
target_arch = self.d.getVar('SDK_ARCH')
rootfs = oe.path.join(self.sdk_host_sysroot, self.sdk_native_path)
diff --git a/external/poky/meta/lib/oe/sstatesig.py b/external/poky/meta/lib/oe/sstatesig.py
index 18c5a353..d24e3738 100644
--- a/external/poky/meta/lib/oe/sstatesig.py
+++ b/external/poky/meta/lib/oe/sstatesig.py
@@ -1,3 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
import bb.siggen
import oe
@@ -56,7 +59,7 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
# is machine specific.
# Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
# and we reccomend a kernel-module, we exclude the dependency.
- depfn = dep.rsplit(".", 1)[0]
+ depfn = dep.rsplit(":", 1)[0]
if dataCache and isKernel(depfn) and not isKernel(fn):
for pkg in dataCache.runrecs[fn]:
if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1:
@@ -87,8 +90,7 @@ class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
-class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
- name = "OEBasicHash"
+class SignatureGeneratorOEBasicHashMixIn(object):
def init_rundepcheck(self, data):
self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split()
self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split()
@@ -101,6 +103,8 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or
"").split()
self.unlockedrecipes = { k: "" for k in self.unlockedrecipes }
+ self.buildarch = data.getVar('BUILD_ARCH')
+ self._internal = False
pass
def tasks_resolved(self, virtmap, virtpnmap, dataCache):
@@ -126,12 +130,11 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
def get_taskdata(self):
- data = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskdata()
- return (data, self.lockedpnmap, self.lockedhashfn)
+ return (self.lockedpnmap, self.lockedhashfn, self.lockedhashes) + super().get_taskdata()
def set_taskdata(self, data):
- coredata, self.lockedpnmap, self.lockedhashfn = data
- super(bb.siggen.SignatureGeneratorBasicHash, self).set_taskdata(coredata)
+ self.lockedpnmap, self.lockedhashfn, self.lockedhashes = data[:3]
+ super().set_taskdata(data[3:])
def dump_sigs(self, dataCache, options):
sigfile = os.getcwd() + "/locked-sigs.inc"
@@ -139,8 +142,29 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
self.dump_lockedsigs(sigfile)
return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
- def get_taskhash(self, fn, task, deps, dataCache):
- h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(fn, task, deps, dataCache)
+ def prep_taskhash(self, tid, deps, dataCache):
+ super().prep_taskhash(tid, deps, dataCache)
+ if hasattr(self, "extramethod"):
+ (_, _, _, fn) = bb.runqueue.split_tid_mcfn(tid)
+ inherits = " ".join(dataCache.inherits[fn])
+ if inherits.find("/native.bbclass") != -1 or inherits.find("/cross.bbclass") != -1:
+ self.extramethod[tid] = ":" + self.buildarch
+
+ def get_taskhash(self, tid, deps, dataCache):
+ if tid in self.lockedhashes:
+ if self.lockedhashes[tid]:
+ return self.lockedhashes[tid]
+ else:
+ return super().get_taskhash(tid, deps, dataCache)
+
+ # get_taskhash will call get_unihash internally in the parent class, we
+ # need to disable our filter of it whilst this runs else
+ # incorrect hashes can be calculated.
+ self._internal = True
+ h = super().get_taskhash(tid, deps, dataCache)
+ self._internal = False
+
+ (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
recipename = dataCache.pkg_fn[fn]
self.lockedpnmap[fn] = recipename
@@ -150,75 +174,77 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
if recipename in self.unlockedrecipes:
unlocked = True
else:
- def get_mc(tid):
- tid = tid.rsplit('.', 1)[0]
- if tid.startswith('multiconfig:'):
- elems = tid.split(':')
- return elems[1]
def recipename_from_dep(dep):
- # The dep entry will look something like
- # /path/path/recipename.bb.task, virtual:native:/p/foo.bb.task,
- # ...
-
- fn = dep.rsplit('.', 1)[0]
+ fn = bb.runqueue.fn_from_tid(dep)
return dataCache.pkg_fn[fn]
- mc = get_mc(fn)
# If any unlocked recipe is in the direct dependencies then the
# current recipe should be unlocked as well.
- depnames = [ recipename_from_dep(x) for x in deps if mc == get_mc(x)]
+ depnames = [ recipename_from_dep(x) for x in deps if mc == bb.runqueue.mc_from_tid(x)]
if any(x in y for y in depnames for x in self.unlockedrecipes):
self.unlockedrecipes[recipename] = ''
unlocked = True
if not unlocked and recipename in self.lockedsigs:
if task in self.lockedsigs[recipename]:
- k = fn + "." + task
h_locked = self.lockedsigs[recipename][task][0]
var = self.lockedsigs[recipename][task][1]
- self.lockedhashes[k] = h_locked
- self.taskhash[k] = h_locked
+ self.lockedhashes[tid] = h_locked
+ self._internal = True
+ unihash = self.get_unihash(tid)
+ self._internal = False
#bb.warn("Using %s %s %s" % (recipename, task, h))
- if h != h_locked:
+ if h != h_locked and h_locked != unihash:
self.mismatch_msgs.append('The %s:%s sig is computed to be %s, but the sig is locked to %s in %s'
% (recipename, task, h, h_locked, var))
return h_locked
+
+ self.lockedhashes[tid] = False
#bb.warn("%s %s %s" % (recipename, task, h))
return h
+ def get_stampfile_hash(self, tid):
+ if tid in self.lockedhashes and self.lockedhashes[tid]:
+ return self.lockedhashes[tid]
+ return super().get_stampfile_hash(tid)
+
+ def get_unihash(self, tid):
+ if tid in self.lockedhashes and self.lockedhashes[tid] and not self._internal:
+ return self.lockedhashes[tid]
+ return super().get_unihash(tid)
+
def dump_sigtask(self, fn, task, stampbase, runtime):
- k = fn + "." + task
- if k in self.lockedhashes:
+ tid = fn + ":" + task
+ if tid in self.lockedhashes and self.lockedhashes[tid]:
return
super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
def dump_lockedsigs(self, sigfile, taskfilter=None):
types = {}
- for k in self.runtaskdeps:
+ for tid in self.runtaskdeps:
if taskfilter:
- if not k in taskfilter:
+ if not tid in taskfilter:
continue
- fn = k.rsplit(".",1)[0]
+ fn = bb.runqueue.fn_from_tid(tid)
t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
t = 't-' + t.replace('_', '-')
if t not in types:
types[t] = []
- types[t].append(k)
+ types[t].append(tid)
with open(sigfile, "w") as f:
l = sorted(types)
for t in l:
f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
types[t].sort()
- sortedk = sorted(types[t], key=lambda k: self.lockedpnmap[k.rsplit(".",1)[0]])
- for k in sortedk:
- fn = k.rsplit(".",1)[0]
- task = k.rsplit(".",1)[1]
- if k not in self.taskhash:
+ sortedtid = sorted(types[t], key=lambda tid: self.lockedpnmap[bb.runqueue.fn_from_tid(tid)])
+ for tid in sortedtid:
+ (_, _, task, fn) = bb.runqueue.split_tid_mcfn(tid)
+ if tid not in self.taskhash:
continue
- f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n")
+ f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.get_unihash(tid) + " \\\n")
f.write(' "\n')
f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l)))
@@ -226,25 +252,26 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
with open(sigfile, "w") as f:
tasks = []
for taskitem in self.taskhash:
- (fn, task) = taskitem.rsplit(".", 1)
+ (fn, task) = taskitem.rsplit(":", 1)
pn = self.lockedpnmap[fn]
tasks.append((pn, task, fn, self.taskhash[taskitem]))
for (pn, task, fn, taskhash) in sorted(tasks):
- f.write('%s.%s %s %s\n' % (pn, task, fn, taskhash))
+ f.write('%s:%s %s %s\n' % (pn, task, fn, taskhash))
- def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d):
+ def checkhashes(self, sq_data, missed, found, d):
warn_msgs = []
error_msgs = []
sstate_missing_msgs = []
- for task in range(len(sq_fn)):
- if task not in ret:
+ for tid in sq_data['hash']:
+ if tid not in found:
for pn in self.lockedsigs:
- if sq_hash[task] in iter(self.lockedsigs[pn].values()):
- if sq_task[task] == 'do_shared_workdir':
+ taskname = bb.runqueue.taskname_from_tid(tid)
+ if sq_data['hash'][tid] in iter(self.lockedsigs[pn].values()):
+ if taskname == 'do_shared_workdir':
continue
sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
- % (pn, sq_task[task], sq_hash[task]))
+ % (pn, taskname, sq_data['hash'][tid]))
checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK")
if checklevel == 'warn':
@@ -263,10 +290,25 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
if error_msgs:
bb.fatal("\n".join(error_msgs))
+class SignatureGeneratorOEBasicHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
+ name = "OEBasicHash"
+
+class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.siggen.SignatureGeneratorUniHashMixIn, bb.siggen.SignatureGeneratorBasicHash):
+ name = "OEEquivHash"
+
+ def init_rundepcheck(self, data):
+ super().init_rundepcheck(data)
+ self.server = data.getVar('BB_HASHSERVE')
+ if not self.server:
+ bb.fatal("OEEquivHash requires BB_HASHSERVE to be set")
+ self.method = data.getVar('SSTATE_HASHEQUIV_METHOD')
+ if not self.method:
+ bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set")
# Insert these classes into siggen's namespace so it can see and select them
bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
+bb.siggen.SignatureGeneratorOEEquivHash = SignatureGeneratorOEEquivHash
def find_siginfo(pn, taskname, taskhashlist, d):
@@ -278,7 +320,7 @@ def find_siginfo(pn, taskname, taskhashlist, d):
if not taskname:
# We have to derive pn and taskname
key = pn
- splitit = key.split('.bb.')
+ splitit = key.split('.bb:')
taskname = splitit[1]
pn = os.path.basename(splitit[0]).split('_')[0]
if key.startswith('virtual:native:'):
@@ -327,7 +369,7 @@ def find_siginfo(pn, taskname, taskhashlist, d):
if not taskhashlist or (len(filedates) < 2 and not foundall):
# That didn't work, look in sstate-cache
- hashes = taskhashlist or ['?' * 32]
+ hashes = taskhashlist or ['?' * 64]
localdata = bb.data.createCopy(d)
for hashval in hashes:
localdata.setVar('PACKAGE_ARCH', '*')
@@ -414,4 +456,140 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
bb.warn("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
return None, d2
+def OEOuthashBasic(path, sigfile, task, d):
+ """
+ Basic output hash function
+
+ Calculates the output hash of a task by hashing all output file metadata,
+ and file contents.
+ """
+ import hashlib
+ import stat
+ import pwd
+ import grp
+
+ def update_hash(s):
+ s = s.encode('utf-8')
+ h.update(s)
+ if sigfile:
+ sigfile.write(s)
+
+ h = hashlib.sha256()
+ prev_dir = os.getcwd()
+ include_owners = os.environ.get('PSEUDO_DISABLED') == '0'
+ extra_content = d.getVar('HASHEQUIV_HASH_VERSION')
+
+ try:
+ os.chdir(path)
+
+ update_hash("OEOuthashBasic\n")
+ if extra_content:
+ update_hash(extra_content + "\n")
+
+ # It is only currently useful to get equivalent hashes for things that
+ # can be restored from sstate. Since the sstate object is named using
+ # SSTATE_PKGSPEC and the task name, those should be included in the
+ # output hash calculation.
+ update_hash("SSTATE_PKGSPEC=%s\n" % d.getVar('SSTATE_PKGSPEC'))
+ update_hash("task=%s\n" % task)
+
+ for root, dirs, files in os.walk('.', topdown=True):
+ # Sort directories to ensure consistent ordering when recursing
+ dirs.sort()
+ files.sort()
+
+ def process(path):
+ s = os.lstat(path)
+
+ if stat.S_ISDIR(s.st_mode):
+ update_hash('d')
+ elif stat.S_ISCHR(s.st_mode):
+ update_hash('c')
+ elif stat.S_ISBLK(s.st_mode):
+ update_hash('b')
+ elif stat.S_ISSOCK(s.st_mode):
+ update_hash('s')
+ elif stat.S_ISLNK(s.st_mode):
+ update_hash('l')
+ elif stat.S_ISFIFO(s.st_mode):
+ update_hash('p')
+ else:
+ update_hash('-')
+
+ def add_perm(mask, on, off='-'):
+ if mask & s.st_mode:
+ update_hash(on)
+ else:
+ update_hash(off)
+
+ add_perm(stat.S_IRUSR, 'r')
+ add_perm(stat.S_IWUSR, 'w')
+ if stat.S_ISUID & s.st_mode:
+ add_perm(stat.S_IXUSR, 's', 'S')
+ else:
+ add_perm(stat.S_IXUSR, 'x')
+
+ add_perm(stat.S_IRGRP, 'r')
+ add_perm(stat.S_IWGRP, 'w')
+ if stat.S_ISGID & s.st_mode:
+ add_perm(stat.S_IXGRP, 's', 'S')
+ else:
+ add_perm(stat.S_IXGRP, 'x')
+
+ add_perm(stat.S_IROTH, 'r')
+ add_perm(stat.S_IWOTH, 'w')
+ if stat.S_ISVTX & s.st_mode:
+ update_hash('t')
+ else:
+ add_perm(stat.S_IXOTH, 'x')
+
+ if include_owners:
+ try:
+ update_hash(" %10s" % pwd.getpwuid(s.st_uid).pw_name)
+ update_hash(" %10s" % grp.getgrgid(s.st_gid).gr_name)
+ except KeyError:
+ bb.warn("KeyError in %s" % path)
+ raise
+
+ update_hash(" ")
+ if stat.S_ISBLK(s.st_mode) or stat.S_ISCHR(s.st_mode):
+ update_hash("%9s" % ("%d.%d" % (os.major(s.st_rdev), os.minor(s.st_rdev))))
+ else:
+ update_hash(" " * 9)
+
+ update_hash(" ")
+ if stat.S_ISREG(s.st_mode):
+ update_hash("%10d" % s.st_size)
+ else:
+ update_hash(" " * 10)
+
+ update_hash(" ")
+ fh = hashlib.sha256()
+ if stat.S_ISREG(s.st_mode):
+ # Hash file contents
+ with open(path, 'rb') as d:
+ for chunk in iter(lambda: d.read(4096), b""):
+ fh.update(chunk)
+ update_hash(fh.hexdigest())
+ else:
+ update_hash(" " * len(fh.hexdigest()))
+
+ update_hash(" %s" % path)
+
+ if stat.S_ISLNK(s.st_mode):
+ update_hash(" -> %s" % os.readlink(path))
+
+ update_hash("\n")
+
+ # Process this directory and all its child files
+ process(root)
+ for f in files:
+ if f == 'fixmepath':
+ continue
+ process(os.path.join(root, f))
+ finally:
+ os.chdir(prev_dir)
+
+ return h.hexdigest()
+
diff --git a/external/poky/meta/lib/oe/terminal.py b/external/poky/meta/lib/oe/terminal.py
index caeb5e31..eb10a6e3 100644
--- a/external/poky/meta/lib/oe/terminal.py
+++ b/external/poky/meta/lib/oe/terminal.py
@@ -1,3 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
import logging
import oe.classutils
import shlex
@@ -39,7 +42,7 @@ class Terminal(Popen, metaclass=Registry):
raise
def format_command(self, sh_cmd, title):
- fmt = {'title': title or 'Terminal', 'command': sh_cmd}
+ fmt = {'title': title or 'Terminal', 'command': sh_cmd, 'cwd': os.getcwd() }
if isinstance(self.command, str):
return shlex.split(self.command.format(**fmt))
else:
@@ -52,7 +55,7 @@ class XTerminal(Terminal):
raise UnsupportedTerminal(self.name)
class Gnome(XTerminal):
- command = 'gnome-terminal -t "{title}" -x {command}'
+ command = 'gnome-terminal -t "{title}" -- {command}'
priority = 2
def __init__(self, sh_cmd, title=None, env=None, d=None):
@@ -117,7 +120,7 @@ class Screen(Terminal):
class TmuxRunning(Terminal):
"""Open a new pane in the current running tmux window"""
name = 'tmux-running'
- command = 'tmux split-window "{command}"'
+ command = 'tmux split-window -c "{cwd}" "{command}"'
priority = 2.75
def __init__(self, sh_cmd, title=None, env=None, d=None):
@@ -135,7 +138,7 @@ class TmuxRunning(Terminal):
class TmuxNewWindow(Terminal):
"""Open a new window in the current running tmux session"""
name = 'tmux-new-window'
- command = 'tmux new-window -n "{title}" "{command}"'
+ command = 'tmux new-window -c "{cwd}" -n "{title}" "{command}"'
priority = 2.70
def __init__(self, sh_cmd, title=None, env=None, d=None):
@@ -149,7 +152,7 @@ class TmuxNewWindow(Terminal):
class Tmux(Terminal):
"""Start a new tmux session and window"""
- command = 'tmux new -d -s devshell -n devshell "{command}"'
+ command = 'tmux new -c "{cwd}" -d -s devshell -n devshell "{command}"'
priority = 0.75
def __init__(self, sh_cmd, title=None, env=None, d=None):
@@ -160,7 +163,7 @@ class Tmux(Terminal):
# devshells, if it's already there, add a new window to it.
window_name = 'devshell-%i' % os.getpid()
- self.command = 'tmux new -d -s {0} -n {0} "{{command}}"'.format(window_name)
+ self.command = 'tmux new -c "{{cwd}}" -d -s {0} -n {0} "{{command}}"'.format(window_name)
Terminal.__init__(self, sh_cmd, title, env, d)
attach_cmd = 'tmux att -t {0}'.format(window_name)
@@ -204,7 +207,10 @@ def spawn_preferred(sh_cmd, title=None, env=None, d=None):
spawn(terminal.name, sh_cmd, title, env, d)
break
except UnsupportedTerminal:
- continue
+ pass
+ except:
+ bb.warn("Terminal %s is supported but did not start" % (terminal.name))
+ # when we've run out of options
else:
raise NoSupportedTerminals(get_cmd_list())
@@ -296,6 +302,8 @@ def check_terminal_version(terminalName):
vernum = ver.split(' ')[-1]
if ver.startswith('tmux'):
vernum = ver.split()[-1]
+ if ver.startswith('tmux next-'):
+ vernum = ver.split()[-1][5:]
return vernum
def distro_name():
diff --git a/external/poky/meta/lib/oe/types.py b/external/poky/meta/lib/oe/types.py
index 1eebba5a..bbbabafb 100644
--- a/external/poky/meta/lib/oe/types.py
+++ b/external/poky/meta/lib/oe/types.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import errno
import re
import os
@@ -150,7 +154,8 @@ def path(value, relativeto='', normalize='true', mustexist='false'):
if boolean(mustexist):
try:
- open(value, 'r')
+ with open(value, 'r'):
+ pass
except IOError as exc:
if exc.errno == errno.ENOENT:
raise ValueError("{0}: {1}".format(value, os.strerror(errno.ENOENT)))
@@ -179,4 +184,3 @@ def qemu_use_kvm(kvm, target_arch):
elif build_arch == target_arch:
use_kvm = True
return use_kvm
-
diff --git a/external/poky/meta/lib/oe/useradd.py b/external/poky/meta/lib/oe/useradd.py
index 179ac76b..8fc77568 100644
--- a/external/poky/meta/lib/oe/useradd.py
+++ b/external/poky/meta/lib/oe/useradd.py
@@ -1,3 +1,6 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
import argparse
import re
@@ -11,7 +14,7 @@ class myArgumentParser(argparse.ArgumentParser):
error(message)
def error(self, message):
- raise bb.build.FuncFailed(message)
+ bb.fatal(message)
def split_commands(params):
params = re.split('''[ \t]*;[ \t]*(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', params.strip())
diff --git a/external/poky/meta/lib/oe/utils.py b/external/poky/meta/lib/oe/utils.py
index 8a584d6d..13f4271d 100644
--- a/external/poky/meta/lib/oe/utils.py
+++ b/external/poky/meta/lib/oe/utils.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import subprocess
import multiprocessing
import traceback
@@ -78,12 +82,12 @@ def prune_suffix(var, suffixes, d):
# See if var ends with any of the suffixes listed and
# remove it if found
for suffix in suffixes:
- if var.endswith(suffix):
- var = var.replace(suffix, "")
+ if suffix and var.endswith(suffix):
+ var = var[:-len(suffix)]
prefix = d.getVar("MLPREFIX")
if prefix and var.startswith(prefix):
- var = var.replace(prefix, "")
+ var = var[len(prefix):]
return var
@@ -165,7 +169,7 @@ def any_distro_features(d, features, truevalue="1", falsevalue=""):
"""
return bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d)
-def parallel_make(d):
+def parallel_make(d, makeinst=False):
"""
Return the integer value for the number of parallel threads to use when
building, scraped out of PARALLEL_MAKE. If no parallelization option is
@@ -173,7 +177,10 @@ def parallel_make(d):
e.g. if PARALLEL_MAKE = "-j 10", this will return 10 as an integer.
"""
- pm = (d.getVar('PARALLEL_MAKE') or '').split()
+ if makeinst:
+ pm = (d.getVar('PARALLEL_MAKEINST') or '').split()
+ else:
+ pm = (d.getVar('PARALLEL_MAKE') or '').split()
# look for '-j' and throw other options (e.g. '-l') away
while pm:
opt = pm.pop(0)
@@ -188,7 +195,7 @@ def parallel_make(d):
return None
-def parallel_make_argument(d, fmt, limit=None):
+def parallel_make_argument(d, fmt, limit=None, makeinst=False):
"""
Helper utility to construct a parallel make argument from the number of
parallel threads specified in PARALLEL_MAKE.
@@ -201,7 +208,7 @@ def parallel_make_argument(d, fmt, limit=None):
e.g. if PARALLEL_MAKE = "-j 10", parallel_make_argument(d, "-n %d") will return
"-n 10"
"""
- v = parallel_make(d)
+ v = parallel_make(d, makeinst)
if v:
if limit:
v = min(limit, v)
@@ -214,7 +221,7 @@ def packages_filter_out_system(d):
PN-dbg PN-doc PN-locale-eb-gb removed.
"""
pn = d.getVar('PN')
- blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')]
+ blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
localepkg = pn + "-locale-"
pkgs = []
@@ -241,9 +248,10 @@ def trim_version(version, num_parts=2):
trimmed = ".".join(parts[:num_parts])
return trimmed
-def cpu_count():
+def cpu_count(at_least=1):
import multiprocessing
- return multiprocessing.cpu_count()
+ cpus = multiprocessing.cpu_count()
+ return max(cpus, at_least)
def execute_pre_post_process(d, cmds):
if cmds is None:
@@ -307,6 +315,10 @@ def multiprocess_launch(target, items, d, extraargs=None):
p.start()
launched.append(p)
for q in launched:
+ # Have to manually call update() to avoid deadlocks. The pipe can be full and
+ # transfer stalled until we try and read the results object but the subprocess won't exit
+ # as it still has data to write (https://bugs.python.org/issue8426)
+ q.update()
# The finished processes are joined when calling is_alive()
if not q.is_alive():
if q.exception:
@@ -320,13 +332,18 @@ def multiprocess_launch(target, items, d, extraargs=None):
if errors:
msg = ""
for (e, tb) in errors:
- msg = msg + str(e) + ": " + str(tb) + "\n"
+ if isinstance(e, subprocess.CalledProcessError) and e.output:
+ msg = msg + str(e) + "\n"
+ msg = msg + "Subprocess output:"
+ msg = msg + e.output.decode("utf-8", errors="ignore")
+ else:
+ msg = msg + str(e) + ": " + str(tb) + "\n"
bb.fatal("Fatal errors occurred in subprocesses:\n%s" % msg)
return results
def squashspaces(string):
import re
- return re.sub("\s+", " ", string).strip()
+ return re.sub(r"\s+", " ", string).strip()
def format_pkg_list(pkg_dict, ret_format=None):
output = []
@@ -356,6 +373,37 @@ def format_pkg_list(pkg_dict, ret_format=None):
return output_str
+
+# Helper function to get the host compiler version
+# Do not assume the compiler is gcc
+def get_host_compiler_version(d, taskcontextonly=False):
+ import re, subprocess
+
+ if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
+ return
+
+ compiler = d.getVar("BUILD_CC")
+ # Get rid of ccache since it is not present when parsing.
+ if compiler.startswith('ccache '):
+ compiler = compiler[7:]
+ try:
+ env = os.environ.copy()
+ # datastore PATH does not contain session PATH as set by environment-setup-...
+ # this breaks the install-buildtools use-case
+ # env["PATH"] = d.getVar("PATH")
+ output = subprocess.check_output("%s --version" % compiler, \
+ shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
+ except subprocess.CalledProcessError as e:
+ bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
+
+ match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
+ if not match:
+ bb.fatal("Can't get compiler version from %s --version output" % compiler)
+
+ version = match.group(1)
+ return compiler, version
+
+
def host_gcc_version(d, taskcontextonly=False):
import re, subprocess
@@ -363,14 +411,18 @@ def host_gcc_version(d, taskcontextonly=False):
return
compiler = d.getVar("BUILD_CC")
+ # Get rid of ccache since it is not present when parsing.
+ if compiler.startswith('ccache '):
+ compiler = compiler[7:]
try:
env = os.environ.copy()
env["PATH"] = d.getVar("PATH")
- output = subprocess.check_output("%s --version" % compiler, shell=True, env=env).decode("utf-8")
+ output = subprocess.check_output("%s --version" % compiler, \
+ shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
except subprocess.CalledProcessError as e:
bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
- match = re.match(".* (\d\.\d)\.\d.*", output.split('\n')[0])
+ match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
if not match:
bb.fatal("Can't get compiler version from %s --version output" % compiler)
@@ -469,7 +521,7 @@ def write_ld_so_conf(d):
f.write(d.getVar("base_libdir") + '\n')
f.write(d.getVar("libdir") + '\n')
-class ImageQAFailed(bb.build.FuncFailed):
+class ImageQAFailed(Exception):
def __init__(self, description, name=None, logfile=None):
self.description = description
self.name = name
@@ -482,3 +534,6 @@ class ImageQAFailed(bb.build.FuncFailed):
return msg
+def sh_quote(string):
+ import shlex
+ return shlex.quote(string)