summaryrefslogtreecommitdiffstats
path: root/external/poky/bitbake/lib/bb/utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'external/poky/bitbake/lib/bb/utils.py')
-rw-r--r--external/poky/bitbake/lib/bb/utils.py195
1 files changed, 126 insertions, 69 deletions
diff --git a/external/poky/bitbake/lib/bb/utils.py b/external/poky/bitbake/lib/bb/utils.py
index f5bd816c..5f5767c1 100644
--- a/external/poky/bitbake/lib/bb/utils.py
+++ b/external/poky/bitbake/lib/bb/utils.py
@@ -1,23 +1,11 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
"""
BitBake Utility Functions
"""
# Copyright (C) 2004 Michael Lauer
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re, fcntl, os, string, stat, shutil, time
import sys
@@ -27,7 +15,8 @@ import bb
import bb.msg
import multiprocessing
import fcntl
-import imp
+import importlib
+from importlib import machinery
import itertools
import subprocess
import glob
@@ -35,7 +24,6 @@ import fnmatch
import traceback
import errno
import signal
-import ast
import collections
import copy
from subprocess import getstatusoutput
@@ -43,7 +31,7 @@ from contextlib import contextmanager
from ctypes import cdll
logger = logging.getLogger("BitBake.Util")
-python_extensions = [e for e, _, _ in imp.get_suffixes()]
+python_extensions = importlib.machinery.all_suffixes()
def clean_context():
@@ -68,8 +56,8 @@ class VersionStringException(Exception):
def explode_version(s):
r = []
- alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
- numeric_regexp = re.compile('^(\d+)(.*)$')
+ alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
+ numeric_regexp = re.compile(r'^(\d+)(.*)$')
while (s != ''):
if s[0] in string.digits:
m = numeric_regexp.match(s)
@@ -321,10 +309,13 @@ def better_compile(text, file, realfile, mode = "exec", lineno = 0):
error = []
# split the text into lines again
body = text.split('\n')
- error.append("Error in compiling python function in %s, line %s:\n" % (realfile, lineno))
+ error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
if hasattr(e, "lineno"):
error.append("The code lines resulting in this error were:")
- error.extend(_print_trace(body, e.lineno))
+ # e.lineno: line's position in reaflile
+ # lineno: function name's "position -1" in realfile
+ # e.lineno - lineno: line's relative position in function
+ error.extend(_print_trace(body, e.lineno - lineno))
else:
error.append("The function causing this error was:")
for line in body:
@@ -402,7 +393,7 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
code = better_compile(code, realfile, realfile)
try:
exec(code, get_context(), context)
- except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
+ except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError):
# Error already shown so passthrough, no need for traceback
raise
except Exception as e:
@@ -436,10 +427,11 @@ def fileslocked(files):
for lockfile in files:
locks.append(bb.utils.lockfile(lockfile))
- yield
-
- for lock in locks:
- bb.utils.unlockfile(lock)
+ try:
+ yield
+ finally:
+ for lock in locks:
+ bb.utils.unlockfile(lock)
@contextmanager
def timeout(seconds):
@@ -528,22 +520,26 @@ def unlockfile(lf):
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
lf.close()
-def md5_file(filename):
- """
- Return the hex string representation of the MD5 checksum of filename.
- """
- import hashlib, mmap
+def _hasher(method, filename):
+ import mmap
with open(filename, "rb") as f:
- m = hashlib.md5()
try:
with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
for chunk in iter(lambda: mm.read(8192), b''):
- m.update(chunk)
+ method.update(chunk)
except ValueError:
# You can't mmap() an empty file so silence this exception
pass
- return m.hexdigest()
+ return method.hexdigest()
+
+
+def md5_file(filename):
+ """
+ Return the hex string representation of the MD5 checksum of filename.
+ """
+ import hashlib
+ return _hasher(hashlib.md5(), filename)
def sha256_file(filename):
"""
@@ -551,24 +547,28 @@ def sha256_file(filename):
filename.
"""
import hashlib
-
- s = hashlib.sha256()
- with open(filename, "rb") as f:
- for line in f:
- s.update(line)
- return s.hexdigest()
+ return _hasher(hashlib.sha256(), filename)
def sha1_file(filename):
"""
Return the hex string representation of the SHA1 checksum of the filename
"""
import hashlib
+ return _hasher(hashlib.sha1(), filename)
- s = hashlib.sha1()
- with open(filename, "rb") as f:
- for line in f:
- s.update(line)
- return s.hexdigest()
+def sha384_file(filename):
+ """
+ Return the hex string representation of the SHA384 checksum of the filename
+ """
+ import hashlib
+ return _hasher(hashlib.sha384(), filename)
+
+def sha512_file(filename):
+ """
+ Return the hex string representation of the SHA512 checksum of the filename
+ """
+ import hashlib
+ return _hasher(hashlib.sha512(), filename)
def preserved_envvars_exported():
"""Variables which are taken from the environment and placed in and exported
@@ -685,7 +685,7 @@ def _check_unsafe_delete_path(path):
return True
return False
-def remove(path, recurse=False):
+def remove(path, recurse=False, ionice=False):
"""Equivalent to rm -f or rm -rf"""
if not path:
return
@@ -694,7 +694,10 @@ def remove(path, recurse=False):
if _check_unsafe_delete_path(path):
raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
# shutil.rmtree(name) would be ideal but its too slow
- subprocess.check_call(['rm', '-rf'] + glob.glob(path))
+ cmd = []
+ if ionice:
+ cmd = ['ionice', '-c', '3']
+ subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
return
for name in glob.glob(path):
try:
@@ -703,20 +706,12 @@ def remove(path, recurse=False):
if exc.errno != errno.ENOENT:
raise
-def prunedir(topdir):
+def prunedir(topdir, ionice=False):
# Delete everything reachable from the directory named in 'topdir'.
# CAUTION: This is dangerous!
if _check_unsafe_delete_path(topdir):
raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
- for root, dirs, files in os.walk(topdir, topdown = False):
- for name in files:
- os.remove(os.path.join(root, name))
- for name in dirs:
- if os.path.islink(os.path.join(root, name)):
- os.remove(os.path.join(root, name))
- else:
- os.rmdir(os.path.join(root, name))
- os.rmdir(topdir)
+ remove(topdir, recurse=True, ionice=ionice)
#
# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
@@ -726,8 +721,8 @@ def prune_suffix(var, suffixes, d):
# See if var ends with any of the suffixes listed and
# remove it if found
for suffix in suffixes:
- if var.endswith(suffix):
- return var.replace(suffix, "")
+ if suffix and var.endswith(suffix):
+ return var[:-len(suffix)]
return var
def mkdirhier(directory):
@@ -738,7 +733,7 @@ def mkdirhier(directory):
try:
os.makedirs(directory)
except OSError as e:
- if e.errno != errno.EEXIST:
+ if e.errno != errno.EEXIST or not os.path.isdir(directory):
raise e
def movefile(src, dest, newmtime = None, sstat = None):
@@ -869,7 +864,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
os.unlink(dest)
os.symlink(target, dest)
- #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
+ os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
return os.lstat(dest)
except Exception as e:
logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
@@ -1044,6 +1039,43 @@ def filter(variable, checkvalues, d):
checkvalues = set(checkvalues)
return ' '.join(sorted(checkvalues & val))
+
+def get_referenced_vars(start_expr, d):
+ """
+ :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
+ are ordered arbitrarily)
+ """
+
+ seen = set()
+ ret = []
+
+ # The first entry in the queue is the unexpanded start expression
+ queue = collections.deque([start_expr])
+ # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
+ is_first = True
+
+ empty_data = bb.data.init()
+ while queue:
+ entry = queue.popleft()
+ if is_first:
+ # Entry is the start expression - no expansion needed
+ is_first = False
+ expression = entry
+ else:
+ # This is a variable name - need to get the value
+ expression = d.getVar(entry, False)
+ ret.append(entry)
+
+ # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
+ # data store because we only want the variables directly used in the expression. It returns a set, which is what
+ # dooms us to only ever be "quasi-BFS" rather than full BFS.
+ new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
+
+ queue.extend(new_vars)
+ seen.update(new_vars)
+ return ret
+
+
def cpu_count():
return multiprocessing.cpu_count()
@@ -1161,14 +1193,14 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
var_res = {}
if match_overrides:
- override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
+ override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
else:
override_re = ''
for var in variables:
if var.endswith('()'):
- var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
+ var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
else:
- var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
+ var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
updated = False
varset_start = ''
@@ -1505,6 +1537,8 @@ def ioprio_set(who, cls, value):
NR_ioprio_set = 251
elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
NR_ioprio_set = 289
+ elif _unamearch == "aarch64":
+ NR_ioprio_set = 30
if NR_ioprio_set:
ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
@@ -1548,12 +1582,9 @@ def export_proxies(d):
def load_plugins(logger, plugins, pluginpath):
def load_plugin(name):
logger.debug(1, 'Loading plugin %s' % name)
- fp, pathname, description = imp.find_module(name, [pluginpath])
- try:
- return imp.load_module(name, fp, pathname, description)
- finally:
- if fp:
- fp.close()
+ spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
+ if spec:
+ return spec.loader.load_module()
logger.debug(1, 'Loading plugins from %s...' % pluginpath)
@@ -1580,3 +1611,29 @@ class LogCatcher(logging.Handler):
self.messages.append(bb.build.logformatter.format(record))
def contains(self, message):
return (message in self.messages)
+
+def is_semver(version):
+ """
+ Is the version string following the semver semantic?
+
+ https://semver.org/spec/v2.0.0.html
+ """
+ regex = re.compile(
+ r"""
+ ^
+ (0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)
+ (?:-(
+ (?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)
+ (?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*
+ ))?
+ (?:\+(
+ [0-9a-zA-Z-]+
+ (?:\.[0-9a-zA-Z-]+)*
+ ))?
+ $
+ """, re.VERBOSE)
+
+ if regex.match(version) is None:
+ return False
+
+ return True