summaryrefslogtreecommitdiffstats
path: root/external/poky/scripts/lib
diff options
context:
space:
mode:
Diffstat (limited to 'external/poky/scripts/lib')
-rw-r--r--external/poky/scripts/lib/argparse_oe.py4
-rw-r--r--external/poky/scripts/lib/build_perf/__init__.py9
-rw-r--r--external/poky/scripts/lib/build_perf/html.py9
-rw-r--r--external/poky/scripts/lib/build_perf/report.py9
-rw-r--r--external/poky/scripts/lib/buildstats.py15
-rw-r--r--external/poky/scripts/lib/checklayer/__init__.py30
-rw-r--r--external/poky/scripts/lib/checklayer/case.py4
-rw-r--r--external/poky/scripts/lib/checklayer/cases/bsp.py4
-rw-r--r--external/poky/scripts/lib/checklayer/cases/common.py6
-rw-r--r--external/poky/scripts/lib/checklayer/cases/distro.py4
-rw-r--r--external/poky/scripts/lib/checklayer/context.py4
-rw-r--r--external/poky/scripts/lib/devtool/__init__.py12
-rw-r--r--external/poky/scripts/lib/devtool/build.py25
-rw-r--r--external/poky/scripts/lib/devtool/build_image.py12
-rw-r--r--external/poky/scripts/lib/devtool/build_sdk.py12
-rw-r--r--external/poky/scripts/lib/devtool/deploy.py40
-rw-r--r--external/poky/scripts/lib/devtool/export.py12
-rw-r--r--external/poky/scripts/lib/devtool/import.py12
-rw-r--r--external/poky/scripts/lib/devtool/menuconfig.py79
-rw-r--r--external/poky/scripts/lib/devtool/package.py12
-rw-r--r--external/poky/scripts/lib/devtool/runqemu.py12
-rw-r--r--external/poky/scripts/lib/devtool/sdk.py12
-rw-r--r--external/poky/scripts/lib/devtool/search.py12
-rw-r--r--external/poky/scripts/lib/devtool/standard.py252
-rw-r--r--external/poky/scripts/lib/devtool/upgrade.py70
-rw-r--r--external/poky/scripts/lib/devtool/utilcmds.py12
-rw-r--r--external/poky/scripts/lib/recipetool/append.py12
-rw-r--r--external/poky/scripts/lib/recipetool/create.py42
-rw-r--r--external/poky/scripts/lib/recipetool/create_buildsys.py29
-rw-r--r--external/poky/scripts/lib/recipetool/create_buildsys_python.py43
-rw-r--r--external/poky/scripts/lib/recipetool/create_kernel.py12
-rw-r--r--external/poky/scripts/lib/recipetool/create_kmod.py12
-rw-r--r--external/poky/scripts/lib/recipetool/create_npm.py526
-rw-r--r--external/poky/scripts/lib/recipetool/edit.py14
-rw-r--r--external/poky/scripts/lib/recipetool/newappend.py12
-rw-r--r--external/poky/scripts/lib/recipetool/setvar.py12
-rw-r--r--external/poky/scripts/lib/resulttool/log.py84
-rwxr-xr-xexternal/poky/scripts/lib/resulttool/manualexecution.py41
-rw-r--r--external/poky/scripts/lib/resulttool/merge.py30
-rw-r--r--external/poky/scripts/lib/resulttool/regression.py10
-rw-r--r--external/poky/scripts/lib/resulttool/report.py234
-rw-r--r--external/poky/scripts/lib/resulttool/resultutils.py85
-rw-r--r--external/poky/scripts/lib/resulttool/store.py24
-rw-r--r--external/poky/scripts/lib/resulttool/template/test_report_full_text.txt47
-rw-r--r--external/poky/scripts/lib/scriptpath.py12
-rw-r--r--external/poky/scripts/lib/scriptutils.py79
-rw-r--r--external/poky/scripts/lib/wic/__init__.py14
-rw-r--r--external/poky/scripts/lib/wic/canned-wks/qemuriscv.wks3
-rw-r--r--external/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks2
-rw-r--r--external/poky/scripts/lib/wic/engine.py129
-rw-r--r--external/poky/scripts/lib/wic/filemap.py93
-rw-r--r--external/poky/scripts/lib/wic/help.py123
-rw-r--r--external/poky/scripts/lib/wic/ksparser.py42
-rw-r--r--external/poky/scripts/lib/wic/misc.py16
-rw-r--r--external/poky/scripts/lib/wic/partition.py47
-rw-r--r--external/poky/scripts/lib/wic/pluginbase.py25
-rw-r--r--external/poky/scripts/lib/wic/plugins/imager/direct.py101
-rw-r--r--external/poky/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py213
-rw-r--r--external/poky/scripts/lib/wic/plugins/source/bootimg-efi.py88
-rw-r--r--external/poky/scripts/lib/wic/plugins/source/bootimg-partition.py15
-rw-r--r--external/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py26
-rw-r--r--external/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py55
-rw-r--r--external/poky/scripts/lib/wic/plugins/source/rawcopy.py20
-rw-r--r--external/poky/scripts/lib/wic/plugins/source/rootfs.py23
64 files changed, 1806 insertions, 1273 deletions
diff --git a/external/poky/scripts/lib/argparse_oe.py b/external/poky/scripts/lib/argparse_oe.py
index 9bdfc1ce..94a4ac50 100644
--- a/external/poky/scripts/lib/argparse_oe.py
+++ b/external/poky/scripts/lib/argparse_oe.py
@@ -1,3 +1,7 @@
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
import sys
import argparse
from collections import defaultdict, OrderedDict
diff --git a/external/poky/scripts/lib/build_perf/__init__.py b/external/poky/scripts/lib/build_perf/__init__.py
index 1f8b7290..dcbb7804 100644
--- a/external/poky/scripts/lib/build_perf/__init__.py
+++ b/external/poky/scripts/lib/build_perf/__init__.py
@@ -1,14 +1,7 @@
#
# Copyright (c) 2017, Intel Corporation.
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
"""Build performance test library functions"""
diff --git a/external/poky/scripts/lib/build_perf/html.py b/external/poky/scripts/lib/build_perf/html.py
index 578bb162..d1273c9c 100644
--- a/external/poky/scripts/lib/build_perf/html.py
+++ b/external/poky/scripts/lib/build_perf/html.py
@@ -1,14 +1,7 @@
#
# Copyright (c) 2017, Intel Corporation.
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
"""Helper module for HTML reporting"""
from jinja2 import Environment, PackageLoader
diff --git a/external/poky/scripts/lib/build_perf/report.py b/external/poky/scripts/lib/build_perf/report.py
index d99a3679..4e8e2a8a 100644
--- a/external/poky/scripts/lib/build_perf/report.py
+++ b/external/poky/scripts/lib/build_perf/report.py
@@ -1,14 +1,7 @@
#
# Copyright (c) 2017, Intel Corporation.
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
"""Handling of build perf test reports"""
from collections import OrderedDict, Mapping, namedtuple
diff --git a/external/poky/scripts/lib/buildstats.py b/external/poky/scripts/lib/buildstats.py
index f7db3eaf..c69b5bf4 100644
--- a/external/poky/scripts/lib/buildstats.py
+++ b/external/poky/scripts/lib/buildstats.py
@@ -1,14 +1,7 @@
#
# Copyright (c) 2017, Intel Corporation.
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
"""Functionality for analyzing buildstats"""
import json
@@ -268,13 +261,17 @@ class BuildStats(dict):
self[pkg].aggregate(data)
-def diff_buildstats(bs1, bs2, stat_attr, min_val=None, min_absdiff=None):
+def diff_buildstats(bs1, bs2, stat_attr, min_val=None, min_absdiff=None, only_tasks=[]):
"""Compare the tasks of two buildstats"""
tasks_diff = []
pkgs = set(bs1.keys()).union(set(bs2.keys()))
for pkg in pkgs:
tasks1 = bs1[pkg].tasks if pkg in bs1 else {}
tasks2 = bs2[pkg].tasks if pkg in bs2 else {}
+ if only_tasks:
+ tasks1 = {k: v for k, v in tasks1.items() if k in only_tasks}
+ tasks2 = {k: v for k, v in tasks2.items() if k in only_tasks}
+
if not tasks1:
pkg_op = '+'
elif not tasks2:
diff --git a/external/poky/scripts/lib/checklayer/__init__.py b/external/poky/scripts/lib/checklayer/__init__.py
index 670f0eea..fe545607 100644
--- a/external/poky/scripts/lib/checklayer/__init__.py
+++ b/external/poky/scripts/lib/checklayer/__init__.py
@@ -1,7 +1,9 @@
# Yocto Project layer check tool
#
# Copyright (C) 2017 Intel Corporation
-# Released under the MIT license (see COPYING.MIT)
+#
+# SPDX-License-Identifier: MIT
+#
import os
import re
@@ -57,9 +59,14 @@ def _get_layer_collections(layer_path, lconf=None, data=None):
pattern = ldata.getVar('BBFILE_PATTERN_%s' % name)
depends = ldata.getVar('LAYERDEPENDS_%s' % name)
compat = ldata.getVar('LAYERSERIES_COMPAT_%s' % name)
+ try:
+ depDict = bb.utils.explode_dep_versions2(depends or "")
+ except bb.utils.VersionStringException as vse:
+ bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (name, str(vse)))
+
collections[name]['priority'] = priority
collections[name]['pattern'] = pattern
- collections[name]['depends'] = depends
+ collections[name]['depends'] = ' '.join(depDict.keys())
collections[name]['compat'] = compat
return collections
@@ -141,6 +148,9 @@ def detect_layers(layer_directories, no_auto):
def _find_layer_depends(depend, layers):
for layer in layers:
+ if 'collections' not in layer:
+ continue
+
for collection in layer['collections']:
if depend == collection:
return layer
@@ -219,6 +229,20 @@ def add_layers(bblayersconf, layers, logger):
f.write("\nBBLAYERS += \"%s\"\n" % path)
return True
+def check_bblayers(bblayersconf, layer_path, logger):
+ '''
+ If layer_path found in BBLAYERS return True
+ '''
+ import bb.parse
+ import bb.data
+
+ ldata = bb.parse.handle(bblayersconf, bb.data.init(), include=True)
+ for bblayer in (ldata.getVar('BBLAYERS') or '').split():
+ if os.path.normpath(bblayer) == os.path.normpath(layer_path):
+ return True
+
+ return False
+
def check_command(error_msg, cmd, cwd=None):
'''
Run a command under a shell, capture stdout and stderr in a single stream,
@@ -243,7 +267,7 @@ def get_signatures(builddir, failsafe=False, machine=None):
sigs = {}
tune2tasks = {}
- cmd = ''
+ cmd = 'BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
if machine:
cmd += 'MACHINE=%s ' % machine
cmd += 'bitbake '
diff --git a/external/poky/scripts/lib/checklayer/case.py b/external/poky/scripts/lib/checklayer/case.py
index 9dd00412..fa9dee38 100644
--- a/external/poky/scripts/lib/checklayer/case.py
+++ b/external/poky/scripts/lib/checklayer/case.py
@@ -1,5 +1,7 @@
# Copyright (C) 2017 Intel Corporation
-# Released under the MIT license (see COPYING.MIT)
+#
+# SPDX-License-Identifier: MIT
+#
from oeqa.core.case import OETestCase
diff --git a/external/poky/scripts/lib/checklayer/cases/bsp.py b/external/poky/scripts/lib/checklayer/cases/bsp.py
index b6b611be..7fd56f5d 100644
--- a/external/poky/scripts/lib/checklayer/cases/bsp.py
+++ b/external/poky/scripts/lib/checklayer/cases/bsp.py
@@ -1,5 +1,7 @@
# Copyright (C) 2017 Intel Corporation
-# Released under the MIT license (see COPYING.MIT)
+#
+# SPDX-License-Identifier: MIT
+#
import unittest
diff --git a/external/poky/scripts/lib/checklayer/cases/common.py b/external/poky/scripts/lib/checklayer/cases/common.py
index 1bef61b0..b82304e3 100644
--- a/external/poky/scripts/lib/checklayer/cases/common.py
+++ b/external/poky/scripts/lib/checklayer/cases/common.py
@@ -1,5 +1,7 @@
# Copyright (C) 2017 Intel Corporation
-# Released under the MIT license (see COPYING.MIT)
+#
+# SPDX-License-Identifier: MIT
+#
import glob
import os
@@ -10,7 +12,7 @@ from checklayer.case import OECheckLayerTestCase
class CommonCheckLayer(OECheckLayerTestCase):
def test_readme(self):
# The top-level README file may have a suffix (like README.rst or README.txt).
- readme_files = glob.glob(os.path.join(self.tc.layer['path'], 'README*'))
+ readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
self.assertTrue(len(readme_files) > 0,
msg="Layer doesn't contains README file.")
diff --git a/external/poky/scripts/lib/checklayer/cases/distro.py b/external/poky/scripts/lib/checklayer/cases/distro.py
index df1b3035..f0bee549 100644
--- a/external/poky/scripts/lib/checklayer/cases/distro.py
+++ b/external/poky/scripts/lib/checklayer/cases/distro.py
@@ -1,5 +1,7 @@
# Copyright (C) 2017 Intel Corporation
-# Released under the MIT license (see COPYING.MIT)
+#
+# SPDX-License-Identifier: MIT
+#
import unittest
diff --git a/external/poky/scripts/lib/checklayer/context.py b/external/poky/scripts/lib/checklayer/context.py
index 1bec2c41..4de8f668 100644
--- a/external/poky/scripts/lib/checklayer/context.py
+++ b/external/poky/scripts/lib/checklayer/context.py
@@ -1,5 +1,7 @@
# Copyright (C) 2017 Intel Corporation
-# Released under the MIT license (see COPYING.MIT)
+#
+# SPDX-License-Identifier: MIT
+#
import os
import sys
diff --git a/external/poky/scripts/lib/devtool/__init__.py b/external/poky/scripts/lib/devtool/__init__.py
index 8fc7fffc..d39c474f 100644
--- a/external/poky/scripts/lib/devtool/__init__.py
+++ b/external/poky/scripts/lib/devtool/__init__.py
@@ -4,18 +4,8 @@
#
# Copyright (C) 2014 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugins module"""
import os
diff --git a/external/poky/scripts/lib/devtool/build.py b/external/poky/scripts/lib/devtool/build.py
index ba9593f1..935ffab4 100644
--- a/external/poky/scripts/lib/devtool/build.py
+++ b/external/poky/scripts/lib/devtool/build.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool build plugin"""
import os
@@ -21,7 +11,8 @@ import bb
import logging
import argparse
import tempfile
-from devtool import exec_build_env_command, check_workspace_recipe, DevtoolError
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
+from devtool import parse_recipe
logger = logging.getLogger('devtool')
@@ -53,12 +44,22 @@ def _get_build_tasks(config):
def build(args, config, basepath, workspace):
"""Entry point for the devtool 'build' subcommand"""
workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
+ if not rd:
+ return 1
+ deploytask = 'do_deploy' in rd.getVar('__BBTASKS')
+ finally:
+ tinfoil.shutdown()
if args.clean:
# use clean instead of cleansstate to avoid messing things up in eSDK
build_tasks = ['do_clean']
else:
build_tasks = _get_build_tasks(config)
+ if deploytask:
+ build_tasks.append('do_deploy')
bbappend = workspace[workspacepn]['bbappend']
if args.disable_parallel_make:
diff --git a/external/poky/scripts/lib/devtool/build_image.py b/external/poky/scripts/lib/devtool/build_image.py
index e5810389..9388abba 100644
--- a/external/poky/scripts/lib/devtool/build_image.py
+++ b/external/poky/scripts/lib/devtool/build_image.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugin containing the build-image subcommand."""
diff --git a/external/poky/scripts/lib/devtool/build_sdk.py b/external/poky/scripts/lib/devtool/build_sdk.py
index b89d65b0..6fe02fff 100644
--- a/external/poky/scripts/lib/devtool/build_sdk.py
+++ b/external/poky/scripts/lib/devtool/build_sdk.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import subprocess
diff --git a/external/poky/scripts/lib/devtool/deploy.py b/external/poky/scripts/lib/devtool/deploy.py
index 886004b5..6a997735 100644
--- a/external/poky/scripts/lib/devtool/deploy.py
+++ b/external/poky/scripts/lib/devtool/deploy.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugin containing the deploy subcommands"""
import logging
@@ -211,12 +201,20 @@ def deploy(args, config, basepath, workspace):
if not args.show_status:
extraoptions += ' -q'
+ scp_sshexec = ''
+ ssh_sshexec = 'ssh'
+ if args.ssh_exec:
+ scp_sshexec = "-S %s" % args.ssh_exec
+ ssh_sshexec = args.ssh_exec
scp_port = ''
ssh_port = ''
if args.port:
scp_port = "-P %s" % args.port
ssh_port = "-p %s" % args.port
+ if args.key:
+ extraoptions += ' -i %s' % args.key
+
# In order to delete previously deployed files and have the manifest file on
# the target, we write out a shell script and then copy it to the target
# so we can then run it (piping tar output to it).
@@ -238,7 +236,7 @@ def deploy(args, config, basepath, workspace):
for fpath, fsize in filelist:
f.write('%s %d\n' % (fpath, fsize))
# Copy them to the target
- ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
if ret != 0:
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
'get a complete error message' % args.target)
@@ -246,7 +244,7 @@ def deploy(args, config, basepath, workspace):
shutil.rmtree(tmpdir)
# Now run the script
- ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s %s \'sh %s %s %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
+ ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
if ret != 0:
raise DevtoolError('Deploy failed - rerun with -s to get a complete '
'error message')
@@ -276,6 +274,11 @@ def undeploy(args, config, basepath, workspace):
if not args.show_status:
extraoptions += ' -q'
+ scp_sshexec = ''
+ ssh_sshexec = 'ssh'
+ if args.ssh_exec:
+ scp_sshexec = "-S %s" % args.ssh_exec
+ ssh_sshexec = args.ssh_exec
scp_port = ''
ssh_port = ''
if args.port:
@@ -292,7 +295,7 @@ def undeploy(args, config, basepath, workspace):
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
f.write(shellscript)
# Copy it to the target
- ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
if ret != 0:
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
'get a complete error message' % args.target)
@@ -300,7 +303,7 @@ def undeploy(args, config, basepath, workspace):
shutil.rmtree(tmpdir)
# Now run the script
- ret = subprocess.call('ssh %s %s %s \'sh %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
+ ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
if ret != 0:
raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
'error message')
@@ -324,7 +327,10 @@ def register_commands(subparsers, context):
parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
+ parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
+ parser_deploy.add_argument('-I', '--key',
+ help='Specifiy ssh private key for connection to the target')
strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
strip_opts.add_argument('-S', '--strip',
@@ -346,5 +352,9 @@ def register_commands(subparsers, context):
parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
+ parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
+ parser_undeploy.add_argument('-I', '--key',
+ help='Specifiy ssh private key for connection to the target')
+
parser_undeploy.set_defaults(func=undeploy)
diff --git a/external/poky/scripts/lib/devtool/export.py b/external/poky/scripts/lib/devtool/export.py
index 35349e2c..01174eda 100644
--- a/external/poky/scripts/lib/devtool/export.py
+++ b/external/poky/scripts/lib/devtool/export.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool export plugin"""
import os
diff --git a/external/poky/scripts/lib/devtool/import.py b/external/poky/scripts/lib/devtool/import.py
index 4264b7d8..68298516 100644
--- a/external/poky/scripts/lib/devtool/import.py
+++ b/external/poky/scripts/lib/devtool/import.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool import plugin"""
import os
diff --git a/external/poky/scripts/lib/devtool/menuconfig.py b/external/poky/scripts/lib/devtool/menuconfig.py
new file mode 100644
index 00000000..95384c53
--- /dev/null
+++ b/external/poky/scripts/lib/devtool/menuconfig.py
@@ -0,0 +1,79 @@
+# OpenEmbedded Development tool - menuconfig command plugin
+#
+# Copyright (C) 2018 Xilinx
+# Written by: Chandana Kalluri <ckalluri@xilinx.com>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""Devtool menuconfig plugin"""
+
+import os
+import bb
+import logging
+import argparse
+import re
+import glob
+from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
+from devtool import check_workspace_recipe
+logger = logging.getLogger('devtool')
+
+def menuconfig(args, config, basepath, workspace):
+ """Entry point for the devtool 'menuconfig' subcommand"""
+
+ rd = ""
+ kconfigpath = ""
+ pn_src = ""
+ localfilesdir = ""
+ workspace_dir = ""
+ tinfoil = setup_tinfoil(basepath=basepath)
+ try:
+ rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
+ if not rd:
+ return 1
+
+ check_workspace_recipe(workspace, args.component)
+ pn = rd.getVar('PN', True)
+
+ if not rd.getVarFlag('do_menuconfig','task'):
+ raise DevtoolError("This recipe does not support menuconfig option")
+
+ workspace_dir = os.path.join(config.workspace_path,'sources')
+ kconfigpath = rd.getVar('B')
+ pn_src = os.path.join(workspace_dir,pn)
+
+ # add check to see if oe_local_files exists or not
+ localfilesdir = os.path.join(pn_src,'oe-local-files')
+ if not os.path.exists(localfilesdir):
+ bb.utils.mkdirhier(localfilesdir)
+ # Add gitignore to ensure source tree is clean
+ gitignorefile = os.path.join(localfilesdir,'.gitignore')
+ with open(gitignorefile, 'w') as f:
+ f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
+ f.write('*\n')
+
+ finally:
+ tinfoil.shutdown()
+
+ logger.info('Launching menuconfig')
+ exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
+ fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
+ res = standard._create_kconfig_diff(pn_src,rd,fragment)
+
+ return 0
+
+def register_commands(subparsers, context):
+ """register devtool subcommands from this plugin"""
+ parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
+ parser_menuconfig.add_argument('component', help='compenent to alter config')
+ parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)
diff --git a/external/poky/scripts/lib/devtool/package.py b/external/poky/scripts/lib/devtool/package.py
index af9e8f15..c2367342 100644
--- a/external/poky/scripts/lib/devtool/package.py
+++ b/external/poky/scripts/lib/devtool/package.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugin containing the package subcommands"""
import os
diff --git a/external/poky/scripts/lib/devtool/runqemu.py b/external/poky/scripts/lib/devtool/runqemu.py
index e26cf28c..ead978aa 100644
--- a/external/poky/scripts/lib/devtool/runqemu.py
+++ b/external/poky/scripts/lib/devtool/runqemu.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool runqemu plugin"""
diff --git a/external/poky/scripts/lib/devtool/sdk.py b/external/poky/scripts/lib/devtool/sdk.py
index 46167537..3aa42a14 100644
--- a/external/poky/scripts/lib/devtool/sdk.py
+++ b/external/poky/scripts/lib/devtool/sdk.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import subprocess
diff --git a/external/poky/scripts/lib/devtool/search.py b/external/poky/scripts/lib/devtool/search.py
index b4f209b7..d24040df 100644
--- a/external/poky/scripts/lib/devtool/search.py
+++ b/external/poky/scripts/lib/devtool/search.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool search plugin"""
diff --git a/external/poky/scripts/lib/devtool/standard.py b/external/poky/scripts/lib/devtool/standard.py
index ea09bbff..bab644b8 100644
--- a/external/poky/scripts/lib/devtool/standard.py
+++ b/external/poky/scripts/lib/devtool/standard.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool standard plugins"""
import os
@@ -155,8 +145,8 @@ def add(args, config, basepath, workspace):
extracmdopts += ' --src-subdir "%s"' % args.src_subdir
if args.autorev:
extracmdopts += ' -a'
- if args.fetch_dev:
- extracmdopts += ' --fetch-dev'
+ if args.npm_dev:
+ extracmdopts += ' --npm-dev'
if args.mirrors:
extracmdopts += ' --mirrors'
if args.srcrev:
@@ -270,14 +260,10 @@ def add(args, config, basepath, workspace):
f.write('}\n')
if bb.data.inherits_class('npm', rd):
- f.write('do_install_append() {\n')
- f.write(' # Remove files added to source dir by devtool/externalsrc\n')
- f.write(' rm -f ${NPM_INSTALLDIR}/singletask.lock\n')
- f.write(' rm -rf ${NPM_INSTALLDIR}/.git\n')
- f.write(' rm -rf ${NPM_INSTALLDIR}/oe-local-files\n')
- f.write(' for symlink in ${EXTERNALSRC_SYMLINKS} ; do\n')
- f.write(' rm -f ${NPM_INSTALLDIR}/${symlink%%:*}\n')
- f.write(' done\n')
+ f.write('python do_configure_append() {\n')
+ f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n')
+ f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n')
+ f.write(' bb.utils.remove(lockfile)\n')
f.write('}\n')
# Check if the new layer provides recipes whose priorities have been
@@ -471,11 +457,37 @@ def sync(args, config, basepath, workspace):
finally:
tinfoil.shutdown()
+def symlink_oelocal_files_srctree(rd,srctree):
+ import oe.patch
+ if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')):
+ # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
+ # (otherwise the recipe won't build as expected)
+ local_files_dir = os.path.join(srctree, 'oe-local-files')
+ addfiles = []
+ for root, _, files in os.walk(local_files_dir):
+ relpth = os.path.relpath(root, local_files_dir)
+ if relpth != '.':
+ bb.utils.mkdirhier(os.path.join(srctree, relpth))
+ for fn in files:
+ if fn == '.gitignore':
+ continue
+ destpth = os.path.join(srctree, relpth, fn)
+ if os.path.exists(destpth):
+ os.unlink(destpth)
+ os.symlink('oe-local-files/%s' % fn, destpth)
+ addfiles.append(os.path.join(relpth, fn))
+ if addfiles:
+ bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
+ useroptions = []
+ oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
+ bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
+
def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
"""Extract sources of a recipe"""
import oe.recipeutils
import oe.patch
+ import oe.path
pn = d.getVar('PN')
@@ -572,7 +584,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
with open(preservestampfile, 'w') as f:
f.write(d.getVar('STAMP'))
try:
- if bb.data.inherits_class('kernel-yocto', d):
+ if is_kernel_yocto:
# We need to generate the kernel config
task = 'do_configure'
else:
@@ -599,6 +611,23 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir'))
+ # Check if work-shared is empty, if yes
+ # find source and copy to work-shared
+ if is_kernel_yocto:
+ workshareddir = d.getVar('STAGING_KERNEL_DIR')
+ staging_kerVer = get_staging_kver(workshareddir)
+ kernelVersion = d.getVar('LINUX_VERSION')
+
+ # handle dangling symbolic link in work-shared:
+ if os.path.islink(workshareddir):
+ os.unlink(workshareddir)
+
+ if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
+ shutil.rmtree(workshareddir)
+ oe.path.copyhardlinktree(srcsubdir,workshareddir)
+ elif not os.path.exists(workshareddir):
+ oe.path.copyhardlinktree(srcsubdir,workshareddir)
+
tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
srctree_localdir = os.path.join(srctree, 'oe-local-files')
@@ -627,29 +656,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
shutil.move(tempdir_localdir, srcsubdir)
shutil.move(srcsubdir, srctree)
-
- if os.path.abspath(d.getVar('S')) == os.path.abspath(d.getVar('WORKDIR')):
- # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
- # (otherwise the recipe won't build as expected)
- local_files_dir = os.path.join(srctree, 'oe-local-files')
- addfiles = []
- for root, _, files in os.walk(local_files_dir):
- relpth = os.path.relpath(root, local_files_dir)
- if relpth != '.':
- bb.utils.mkdirhier(os.path.join(srctree, relpth))
- for fn in files:
- if fn == '.gitignore':
- continue
- destpth = os.path.join(srctree, relpth, fn)
- if os.path.exists(destpth):
- os.unlink(destpth)
- os.symlink('oe-local-files/%s' % fn, destpth)
- addfiles.append(os.path.join(relpth, fn))
- if addfiles:
- bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
- useroptions = []
- oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d)
- bb.process.run('git %s commit -a -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
+ symlink_oelocal_files_srctree(d,srctree)
if is_kernel_yocto:
logger.info('Copying kernel config to srctree')
@@ -717,11 +724,31 @@ def _check_preserve(config, recipename):
tf.write(line)
os.rename(newfile, origfile)
+def get_staging_kver(srcdir):
+ # Kernel version from work-shared
+ kerver = []
+ staging_kerVer=""
+ if os.path.exists(srcdir) and os.listdir(srcdir):
+ with open(os.path.join(srcdir,"Makefile")) as f:
+ version = [next(f) for x in range(5)][1:4]
+ for word in version:
+ kerver.append(word.split('= ')[1].split('\n')[0])
+ staging_kerVer = ".".join(kerver)
+ return staging_kerVer
+
+def get_staging_kbranch(srcdir):
+ staging_kbranch = ""
+ if os.path.exists(srcdir) and os.listdir(srcdir):
+ (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir)
+ staging_kbranch = "".join(branch.split('\n')[0])
+ return staging_kbranch
+
def modify(args, config, basepath, workspace):
"""Entry point for the devtool 'modify' subcommand"""
import bb
import oe.recipeutils
import oe.patch
+ import oe.path
if args.recipename in workspace:
raise DevtoolError("recipe %s is already in your workspace" %
@@ -763,6 +790,59 @@ def modify(args, config, basepath, workspace):
initial_rev = None
commits = []
check_commits = False
+
+ if bb.data.inherits_class('kernel-yocto', rd):
+ # Current set kernel version
+ kernelVersion = rd.getVar('LINUX_VERSION')
+ srcdir = rd.getVar('STAGING_KERNEL_DIR')
+ kbranch = rd.getVar('KBRANCH')
+
+ staging_kerVer = get_staging_kver(srcdir)
+ staging_kbranch = get_staging_kbranch(srcdir)
+ if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
+ oe.path.copyhardlinktree(srcdir,srctree)
+ workdir = rd.getVar('WORKDIR')
+ srcsubdir = rd.getVar('S')
+ localfilesdir = os.path.join(srctree,'oe-local-files')
+ # Move local source files into separate subdir
+ recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)]
+ local_files = oe.recipeutils.get_recipe_local_files(rd)
+
+ for key in local_files.copy():
+ if key.endswith('scc'):
+ sccfile = open(local_files[key], 'r')
+ for l in sccfile:
+ line = l.split()
+ if line and line[0] in ('kconf', 'patch'):
+ cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
+ if not cfg in local_files.values():
+ local_files[line[-1]] = cfg
+ shutil.copy2(cfg, workdir)
+ sccfile.close()
+
+ # Ignore local files with subdir={BP}
+ srcabspath = os.path.abspath(srcsubdir)
+ local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))]
+ if local_files:
+ for fname in local_files:
+ _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname))
+ with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f:
+ f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n')
+
+ symlink_oelocal_files_srctree(rd,srctree)
+
+ task = 'do_configure'
+ res = tinfoil.build_targets(pn, task, handle_events=True)
+
+ # Copy .config to workspace
+ kconfpath = rd.getVar('B')
+ logger.info('Copying kernel config to workspace')
+ shutil.copy2(os.path.join(kconfpath, '.config'),srctree)
+
+ # Set this to true, we still need to get initial_rev
+ # by parsing the git repo
+ args.no_extract = True
+
if not args.no_extract:
initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
if not initial_rev:
@@ -854,6 +934,13 @@ def modify(args, config, basepath, workspace):
' cp ${B}/.config ${S}/.config.baseline\n'
' ln -sfT ${B}/.config ${S}/.config.new\n'
'}\n')
+ if rd.getVarFlag('do_menuconfig','task'):
+ f.write('\ndo_configure_append() {\n'
+ ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n'
+ ' cp ${B}/.config ${S}/.config.baseline\n'
+ ' ln -sfT ${B}/.config ${S}/.config.new\n'
+ ' fi\n'
+ '}\n')
if initial_rev:
f.write('\n# initial_rev: %s\n' % initial_rev)
for commit in commits:
@@ -1328,6 +1415,20 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
if os.path.exists(os.path.join(local_files_dir, fragment_fn)):
os.unlink(os.path.join(local_files_dir, fragment_fn))
+ # Special handling for cml1, ccmake, etc bbclasses that generated
+ # configuration fragment files that are consumed as source files
+ for frag_class, frag_name in [("cml1", "fragment.cfg"), ("ccmake", "site-file.cmake")]:
+ if bb.data.inherits_class(frag_class, rd):
+ srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name)
+ if os.path.exists(srcpath):
+ if frag_name not in new_set:
+ new_set.append(frag_name)
+ # copy fragment into destdir
+ shutil.copy2(srcpath, destdir)
+ # copy fragment into local files if exists
+ if os.path.isdir(local_files_dir):
+ shutil.copy2(srcpath, local_files_dir)
+
if new_set is not None:
for fname in new_set:
if fname in existing_files:
@@ -1516,17 +1617,17 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
patches_dir, changed_revs)
logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
if filter_patches:
- new_p = {}
- upd_p = {k:v for k,v in upd_p.items() if k in filter_patches}
+ new_p = OrderedDict()
+ upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
remove_files = [f for f in remove_files if f in filter_patches]
updatefiles = False
updaterecipe = False
destpath = None
srcuri = (rd.getVar('SRC_URI', False) or '').split()
if appendlayerdir:
- files = dict((os.path.join(local_files_dir, key), val) for
+ files = OrderedDict((os.path.join(local_files_dir, key), val) for
key, val in list(upd_f.items()) + list(new_f.items()))
- files.update(dict((os.path.join(patches_dir, key), val) for
+ files.update(OrderedDict((os.path.join(patches_dir, key), val) for
key, val in list(upd_p.items()) + list(new_p.items())))
if files or remove_files:
removevalues = None
@@ -1749,7 +1850,7 @@ def status(args, config, basepath, workspace):
return 0
-def _reset(recipes, no_clean, config, basepath, workspace):
+def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
"""Reset one or more recipes"""
import oe.path
@@ -1827,10 +1928,15 @@ def _reset(recipes, no_clean, config, basepath, workspace):
srctreebase = workspace[pn]['srctreebase']
if os.path.isdir(srctreebase):
if os.listdir(srctreebase):
- # We don't want to risk wiping out any work in progress
- logger.info('Leaving source tree %s as-is; if you no '
- 'longer need it then please delete it manually'
- % srctreebase)
+ if remove_work:
+ logger.info('-r argument used on %s, removing source tree.'
+ ' You will lose any unsaved work' %pn)
+ shutil.rmtree(srctreebase)
+ else:
+ # We don't want to risk wiping out any work in progress
+ logger.info('Leaving source tree %s as-is; if you no '
+ 'longer need it then please delete it manually'
+ % srctreebase)
else:
# This is unlikely, but if it's empty we can just remove it
os.rmdir(srctreebase)
@@ -1840,6 +1946,10 @@ def _reset(recipes, no_clean, config, basepath, workspace):
def reset(args, config, basepath, workspace):
"""Entry point for the devtool 'reset' subcommand"""
import bb
+ import shutil
+
+ recipes = ""
+
if args.recipename:
if args.all:
raise DevtoolError("Recipe cannot be specified if -a/--all is used")
@@ -1854,7 +1964,7 @@ def reset(args, config, basepath, workspace):
else:
recipes = args.recipename
- _reset(recipes, args.no_clean, config, basepath, workspace)
+ _reset(recipes, args.no_clean, args.remove_work, config, basepath, workspace)
return 0
@@ -1862,13 +1972,27 @@ def reset(args, config, basepath, workspace):
def _get_layer(layername, d):
"""Determine the base layer path for the specified layer name/path"""
layerdirs = d.getVar('BBLAYERS').split()
- layers = {os.path.basename(p): p for p in layerdirs}
+ layers = {} # {basename: layer_paths}
+ for p in layerdirs:
+ bn = os.path.basename(p)
+ if bn not in layers:
+ layers[bn] = [p]
+ else:
+ layers[bn].append(p)
# Provide some shortcuts
if layername.lower() in ['oe-core', 'openembedded-core']:
- layerdir = layers.get('meta', None)
+ layername = 'meta'
+ layer_paths = layers.get(layername, None)
+ if not layer_paths:
+ return os.path.abspath(layername)
+ elif len(layer_paths) == 1:
+ return os.path.abspath(layer_paths[0])
else:
- layerdir = layers.get(layername, None)
- return os.path.abspath(layerdir or layername)
+ # multiple layers having the same base name
+ logger.warning("Multiple layers have the same base name '%s', use the first one '%s'." % (layername, layer_paths[0]))
+ logger.warning("Consider using path instead of base name to specify layer:\n\t\t%s" % '\n\t\t'.join(layer_paths))
+ return os.path.abspath(layer_paths[0])
+
def finish(args, config, basepath, workspace):
"""Entry point for the devtool 'finish' subcommand"""
@@ -1891,7 +2015,8 @@ def finish(args, config, basepath, workspace):
else:
raise DevtoolError('Source tree is not clean:\n\n%s\nEnsure you have committed your changes or use -f/--force if you are sure there\'s nothing that needs to be committed' % dirty)
- no_clean = False
+ no_clean = args.no_clean
+ remove_work=args.remove_work
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -2043,7 +2168,7 @@ def finish(args, config, basepath, workspace):
if args.dry_run:
logger.info('Resetting recipe (dry-run)')
else:
- _reset([args.recipename], no_clean=no_clean, config=config, basepath=basepath, workspace=workspace)
+ _reset([args.recipename], no_clean=no_clean, remove_work=remove_work, config=config, basepath=basepath, workspace=workspace)
return 0
@@ -2070,7 +2195,7 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
- parser_add.add_argument('--fetch-dev', help='For npm, also fetch devDependencies', action="store_true")
+ parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
group = parser_add.add_mutually_exclusive_group()
@@ -2155,6 +2280,7 @@ def register_commands(subparsers, context):
parser_reset.add_argument('recipename', nargs='*', help='Recipe to reset')
parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
+ parser_reset.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory along with append')
parser_reset.set_defaults(func=reset)
parser_finish = subparsers.add_parser('finish', help='Finish working on a recipe in your workspace',
@@ -2165,6 +2291,8 @@ def register_commands(subparsers, context):
parser_finish.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
parser_finish.add_argument('--initial-rev', help='Override starting revision for patches')
parser_finish.add_argument('--force', '-f', action="store_true", help='Force continuing even if there are uncommitted changes in the source tree repository')
+ parser_finish.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory under workspace')
+ parser_finish.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
parser_finish.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
parser_finish.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
parser_finish.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
diff --git a/external/poky/scripts/lib/devtool/upgrade.py b/external/poky/scripts/lib/devtool/upgrade.py
index 20200779..327916ad 100644
--- a/external/poky/scripts/lib/devtool/upgrade.py
+++ b/external/poky/scripts/lib/devtool/upgrade.py
@@ -2,18 +2,7 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool upgrade plugin"""
@@ -43,7 +32,7 @@ def _run(cmd, cwd=''):
def _get_srctree(tmpdir):
srctree = tmpdir
- dirs = os.listdir(tmpdir)
+ dirs = scriptutils.filter_src_subdirs(tmpdir)
if len(dirs) == 1:
srctree = os.path.join(tmpdir, dirs[0])
return srctree
@@ -133,18 +122,22 @@ def _cleanup_on_error(rf, srctree):
rfp = os.path.split(rf)[0] # recipe folder
rfpp = os.path.split(rfp)[0] # recipes folder
if os.path.exists(rfp):
- shutil.rmtree(b)
+ shutil.rmtree(rfp)
if not len(os.listdir(rfpp)):
os.rmdir(rfpp)
srctree = os.path.abspath(srctree)
if os.path.exists(srctree):
shutil.rmtree(srctree)
-def _upgrade_error(e, rf, srctree):
- if rf:
- cleanup_on_error(rf, srctree)
+def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None):
+ if rf and not keep_failure:
+ _cleanup_on_error(rf, srctree)
logger.error(e)
- raise DevtoolError(e)
+ if extramsg:
+ logger.error(extramsg)
+ if keep_failure:
+ logger.info('Preserving failed upgrade files (--keep-failure)')
+ sys.exit(1)
def _get_uri(rd):
srcuris = rd.getVar('SRC_URI').split()
@@ -288,6 +281,8 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
logger.info('Preserving temporary directory %s' % tmpsrctree)
else:
shutil.rmtree(tmpsrctree)
+ if tmpdir != tmpsrctree:
+ shutil.rmtree(tmpdir)
return (rev, md5, sha256, srcbranch, srcsubdir_rel)
@@ -310,7 +305,7 @@ def _add_license_diff_to_recipe(path, diff):
f.write("\n#\n\n".encode())
f.write(orig_content)
-def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses):
+def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
"""Creates the new recipe under workspace"""
bpn = rd.getVar('BPN')
@@ -427,7 +422,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
newvalues["LIC_FILES_CHKSUM"] = newlicchksum
_add_license_diff_to_recipe(fullpath, license_diff)
- rd = tinfoil.parse_recipe_file(fullpath, False)
+ try:
+ rd = tinfoil.parse_recipe_file(fullpath, False)
+ except bb.tinfoil.TinfoilCommandFailed as e:
+ _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed')
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
return fullpath, copied
@@ -552,18 +550,18 @@ def upgrade(args, config, basepath, workspace):
try:
logger.info('Extracting current version source...')
rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
- old_licenses = _extract_licenses(srctree, rd.getVar('LIC_FILES_CHKSUM'))
+ old_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or ""))
logger.info('Extracting upgraded version source...')
rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
args.srcrev, args.srcbranch, args.branch, args.keep_temp,
tinfoil, rd)
- new_licenses = _extract_licenses(srctree, rd.getVar('LIC_FILES_CHKSUM'))
+ new_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or ""))
license_diff = _generate_license_diff(old_licenses, new_licenses)
- rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses)
+ rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
except bb.process.CmdError as e:
- _upgrade_error(e, rf, srctree)
+ _upgrade_error(e, rf, srctree, args.keep_failure)
except DevtoolError as e:
- _upgrade_error(e, rf, srctree)
+ _upgrade_error(e, rf, srctree, args.keep_failure)
standard._add_md5(config, pn, os.path.dirname(rf))
af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2,
@@ -600,6 +598,20 @@ def latest_version(args, config, basepath, workspace):
tinfoil.shutdown()
return 0
+def check_upgrade_status(args, config, basepath, workspace):
+ if not args.recipe:
+ logger.info("Checking the upstream status for all recipes may take a few minutes")
+ results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
+ for result in results:
+ # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
+ if args.all or result[1] != 'MATCH':
+ logger.info("{:25} {:15} {:15} {} {} {}".format( result[0],
+ result[2],
+ result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
+ result[4],
+ result[5] if result[5] != 'N/A' else "",
+ "cannot be updated due to: %s" %(result[6]) if result[6] else ""))
+
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
@@ -620,6 +632,7 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
+ parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
@@ -627,3 +640,10 @@ def register_commands(subparsers, context):
group='info')
parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
parser_latest_version.set_defaults(func=latest_version)
+
+ parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
+ description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
+ group='info')
+ parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
+ parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
+ parser_check_upgrade_status.set_defaults(func=check_upgrade_status)
diff --git a/external/poky/scripts/lib/devtool/utilcmds.py b/external/poky/scripts/lib/devtool/utilcmds.py
index 7cd139fb..96481776 100644
--- a/external/poky/scripts/lib/devtool/utilcmds.py
+++ b/external/poky/scripts/lib/devtool/utilcmds.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool utility plugins"""
diff --git a/external/poky/scripts/lib/recipetool/append.py b/external/poky/scripts/lib/recipetool/append.py
index 3f2c134a..e9d52bb6 100644
--- a/external/poky/scripts/lib/recipetool/append.py
+++ b/external/poky/scripts/lib/recipetool/append.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os
diff --git a/external/poky/scripts/lib/recipetool/create.py b/external/poky/scripts/lib/recipetool/create.py
index dbd74a1c..566c7536 100644
--- a/external/poky/scripts/lib/recipetool/create.py
+++ b/external/poky/scripts/lib/recipetool/create.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os
@@ -70,11 +60,13 @@ class RecipeHandler(object):
if RecipeHandler.recipelibmap:
return
# First build up library->package mapping
- shlib_providers = oe.package.read_shlib_providers(d)
+ d2 = bb.data.createCopy(d)
+ d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}")
+ shlib_providers = oe.package.read_shlib_providers(d2)
libdir = d.getVar('libdir')
base_libdir = d.getVar('base_libdir')
libpaths = list(set([base_libdir, libdir]))
- libname_re = re.compile('^lib(.+)\.so.*$')
+ libname_re = re.compile(r'^lib(.+)\.so.*$')
pkglibmap = {}
for lib, item in shlib_providers.items():
for path, pkg in item.items():
@@ -436,7 +428,7 @@ def create_recipe(args):
if scriptutils.is_src_url(source):
# Warn about github archive URLs
- if re.match('https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
+ if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
# Fetch a URL
fetchuri = reformat_git_uri(urldefrag(source)[0])
@@ -468,6 +460,7 @@ def create_recipe(args):
logger.error('branch= parameter and -B/--srcbranch option cannot both be specified - use one or the other')
sys.exit(1)
srcbranch = args.srcbranch
+ params['branch'] = srcbranch
nobranch = params.get('nobranch')
if nobranch and srcbranch:
logger.error('nobranch= cannot be used if you specify a branch')
@@ -485,8 +478,6 @@ def create_recipe(args):
storeTagName = params['tag']
params['nobranch'] = '1'
del params['tag']
- if scheme == 'npm':
- params['noverify'] = '1'
fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
@@ -503,9 +494,7 @@ def create_recipe(args):
if ftmpdir and args.keep_temp:
logger.info('Fetch temp directory is %s' % ftmpdir)
- dirlist = os.listdir(srctree)
- filterout = ['git.indirectionsymlink']
- dirlist = [x for x in dirlist if x not in filterout]
+ dirlist = scriptutils.filter_src_subdirs(srctree)
logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
if len(dirlist) == 1:
singleitem = os.path.join(srctree, dirlist[0])
@@ -724,10 +713,8 @@ def create_recipe(args):
lines_after.append('INSANE_SKIP_${PN} += "already-stripped"')
lines_after.append('')
- if args.fetch_dev:
- extravalues['fetchdev'] = True
- else:
- extravalues['fetchdev'] = None
+ if args.npm_dev:
+ extravalues['NPM_INSTALL_DEV'] = 1
# Find all plugins that want to register handlers
logger.debug('Loading recipe handlers')
@@ -843,7 +830,7 @@ def create_recipe(args):
elif line.startswith('PV = '):
if realpv:
# Replace the first part of the PV value
- line = re.sub('"[^+]*\+', '"%s+' % realpv, line)
+ line = re.sub(r'"[^+]*\+', '"%s+' % realpv, line)
lines_before.append(line)
if args.also_native:
@@ -1063,6 +1050,7 @@ def get_license_md5sums(d, static_only=False):
md5sums['3b83ef96387f14655fc854ddc3c6bd57'] = 'Apache-2.0'
md5sums['385c55653886acac3821999a3ccd17b3'] = 'Artistic-1.0 | GPL-2.0' # some perl modules
md5sums['54c7042be62e169199200bc6477f04d1'] = 'BSD-3-Clause'
+ md5sums['bfe1f75d606912a4111c90743d6c7325'] = 'MPL-1.1'
return md5sums
def crunch_license(licfile):
@@ -1078,8 +1066,8 @@ def crunch_license(licfile):
import oe.utils
# Note: these are carefully constructed!
- license_title_re = re.compile('^\(?(#+ *)?(The )?.{1,10} [Ll]icen[sc]e( \(.{1,10}\))?\)?:?$')
- license_statement_re = re.compile('^(This (project|software) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
+ license_title_re = re.compile(r'^\(?(#+ *)?(The )?.{1,10} [Ll]icen[sc]e( \(.{1,10}\))?\)?:?$')
+ license_statement_re = re.compile(r'^(This (project|software) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
copyright_re = re.compile('^(#+)? *Copyright .*$')
crunched_md5sums = {}
@@ -1322,7 +1310,7 @@ def register_commands(subparsers):
group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)')
parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
- parser_create.add_argument('--fetch-dev', action="store_true", help='For npm, also fetch devDependencies')
+ parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
parser_create.set_defaults(func=create_recipe)
diff --git a/external/poky/scripts/lib/recipetool/create_buildsys.py b/external/poky/scripts/lib/recipetool/create_buildsys.py
index 4743c740..35a97c93 100644
--- a/external/poky/scripts/lib/recipetool/create_buildsys.py
+++ b/external/poky/scripts/lib/recipetool/create_buildsys.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re
import logging
@@ -236,9 +226,9 @@ class CmakeRecipeHandler(RecipeHandler):
elif pkg == 'PkgConfig':
inherits.append('pkgconfig')
elif pkg == 'PythonInterp':
- inherits.append('pythonnative')
+ inherits.append('python3native')
elif pkg == 'PythonLibs':
- inherits.append('python-dir')
+ inherits.append('python3-dir')
else:
# Try to map via looking at installed CMake packages in pkgdata
dep = find_cmake_package(pkg)
@@ -427,7 +417,7 @@ class AutotoolsRecipeHandler(RecipeHandler):
}
progclassmap = {'gconftool-2': 'gconf',
'pkg-config': 'pkgconfig',
- 'python': 'pythonnative',
+ 'python': 'python3native',
'python3': 'python3native',
'perl': 'perlnative',
'makeinfo': 'texinfo',
@@ -576,16 +566,7 @@ class AutotoolsRecipeHandler(RecipeHandler):
elif keyword == 'AX_PROG_XSLTPROC':
deps.append('libxslt-native')
elif keyword in ['AC_PYTHON_DEVEL', 'AX_PYTHON_DEVEL', 'AM_PATH_PYTHON']:
- pythonclass = 'pythonnative'
- res = version_re.search(value)
- if res:
- if res.group(1).startswith('3'):
- pythonclass = 'python3native'
- # Avoid replacing python3native with pythonnative
- if not pythonclass in inherits and not 'python3native' in inherits:
- if 'pythonnative' in inherits:
- inherits.remove('pythonnative')
- inherits.append(pythonclass)
+ pythonclass = 'python3native'
elif keyword == 'AX_WITH_CURSES':
deps.append('ncurses')
elif keyword == 'AX_PATH_BDB':
diff --git a/external/poky/scripts/lib/recipetool/create_buildsys_python.py b/external/poky/scripts/lib/recipetool/create_buildsys_python.py
index 5bd2aa33..adfa3779 100644
--- a/external/poky/scripts/lib/recipetool/create_buildsys_python.py
+++ b/external/poky/scripts/lib/recipetool/create_buildsys_python.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Mentor Graphics Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import ast
import codecs
@@ -41,11 +31,11 @@ def tinfoil_init(instance):
class PythonRecipeHandler(RecipeHandler):
- base_pkgdeps = ['python-core']
- excluded_pkgdeps = ['python-dbg']
- # os.path is provided by python-core
+ base_pkgdeps = ['python3-core']
+ excluded_pkgdeps = ['python3-dbg']
+ # os.path is provided by python3-core
assume_provided = ['builtins', 'os.path']
- # Assumes that the host python builtin_module_names is sane for target too
+ # Assumes that the host python3 builtin_module_names is sane for target too
assume_provided = assume_provided + list(sys.builtin_module_names)
bbvar_map = {
@@ -164,8 +154,13 @@ class PythonRecipeHandler(RecipeHandler):
if 'buildsystem' in handled:
return False
- if not RecipeHandler.checkfiles(srctree, ['setup.py']):
- return
+ # Check for non-zero size setup.py files
+ setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
+ for fn in setupfiles:
+ if os.path.getsize(fn):
+ break
+ else:
+ return False
# setup.py is always parsed to get at certain required information, such as
# distutils vs setuptools
@@ -225,9 +220,9 @@ class PythonRecipeHandler(RecipeHandler):
self.apply_info_replacements(info)
if uses_setuptools:
- classes.append('setuptools')
+ classes.append('setuptools3')
else:
- classes.append('distutils')
+ classes.append('distutils3')
if license_str:
for i, line in enumerate(lines_before):
@@ -292,7 +287,7 @@ class PythonRecipeHandler(RecipeHandler):
for feature, feature_reqs in extras_req.items():
unmapped_deps.difference_update(feature_reqs)
- feature_req_deps = ('python-' + r.replace('.', '-').lower() for r in sorted(feature_reqs))
+ feature_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(feature_reqs))
lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps)))
inst_reqs = set()
@@ -303,7 +298,7 @@ class PythonRecipeHandler(RecipeHandler):
if inst_reqs:
unmapped_deps.difference_update(inst_reqs)
- inst_req_deps = ('python-' + r.replace('.', '-').lower() for r in sorted(inst_reqs))
+ inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs))
lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These')
lines_after.append('# upstream names may not correspond exactly to bitbake package names.')
lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(' '.join(inst_req_deps)))
@@ -366,7 +361,7 @@ class PythonRecipeHandler(RecipeHandler):
return info, 'setuptools' in imported_modules, non_literals, extensions
def get_setup_args_info(self, setupscript='./setup.py'):
- cmd = ['python', setupscript]
+ cmd = ['python3', setupscript]
info = {}
keys = set(self.bbvar_map.keys())
keys |= set(self.setuparg_list_fields)
@@ -400,7 +395,7 @@ class PythonRecipeHandler(RecipeHandler):
def get_setup_byline(self, fields, setupscript='./setup.py'):
info = {}
- cmd = ['python', setupscript]
+ cmd = ['python3', setupscript]
cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
try:
info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
@@ -537,7 +532,7 @@ class PythonRecipeHandler(RecipeHandler):
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
ldata = tinfoil.config_data.createCopy()
- bb.parse.handle('classes/python-dir.bbclass', ldata, True)
+ bb.parse.handle('classes/python3-dir.bbclass', ldata, True)
python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
diff --git a/external/poky/scripts/lib/recipetool/create_kernel.py b/external/poky/scripts/lib/recipetool/create_kernel.py
index ca4996c7..5740589a 100644
--- a/external/poky/scripts/lib/recipetool/create_kernel.py
+++ b/external/poky/scripts/lib/recipetool/create_kernel.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re
import logging
diff --git a/external/poky/scripts/lib/recipetool/create_kmod.py b/external/poky/scripts/lib/recipetool/create_kmod.py
index 3982537a..85b5c48e 100644
--- a/external/poky/scripts/lib/recipetool/create_kmod.py
+++ b/external/poky/scripts/lib/recipetool/create_kmod.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re
import logging
diff --git a/external/poky/scripts/lib/recipetool/create_npm.py b/external/poky/scripts/lib/recipetool/create_npm.py
index 0b09ed0b..579b7ae4 100644
--- a/external/poky/scripts/lib/recipetool/create_npm.py
+++ b/external/poky/scripts/lib/recipetool/create_npm.py
@@ -1,331 +1,255 @@
-# Recipe creation tool - node.js NPM module support plugin
-#
# Copyright (C) 2016 Intel Corporation
+# Copyright (C) 2020 Savoir-Faire Linux
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+"""Recipe creation tool - npm module support plugin"""
+import json
import os
+import re
import sys
-import logging
-import subprocess
import tempfile
-import shutil
-import json
-from recipetool.create import RecipeHandler, split_pkg_licenses, handle_license_vars
+import bb
+from bb.fetch2.npm import NpmEnvironment
+from bb.fetch2.npmsw import foreach_dependencies
+from recipetool.create import RecipeHandler
+from recipetool.create import guess_license
+from recipetool.create import split_pkg_licenses
-logger = logging.getLogger('recipetool')
+TINFOIL = None
+def tinfoil_init(instance):
+ """Initialize tinfoil"""
+ global TINFOIL
+ TINFOIL = instance
-tinfoil = None
+class NpmRecipeHandler(RecipeHandler):
+ """Class to handle the npm recipe creation"""
+
+ @staticmethod
+ def _npm_name(name):
+ """Generate a Yocto friendly npm name"""
+ name = re.sub("/", "-", name)
+ name = name.lower()
+ name = re.sub(r"[^\-a-z0-9]", "", name)
+ name = name.strip("-")
+ return name
+
+ @staticmethod
+ def _get_registry(lines):
+ """Get the registry value from the 'npm://registry' url"""
+ registry = None
+
+ def _handle_registry(varname, origvalue, op, newlines):
+ nonlocal registry
+ if origvalue.startswith("npm://"):
+ registry = re.sub(r"^npm://", "http://", origvalue.split(";")[0])
+ return origvalue, None, 0, True
-def tinfoil_init(instance):
- global tinfoil
- tinfoil = instance
+ bb.utils.edit_metadata(lines, ["SRC_URI"], _handle_registry)
+ return registry
-class NpmRecipeHandler(RecipeHandler):
- lockdownpath = None
+ @staticmethod
+ def _ensure_npm():
+ """Check if the 'npm' command is available in the recipes"""
+ if not TINFOIL.recipes_parsed:
+ TINFOIL.parse_recipes()
- def _ensure_npm(self, fixed_setup=False):
- if not tinfoil.recipes_parsed:
- tinfoil.parse_recipes()
try:
- rd = tinfoil.parse_recipe('nodejs-native')
+ d = TINFOIL.parse_recipe("nodejs-native")
except bb.providers.NoProvider:
- if fixed_setup:
- msg = 'nodejs-native is required for npm but is not available within this SDK'
- else:
- msg = 'nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs'
- logger.error(msg)
- return None
- bindir = rd.getVar('STAGING_BINDIR_NATIVE')
- npmpath = os.path.join(bindir, 'npm')
+ bb.error("Nothing provides 'nodejs-native' which is required for the build")
+ bb.note("You will likely need to add a layer that provides nodejs")
+ sys.exit(14)
+
+ bindir = d.getVar("STAGING_BINDIR_NATIVE")
+ npmpath = os.path.join(bindir, "npm")
+
if not os.path.exists(npmpath):
- tinfoil.build_targets('nodejs-native', 'addto_recipe_sysroot')
+ TINFOIL.build_targets("nodejs-native", "addto_recipe_sysroot")
+
if not os.path.exists(npmpath):
- logger.error('npm required to process specified source, but nodejs-native did not seem to populate it')
- return None
+ bb.error("Failed to add 'npm' to sysroot")
+ sys.exit(14)
+
return bindir
- def _handle_license(self, data):
- '''
- Handle the license value from an npm package.json file
- '''
- license = None
- if 'license' in data:
- license = data['license']
- if isinstance(license, dict):
- license = license.get('type', None)
- if license:
- if 'OR' in license:
- license = license.replace('OR', '|')
- license = license.replace('AND', '&')
- license = license.replace(' ', '_')
- if not license[0] == '(':
- license = '(' + license + ')'
- else:
- license = license.replace('AND', '&')
- if license[0] == '(':
- license = license[1:]
- if license[-1] == ')':
- license = license[:-1]
- license = license.replace('MIT/X11', 'MIT')
- license = license.replace('Public Domain', 'PD')
- license = license.replace('SEE LICENSE IN EULA',
- 'SEE-LICENSE-IN-EULA')
- return license
-
- def _shrinkwrap(self, srctree, localfilesdir, extravalues, lines_before, d):
- try:
- runenv = dict(os.environ, PATH=d.getVar('PATH'))
- bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
- except bb.process.ExecutionError as e:
- logger.warning('npm shrinkwrap failed:\n%s' % e.stdout)
- return
-
- tmpfile = os.path.join(localfilesdir, 'npm-shrinkwrap.json')
- shutil.move(os.path.join(srctree, 'npm-shrinkwrap.json'), tmpfile)
- extravalues.setdefault('extrafiles', {})
- extravalues['extrafiles']['npm-shrinkwrap.json'] = tmpfile
- lines_before.append('NPM_SHRINKWRAP := "${THISDIR}/${PN}/npm-shrinkwrap.json"')
-
- def _lockdown(self, srctree, localfilesdir, extravalues, lines_before, d):
- runenv = dict(os.environ, PATH=d.getVar('PATH'))
- if not NpmRecipeHandler.lockdownpath:
- NpmRecipeHandler.lockdownpath = tempfile.mkdtemp('recipetool-npm-lockdown')
- bb.process.run('npm install lockdown --prefix %s' % NpmRecipeHandler.lockdownpath,
- cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
- relockbin = os.path.join(NpmRecipeHandler.lockdownpath, 'node_modules', 'lockdown', 'relock.js')
- if not os.path.exists(relockbin):
- logger.warning('Could not find relock.js within lockdown directory; skipping lockdown')
- return
- try:
- bb.process.run('node %s' % relockbin, cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True)
- except bb.process.ExecutionError as e:
- logger.warning('lockdown-relock failed:\n%s' % e.stdout)
- return
-
- tmpfile = os.path.join(localfilesdir, 'lockdown.json')
- shutil.move(os.path.join(srctree, 'lockdown.json'), tmpfile)
- extravalues.setdefault('extrafiles', {})
- extravalues['extrafiles']['lockdown.json'] = tmpfile
- lines_before.append('NPM_LOCKDOWN := "${THISDIR}/${PN}/lockdown.json"')
-
- def _handle_dependencies(self, d, deps, optdeps, devdeps, lines_before, srctree):
- import scriptutils
- # If this isn't a single module we need to get the dependencies
- # and add them to SRC_URI
- def varfunc(varname, origvalue, op, newlines):
- if varname == 'SRC_URI':
- if not origvalue.startswith('npm://'):
- src_uri = origvalue.split()
- deplist = {}
- for dep, depver in optdeps.items():
- depdata = self.get_npm_data(dep, depver, d)
- if self.check_npm_optional_dependency(depdata):
- deplist[dep] = depdata
- for dep, depver in devdeps.items():
- depdata = self.get_npm_data(dep, depver, d)
- if self.check_npm_optional_dependency(depdata):
- deplist[dep] = depdata
- for dep, depver in deps.items():
- depdata = self.get_npm_data(dep, depver, d)
- deplist[dep] = depdata
-
- extra_urls = []
- for dep, depdata in deplist.items():
- version = depdata.get('version', None)
- if version:
- url = 'npm://registry.npmjs.org;name=%s;version=%s;subdir=node_modules/%s' % (dep, version, dep)
- extra_urls.append(url)
- if extra_urls:
- scriptutils.fetch_url(tinfoil, ' '.join(extra_urls), None, srctree, logger)
- src_uri.extend(extra_urls)
- return src_uri, None, -1, True
- return origvalue, None, 0, True
- updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
- if updated:
- del lines_before[:]
- for line in newlines:
- # Hack to avoid newlines that edit_metadata inserts
- if line.endswith('\n'):
- line = line[:-1]
- lines_before.append(line)
- return updated
+ @staticmethod
+ def _npm_global_configs(dev):
+ """Get the npm global configuration"""
+ configs = []
+
+ if dev:
+ configs.append(("also", "development"))
+ else:
+ configs.append(("only", "production"))
+
+ configs.append(("save", "false"))
+ configs.append(("package-lock", "false"))
+ configs.append(("shrinkwrap", "false"))
+ return configs
+
+ def _run_npm_install(self, d, srctree, registry, dev):
+ """Run the 'npm install' command without building the addons"""
+ configs = self._npm_global_configs(dev)
+ configs.append(("ignore-scripts", "true"))
+
+ if registry:
+ configs.append(("registry", registry))
+
+ bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
+
+ env = NpmEnvironment(d, configs=configs)
+ env.run("npm install", workdir=srctree)
+
+ def _generate_shrinkwrap(self, d, srctree, dev):
+ """Check and generate the 'npm-shrinkwrap.json' file if needed"""
+ configs = self._npm_global_configs(dev)
+
+ env = NpmEnvironment(d, configs=configs)
+ env.run("npm shrinkwrap", workdir=srctree)
+
+ return os.path.join(srctree, "npm-shrinkwrap.json")
+
+ def _handle_licenses(self, srctree, shrinkwrap_file, dev):
+ """Return the extra license files and the list of packages"""
+ licfiles = []
+ packages = {}
+
+ def _licfiles_append(licfile):
+ """Append 'licfile' to the license files list"""
+ licfilepath = os.path.join(srctree, licfile)
+ licmd5 = bb.utils.md5_file(licfilepath)
+ licfiles.append("file://%s;md5=%s" % (licfile, licmd5))
+
+ # Handle the parent package
+ _licfiles_append("package.json")
+ packages["${PN}"] = ""
+
+ # Handle the dependencies
+ def _handle_dependency(name, params, deptree):
+ suffix = "-".join([self._npm_name(dep) for dep in deptree])
+ destdirs = [os.path.join("node_modules", dep) for dep in deptree]
+ destdir = os.path.join(*destdirs)
+ _licfiles_append(os.path.join(destdir, "package.json"))
+ packages["${PN}-" + suffix] = destdir
+
+ with open(shrinkwrap_file, "r") as f:
+ shrinkwrap = json.load(f)
+
+ foreach_dependencies(shrinkwrap, _handle_dependency, dev)
+
+ return licfiles, packages
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
- import bb.utils
- import oe.package
- from collections import OrderedDict
+ """Handle the npm recipe creation"""
- if 'buildsystem' in handled:
+ if "buildsystem" in handled:
return False
- def read_package_json(fn):
- with open(fn, 'r', errors='surrogateescape') as f:
- return json.loads(f.read())
+ files = RecipeHandler.checkfiles(srctree, ["package.json"])
- files = RecipeHandler.checkfiles(srctree, ['package.json'])
- if files:
- d = bb.data.createCopy(tinfoil.config_data)
- npm_bindir = self._ensure_npm()
- if not npm_bindir:
- sys.exit(14)
- d.prependVar('PATH', '%s:' % npm_bindir)
-
- data = read_package_json(files[0])
- if 'name' in data and 'version' in data:
- extravalues['PN'] = data['name']
- extravalues['PV'] = data['version']
- classes.append('npm')
- handled.append('buildsystem')
- if 'description' in data:
- extravalues['SUMMARY'] = data['description']
- if 'homepage' in data:
- extravalues['HOMEPAGE'] = data['homepage']
-
- fetchdev = extravalues['fetchdev'] or None
- deps, optdeps, devdeps = self.get_npm_package_dependencies(data, fetchdev)
- self._handle_dependencies(d, deps, optdeps, devdeps, lines_before, srctree)
-
- # Shrinkwrap
- localfilesdir = tempfile.mkdtemp(prefix='recipetool-npm')
- self._shrinkwrap(srctree, localfilesdir, extravalues, lines_before, d)
-
- # Lockdown
- self._lockdown(srctree, localfilesdir, extravalues, lines_before, d)
-
- # Split each npm module out to is own package
- npmpackages = oe.package.npm_split_package_dirs(srctree)
- licvalues = None
- for item in handled:
- if isinstance(item, tuple):
- if item[0] == 'license':
- licvalues = item[1]
- break
- if not licvalues:
- licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d)
- if licvalues:
- # Augment the license list with information we have in the packages
- licenses = {}
- license = self._handle_license(data)
- if license:
- licenses['${PN}'] = license
- for pkgname, pkgitem in npmpackages.items():
- _, pdata = pkgitem
- license = self._handle_license(pdata)
- if license:
- licenses[pkgname] = license
- # Now write out the package-specific license values
- # We need to strip out the json data dicts for this since split_pkg_licenses
- # isn't expecting it
- packages = OrderedDict((x,y[0]) for x,y in npmpackages.items())
- packages['${PN}'] = ''
- pkglicenses = split_pkg_licenses(licvalues, packages, lines_after, licenses)
- all_licenses = list(set([item.replace('_', ' ') for pkglicense in pkglicenses.values() for item in pkglicense]))
- if '&' in all_licenses:
- all_licenses.remove('&')
- extravalues['LICENSE'] = ' & '.join(all_licenses)
-
- # Need to move S setting after inherit npm
- for i, line in enumerate(lines_before):
- if line.startswith('S ='):
- lines_before.pop(i)
- lines_after.insert(0, '# Must be set after inherit npm since that itself sets S')
- lines_after.insert(1, line)
- break
-
- return True
-
- return False
-
- # FIXME this is duplicated from lib/bb/fetch2/npm.py
- def _parse_view(self, output):
- '''
- Parse the output of npm view --json; the last JSON result
- is assumed to be the one that we're interested in.
- '''
- pdata = None
- outdeps = {}
- datalines = []
- bracelevel = 0
- for line in output.splitlines():
- if bracelevel:
- datalines.append(line)
- elif '{' in line:
- datalines = []
- datalines.append(line)
- bracelevel = bracelevel + line.count('{') - line.count('}')
- if datalines:
- pdata = json.loads('\n'.join(datalines))
- return pdata
-
- # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py
- # (split out from _getdependencies())
- def get_npm_data(self, pkg, version, d):
- import bb.fetch2
- pkgfullname = pkg
- if version != '*' and not '/' in version:
- pkgfullname += "@'%s'" % version
- logger.debug(2, "Calling getdeps on %s" % pkg)
- runenv = dict(os.environ, PATH=d.getVar('PATH'))
- fetchcmd = "npm view %s --json" % pkgfullname
- output, _ = bb.process.run(fetchcmd, stderr=subprocess.STDOUT, env=runenv, shell=True)
- data = self._parse_view(output)
- return data
-
- # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py
- # (split out from _getdependencies())
- def get_npm_package_dependencies(self, pdata, fetchdev):
- dependencies = pdata.get('dependencies', {})
- optionalDependencies = pdata.get('optionalDependencies', {})
- dependencies.update(optionalDependencies)
- if fetchdev:
- devDependencies = pdata.get('devDependencies', {})
- dependencies.update(devDependencies)
- else:
- devDependencies = {}
- depsfound = {}
- optdepsfound = {}
- devdepsfound = {}
- for dep in dependencies:
- if dep in optionalDependencies:
- optdepsfound[dep] = dependencies[dep]
- elif dep in devDependencies:
- devdepsfound[dep] = dependencies[dep]
- else:
- depsfound[dep] = dependencies[dep]
- return depsfound, optdepsfound, devdepsfound
-
- # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py
- # (split out from _getdependencies())
- def check_npm_optional_dependency(self, pdata):
- pkg_os = pdata.get('os', None)
- if pkg_os:
- if not isinstance(pkg_os, list):
- pkg_os = [pkg_os]
- blacklist = False
- for item in pkg_os:
- if item.startswith('!'):
- blacklist = True
- break
- if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
- pkg = pdata.get('name', 'Unnamed package')
- logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
- return False
- return True
+ if not files:
+ return False
+ with open(files[0], "r") as f:
+ data = json.load(f)
+
+ if "name" not in data or "version" not in data:
+ return False
+
+ extravalues["PN"] = self._npm_name(data["name"])
+ extravalues["PV"] = data["version"]
+
+ if "description" in data:
+ extravalues["SUMMARY"] = data["description"]
+
+ if "homepage" in data:
+ extravalues["HOMEPAGE"] = data["homepage"]
+
+ dev = bb.utils.to_boolean(str(extravalues.get("NPM_INSTALL_DEV", "0")), False)
+ registry = self._get_registry(lines_before)
+
+ bb.note("Checking if npm is available ...")
+ # The native npm is used here (and not the host one) to ensure that the
+ # npm version is high enough to ensure an efficient dependency tree
+ # resolution and avoid issue with the shrinkwrap file format.
+ # Moreover the native npm is mandatory for the build.
+ bindir = self._ensure_npm()
+
+ d = bb.data.createCopy(TINFOIL.config_data)
+ d.prependVar("PATH", bindir + ":")
+ d.setVar("S", srctree)
+
+ bb.note("Generating shrinkwrap file ...")
+ # To generate the shrinkwrap file the dependencies have to be installed
+ # first. During the generation process some files may be updated /
+ # deleted. By default devtool tracks the diffs in the srctree and raises
+ # errors when finishing the recipe if some diffs are found.
+ git_exclude_file = os.path.join(srctree, ".git", "info", "exclude")
+ if os.path.exists(git_exclude_file):
+ with open(git_exclude_file, "r+") as f:
+ lines = f.readlines()
+ for line in ["/node_modules/", "/npm-shrinkwrap.json"]:
+ if line not in lines:
+ f.write(line + "\n")
+
+ lock_file = os.path.join(srctree, "package-lock.json")
+ lock_copy = lock_file + ".copy"
+ if os.path.exists(lock_file):
+ bb.utils.copyfile(lock_file, lock_copy)
+
+ self._run_npm_install(d, srctree, registry, dev)
+ shrinkwrap_file = self._generate_shrinkwrap(d, srctree, dev)
+
+ if os.path.exists(lock_copy):
+ bb.utils.movefile(lock_copy, lock_file)
+
+ # Add the shrinkwrap file as 'extrafiles'
+ shrinkwrap_copy = shrinkwrap_file + ".copy"
+ bb.utils.copyfile(shrinkwrap_file, shrinkwrap_copy)
+ extravalues.setdefault("extrafiles", {})
+ extravalues["extrafiles"]["npm-shrinkwrap.json"] = shrinkwrap_copy
+
+ url_local = "npmsw://%s" % shrinkwrap_file
+ url_recipe= "npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json"
+
+ if dev:
+ url_local += ";dev=1"
+ url_recipe += ";dev=1"
+
+ # Add the npmsw url in the SRC_URI of the generated recipe
+ def _handle_srcuri(varname, origvalue, op, newlines):
+ """Update the version value and add the 'npmsw://' url"""
+ value = origvalue.replace("version=" + data["version"], "version=${PV}")
+ value = value.replace("version=latest", "version=${PV}")
+ values = [line.strip() for line in value.strip('\n').splitlines()]
+ values.append(url_recipe)
+ return values, None, 4, False
+
+ (_, newlines) = bb.utils.edit_metadata(lines_before, ["SRC_URI"], _handle_srcuri)
+ lines_before[:] = [line.rstrip('\n') for line in newlines]
+
+ # In order to generate correct licence checksums in the recipe the
+ # dependencies have to be fetched again using the npmsw url
+ bb.note("Fetching npm dependencies ...")
+ bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
+ fetcher = bb.fetch2.Fetch([url_local], d)
+ fetcher.download()
+ fetcher.unpack(srctree)
+
+ bb.note("Handling licences ...")
+ (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev)
+ extravalues["LIC_FILES_CHKSUM"] = licfiles
+ split_pkg_licenses(guess_license(srctree, d), packages, lines_after, [])
+
+ classes.append("npm")
+ handled.append("buildsystem")
+
+ return True
def register_recipe_handlers(handlers):
+ """Register the npm handler"""
handlers.append((NpmRecipeHandler(), 60))
diff --git a/external/poky/scripts/lib/recipetool/edit.py b/external/poky/scripts/lib/recipetool/edit.py
index c4789a99..d5b980a1 100644
--- a/external/poky/scripts/lib/recipetool/edit.py
+++ b/external/poky/scripts/lib/recipetool/edit.py
@@ -6,18 +6,8 @@
#
# Copyright (C) 2018 Mentor Graphics Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import argparse
import errno
@@ -44,7 +34,7 @@ def edit(args):
recipe_path = tinfoil.get_recipe_file(args.target)
appends = tinfoil.get_file_appends(recipe_path)
- return scriptutils.run_editor([recipe_path] + appends, logger)
+ return scriptutils.run_editor([recipe_path] + list(appends), logger)
def register_commands(subparsers):
diff --git a/external/poky/scripts/lib/recipetool/newappend.py b/external/poky/scripts/lib/recipetool/newappend.py
index 76707b4c..08e2474d 100644
--- a/external/poky/scripts/lib/recipetool/newappend.py
+++ b/external/poky/scripts/lib/recipetool/newappend.py
@@ -7,18 +7,8 @@
#
# Copyright (C) 2015 Christopher Larson <kergoth@gmail.com>
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import argparse
import errno
diff --git a/external/poky/scripts/lib/recipetool/setvar.py b/external/poky/scripts/lib/recipetool/setvar.py
index 9de315a0..f8e2ee75 100644
--- a/external/poky/scripts/lib/recipetool/setvar.py
+++ b/external/poky/scripts/lib/recipetool/setvar.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os
diff --git a/external/poky/scripts/lib/resulttool/log.py b/external/poky/scripts/lib/resulttool/log.py
index 49816357..eb3927ec 100644
--- a/external/poky/scripts/lib/resulttool/log.py
+++ b/external/poky/scripts/lib/resulttool/log.py
@@ -2,27 +2,29 @@
#
# Copyright (c) 2019 Garmin International
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
import os
import resulttool.resultutils as resultutils
def show_ptest(result, ptest, logger):
- if 'ptestresult.sections' in result:
- if ptest in result['ptestresult.sections'] and 'log' in result['ptestresult.sections'][ptest]:
- print(result['ptestresult.sections'][ptest]['log'])
- return 0
+ logdata = resultutils.ptestresult_get_log(result, ptest)
+ if logdata is not None:
+ print(logdata)
+ return 0
- print("ptest '%s' not found" % ptest)
+ print("ptest '%s' log not found" % ptest)
return 1
+def show_reproducible(result, reproducible, logger):
+ try:
+ print(result['reproducible'][reproducible]['diffoscope.text'])
+ return 0
+
+ except KeyError:
+ print("reproducible '%s' not found" % reproducible)
+ return 1
+
def log(args, logger):
results = resultutils.load_resultsdata(args.source)
@@ -33,31 +35,49 @@ def log(args, logger):
for _, run_name, _, r in resultutils.test_run_results(results):
if args.dump_ptest:
- if 'ptestresult.sections' in r:
- for name, ptest in r['ptestresult.sections'].items():
- if 'log' in ptest:
- dest_dir = args.dump_ptest
- if args.prepend_run:
- dest_dir = os.path.join(dest_dir, run_name)
+ for sectname in ['ptestresult.sections', 'ltpposixresult.sections', 'ltpresult.sections']:
+ if sectname in r:
+ for name, ptest in r[sectname].items():
+ logdata = resultutils.generic_get_log(sectname, r, name)
+ if logdata is not None:
+ dest_dir = args.dump_ptest
+ if args.prepend_run:
+ dest_dir = os.path.join(dest_dir, run_name)
+ if not sectname.startswith("ptest"):
+ dest_dir = os.path.join(dest_dir, sectname.split(".")[0])
- os.makedirs(dest_dir, exist_ok=True)
+ os.makedirs(dest_dir, exist_ok=True)
+ dest = os.path.join(dest_dir, '%s.log' % name)
+ print(dest)
+ with open(dest, 'w') as f:
+ f.write(logdata)
- dest = os.path.join(dest_dir, '%s.log' % name)
- print(dest)
- with open(dest, 'w') as f:
- f.write(ptest['log'])
+ if args.raw_ptest:
+ found = False
+ for sectname in ['ptestresult.rawlogs', 'ltpposixresult.rawlogs', 'ltpresult.rawlogs']:
+ rawlog = resultutils.generic_get_rawlogs(sectname, r)
+ if rawlog is not None:
+ print(rawlog)
+ found = True
+ if not found:
+ print('Raw ptest logs not found')
+ return 1
- if args.raw:
- if 'ptestresult.rawlogs' in r:
- print(r['ptestresult.rawlogs']['log'])
+ if args.raw_reproducible:
+ if 'reproducible.rawlogs' in r:
+ print(r['reproducible.rawlogs']['log'])
else:
- print('Raw logs not found')
+ print('Raw reproducible logs not found')
return 1
for ptest in args.ptest:
if not show_ptest(r, ptest, logger):
return 1
+ for reproducible in args.reproducible:
+ if not show_reproducible(r, reproducible, logger):
+ return 1
+
def register_commands(subparsers):
"""Register subcommands from this plugin"""
parser = subparsers.add_parser('log', help='show logs',
@@ -70,9 +90,15 @@ def register_commands(subparsers):
help='show logs for a ptest')
parser.add_argument('--dump-ptest', metavar='DIR',
help='Dump all ptest log files to the specified directory.')
+ parser.add_argument('--reproducible', action='append', default=[],
+ help='show logs for a reproducible test')
parser.add_argument('--prepend-run', action='store_true',
help='''Dump ptest results to a subdirectory named after the test run when using --dump-ptest.
Required if more than one test run is present in the result file''')
parser.add_argument('--raw', action='store_true',
- help='show raw logs')
+ help='show raw (ptest) logs. Deprecated. Alias for "--raw-ptest"', dest='raw_ptest')
+ parser.add_argument('--raw-ptest', action='store_true',
+ help='show raw ptest log')
+ parser.add_argument('--raw-reproducible', action='store_true',
+ help='show raw reproducible build logs')
diff --git a/external/poky/scripts/lib/resulttool/manualexecution.py b/external/poky/scripts/lib/resulttool/manualexecution.py
index dc368f36..ecb27c59 100755
--- a/external/poky/scripts/lib/resulttool/manualexecution.py
+++ b/external/poky/scripts/lib/resulttool/manualexecution.py
@@ -2,15 +2,9 @@
#
# Copyright (c) 2018, Intel Corporation.
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
+
import argparse
import json
import os
@@ -187,11 +181,38 @@ class ManualTestRunner(object):
write_json_file(config_options_file, config_options)
logger.info('Configuration option file created at %s' % config_options_file)
+ def make_testcase_config_file(self, logger, case_file, testcase_config_file):
+ if testcase_config_file:
+ if os.path.exists(testcase_config_file):
+ print('\nTest configuration file with name %s already exists. Please provide a unique file name' % (testcase_config_file))
+ return 0
+
+ if not testcase_config_file:
+ testcase_config_file = os.path.join(self._get_write_dir(), "testconfig_new.json")
+
+ testcase_config = {}
+ cases = load_json_file(case_file)
+ new_test_module = self._get_test_module(case_file)
+ new_testcase_config = {}
+ new_testcase_config['testcases'] = []
+
+ print('\nAdd testcases for this configuration file:')
+ for case in cases:
+ print('\n' + case['test']['@alias'])
+ add_tc_config = self._get_true_false_input('\nDo you want to add this test case to test configuration : (Y)es/(N)o\n')
+ if add_tc_config:
+ new_testcase_config['testcases'].append(case['test']['@alias'])
+ write_json_file(testcase_config_file, new_testcase_config)
+ logger.info('Testcase Configuration file created at %s' % testcase_config_file)
+
def manualexecution(args, logger):
testrunner = ManualTestRunner()
if args.make_config_options_file:
testrunner.make_config_option_file(logger, args.file, args.config_options_file)
return 0
+ if args.make_testcase_config_file:
+ testrunner.make_testcase_config_file(logger, args.file, args.testcase_config_file)
+ return 0
configurations, result_id, write_dir, test_results = testrunner.run_test(args.file, args.config_options_file, args.testcase_config_file)
resultjsonhelper = OETestResultJSONHelper()
resultjsonhelper.dump_testresult_file(write_dir, configurations, result_id, test_results)
@@ -209,4 +230,6 @@ def register_commands(subparsers):
parser_build.add_argument('-m', '--make-config-options-file', action='store_true',
help='make the configuration options file based on provided inputs')
parser_build.add_argument('-t', '--testcase-config-file', default='',
- help='the testcase configuration file to enable user to run a selected set of test case') \ No newline at end of file
+ help='the testcase configuration file to enable user to run a selected set of test case or make a testcase configuration file')
+ parser_build.add_argument('-d', '--make-testcase-config-file', action='store_true',
+ help='make the testcase configuration file to run a set of test cases based on user selection') \ No newline at end of file
diff --git a/external/poky/scripts/lib/resulttool/merge.py b/external/poky/scripts/lib/resulttool/merge.py
index 7159463f..18b4825a 100644
--- a/external/poky/scripts/lib/resulttool/merge.py
+++ b/external/poky/scripts/lib/resulttool/merge.py
@@ -3,30 +3,31 @@
# Copyright (c) 2019, Intel Corporation.
# Copyright (c) 2019, Linux Foundation
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
+
import os
import json
import resulttool.resultutils as resultutils
def merge(args, logger):
+ configvars = {}
+ if not args.not_add_testseries:
+ configvars = resultutils.extra_configvars.copy()
+ if args.executed_by:
+ configvars['EXECUTED_BY'] = args.executed_by
if resultutils.is_url(args.target_results) or os.path.isdir(args.target_results):
- results = resultutils.load_resultsdata(args.target_results, configmap=resultutils.store_map)
- resultutils.append_resultsdata(results, args.base_results, configmap=resultutils.store_map)
+ results = resultutils.load_resultsdata(args.target_results, configmap=resultutils.store_map, configvars=configvars)
+ resultutils.append_resultsdata(results, args.base_results, configmap=resultutils.store_map, configvars=configvars)
resultutils.save_resultsdata(results, args.target_results)
else:
- results = resultutils.load_resultsdata(args.base_results, configmap=resultutils.flatten_map)
+ results = resultutils.load_resultsdata(args.base_results, configmap=resultutils.flatten_map, configvars=configvars)
if os.path.exists(args.target_results):
- resultutils.append_resultsdata(results, args.target_results, configmap=resultutils.flatten_map)
+ resultutils.append_resultsdata(results, args.target_results, configmap=resultutils.flatten_map, configvars=configvars)
resultutils.save_resultsdata(results, os.path.dirname(args.target_results), fn=os.path.basename(args.target_results))
+ logger.info('Merged results to %s' % os.path.dirname(args.target_results))
+
return 0
def register_commands(subparsers):
@@ -39,4 +40,7 @@ def register_commands(subparsers):
help='the results file/directory/URL to import')
parser_build.add_argument('target_results',
help='the target file or directory to merge the base_results with')
-
+ parser_build.add_argument('-t', '--not-add-testseries', action='store_true',
+ help='do not add testseries configuration to results')
+ parser_build.add_argument('-x', '--executed-by', default='',
+ help='add executed-by configuration to each result file')
diff --git a/external/poky/scripts/lib/resulttool/regression.py b/external/poky/scripts/lib/resulttool/regression.py
index fa90ab1e..9f952951 100644
--- a/external/poky/scripts/lib/resulttool/regression.py
+++ b/external/poky/scripts/lib/resulttool/regression.py
@@ -3,15 +3,9 @@
# Copyright (c) 2019, Intel Corporation.
# Copyright (c) 2019, Linux Foundation
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
+
import resulttool.resultutils as resultutils
import json
diff --git a/external/poky/scripts/lib/resulttool/report.py b/external/poky/scripts/lib/resulttool/report.py
index 8ae42728..f0ca50eb 100644
--- a/external/poky/scripts/lib/resulttool/report.py
+++ b/external/poky/scripts/lib/resulttool/report.py
@@ -3,15 +3,9 @@
# Copyright (c) 2019, Intel Corporation.
# Copyright (c) 2019, Linux Foundation
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
+
import os
import glob
import json
@@ -23,26 +17,37 @@ import oeqa.utils.gitarchive as gitarchive
class ResultsTextReport(object):
def __init__(self):
self.ptests = {}
- self.result_types = {'passed': ['PASSED', 'passed'],
- 'failed': ['FAILED', 'failed', 'ERROR', 'error', 'UNKNOWN'],
- 'skipped': ['SKIPPED', 'skipped']}
+ self.ltptests = {}
+ self.ltpposixtests = {}
+ self.result_types = {'passed': ['PASSED', 'passed', 'PASS', 'XFAIL'],
+ 'failed': ['FAILED', 'failed', 'FAIL', 'ERROR', 'error', 'UNKNOWN', 'XPASS'],
+ 'skipped': ['SKIPPED', 'skipped', 'UNSUPPORTED', 'UNTESTED', 'UNRESOLVED']}
+
+ def handle_ptest_result(self, k, status, result, machine):
+ if machine not in self.ptests:
+ self.ptests[machine] = {}
- def handle_ptest_result(self, k, status, result):
if k == 'ptestresult.sections':
# Ensure tests without any test results still show up on the report
for suite in result['ptestresult.sections']:
- if suite not in self.ptests:
- self.ptests[suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ if suite not in self.ptests[machine]:
+ self.ptests[machine][suite] = {
+ 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
+ 'failed_testcases': [], "testcases": set(),
+ }
if 'duration' in result['ptestresult.sections'][suite]:
- self.ptests[suite]['duration'] = result['ptestresult.sections'][suite]['duration']
+ self.ptests[machine][suite]['duration'] = result['ptestresult.sections'][suite]['duration']
if 'timeout' in result['ptestresult.sections'][suite]:
- self.ptests[suite]['duration'] += " T"
- return
+ self.ptests[machine][suite]['duration'] += " T"
+ return True
+
+ # process test result
try:
_, suite, test = k.split(".", 2)
except ValueError:
- return
+ return True
+
# Handle 'glib-2.0'
if 'ptestresult.sections' in result and suite not in result['ptestresult.sections']:
try:
@@ -51,24 +56,105 @@ class ResultsTextReport(object):
suite = suite + "." + suite1
except ValueError:
pass
- if suite not in self.ptests:
- self.ptests[suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+
+ if suite not in self.ptests[machine]:
+ self.ptests[machine][suite] = {
+ 'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-',
+ 'failed_testcases': [], "testcases": set(),
+ }
+
+ # do not process duplicate results
+ if test in self.ptests[machine][suite]["testcases"]:
+ print("Warning duplicate ptest result '{}.{}' for {}".format(suite, test, machine))
+ return False
+
for tk in self.result_types:
if status in self.result_types[tk]:
- self.ptests[suite][tk] += 1
+ self.ptests[machine][suite][tk] += 1
+ self.ptests[machine][suite]["testcases"].add(test)
+ return True
+
+ def handle_ltptest_result(self, k, status, result, machine):
+ if machine not in self.ltptests:
+ self.ltptests[machine] = {}
- def get_aggregated_test_result(self, logger, testresult):
+ if k == 'ltpresult.sections':
+ # Ensure tests without any test results still show up on the report
+ for suite in result['ltpresult.sections']:
+ if suite not in self.ltptests[machine]:
+ self.ltptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ if 'duration' in result['ltpresult.sections'][suite]:
+ self.ltptests[machine][suite]['duration'] = result['ltpresult.sections'][suite]['duration']
+ if 'timeout' in result['ltpresult.sections'][suite]:
+ self.ltptests[machine][suite]['duration'] += " T"
+ return
+ try:
+ _, suite, test = k.split(".", 2)
+ except ValueError:
+ return
+ # Handle 'glib-2.0'
+ if 'ltpresult.sections' in result and suite not in result['ltpresult.sections']:
+ try:
+ _, suite, suite1, test = k.split(".", 3)
+ if suite + "." + suite1 in result['ltpresult.sections']:
+ suite = suite + "." + suite1
+ except ValueError:
+ pass
+ if suite not in self.ltptests[machine]:
+ self.ltptests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ for tk in self.result_types:
+ if status in self.result_types[tk]:
+ self.ltptests[machine][suite][tk] += 1
+
+ def handle_ltpposixtest_result(self, k, status, result, machine):
+ if machine not in self.ltpposixtests:
+ self.ltpposixtests[machine] = {}
+
+ if k == 'ltpposixresult.sections':
+ # Ensure tests without any test results still show up on the report
+ for suite in result['ltpposixresult.sections']:
+ if suite not in self.ltpposixtests[machine]:
+ self.ltpposixtests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ if 'duration' in result['ltpposixresult.sections'][suite]:
+ self.ltpposixtests[machine][suite]['duration'] = result['ltpposixresult.sections'][suite]['duration']
+ return
+ try:
+ _, suite, test = k.split(".", 2)
+ except ValueError:
+ return
+ # Handle 'glib-2.0'
+ if 'ltpposixresult.sections' in result and suite not in result['ltpposixresult.sections']:
+ try:
+ _, suite, suite1, test = k.split(".", 3)
+ if suite + "." + suite1 in result['ltpposixresult.sections']:
+ suite = suite + "." + suite1
+ except ValueError:
+ pass
+ if suite not in self.ltpposixtests[machine]:
+ self.ltpposixtests[machine][suite] = {'passed': 0, 'failed': 0, 'skipped': 0, 'duration' : '-', 'failed_testcases': []}
+ for tk in self.result_types:
+ if status in self.result_types[tk]:
+ self.ltpposixtests[machine][suite][tk] += 1
+
+ def get_aggregated_test_result(self, logger, testresult, machine):
test_count_report = {'passed': 0, 'failed': 0, 'skipped': 0, 'failed_testcases': []}
result = testresult.get('result', [])
for k in result:
test_status = result[k].get('status', [])
+ if k.startswith("ptestresult."):
+ if not self.handle_ptest_result(k, test_status, result, machine):
+ continue
+ elif k.startswith("ltpresult."):
+ self.handle_ltptest_result(k, test_status, result, machine)
+ elif k.startswith("ltpposixresult."):
+ self.handle_ltpposixtest_result(k, test_status, result, machine)
+
+ # process result if it was not skipped by a handler
for tk in self.result_types:
if test_status in self.result_types[tk]:
test_count_report[tk] += 1
if test_status in self.result_types['failed']:
test_count_report['failed_testcases'].append(k)
- if k.startswith("ptestresult."):
- self.handle_ptest_result(k, test_status, result)
return test_count_report
def print_test_report(self, template_file_name, test_count_reports):
@@ -78,10 +164,10 @@ class ResultsTextReport(object):
env = Environment(loader=file_loader, trim_blocks=True)
template = env.get_template(template_file_name)
havefailed = False
- haveptest = bool(self.ptests)
reportvalues = []
+ machines = []
cols = ['passed', 'failed', 'skipped']
- maxlen = {'passed' : 0, 'failed' : 0, 'skipped' : 0, 'result_id': 0, 'testseries' : 0, 'ptest' : 0 }
+ maxlen = {'passed' : 0, 'failed' : 0, 'skipped' : 0, 'result_id': 0, 'testseries' : 0, 'ptest' : 0 ,'ltptest': 0, 'ltpposixtest': 0}
for line in test_count_reports:
total_tested = line['passed'] + line['failed'] + line['skipped']
vals = {}
@@ -97,18 +183,53 @@ class ResultsTextReport(object):
reportvalues.append(vals)
if line['failed_testcases']:
havefailed = True
- for ptest in self.ptests:
- if len(ptest) > maxlen['ptest']:
- maxlen['ptest'] = len(ptest)
+ if line['machine'] not in machines:
+ machines.append(line['machine'])
+ reporttotalvalues = {}
+ for k in cols:
+ reporttotalvalues[k] = '%s' % sum([line[k] for line in test_count_reports])
+ reporttotalvalues['count'] = '%s' % len(test_count_reports)
+ for (machine, report) in self.ptests.items():
+ for ptest in self.ptests[machine]:
+ if len(ptest) > maxlen['ptest']:
+ maxlen['ptest'] = len(ptest)
+ for (machine, report) in self.ltptests.items():
+ for ltptest in self.ltptests[machine]:
+ if len(ltptest) > maxlen['ltptest']:
+ maxlen['ltptest'] = len(ltptest)
+ for (machine, report) in self.ltpposixtests.items():
+ for ltpposixtest in self.ltpposixtests[machine]:
+ if len(ltpposixtest) > maxlen['ltpposixtest']:
+ maxlen['ltpposixtest'] = len(ltpposixtest)
output = template.render(reportvalues=reportvalues,
+ reporttotalvalues=reporttotalvalues,
havefailed=havefailed,
- haveptest=haveptest,
+ machines=machines,
ptests=self.ptests,
+ ltptests=self.ltptests,
+ ltpposixtests=self.ltpposixtests,
maxlen=maxlen)
print(output)
- def view_test_report(self, logger, source_dir, branch, commit, tag):
+ def view_test_report(self, logger, source_dir, branch, commit, tag, use_regression_map, raw_test, selected_test_case_only):
+ def print_selected_testcase_result(testresults, selected_test_case_only):
+ for testsuite in testresults:
+ for resultid in testresults[testsuite]:
+ result = testresults[testsuite][resultid]['result']
+ test_case_result = result.get(selected_test_case_only, {})
+ if test_case_result.get('status'):
+ print('Found selected test case result for %s from %s' % (selected_test_case_only,
+ resultid))
+ print(test_case_result['status'])
+ else:
+ print('Could not find selected test case result for %s from %s' % (selected_test_case_only,
+ resultid))
+ if test_case_result.get('log'):
+ print(test_case_result['log'])
test_count_reports = []
+ configmap = resultutils.store_map
+ if use_regression_map:
+ configmap = resultutils.regression_map
if commit:
if tag:
logger.warning("Ignoring --tag as --commit was specified")
@@ -116,16 +237,48 @@ class ResultsTextReport(object):
repo = GitRepo(source_dir)
revs = gitarchive.get_test_revs(logger, repo, tag_name, branch=branch)
rev_index = gitarchive.rev_find(revs, 'commit', commit)
- testresults = resultutils.git_get_result(repo, revs[rev_index][2])
+ testresults = resultutils.git_get_result(repo, revs[rev_index][2], configmap=configmap)
elif tag:
repo = GitRepo(source_dir)
- testresults = resultutils.git_get_result(repo, [tag])
+ testresults = resultutils.git_get_result(repo, [tag], configmap=configmap)
else:
- testresults = resultutils.load_resultsdata(source_dir)
+ testresults = resultutils.load_resultsdata(source_dir, configmap=configmap)
+ if raw_test:
+ raw_results = {}
+ for testsuite in testresults:
+ result = testresults[testsuite].get(raw_test, {})
+ if result:
+ raw_results[testsuite] = {raw_test: result}
+ if raw_results:
+ if selected_test_case_only:
+ print_selected_testcase_result(raw_results, selected_test_case_only)
+ else:
+ print(json.dumps(raw_results, sort_keys=True, indent=4))
+ else:
+ print('Could not find raw test result for %s' % raw_test)
+ return 0
+ if selected_test_case_only:
+ print_selected_testcase_result(testresults, selected_test_case_only)
+ return 0
for testsuite in testresults:
for resultid in testresults[testsuite]:
+ skip = False
result = testresults[testsuite][resultid]
- test_count_report = self.get_aggregated_test_result(logger, result)
+ machine = result['configuration']['MACHINE']
+
+ # Check to see if there is already results for these kinds of tests for the machine
+ for key in result['result'].keys():
+ testtype = str(key).split('.')[0]
+ if ((machine in self.ltptests and testtype == "ltpiresult" and self.ltptests[machine]) or
+ (machine in self.ltpposixtests and testtype == "ltpposixresult" and self.ltpposixtests[machine])):
+ print("Already have test results for %s on %s, skipping %s" %(str(key).split('.')[0], machine, resultid))
+ skip = True
+ break
+ if skip:
+ break
+
+ test_count_report = self.get_aggregated_test_result(logger, result, machine)
+ test_count_report['machine'] = machine
test_count_report['testseries'] = result['configuration']['TESTSERIES']
test_count_report['result_id'] = resultid
test_count_reports.append(test_count_report)
@@ -133,7 +286,8 @@ class ResultsTextReport(object):
def report(args, logger):
report = ResultsTextReport()
- report.view_test_report(logger, args.source_dir, args.branch, args.commit, args.tag)
+ report.view_test_report(logger, args.source_dir, args.branch, args.commit, args.tag, args.use_regression_map,
+ args.raw_test_only, args.selected_test_case_only)
return 0
def register_commands(subparsers):
@@ -148,3 +302,11 @@ def register_commands(subparsers):
parser_build.add_argument('--commit', help="Revision to report")
parser_build.add_argument('-t', '--tag', default='',
help='source_dir is a git repository, report on the tag specified from that repository')
+ parser_build.add_argument('-m', '--use_regression_map', action='store_true',
+ help='instead of the default "store_map", use the "regression_map" for report')
+ parser_build.add_argument('-r', '--raw_test_only', default='',
+ help='output raw test result only for the user provided test result id')
+ parser_build.add_argument('-s', '--selected_test_case_only', default='',
+ help='output selected test case result for the user provided test case id, if both test '
+ 'result id and test case id are provided then output the selected test case result '
+ 'from the provided test result id')
diff --git a/external/poky/scripts/lib/resulttool/resultutils.py b/external/poky/scripts/lib/resulttool/resultutils.py
index 07dab4cb..8917022d 100644
--- a/external/poky/scripts/lib/resulttool/resultutils.py
+++ b/external/poky/scripts/lib/resulttool/resultutils.py
@@ -3,16 +3,12 @@
# Copyright (c) 2019, Intel Corporation.
# Copyright (c) 2019, Linux Foundation
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
+
import os
+import base64
+import zlib
import json
import scriptpath
import copy
@@ -48,10 +44,12 @@ def is_url(p):
"""
return p.startswith('http://') or p.startswith('https://')
+extra_configvars = {'TESTSERIES': ''}
+
#
# Load the json file and append the results data into the provided results dict
#
-def append_resultsdata(results, f, configmap=store_map):
+def append_resultsdata(results, f, configmap=store_map, configvars=extra_configvars):
if type(f) is str:
if is_url(f):
with urllib.request.urlopen(f) as response:
@@ -67,12 +65,15 @@ def append_resultsdata(results, f, configmap=store_map):
for res in data:
if "configuration" not in data[res] or "result" not in data[res]:
raise ValueError("Test results data without configuration or result section?")
- if "TESTSERIES" not in data[res]["configuration"]:
- data[res]["configuration"]["TESTSERIES"] = testseries
+ for config in configvars:
+ if config == "TESTSERIES" and "TESTSERIES" not in data[res]["configuration"]:
+ data[res]["configuration"]["TESTSERIES"] = testseries
+ continue
+ if config not in data[res]["configuration"]:
+ data[res]["configuration"][config] = configvars[config]
testtype = data[res]["configuration"].get("TEST_TYPE")
if testtype not in configmap:
raise ValueError("Unknown test type %s" % testtype)
- configvars = configmap[testtype]
testpath = "/".join(data[res]["configuration"].get(i) for i in configmap[testtype])
if testpath not in results:
results[testpath] = {}
@@ -82,16 +83,16 @@ def append_resultsdata(results, f, configmap=store_map):
# Walk a directory and find/load results data
# or load directly from a file
#
-def load_resultsdata(source, configmap=store_map):
+def load_resultsdata(source, configmap=store_map, configvars=extra_configvars):
results = {}
if is_url(source) or os.path.isfile(source):
- append_resultsdata(results, source, configmap)
+ append_resultsdata(results, source, configmap, configvars)
return results
for root, dirs, files in os.walk(source):
for name in files:
f = os.path.join(root, name)
if name == "testresults.json":
- append_resultsdata(results, f, configmap)
+ append_resultsdata(results, f, configmap, configvars)
return results
def filter_resultsdata(results, resultid):
@@ -118,6 +119,41 @@ def strip_ptestresults(results):
del newresults[res]['result']['ptestresult.sections'][i]['log']
return newresults
+def decode_log(logdata):
+ if isinstance(logdata, str):
+ return logdata
+ elif isinstance(logdata, dict):
+ if "compressed" in logdata:
+ data = logdata.get("compressed")
+ data = base64.b64decode(data.encode("utf-8"))
+ data = zlib.decompress(data)
+ return data.decode("utf-8", errors='ignore')
+ return None
+
+def generic_get_log(sectionname, results, section):
+ if sectionname not in results:
+ return None
+ if section not in results[sectionname]:
+ return None
+
+ ptest = results[sectionname][section]
+ if 'log' not in ptest:
+ return None
+ return decode_log(ptest['log'])
+
+def ptestresult_get_log(results, section):
+ return generic_get_log('ptestresuls.sections', results, section)
+
+def generic_get_rawlogs(sectname, results):
+ if sectname not in results:
+ return None
+ if 'log' not in results[sectname]:
+ return None
+ return decode_log(results[sectname]['log'])
+
+def ptestresult_get_rawlogs(results):
+ return generic_get_rawlogs('ptestresult.rawlogs', results)
+
def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, ptestlogs=False):
for res in results:
if res:
@@ -132,16 +168,19 @@ def save_resultsdata(results, destdir, fn="testresults.json", ptestjson=False, p
f.write(json.dumps(resultsout, sort_keys=True, indent=4))
for res2 in results[res]:
if ptestlogs and 'result' in results[res][res2]:
- if 'ptestresult.rawlogs' in results[res][res2]['result']:
+ seriesresults = results[res][res2]['result']
+ rawlogs = ptestresult_get_rawlogs(seriesresults)
+ if rawlogs is not None:
with open(dst.replace(fn, "ptest-raw.log"), "w+") as f:
- f.write(results[res][res2]['result']['ptestresult.rawlogs']['log'])
- if 'ptestresult.sections' in results[res][res2]['result']:
- for i in results[res][res2]['result']['ptestresult.sections']:
- if 'log' in results[res][res2]['result']['ptestresult.sections'][i]:
+ f.write(rawlogs)
+ if 'ptestresult.sections' in seriesresults:
+ for i in seriesresults['ptestresult.sections']:
+ sectionlog = ptestresult_get_log(seriesresults, i)
+ if sectionlog is not None:
with open(dst.replace(fn, "ptest-%s.log" % i), "w+") as f:
- f.write(results[res][res2]['result']['ptestresult.sections'][i]['log'])
+ f.write(sectionlog)
-def git_get_result(repo, tags):
+def git_get_result(repo, tags, configmap=store_map):
git_objs = []
for tag in tags:
files = repo.run_cmd(['ls-tree', "--name-only", "-r", tag]).splitlines()
@@ -164,7 +203,7 @@ def git_get_result(repo, tags):
# Optimize by reading all data with one git command
results = {}
for obj in parse_json_stream(repo.run_cmd(['show'] + git_objs + ['--'])):
- append_resultsdata(results, obj)
+ append_resultsdata(results, obj, configmap=configmap)
return results
diff --git a/external/poky/scripts/lib/resulttool/store.py b/external/poky/scripts/lib/resulttool/store.py
index acdfbd94..e0951f0a 100644
--- a/external/poky/scripts/lib/resulttool/store.py
+++ b/external/poky/scripts/lib/resulttool/store.py
@@ -3,15 +3,9 @@
# Copyright (c) 2019, Intel Corporation.
# Copyright (c) 2019, Linux Foundation
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms and conditions of the GNU General Public License,
-# version 2, as published by the Free Software Foundation.
-#
-# This program is distributed in the hope it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
-# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
-# more details.
+# SPDX-License-Identifier: GPL-2.0-only
#
+
import tempfile
import os
import subprocess
@@ -27,16 +21,21 @@ import oeqa.utils.gitarchive as gitarchive
def store(args, logger):
tempdir = tempfile.mkdtemp(prefix='testresults.')
try:
+ configvars = resultutils.extra_configvars.copy()
+ if args.executed_by:
+ configvars['EXECUTED_BY'] = args.executed_by
+ if args.extra_test_env:
+ configvars['EXTRA_TEST_ENV'] = args.extra_test_env
results = {}
logger.info('Reading files from %s' % args.source)
if resultutils.is_url(args.source) or os.path.isfile(args.source):
- resultutils.append_resultsdata(results, args.source)
+ resultutils.append_resultsdata(results, args.source, configvars=configvars)
else:
for root, dirs, files in os.walk(args.source):
for name in files:
f = os.path.join(root, name)
if name == "testresults.json":
- resultutils.append_resultsdata(results, f)
+ resultutils.append_resultsdata(results, f, configvars=configvars)
elif args.all:
dst = f.replace(args.source, tempdir + "/")
os.makedirs(os.path.dirname(dst), exist_ok=True)
@@ -99,4 +98,7 @@ def register_commands(subparsers):
help='include all files, not just testresults.json files')
parser_build.add_argument('-e', '--allow-empty', action='store_true',
help='don\'t error if no results to store are found')
-
+ parser_build.add_argument('-x', '--executed-by', default='',
+ help='add executed-by configuration to each result file')
+ parser_build.add_argument('-t', '--extra-test-env', default='',
+ help='add extra test environment data to each result file configuration')
diff --git a/external/poky/scripts/lib/resulttool/template/test_report_full_text.txt b/external/poky/scripts/lib/resulttool/template/test_report_full_text.txt
index 590f35c7..2efba2ef 100644
--- a/external/poky/scripts/lib/resulttool/template/test_report_full_text.txt
+++ b/external/poky/scripts/lib/resulttool/template/test_report_full_text.txt
@@ -8,22 +8,57 @@ Test Result Status Summary (Counts/Percentages sorted by testseries, ID)
{{ report.testseries.ljust(maxlen['testseries']) }} | {{ report.result_id.ljust(maxlen['result_id']) }} | {{ (report.passed|string).ljust(maxlen['passed']) }} | {{ (report.failed|string).ljust(maxlen['failed']) }} | {{ (report.skipped|string).ljust(maxlen['skipped']) }}
{% endfor %}
--------------------------------------------------------------------------------------------------------------
+{{ 'Total'.ljust(maxlen['testseries']) }} | {{ reporttotalvalues['count'].ljust(maxlen['result_id']) }} | {{ reporttotalvalues['passed'].ljust(maxlen['passed']) }} | {{ reporttotalvalues['failed'].ljust(maxlen['failed']) }} | {{ reporttotalvalues['skipped'].ljust(maxlen['skipped']) }}
+--------------------------------------------------------------------------------------------------------------
-{% if haveptest %}
+{% for machine in machines %}
+{% if ptests[machine] %}
==============================================================================================================
-PTest Result Summary
+{{ machine }} PTest Result Summary
==============================================================================================================
--------------------------------------------------------------------------------------------------------------
{{ 'Recipe'.ljust(maxlen['ptest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
--------------------------------------------------------------------------------------------------------------
-{% for ptest in ptests |sort %}
-{{ ptest.ljust(maxlen['ptest']) }} | {{ (ptests[ptest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ptests[ptest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ptests[ptest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ptests[ptest]['duration']|string) }}
+{% for ptest in ptests[machine] |sort %}
+{{ ptest.ljust(maxlen['ptest']) }} | {{ (ptests[machine][ptest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ptests[machine][ptest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ptests[machine][ptest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ptests[machine][ptest]['duration']|string) }}
+{% endfor %}
+--------------------------------------------------------------------------------------------------------------
+
+{% endif %}
+{% endfor %}
+
+{% for machine in machines %}
+{% if ltptests[machine] %}
+==============================================================================================================
+{{ machine }} Ltp Test Result Summary
+==============================================================================================================
+--------------------------------------------------------------------------------------------------------------
+{{ 'Recipe'.ljust(maxlen['ltptest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
+--------------------------------------------------------------------------------------------------------------
+{% for ltptest in ltptests[machine] |sort %}
+{{ ltptest.ljust(maxlen['ltptest']) }} | {{ (ltptests[machine][ltptest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ltptests[machine][ltptest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ltptests[machine][ltptest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ltptests[machine][ltptest]['duration']|string) }}
{% endfor %}
--------------------------------------------------------------------------------------------------------------
-{% else %}
-There was no ptest data
{% endif %}
+{% endfor %}
+
+{% for machine in machines %}
+{% if ltpposixtests[machine] %}
+==============================================================================================================
+{{ machine }} Ltp Posix Result Summary
+==============================================================================================================
+--------------------------------------------------------------------------------------------------------------
+{{ 'Recipe'.ljust(maxlen['ltpposixtest']) }} | {{ 'Passed'.ljust(maxlen['passed']) }} | {{ 'Failed'.ljust(maxlen['failed']) }} | {{ 'Skipped'.ljust(maxlen['skipped']) }} | {{ 'Time(s)'.ljust(10) }}
+--------------------------------------------------------------------------------------------------------------
+{% for ltpposixtest in ltpposixtests[machine] |sort %}
+{{ ltpposixtest.ljust(maxlen['ltpposixtest']) }} | {{ (ltpposixtests[machine][ltpposixtest]['passed']|string).ljust(maxlen['passed']) }} | {{ (ltpposixtests[machine][ltpposixtest]['failed']|string).ljust(maxlen['failed']) }} | {{ (ltpposixtests[machine][ltpposixtest]['skipped']|string).ljust(maxlen['skipped']) }} | {{ (ltpposixtests[machine][ltpposixtest]['duration']|string) }}
+{% endfor %}
+--------------------------------------------------------------------------------------------------------------
+
+{% endif %}
+{% endfor %}
+
==============================================================================================================
Failed test cases (sorted by testseries, ID)
diff --git a/external/poky/scripts/lib/scriptpath.py b/external/poky/scripts/lib/scriptpath.py
index d00317e1..f32326db 100644
--- a/external/poky/scripts/lib/scriptpath.py
+++ b/external/poky/scripts/lib/scriptpath.py
@@ -3,18 +3,8 @@
# Copyright (C) 2012-2014 Intel Corporation
# Copyright (C) 2011 Mentor Graphics Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import os
diff --git a/external/poky/scripts/lib/scriptutils.py b/external/poky/scripts/lib/scriptutils.py
index 3c60c3a1..f92255d8 100644
--- a/external/poky/scripts/lib/scriptutils.py
+++ b/external/poky/scripts/lib/scriptutils.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import argparse
import glob
@@ -26,12 +16,51 @@ import string
import subprocess
import sys
import tempfile
+import threading
import importlib
from importlib import machinery
-def logger_create(name, stream=None):
+class KeepAliveStreamHandler(logging.StreamHandler):
+ def __init__(self, keepalive=True, **kwargs):
+ super().__init__(**kwargs)
+ if keepalive is True:
+ keepalive = 5000 # default timeout
+ self._timeout = threading.Condition()
+ self._stop = False
+
+ # background thread waits on condition, if the condition does not
+ # happen emit a keep alive message
+ def thread():
+ while not self._stop:
+ with self._timeout:
+ if not self._timeout.wait(keepalive):
+ self.emit(logging.LogRecord("keepalive", logging.INFO,
+ None, None, "Keepalive message", None, None))
+
+ self._thread = threading.Thread(target = thread, daemon = True)
+ self._thread.start()
+
+ def close(self):
+ # mark the thread to stop and notify it
+ self._stop = True
+ with self._timeout:
+ self._timeout.notify()
+ # wait for it to join
+ self._thread.join()
+ super().close()
+
+ def emit(self, record):
+ super().emit(record)
+ # trigger timer reset
+ with self._timeout:
+ self._timeout.notify()
+
+def logger_create(name, stream=None, keepalive=None):
logger = logging.getLogger(name)
- loggerhandler = logging.StreamHandler(stream=stream)
+ if keepalive is not None:
+ loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive)
+ else:
+ loggerhandler = logging.StreamHandler(stream=stream)
loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
logger.addHandler(loggerhandler)
logger.setLevel(logging.INFO)
@@ -39,16 +68,15 @@ def logger_create(name, stream=None):
def logger_setup_color(logger, color='auto'):
from bb.msg import BBLogFormatter
- console = logging.StreamHandler(sys.stdout)
- formatter = BBLogFormatter("%(levelname)s: %(message)s")
- console.setFormatter(formatter)
- logger.handlers = [console]
- if color == 'always' or (color=='auto' and console.stream.isatty()):
- formatter.enable_color()
+
+ for handler in logger.handlers:
+ if (isinstance(handler, logging.StreamHandler) and
+ isinstance(handler.formatter, BBLogFormatter)):
+ if color == 'always' or (color == 'auto' and handler.stream.isatty()):
+ handler.formatter.enable_color()
def load_plugins(logger, plugins, pluginpath):
- import imp
def load_plugin(name):
logger.debug('Loading plugin %s' % name)
@@ -69,6 +97,7 @@ def load_plugins(logger, plugins, pluginpath):
plugin.plugin_init(plugins)
plugins.append(plugin)
+
def git_convert_standalone_clone(repodir):
"""If specified directory is a git repository, ensure it's a standalone clone"""
import bb.process
@@ -238,3 +267,13 @@ def is_src_url(param):
elif param.startswith('git@') or ('@' in param and param.endswith('.git')):
return True
return False
+
+def filter_src_subdirs(pth):
+ """
+ Filter out subdirectories of initial unpacked source trees that we do not care about.
+ Used by devtool and recipetool.
+ """
+ dirlist = os.listdir(pth)
+ filterout = ['git.indirectionsymlink', 'source-date-epoch']
+ dirlist = [x for x in dirlist if x not in filterout]
+ return dirlist
diff --git a/external/poky/scripts/lib/wic/__init__.py b/external/poky/scripts/lib/wic/__init__.py
index 85876b13..85567934 100644
--- a/external/poky/scripts/lib/wic/__init__.py
+++ b/external/poky/scripts/lib/wic/__init__.py
@@ -1,20 +1,10 @@
-#!/usr/bin/env python -tt
+#!/usr/bin/env python3
#
# Copyright (c) 2007 Red Hat, Inc.
# Copyright (c) 2011 Intel, Inc.
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by the Free
-# Software Foundation; version 2 of the License
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
-# for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc., 59
-# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
class WicError(Exception):
pass
diff --git a/external/poky/scripts/lib/wic/canned-wks/qemuriscv.wks b/external/poky/scripts/lib/wic/canned-wks/qemuriscv.wks
new file mode 100644
index 00000000..12c68b70
--- /dev/null
+++ b/external/poky/scripts/lib/wic/canned-wks/qemuriscv.wks
@@ -0,0 +1,3 @@
+# short-description: Create qcow2 image for RISC-V QEMU machines
+
+part / --source rootfs --fstype=ext4 --label root --align 4096 --size 5G
diff --git a/external/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/external/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
index c8d9f121..22b45217 100644
--- a/external/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
+++ b/external/poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks
@@ -4,5 +4,5 @@
include common.wks.inc
-bootloader --timeout=0 --append="vga=0 rw oprofile.timer=1 rootfstype=ext4 "
+bootloader --timeout=0 --append="rw oprofile.timer=1 rootfstype=ext4 "
diff --git a/external/poky/scripts/lib/wic/engine.py b/external/poky/scripts/lib/wic/engine.py
index ea600d28..9ff43947 100644
--- a/external/poky/scripts/lib/wic/engine.py
+++ b/external/poky/scripts/lib/wic/engine.py
@@ -1,21 +1,7 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (c) 2013, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
@@ -33,6 +19,7 @@ import os
import tempfile
import json
import subprocess
+import re
from collections import namedtuple, OrderedDict
from distutils.spawn import find_executable
@@ -89,7 +76,8 @@ def find_canned_image(scripts_path, wks_file):
for fname in files:
if fname.endswith("~") or fname.endswith("#"):
continue
- if fname.endswith(".wks") and wks_file + ".wks" == fname:
+ if ((fname.endswith(".wks") and wks_file + ".wks" == fname) or \
+ (fname.endswith(".wks.in") and wks_file + ".wks.in" == fname)):
fullpath = os.path.join(canned_wks_dir, fname)
return fullpath
return None
@@ -106,7 +94,7 @@ def list_canned_images(scripts_path):
for fname in files:
if fname.endswith("~") or fname.endswith("#"):
continue
- if fname.endswith(".wks"):
+ if fname.endswith(".wks") or fname.endswith(".wks.in"):
fullpath = os.path.join(canned_wks_dir, fname)
with open(fullpath) as wks:
for line in wks:
@@ -115,7 +103,7 @@ def list_canned_images(scripts_path):
if idx != -1:
desc = line[idx + len("short-description:"):].strip()
break
- basename = os.path.splitext(fname)[0]
+ basename = fname.split('.')[0]
print(" %s\t\t%s" % (basename.ljust(30), desc))
@@ -303,7 +291,7 @@ class Disk:
def _get_part_image(self, pnum):
if pnum not in self.partitions:
- raise WicError("Partition %s is not in the image")
+ raise WicError("Partition %s is not in the image" % pnum)
part = self.partitions[pnum]
# check if fstype is supported
for fstype in self.fstypes:
@@ -326,6 +314,9 @@ class Disk:
seek=self.partitions[pnum].start)
def dir(self, pnum, path):
+ if pnum not in self.partitions:
+ raise WicError("Partition %s is not in the image" % pnum)
+
if self.partitions[pnum].fstype.startswith('ext'):
return exec_cmd("{} {} -R 'ls -l {}'".format(self.debugfs,
self._get_part_image(pnum),
@@ -335,38 +326,80 @@ class Disk:
self._get_part_image(pnum),
path))
- def copy(self, src, pnum, path):
+ def copy(self, src, dest):
"""Copy partition image into wic image."""
+ pnum = dest.part if isinstance(src, str) else src.part
+
if self.partitions[pnum].fstype.startswith('ext'):
- cmd = "printf 'cd {}\nwrite {} {}' | {} -w {}".\
- format(path, src, os.path.basename(src),
+ if isinstance(src, str):
+ cmd = "printf 'cd {}\nwrite {} {}\n' | {} -w {}".\
+ format(os.path.dirname(dest.path), src, os.path.basename(src),
self.debugfs, self._get_part_image(pnum))
+ else: # copy from wic
+ # run both dump and rdump to support both files and directory
+ cmd = "printf 'cd {}\ndump /{} {}\nrdump /{} {}\n' | {} {}".\
+ format(os.path.dirname(src.path), src.path,
+ dest, src.path, dest, self.debugfs,
+ self._get_part_image(pnum))
else: # fat
- cmd = "{} -i {} -snop {} ::{}".format(self.mcopy,
+ if isinstance(src, str):
+ cmd = "{} -i {} -snop {} ::{}".format(self.mcopy,
self._get_part_image(pnum),
- src, path)
+ src, dest.path)
+ else:
+ cmd = "{} -i {} -snop ::{} {}".format(self.mcopy,
+ self._get_part_image(pnum),
+ src.path, dest)
+
exec_cmd(cmd, as_shell=True)
self._put_part_image(pnum)
- def remove(self, pnum, path):
+ def remove_ext(self, pnum, path, recursive):
+ """
+ Remove files/dirs and their contents from the partition.
+ This only applies to ext* partition.
+ """
+ abs_path = re.sub('\/\/+', '/', path)
+ cmd = "{} {} -wR 'rm \"{}\"'".format(self.debugfs,
+ self._get_part_image(pnum),
+ abs_path)
+ out = exec_cmd(cmd , as_shell=True)
+ for line in out.splitlines():
+ if line.startswith("rm:"):
+ if "file is a directory" in line:
+ if recursive:
+ # loop through content and delete them one by one if
+ # flaged with -r
+ subdirs = iter(self.dir(pnum, abs_path).splitlines())
+ next(subdirs)
+ for subdir in subdirs:
+ dir = subdir.split(':')[1].split(" ", 1)[1]
+ if not dir == "." and not dir == "..":
+ self.remove_ext(pnum, "%s/%s" % (abs_path, dir), recursive)
+
+ rmdir_out = exec_cmd("{} {} -wR 'rmdir \"{}\"'".format(self.debugfs,
+ self._get_part_image(pnum),
+ abs_path.rstrip('/'))
+ , as_shell=True)
+
+ for rmdir_line in rmdir_out.splitlines():
+ if "directory not empty" in rmdir_line:
+ raise WicError("Could not complete operation: \n%s \n"
+ "use -r to remove non-empty directory" % rmdir_line)
+ if rmdir_line.startswith("rmdir:"):
+ raise WicError("Could not complete operation: \n%s "
+ "\n%s" % (str(line), rmdir_line))
+
+ else:
+ raise WicError("Could not complete operation: \n%s "
+ "\nUnable to remove %s" % (str(line), abs_path))
+
+ def remove(self, pnum, path, recursive):
"""Remove files/dirs from the partition."""
partimg = self._get_part_image(pnum)
if self.partitions[pnum].fstype.startswith('ext'):
- cmd = "{} {} -wR 'rm {}'".format(self.debugfs,
- self._get_part_image(pnum),
- path)
- out = exec_cmd(cmd , as_shell=True)
- for line in out.splitlines():
- if line.startswith("rm:"):
- if "file is a directory" in line:
- # Try rmdir to see if this is an empty directory. This won't delete
- # any non empty directory so let user know about any error that this might
- # generate.
- print(exec_cmd("{} {} -wR 'rmdir {}'".format(self.debugfs,
- self._get_part_image(pnum),
- path), as_shell=True))
- else:
- raise WicError("Could not complete operation: wic %s" % str(line))
+ self.remove_ext(pnum, path, recursive)
+
else: # fat
cmd = "{} -i {} ::{}".format(self.mdel, partimg, path)
try:
@@ -409,7 +442,7 @@ class Disk:
outf.flush()
def read_ptable(path):
- out = exec_cmd("{} -dJ {}".format(self.sfdisk, path))
+ out = exec_cmd("{} -J {}".format(self.sfdisk, path))
return json.loads(out)
def write_ptable(parts, target):
@@ -536,11 +569,15 @@ def wic_ls(args, native_sysroot):
def wic_cp(args, native_sysroot):
"""
- Copy local file or directory to the vfat partition of
+ Copy file or directory to/from the vfat/ext partition of
partitioned image.
"""
- disk = Disk(args.dest.image, native_sysroot)
- disk.copy(args.src, args.dest.part, args.dest.path)
+ if isinstance(args.dest, str):
+ disk = Disk(args.src.image, native_sysroot)
+ else:
+ disk = Disk(args.dest.image, native_sysroot)
+ disk.copy(args.src, args.dest)
+
def wic_rm(args, native_sysroot):
"""
@@ -548,13 +585,13 @@ def wic_rm(args, native_sysroot):
partitioned image.
"""
disk = Disk(args.path.image, native_sysroot)
- disk.remove(args.path.part, args.path.path)
+ disk.remove(args.path.part, args.path.path, args.recursive_delete)
def wic_write(args, native_sysroot):
"""
Write image to a target device.
"""
- disk = Disk(args.image, native_sysroot, ('fat', 'ext', 'swap'))
+ disk = Disk(args.image, native_sysroot, ('fat', 'ext', 'linux-swap'))
disk.write(args.target, args.expand)
def find_canned(scripts_path, file_name):
diff --git a/external/poky/scripts/lib/wic/filemap.py b/external/poky/scripts/lib/wic/filemap.py
index abbf958b..4d9da281 100644
--- a/external/poky/scripts/lib/wic/filemap.py
+++ b/external/poky/scripts/lib/wic/filemap.py
@@ -1,13 +1,8 @@
+#
# Copyright (c) 2012 Intel, Inc.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License, version 2,
-# as published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# General Public License for more details.
"""
This module implements python implements a way to get file block. Two methods
@@ -37,8 +32,13 @@ def get_block_size(file_obj):
"""
# Get the block size of the host file-system for the image file by calling
# the FIGETBSZ ioctl (number 2).
- binary_data = fcntl.ioctl(file_obj, 2, struct.pack('I', 0))
- bsize = struct.unpack('I', binary_data)[0]
+ try:
+ binary_data = fcntl.ioctl(file_obj, 2, struct.pack('I', 0))
+ bsize = struct.unpack('I', binary_data)[0]
+ except OSError:
+ bsize = None
+
+ # If ioctl causes OSError or give bsize to zero failback to os.fstat
if not bsize:
import os
stat = os.fstat(file_obj.fileno())
@@ -142,15 +142,6 @@ class _FilemapBase(object):
raise Error("the method is not implemented")
- def block_is_unmapped(self, block): # pylint: disable=W0613,R0201
- """
- This method has has to be implemented by child classes. It returns
- 'True' if block number 'block' of the image file is not mapped (hole)
- and 'False' otherwise.
- """
-
- raise Error("the method is not implemented")
-
def get_mapped_ranges(self, start, count): # pylint: disable=W0613,R0201
"""
This method has has to be implemented by child classes. This is a
@@ -164,15 +155,6 @@ class _FilemapBase(object):
raise Error("the method is not implemented")
- def get_unmapped_ranges(self, start, count): # pylint: disable=W0613,R0201
- """
- This method has has to be implemented by child classes. Just like
- 'get_mapped_ranges()', but yields unmapped block ranges instead
- (holes).
- """
-
- raise Error("the method is not implemented")
-
# The 'SEEK_HOLE' and 'SEEK_DATA' options of the file seek system call
_SEEK_DATA = 3
@@ -265,15 +247,10 @@ class FilemapSeek(_FilemapBase):
% (block, result))
return result
- def block_is_unmapped(self, block):
- """Refer the '_FilemapBase' class for the documentation."""
- return not self.block_is_mapped(block)
-
def _get_ranges(self, start, count, whence1, whence2):
"""
- This function implements 'get_mapped_ranges()' and
- 'get_unmapped_ranges()' depending on what is passed in the 'whence1'
- and 'whence2' arguments.
+ This function implements 'get_mapped_ranges()' depending
+ on what is passed in the 'whence1' and 'whence2' arguments.
"""
assert whence1 != whence2
@@ -303,12 +280,6 @@ class FilemapSeek(_FilemapBase):
% (start, count, start + count - 1))
return self._get_ranges(start, count, _SEEK_DATA, _SEEK_HOLE)
- def get_unmapped_ranges(self, start, count):
- """Refer the '_FilemapBase' class for the documentation."""
- self._log.debug("FilemapSeek: get_unmapped_ranges(%d, %d(%d))"
- % (start, count, start + count - 1))
- return self._get_ranges(start, count, _SEEK_HOLE, _SEEK_DATA)
-
# Below goes the FIEMAP ioctl implementation, which is not very readable
# because it deals with the rather complex FIEMAP ioctl. To understand the
@@ -422,10 +393,6 @@ class FilemapFiemap(_FilemapBase):
% (block, result))
return result
- def block_is_unmapped(self, block):
- """Refer the '_FilemapBase' class for the documentation."""
- return not self.block_is_mapped(block)
-
def _unpack_fiemap_extent(self, index):
"""
Unpack a 'struct fiemap_extent' structure object number 'index' from
@@ -502,23 +469,28 @@ class FilemapFiemap(_FilemapBase):
% (first_prev, last_prev))
yield (first_prev, last_prev)
- def get_unmapped_ranges(self, start, count):
+class FilemapNobmap(_FilemapBase):
+ """
+ This class is used when both the 'SEEK_DATA/HOLE' and FIEMAP are not
+ supported by the filesystem or kernel.
+ """
+
+ def __init__(self, image, log=None):
"""Refer the '_FilemapBase' class for the documentation."""
- self._log.debug("FilemapFiemap: get_unmapped_ranges(%d, %d(%d))"
- % (start, count, start + count - 1))
- hole_first = start
- for first, last in self._do_get_mapped_ranges(start, count):
- if first > hole_first:
- self._log.debug("FilemapFiemap: yielding range (%d, %d)"
- % (hole_first, first - 1))
- yield (hole_first, first - 1)
- hole_first = last + 1
+ # Call the base class constructor first
+ _FilemapBase.__init__(self, image, log)
+ self._log.debug("FilemapNobmap: initializing")
- if hole_first < start + count:
- self._log.debug("FilemapFiemap: yielding range (%d, %d)"
- % (hole_first, start + count - 1))
- yield (hole_first, start + count - 1)
+ def block_is_mapped(self, block):
+ """Refer the '_FilemapBase' class for the documentation."""
+ return True
+
+ def get_mapped_ranges(self, start, count):
+ """Refer the '_FilemapBase' class for the documentation."""
+ self._log.debug("FilemapNobmap: get_mapped_ranges(%d, %d(%d))"
+ % (start, count, start + count - 1))
+ yield (start, start + count -1)
def filemap(image, log=None):
"""
@@ -533,7 +505,10 @@ def filemap(image, log=None):
try:
return FilemapFiemap(image, log)
except ErrorNotSupp:
- return FilemapSeek(image, log)
+ try:
+ return FilemapSeek(image, log)
+ except ErrorNotSupp:
+ return FilemapNobmap(image, log)
def sparse_copy(src_fname, dst_fname, skip=0, seek=0,
length=0, api=None):
diff --git a/external/poky/scripts/lib/wic/help.py b/external/poky/scripts/lib/wic/help.py
index 64f08052..1e3d06a8 100644
--- a/external/poky/scripts/lib/wic/help.py
+++ b/external/poky/scripts/lib/wic/help.py
@@ -1,21 +1,6 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
# Copyright (c) 2013, Intel Corporation.
-# All rights reserved.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This module implements some basic help invocation functions along
@@ -356,12 +341,15 @@ DESCRIPTION
wic_cp_usage = """
- Copy files and directories to the vfat or ext* partition
+ Copy files and directories to/from the vfat or ext* partition
+
+ usage: wic cp <src> <dest> [--native-sysroot <path>]
- usage: wic cp <src> <image>:<partition>[<path>] [--native-sysroot <path>]
+ source/destination image in format <image>:<partition>[<path>]
- This command copies local files or directories to the vfat or ext* partitions
-of partitioned image.
+ This command copies files or directories either
+ - from local to vfat or ext* partitions of partitioned image
+ - from vfat or ext* partitions of partitioned image to local
See 'wic help cp' for more detailed instructions.
@@ -370,16 +358,18 @@ of partitioned image.
wic_cp_help = """
NAME
- wic cp - copy files and directories to the vfat or ext* partitions
+ wic cp - copy files and directories to/from the vfat or ext* partitions
SYNOPSIS
- wic cp <src> <image>:<partition>
- wic cp <src> <image>:<partition><path>
- wic cp <src> <image>:<partition><path> --native-sysroot <path>
+ wic cp <src> <dest>:<partition>
+ wic cp <src>:<partition> <dest>
+ wic cp <src> <dest-image>:<partition><path>
+ wic cp <src> <dest-image>:<partition><path> --native-sysroot <path>
DESCRIPTION
- This command copies files and directories to the vfat or ext* partition of
- the partitioned image.
+ This command copies files or directories either
+ - from local to vfat or ext* partitions of partitioned image
+ - from vfat or ext* partitions of partitioned image to local
The first form of it copies file or directory to the root directory of
the partition:
@@ -412,6 +402,10 @@ DESCRIPTION
4 files 0 bytes
15 675 392 bytes free
+ The third form of the command copies file or directory from the specified directory
+ on the partition to local:
+ $ wic cp tmp/deploy/images/qemux86-64/core-image-minimal-qemux86-64.wic:1/vmlinuz test
+
The -n option is used to specify the path to the native sysroot
containing the tools(parted and mtools) to use.
"""
@@ -437,6 +431,7 @@ NAME
SYNOPSIS
wic rm <src> <image>:<partition><path>
wic rm <src> <image>:<partition><path> --native-sysroot <path>
+ wic rm -r <image>:<partition><path>
DESCRIPTION
This command removes files or directories from the vfat or ext* partition of the
@@ -471,6 +466,9 @@ DESCRIPTION
The -n option is used to specify the path to the native sysroot
containing the tools(parted and mtools) to use.
+
+ The -r option is used to remove directories and their contents
+ recursively,this only applies to ext* partition.
"""
wic_write_usage = """
@@ -493,7 +491,7 @@ NAME
SYNOPSIS
wic write <image> <target>
wic write <image> <target> --expand auto
- wic write <image> <target> --expand 1:100M-2:300M
+ wic write <image> <target> --expand 1:100M,2:300M
wic write <image> <target> --native-sysroot <path>
DESCRIPTION
@@ -504,7 +502,7 @@ DESCRIPTION
The --expand option is used to resize image partitions.
--expand auto expands partitions to occupy all free space available on the target device.
It's also possible to specify expansion rules in a format
- <partition>:<size>[-<partition>:<size>...] for one or more partitions.
+ <partition>:<size>[,<partition>:<size>...] for one or more partitions.
Specifying size 0 will keep partition unmodified.
Note: Resizing boot partition can result in non-bootable image for non-EFI images. It is
recommended to use size 0 for boot partition to keep image bootable.
@@ -538,7 +536,8 @@ DESCRIPTION
Source plugins can also be implemented and added by external
layers - any plugins found in a scripts/lib/wic/plugins/source/
- directory in an external layer will also be made available.
+ or lib/wic/plugins/source/ directory in an external layer will
+ also be made available.
When the wic implementation needs to invoke a partition-specific
implementation, it looks for the plugin that has the same name as
@@ -971,6 +970,16 @@ DESCRIPTION
is omitted, not the directory itself. This option only
has an effect with the rootfs source plugin.
+ --include-path: This option is specific to wic. It adds the contents
+ of the given path to the resulting image. The path is
+ relative to the directory in which wic is running not
+ the rootfs itself so use of an absolute path is
+ recommended. This option is most useful when multiple
+ copies of the rootfs are added to an image and it is
+ required to add extra content to only one of these
+ copies. This option only has an effect with the rootfs
+ source plugin.
+
--extra-space: This option is specific to wic. It adds extra
space after the space filled by the content
of the partition. The final size can go
@@ -1061,3 +1070,59 @@ NAME
DESCRIPTION
Specify a help topic to display it. Topics are shown above.
"""
+
+
+wic_help = """
+Creates a customized OpenEmbedded image.
+
+Usage: wic [--version]
+ wic help [COMMAND or TOPIC]
+ wic COMMAND [ARGS]
+
+ usage 1: Returns the current version of Wic
+ usage 2: Returns detailed help for a COMMAND or TOPIC
+ usage 3: Executes COMMAND
+
+
+COMMAND:
+
+ list - List available canned images and source plugins
+ ls - List contents of partitioned image or partition
+ rm - Remove files or directories from the vfat or ext* partitions
+ help - Show help for a wic COMMAND or TOPIC
+ write - Write an image to a device
+ cp - Copy files and directories to the vfat or ext* partitions
+ create - Create a new OpenEmbedded image
+
+
+TOPIC:
+ overview - Presents an overall overview of Wic
+ plugins - Presents an overview and API for Wic plugins
+ kickstart - Presents a Wic kicstart file reference
+
+
+Examples:
+
+ $ wic --version
+
+ Returns the current version of Wic
+
+
+ $ wic help cp
+
+ Returns the SYNOPSIS and DESCRIPTION for the Wic "cp" command.
+
+
+ $ wic list images
+
+ Returns the list of canned images (i.e. *.wks files located in
+ the /scripts/lib/wic/canned-wks directory.
+
+
+ $ wic create mkefidisk -e core-image-minimal
+
+ Creates an EFI disk image from artifacts used in a previous
+ core-image-minimal build in standard BitBake locations
+ (e.g. Cooked Mode).
+
+"""
diff --git a/external/poky/scripts/lib/wic/ksparser.py b/external/poky/scripts/lib/wic/ksparser.py
index 7e5a9c50..650b9762 100644
--- a/external/poky/scripts/lib/wic/ksparser.py
+++ b/external/poky/scripts/lib/wic/ksparser.py
@@ -1,21 +1,8 @@
-#!/usr/bin/env python -tt
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#!/usr/bin/env python3
#
# Copyright (c) 2016 Intel, Inc.
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by the Free
-# Software Foundation; version 2 of the License
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
-# for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc., 59
-# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This module provides parser for kickstart format
@@ -28,14 +15,30 @@
import os
import shlex
import logging
+import re
from argparse import ArgumentParser, ArgumentError, ArgumentTypeError
from wic.engine import find_canned
from wic.partition import Partition
+from wic.misc import get_bitbake_var
logger = logging.getLogger('wic')
+__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
+
+def expand_line(line):
+ while True:
+ m = __expand_var_regexp__.search(line)
+ if not m:
+ return line
+ key = m.group()[2:-1]
+ val = get_bitbake_var(key)
+ if val is None:
+ logger.warning("cannot expand variable %s" % key)
+ return line
+ line = line[:m.start()] + val + line[m.end():]
+
class KickStartError(Exception):
"""Custom exception."""
pass
@@ -134,6 +137,7 @@ class KickStart():
part.add_argument('--active', action='store_true')
part.add_argument('--align', type=int)
part.add_argument('--exclude-path', nargs='+')
+ part.add_argument('--include-path', nargs='+')
part.add_argument("--extra-space", type=sizetype)
part.add_argument('--fsoptions', dest='fsopts')
part.add_argument('--fstype', default='vfat',
@@ -148,6 +152,8 @@ class KickStart():
part.add_argument('--part-name')
part.add_argument('--part-type')
part.add_argument('--rootfs-dir')
+ part.add_argument('--type', default='primary',
+ choices = ('primary', 'logical'))
# --size and --fixed-size cannot be specified together; options
# ----extra-space and --overhead-factor should also raise a parser
@@ -190,6 +196,7 @@ class KickStart():
line = line.strip()
lineno += 1
if line and line[0] != '#':
+ line = expand_line(line)
try:
line_args = shlex.split(line)
parsed = parser.parse_args(line_args)
@@ -239,6 +246,11 @@ class KickStart():
elif line.startswith('bootloader'):
if not self.bootloader:
self.bootloader = parsed
+ # Concatenate the strings set in APPEND
+ append_var = get_bitbake_var("APPEND")
+ if append_var:
+ self.bootloader.append = ' '.join(filter(None, \
+ (self.bootloader.append, append_var)))
else:
err = "%s:%d: more than one bootloader specified" \
% (confpath, lineno)
diff --git a/external/poky/scripts/lib/wic/misc.py b/external/poky/scripts/lib/wic/misc.py
index ee888b47..1f199b9f 100644
--- a/external/poky/scripts/lib/wic/misc.py
+++ b/external/poky/scripts/lib/wic/misc.py
@@ -1,21 +1,7 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (c) 2013, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This module provides a place to collect various wic-related utils
diff --git a/external/poky/scripts/lib/wic/partition.py b/external/poky/scripts/lib/wic/partition.py
index 3da7e23e..2d95f784 100644
--- a/external/poky/scripts/lib/wic/partition.py
+++ b/external/poky/scripts/lib/wic/partition.py
@@ -1,21 +1,7 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (c) 2013-2016 Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This module provides the OpenEmbedded partition object definitions.
@@ -44,6 +30,7 @@ class Partition():
self.device = None
self.extra_space = args.extra_space
self.exclude_path = args.exclude_path
+ self.include_path = args.include_path
self.fsopts = args.fsopts
self.fstype = args.fstype
self.label = args.label
@@ -64,6 +51,7 @@ class Partition():
self.use_uuid = args.use_uuid
self.uuid = args.uuid
self.fsuuid = args.fsuuid
+ self.type = args.type
self.lineno = lineno
self.source_file = ""
@@ -173,7 +161,7 @@ class Partition():
# Split sourceparams string of the form key1=val1[,key2=val2,...]
# into a dict. Also accepts valueless keys i.e. without =
splitted = self.sourceparams.split(',')
- srcparams_dict = dict(par.split('=') for par in splitted if par)
+ srcparams_dict = dict(par.split('=', 1) for par in splitted if par)
plugin = PluginMgr.get_plugins('source')[self.source]
plugin.do_configure_partition(self, srcparams_dict, creator,
@@ -225,19 +213,24 @@ class Partition():
if os.path.isfile(rootfs):
os.remove(rootfs)
- # Get rootfs size from bitbake variable if it's not set in .ks file
if not self.size and real_rootfs:
- # Bitbake variable ROOTFS_SIZE is calculated in
- # Image._get_rootfs_size method from meta/lib/oe/image.py
- # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
- # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE
+ # The rootfs size is not set in .ks file so try to get it
+ # from bitbake variable
rsize_bb = get_bitbake_var('ROOTFS_SIZE')
- if rsize_bb:
- logger.warning('overhead-factor was specified, but size was not,'
- ' so bitbake variables will be used for the size.'
- ' In this case both IMAGE_OVERHEAD_FACTOR and '
- '--overhead-factor will be applied')
+ rdir = get_bitbake_var('IMAGE_ROOTFS')
+ if rsize_bb and rdir == rootfs_dir:
+ # Bitbake variable ROOTFS_SIZE is calculated in
+ # Image._get_rootfs_size method from meta/lib/oe/image.py
+ # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT,
+ # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE
self.size = int(round(float(rsize_bb)))
+ else:
+ # Bitbake variable ROOTFS_SIZE is not defined so compute it
+ # from the rootfs_dir size using the same logic found in
+ # get_rootfs_size() from meta/classes/image.bbclass
+ du_cmd = "du -ks %s" % rootfs_dir
+ out = exec_cmd(du_cmd)
+ self.size = int(out.split()[0])
prefix = "ext" if self.fstype.startswith("ext") else self.fstype
method = getattr(self, "prepare_rootfs_" + prefix)
@@ -322,7 +315,7 @@ class Partition():
dosfs_cmd = "mkdosfs %s -i %s %s %s -C %s %d" % \
(label_str, self.fsuuid, size_str, extraopts, rootfs,
- max(8250, rootfs_size))
+ rootfs_size)
exec_native_cmd(dosfs_cmd, native_sysroot)
mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (rootfs, rootfs_dir)
diff --git a/external/poky/scripts/lib/wic/pluginbase.py b/external/poky/scripts/lib/wic/pluginbase.py
index 686d2fee..d9b4e577 100644
--- a/external/poky/scripts/lib/wic/pluginbase.py
+++ b/external/poky/scripts/lib/wic/pluginbase.py
@@ -1,19 +1,9 @@
-#!/usr/bin/env python -tt
+#!/usr/bin/env python3
#
# Copyright (c) 2011 Intel, Inc.
#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by the Free
-# Software Foundation; version 2 of the License
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
-# for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc., 59
-# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
__all__ = ['ImagerPlugin', 'SourcePlugin']
@@ -28,7 +18,7 @@ from wic.misc import get_bitbake_var
PLUGIN_TYPES = ["imager", "source"]
-SCRIPTS_PLUGIN_DIR = "scripts/lib/wic/plugins"
+SCRIPTS_PLUGIN_DIR = ["scripts/lib/wic/plugins", "lib/wic/plugins"]
logger = logging.getLogger('wic')
@@ -48,10 +38,11 @@ class PluginMgr:
cls._plugin_dirs = [os.path.join(os.path.dirname(__file__), 'plugins')]
layers = get_bitbake_var("BBLAYERS") or ''
for layer_path in layers.split():
- path = os.path.join(layer_path, SCRIPTS_PLUGIN_DIR)
- path = os.path.abspath(os.path.expanduser(path))
- if path not in cls._plugin_dirs and os.path.isdir(path):
- cls._plugin_dirs.insert(0, path)
+ for script_plugin_dir in SCRIPTS_PLUGIN_DIR:
+ path = os.path.join(layer_path, script_plugin_dir)
+ path = os.path.abspath(os.path.expanduser(path))
+ if path not in cls._plugin_dirs and os.path.isdir(path):
+ cls._plugin_dirs.insert(0, path)
if ptype not in PLUGINS:
# load all ptype plugins
diff --git a/external/poky/scripts/lib/wic/plugins/imager/direct.py b/external/poky/scripts/lib/wic/plugins/imager/direct.py
index bb14a334..2d06c242 100644
--- a/external/poky/scripts/lib/wic/plugins/imager/direct.py
+++ b/external/poky/scripts/lib/wic/plugins/imager/direct.py
@@ -1,21 +1,7 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (c) 2013, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This implements the 'direct' imager plugin class for 'wic'
@@ -63,7 +49,6 @@ class DirectPlugin(ImagerPlugin):
# parse possible 'rootfs=name' items
self.rootfs_dir = dict(rdir.split('=') for rdir in rootfs_dir.split(' '))
- self.replaced_rootfs_paths = {}
self.bootimg_dir = bootimg_dir
self.kernel_dir = kernel_dir
self.native_sysroot = native_sysroot
@@ -73,6 +58,7 @@ class DirectPlugin(ImagerPlugin):
self.compressor = options.compressor
self.bmap = options.bmap
self.no_fstab_update = options.no_fstab_update
+ self.original_fstab = None
self.name = "%s-%s" % (os.path.splitext(os.path.basename(wks_file))[0],
strftime("%Y%m%d%H%M"))
@@ -118,24 +104,13 @@ class DirectPlugin(ImagerPlugin):
with open(fstab_path) as fstab:
fstab_lines = fstab.readlines()
+ self.original_fstab = fstab_lines.copy()
if self._update_fstab(fstab_lines, self.parts):
- # copy rootfs dir to workdir to update fstab
- # as rootfs can be used by other tasks and can't be modified
- new_pseudo = os.path.realpath(os.path.join(self.workdir, "pseudo"))
- from_dir = os.path.join(os.path.join(image_rootfs, ".."), "pseudo")
- from_dir = os.path.realpath(from_dir)
- copyhardlinktree(from_dir, new_pseudo)
- new_rootfs = os.path.realpath(os.path.join(self.workdir, "rootfs_copy"))
- copyhardlinktree(image_rootfs, new_rootfs)
- fstab_path = os.path.join(new_rootfs, 'etc/fstab')
-
- os.unlink(fstab_path)
-
with open(fstab_path, "w") as fstab:
fstab.writelines(fstab_lines)
-
- return new_rootfs
+ else:
+ self.original_fstab = None
def _update_fstab(self, fstab_lines, parts):
"""Assume partition order same as in wks"""
@@ -184,14 +159,8 @@ class DirectPlugin(ImagerPlugin):
filesystems from the artifacts directly and combine them into
a partitioned image.
"""
- if self.no_fstab_update:
- new_rootfs = None
- else:
- new_rootfs = self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR"))
- if new_rootfs:
- # rootfs was copied to update fstab
- self.replaced_rootfs_paths[new_rootfs] = self.rootfs_dir['ROOTFS_DIR']
- self.rootfs_dir['ROOTFS_DIR'] = new_rootfs
+ if not self.no_fstab_update:
+ self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR"))
for part in self.parts:
# get rootfs size from bitbake variable if it's not set in .ks file
@@ -267,8 +236,6 @@ class DirectPlugin(ImagerPlugin):
else:
suffix = '["%s"]:' % (part.mountpoint or part.label)
rootdir = part.rootfs_dir
- if rootdir in self.replaced_rootfs_paths:
- rootdir = self.replaced_rootfs_paths[rootdir]
msg += ' ROOTFS_DIR%s%s\n' % (suffix.ljust(20), rootdir)
msg += ' BOOTIMG_DIR: %s\n' % self.bootimg_dir
@@ -306,6 +273,12 @@ class DirectPlugin(ImagerPlugin):
if os.path.isfile(path):
shutil.move(path, os.path.join(self.outdir, fname))
+ #Restore original fstab
+ if self.original_fstab:
+ fstab_path = self.rootfs_dir.get("ROOTFS_DIR") + "/etc/fstab"
+ with open(fstab_path, "w") as fstab:
+ fstab.writelines(self.original_fstab)
+
# remove work directory
shutil.rmtree(self.workdir, ignore_errors=True)
@@ -327,6 +300,10 @@ class PartitionedImage():
self.path = path # Path to the image file
self.numpart = 0 # Number of allocated partitions
self.realpart = 0 # Number of partitions in the partition table
+ self.primary_part_num = 0 # Number of primary partitions (msdos)
+ self.extendedpart = 0 # Create extended partition before this logical partition (msdos)
+ self.extended_size_sec = 0 # Size of exteded partition (msdos)
+ self.logical_part_cnt = 0 # Number of total logical paritions (msdos)
self.offset = 0 # Offset of next partition (in sectors)
self.min_size = 0 # Minimum required disk size to fit
# all partitions (in bytes)
@@ -339,6 +316,7 @@ class PartitionedImage():
# Size of a sector used in calculations
self.sector_size = SECTOR_SIZE
self.native_sysroot = native_sysroot
+ num_real_partitions = len([p for p in self.partitions if not p.no_table])
# calculate the real partition number, accounting for partitions not
# in the partition table and logical partitions
@@ -348,7 +326,7 @@ class PartitionedImage():
part.realnum = 0
else:
realnum += 1
- if self.ptable_format == 'msdos' and realnum > 3 and len(partitions) > 4:
+ if self.ptable_format == 'msdos' and realnum > 3 and num_real_partitions > 4:
part.realnum = realnum + 1
continue
part.realnum = realnum
@@ -418,12 +396,16 @@ class PartitionedImage():
# Skip one sector required for the partitioning scheme overhead
self.offset += overhead
- if self.realpart > 3 and num_real_partitions > 4:
+ if self.ptable_format == "msdos":
+ if self.primary_part_num > 3 or \
+ (self.extendedpart == 0 and self.primary_part_num >= 3 and num_real_partitions > 4):
+ part.type = 'logical'
# Reserve a sector for EBR for every logical partition
# before alignment is performed.
- if self.ptable_format == "msdos":
- self.offset += 1
+ if part.type == 'logical':
+ self.offset += 2
+ align_sectors = 0
if part.align:
# If not first partition and we do have alignment set we need
# to align the partition.
@@ -449,18 +431,25 @@ class PartitionedImage():
part.start = self.offset
self.offset += part.size_sec
- part.type = 'primary'
if not part.no_table:
part.num = self.realpart
else:
part.num = 0
- if self.ptable_format == "msdos":
- # only count the partitions that are in partition table
- if num_real_partitions > 4:
- if self.realpart > 3:
- part.type = 'logical'
- part.num = self.realpart + 1
+ if self.ptable_format == "msdos" and not part.no_table:
+ if part.type == 'logical':
+ self.logical_part_cnt += 1
+ part.num = self.logical_part_cnt + 4
+ if self.extendedpart == 0:
+ # Create extended partition as a primary partition
+ self.primary_part_num += 1
+ self.extendedpart = part.num
+ else:
+ self.extended_size_sec += align_sectors
+ self.extended_size_sec += part.size_sec + 2
+ else:
+ self.primary_part_num += 1
+ part.num = self.primary_part_num
logger.debug("Assigned %s to %s%d, sectors range %d-%d size %d "
"sectors (%d bytes).", part.mountpoint, part.disk,
@@ -510,7 +499,7 @@ class PartitionedImage():
if part.num == 0:
continue
- if self.ptable_format == "msdos" and part.num == 5:
+ if self.ptable_format == "msdos" and part.num == self.extendedpart:
# Create an extended partition (note: extended
# partition is described in MBR and contains all
# logical partitions). The logical partitions save a
@@ -523,8 +512,8 @@ class PartitionedImage():
# add a sector at the back, so that there is enough
# room for all logical partitions.
self._create_partition(self.path, "extended",
- None, part.start - 1,
- self.offset - part.start + 1)
+ None, part.start - 2,
+ self.extended_size_sec)
if part.fstype == "swap":
parted_fs_type = "linux-swap"
@@ -591,9 +580,7 @@ class PartitionedImage():
self.native_sysroot)
def cleanup(self):
- # remove partition images
- for image in set(self.partimages):
- os.remove(image)
+ pass
def assemble(self):
logger.debug("Installing partitions")
diff --git a/external/poky/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py b/external/poky/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
new file mode 100644
index 00000000..5bd73906
--- /dev/null
+++ b/external/poky/scripts/lib/wic/plugins/source/bootimg-biosplusefi.py
@@ -0,0 +1,213 @@
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# DESCRIPTION
+# This implements the 'bootimg-biosplusefi' source plugin class for 'wic'
+#
+# AUTHORS
+# William Bourque <wbourque [at) gmail.com>
+
+import types
+
+from wic.pluginbase import SourcePlugin
+from importlib.machinery import SourceFileLoader
+
+class BootimgBiosPlusEFIPlugin(SourcePlugin):
+ """
+ Create MBR + EFI boot partition
+
+ This plugin creates a boot partition that contains both
+ legacy BIOS and EFI content. It will be able to boot from both.
+ This is useful when managing PC fleet with some older machines
+ without EFI support.
+
+ Note it is possible to create an image that can boot from both
+ legacy BIOS and EFI by defining two partitions : one with arg
+ --source bootimg-efi and another one with --source bootimg-pcbios.
+ However, this method has the obvious downside that it requires TWO
+ partitions to be created on the storage device.
+ Both partitions will also be marked as "bootable" which does not work on
+ most BIOS, has BIOS often uses the "bootable" flag to determine
+ what to boot. If you have such a BIOS, you need to manually remove the
+ "bootable" flag from the EFI partition for the drive to be bootable.
+ Having two partitions also seems to confuse wic : the content of
+ the first partition will be duplicated into the second, even though it
+ will not be used at all.
+
+ Also, unlike "isoimage-isohybrid" that also does BIOS and EFI, this plugin
+ allows you to have more than only a single rootfs partitions and does
+ not turn the rootfs into an initramfs RAM image.
+
+ This plugin is made to put everything into a single /boot partition so it
+ does not have the limitations listed above.
+
+ The plugin is made so it does tries not to reimplement what's already
+ been done in other plugins; as such it imports "bootimg-pcbios"
+ and "bootimg-efi".
+ Plugin "bootimg-pcbios" is used to generate legacy BIOS boot.
+ Plugin "bootimg-efi" is used to generate the UEFI boot. Note that it
+ requires a --sourceparams argument to know which loader to use; refer
+ to "bootimg-efi" code/documentation for the list of loader.
+
+ Imports are handled with "SourceFileLoader" from importlib as it is
+ otherwise very difficult to import module that has hyphen "-" in their
+ filename.
+ The SourcePlugin() methods used in the plugins (do_install_disk,
+ do_configure_partition, do_prepare_partition) are then called on both,
+ beginning by "bootimg-efi".
+
+ Plugin options, such as "--sourceparams" can still be passed to a
+ plugin, as long they does not cause issue in the other plugin.
+
+ Example wic configuration:
+ part /boot --source bootimg-biosplusefi --sourceparams="loader=grub-efi"\\
+ --ondisk sda --label os_boot --active --align 1024 --use-uuid
+ """
+
+ name = 'bootimg-biosplusefi'
+
+ __PCBIOS_MODULE_NAME = "bootimg-pcbios"
+ __EFI_MODULE_NAME = "bootimg-efi"
+
+ __imgEFIObj = None
+ __imgBiosObj = None
+
+ @classmethod
+ def __init__(cls):
+ """
+ Constructor (init)
+ """
+
+ # XXX
+ # For some reasons, __init__ constructor is never called.
+ # Something to do with how pluginbase works?
+ cls.__instanciateSubClasses()
+
+ @classmethod
+ def __instanciateSubClasses(cls):
+ """
+
+ """
+
+ # Import bootimg-pcbios (class name "BootimgPcbiosPlugin")
+ modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ cls.__PCBIOS_MODULE_NAME + ".py")
+ loader = SourceFileLoader(cls.__PCBIOS_MODULE_NAME, modulePath)
+ mod = types.ModuleType(loader.name)
+ loader.exec_module(mod)
+ cls.__imgBiosObj = mod.BootimgPcbiosPlugin()
+
+ # Import bootimg-efi (class name "BootimgEFIPlugin")
+ modulePath = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ cls.__EFI_MODULE_NAME + ".py")
+ loader = SourceFileLoader(cls.__EFI_MODULE_NAME, modulePath)
+ mod = types.ModuleType(loader.name)
+ loader.exec_module(mod)
+ cls.__imgEFIObj = mod.BootimgEFIPlugin()
+
+ @classmethod
+ def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir,
+ bootimg_dir, kernel_dir, native_sysroot):
+ """
+ Called after all partitions have been prepared and assembled into a
+ disk image.
+ """
+
+ if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
+ cls.__instanciateSubClasses()
+
+ cls.__imgEFIObj.do_install_disk(
+ disk,
+ disk_name,
+ creator,
+ workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ native_sysroot)
+
+ cls.__imgBiosObj.do_install_disk(
+ disk,
+ disk_name,
+ creator,
+ workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ native_sysroot)
+
+ @classmethod
+ def do_configure_partition(cls, part, source_params, creator, cr_workdir,
+ oe_builddir, bootimg_dir, kernel_dir,
+ native_sysroot):
+ """
+ Called before do_prepare_partition()
+ """
+
+ if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
+ cls.__instanciateSubClasses()
+
+ cls.__imgEFIObj.do_configure_partition(
+ part,
+ source_params,
+ creator,
+ cr_workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ native_sysroot)
+
+ cls.__imgBiosObj.do_configure_partition(
+ part,
+ source_params,
+ creator,
+ cr_workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ native_sysroot)
+
+ @classmethod
+ def do_prepare_partition(cls, part, source_params, creator, cr_workdir,
+ oe_builddir, bootimg_dir, kernel_dir,
+ rootfs_dir, native_sysroot):
+ """
+ Called to do the actual content population for a partition i.e. it
+ 'prepares' the partition to be incorporated into the image.
+ """
+
+ if ( (not cls.__imgEFIObj) or (not cls.__imgBiosObj) ):
+ cls.__instanciateSubClasses()
+
+ cls.__imgEFIObj.do_prepare_partition(
+ part,
+ source_params,
+ creator,
+ cr_workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ rootfs_dir,
+ native_sysroot)
+
+ cls.__imgBiosObj.do_prepare_partition(
+ part,
+ source_params,
+ creator,
+ cr_workdir,
+ oe_builddir,
+ bootimg_dir,
+ kernel_dir,
+ rootfs_dir,
+ native_sysroot)
diff --git a/external/poky/scripts/lib/wic/plugins/source/bootimg-efi.py b/external/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
index 12698184..2cfdc10e 100644
--- a/external/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/external/poky/scripts/lib/wic/plugins/source/bootimg-efi.py
@@ -1,21 +1,7 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (c) 2014, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This implements the 'bootimg-efi' source plugin class for 'wic'
@@ -69,28 +55,44 @@ class BootimgEFIPlugin(SourcePlugin):
if not bootimg_dir:
raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
- cp_cmd = "cp %s/%s %s" % (bootimg_dir, initrd, hdddir)
- exec_cmd(cp_cmd, True)
+ initrds = initrd.split(';')
+ for rd in initrds:
+ cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
+ exec_cmd(cp_cmd, True)
else:
logger.debug("Ignoring missing initrd")
if not custom_cfg:
# Create grub configuration using parameters from wks file
bootloader = creator.ks.bootloader
+ title = source_params.get('title')
grubefi_conf = ""
grubefi_conf += "serial --unit=0 --speed=115200 --word=8 --parity=no --stop=1\n"
grubefi_conf += "default=boot\n"
grubefi_conf += "timeout=%s\n" % bootloader.timeout
- grubefi_conf += "menuentry 'boot'{\n"
+ grubefi_conf += "menuentry '%s'{\n" % (title if title else "boot")
- kernel = "/bzImage"
+ kernel = get_bitbake_var("KERNEL_IMAGETYPE")
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
- grubefi_conf += "linux %s root=%s rootwait %s\n" \
- % (kernel, creator.rootdev, bootloader.append)
+ label = source_params.get('label')
+ label_conf = "root=%s" % creator.rootdev
+ if label:
+ label_conf = "LABEL=%s" % label
+
+ grubefi_conf += "linux /%s %s rootwait %s\n" \
+ % (kernel, label_conf, bootloader.append)
if initrd:
- grubefi_conf += "initrd /%s\n" % initrd
+ initrds = initrd.split(';')
+ grubefi_conf += "initrd"
+ for rd in initrds:
+ grubefi_conf += " /%s" % rd
+ grubefi_conf += "\n"
grubefi_conf += "}\n"
@@ -125,8 +127,10 @@ class BootimgEFIPlugin(SourcePlugin):
if not bootimg_dir:
raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting")
- cp_cmd = "cp %s/%s %s" % (bootimg_dir, initrd, hdddir)
- exec_cmd(cp_cmd, True)
+ initrds = initrd.split(';')
+ for rd in initrds:
+ cp_cmd = "cp %s/%s %s" % (bootimg_dir, rd, hdddir)
+ exec_cmd(cp_cmd, True)
else:
logger.debug("Ignoring missing initrd")
@@ -151,16 +155,30 @@ class BootimgEFIPlugin(SourcePlugin):
if not custom_cfg:
# Create systemd-boot configuration using parameters from wks file
- kernel = "/bzImage"
+ kernel = get_bitbake_var("KERNEL_IMAGETYPE")
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
+
+ title = source_params.get('title')
boot_conf = ""
- boot_conf += "title boot\n"
- boot_conf += "linux %s\n" % kernel
- boot_conf += "options LABEL=Boot root=%s %s\n" % \
- (creator.rootdev, bootloader.append)
+ boot_conf += "title %s\n" % (title if title else "boot")
+ boot_conf += "linux /%s\n" % kernel
+
+ label = source_params.get('label')
+ label_conf = "LABEL=Boot root=%s" % creator.rootdev
+ if label:
+ label_conf = "LABEL=%s" % label
+
+ boot_conf += "options %s %s\n" % \
+ (label_conf, bootloader.append)
if initrd:
- boot_conf += "initrd /%s\n" % initrd
+ initrds = initrd.split(';')
+ for rd in initrds:
+ boot_conf += "initrd /%s\n" % rd
logger.debug("Writing systemd-boot config "
"%s/hdd/boot/loader/entries/boot.conf", cr_workdir)
@@ -210,8 +228,14 @@ class BootimgEFIPlugin(SourcePlugin):
hdddir = "%s/hdd/boot" % cr_workdir
- install_cmd = "install -m 0644 %s/bzImage %s/bzImage" % \
- (staging_kernel_dir, hdddir)
+ kernel = get_bitbake_var("KERNEL_IMAGETYPE")
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
+
+ install_cmd = "install -m 0644 %s/%s %s/%s" % \
+ (staging_kernel_dir, kernel, hdddir, kernel)
exec_cmd(install_cmd)
diff --git a/external/poky/scripts/lib/wic/plugins/source/bootimg-partition.py b/external/poky/scripts/lib/wic/plugins/source/bootimg-partition.py
index ddc880be..138986a7 100644
--- a/external/poky/scripts/lib/wic/plugins/source/bootimg-partition.py
+++ b/external/poky/scripts/lib/wic/plugins/source/bootimg-partition.py
@@ -1,18 +1,5 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This implements the 'bootimg-partition' source plugin class for
diff --git a/external/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/external/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
index 9347aa7f..f2639e70 100644
--- a/external/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/external/poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py
@@ -1,21 +1,7 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (c) 2014, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This implements the 'bootimg-pcbios' source plugin class for 'wic'
@@ -163,8 +149,14 @@ class BootimgPcbiosPlugin(SourcePlugin):
hdddir = "%s/hdd/boot" % cr_workdir
- cmds = ("install -m 0644 %s/bzImage %s/vmlinuz" %
- (staging_kernel_dir, hdddir),
+ kernel = get_bitbake_var("KERNEL_IMAGETYPE")
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
+
+ cmds = ("install -m 0644 %s/%s %s/vmlinuz" %
+ (staging_kernel_dir, kernel, hdddir),
"install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" %
(bootimg_dir, hdddir),
"install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" %
diff --git a/external/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/external/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
index 170077c2..11326a27 100644
--- a/external/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
+++ b/external/poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py
@@ -1,18 +1,5 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This implements the 'isoimage-isohybrid' source plugin class for 'wic'
@@ -83,8 +70,13 @@ class IsoImagePlugin(SourcePlugin):
syslinux_conf += "DEFAULT boot\n"
syslinux_conf += "LABEL boot\n"
- kernel = "/bzImage"
- syslinux_conf += "KERNEL " + kernel + "\n"
+ kernel = get_bitbake_var("KERNEL_IMAGETYPE")
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
+
+ syslinux_conf += "KERNEL /" + kernel + "\n"
syslinux_conf += "APPEND initrd=/initrd LABEL=boot %s\n" \
% bootloader.append
@@ -127,9 +119,13 @@ class IsoImagePlugin(SourcePlugin):
grubefi_conf += "\n"
grubefi_conf += "menuentry 'boot'{\n"
- kernel = "/bzImage"
+ kernel = get_bitbake_var("KERNEL_IMAGETYPE")
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
- grubefi_conf += "linux %s rootwait %s\n" \
+ grubefi_conf += "linux /%s rootwait %s\n" \
% (kernel, bootloader.append)
grubefi_conf += "initrd /initrd \n"
grubefi_conf += "}\n"
@@ -281,9 +277,14 @@ class IsoImagePlugin(SourcePlugin):
if os.path.isfile("%s/initrd.cpio.gz" % cr_workdir):
os.remove("%s/initrd.cpio.gz" % cr_workdir)
- # Install bzImage
- install_cmd = "install -m 0644 %s/bzImage %s/bzImage" % \
- (kernel_dir, isodir)
+ kernel = get_bitbake_var("KERNEL_IMAGETYPE")
+ if get_bitbake_var("INITRAMFS_IMAGE_BUNDLE") == "1":
+ if get_bitbake_var("INITRAMFS_IMAGE"):
+ kernel = "%s-%s.bin" % \
+ (get_bitbake_var("KERNEL_IMAGETYPE"), get_bitbake_var("INITRAMFS_LINK_NAME"))
+
+ install_cmd = "install -m 0644 %s/%s %s/%s" % \
+ (kernel_dir, kernel, isodir, kernel)
exec_cmd(install_cmd)
#Create bootloader for efi boot
@@ -335,19 +336,23 @@ class IsoImagePlugin(SourcePlugin):
(img_iso_dir, isodir)
exec_cmd(install_cmd)
else:
+ # Default to 100 blocks of extra space for file system overhead
+ esp_extra_blocks = int(source_params.get('esp_extra_blocks', '100'))
+
du_cmd = "du -bks %s/EFI" % isodir
out = exec_cmd(du_cmd)
blocks = int(out.split()[0])
- # Add some extra space for file system overhead
- blocks += 100
+ blocks += esp_extra_blocks
logger.debug("Added 100 extra blocks to %s to get to %d "
"total blocks", part.mountpoint, blocks)
# dosfs image for EFI boot
bootimg = "%s/efi.img" % isodir
- dosfs_cmd = 'mkfs.vfat -n "EFIimg" -S 512 -C %s %d' \
- % (bootimg, blocks)
+ esp_label = source_params.get('esp_label', 'EFIimg')
+
+ dosfs_cmd = 'mkfs.vfat -n \'%s\' -S 512 -C %s %d' \
+ % (esp_label, bootimg, blocks)
exec_native_cmd(dosfs_cmd, native_sysroot)
mmd_cmd = "mmd -i %s ::/EFI" % bootimg
diff --git a/external/poky/scripts/lib/wic/plugins/source/rawcopy.py b/external/poky/scripts/lib/wic/plugins/source/rawcopy.py
index e86398ac..3c4997d8 100644
--- a/external/poky/scripts/lib/wic/plugins/source/rawcopy.py
+++ b/external/poky/scripts/lib/wic/plugins/source/rawcopy.py
@@ -1,18 +1,5 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
import logging
@@ -70,7 +57,10 @@ class RawCopyPlugin(SourcePlugin):
raise WicError("No file specified")
src = os.path.join(kernel_dir, source_params['file'])
- dst = os.path.join(cr_workdir, "%s.%s" % (source_params['file'], part.lineno))
+ dst = os.path.join(cr_workdir, "%s.%s" % (os.path.basename(source_params['file']), part.lineno))
+
+ if not os.path.exists(os.path.dirname(dst)):
+ os.makedirs(os.path.dirname(dst))
if 'skip' in source_params:
sparse_copy(src, dst, skip=int(source_params['skip']))
diff --git a/external/poky/scripts/lib/wic/plugins/source/rootfs.py b/external/poky/scripts/lib/wic/plugins/source/rootfs.py
index aec720fb..705aeb55 100644
--- a/external/poky/scripts/lib/wic/plugins/source/rootfs.py
+++ b/external/poky/scripts/lib/wic/plugins/source/rootfs.py
@@ -1,21 +1,7 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (c) 2014, Intel Corporation.
-# All rights reserved.
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
# DESCRIPTION
# This implements the 'rootfs' source plugin class for 'wic'
@@ -85,7 +71,7 @@ class RootfsPlugin(SourcePlugin):
new_rootfs = None
# Handle excluded paths.
- if part.exclude_path is not None:
+ if part.exclude_path or part.include_path:
# We need a new rootfs directory we can delete files from. Copy to
# workdir.
new_rootfs = os.path.realpath(os.path.join(cr_workdir, "rootfs%d" % part.lineno))
@@ -95,7 +81,10 @@ class RootfsPlugin(SourcePlugin):
copyhardlinktree(part.rootfs_dir, new_rootfs)
- for orig_path in part.exclude_path:
+ for path in part.include_path or []:
+ copyhardlinktree(path, new_rootfs)
+
+ for orig_path in part.exclude_path or []:
path = orig_path
if os.path.isabs(path):
logger.error("Must be relative: --exclude-path=%s" % orig_path)