summaryrefslogtreecommitdiffstats
path: root/external/meta-spdxscanner
diff options
context:
space:
mode:
Diffstat (limited to 'external/meta-spdxscanner')
-rw-r--r--external/meta-spdxscanner/COPYING.MIT (renamed from external/meta-spdxscanner/LICENSE)32
-rw-r--r--external/meta-spdxscanner/MAINTAINERS25
-rw-r--r--external/meta-spdxscanner/README.md32
-rw-r--r--external/meta-spdxscanner/classes/fossdriver-host.bbclass30
-rw-r--r--external/meta-spdxscanner/classes/fossology-rest-phased.bbclass739
-rw-r--r--external/meta-spdxscanner/classes/fossology-rest.bbclass32
-rw-r--r--external/meta-spdxscanner/classes/nopackages.bbclass1
-rw-r--r--external/meta-spdxscanner/classes/spdx-common.bbclass141
-rw-r--r--external/meta-spdxscanner/conf/layer.conf2
-rw-r--r--external/meta-spdxscanner/recipes-devtools/scancode-toolkit/scancode-toolkit-native_3.1.1.bb18
10 files changed, 963 insertions, 89 deletions
diff --git a/external/meta-spdxscanner/LICENSE b/external/meta-spdxscanner/COPYING.MIT
index a9511133..fb950dc6 100644
--- a/external/meta-spdxscanner/LICENSE
+++ b/external/meta-spdxscanner/COPYING.MIT
@@ -1,21 +1,17 @@
-MIT License
-
-Copyright (c) 2016 Jan-Simon Möller
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/external/meta-spdxscanner/MAINTAINERS b/external/meta-spdxscanner/MAINTAINERS
new file mode 100644
index 00000000..23761bf8
--- /dev/null
+++ b/external/meta-spdxscanner/MAINTAINERS
@@ -0,0 +1,25 @@
+This file contains a list of maintainers for the meta-spdxscanner layer.
+
+Please submit any patches against meta-spdxscanner to the Yocto Project
+mailing list (yocto@yoctoproject.org) with'[meta-spdxscanner]' in the
+subject.
+
+You may also contact the maintainers directly.
+
+Descriptions of section entries:
+
+ M: Mail patches to: FullName <address@domain>
+ F: Files and directories with wildcard patterns.
+ A trailing slash includes all files and subdirectory files.
+ F: recipes-devtools/ all files in and below meta-spdxscanner
+ One pattern per line. Multiple F: lines acceptable.
+
+Please keep this list in alphabetical order.
+
+Maintainers List (try to look for most precise areas first)
+
+COMMON
+M: Lei Maohui <leimaohui@cn.fujitsu.com>
+F: conf
+F: classes
+F: recipes-*
diff --git a/external/meta-spdxscanner/README.md b/external/meta-spdxscanner/README.md
index a41f8216..bc8da8d8 100644
--- a/external/meta-spdxscanner/README.md
+++ b/external/meta-spdxscanner/README.md
@@ -1,12 +1,9 @@
-# This repository has been moved to http://git.yoctoproject.org/cgit/cgit.cgi/meta-spdxscanner/.
-
# meta-spdxscanner
meta-spdxscanner supports the following SPDX create tools.
1. fossology REST API (Can work with fossology after 3.5.0)
2. fossdriver (Can work with fossology)
3. scancode-toolkit
-4. DoSOCSv2 (Scanner comes from fossology 3.4.0)
# This layer supplys invoking scanners as following:
@@ -18,11 +15,7 @@ meta-spdxscanner supports the following SPDX create tools.
3. scancode-toolkit
- openembedded-core
-
-4. DoSOCSv2
-- openembedded-core
-- meta-openembedded/meta-oe
-- meta-openembedded/meta-python
+- meta-python2
# How to use
@@ -33,8 +26,9 @@ meta-spdxscanner supports the following SPDX create tools.
```
INHERIT += "fossology-rest"
TOKEN = "eyJ0eXAiO..."
- FOSSOLOGY_SERVER = "http://xx.xx.xx.xx:8081/repo" //Optional,by default, it is http://127.0.0.1:8081/repo
- FOLDER_NAME = "xxxx" //Optional,by default, it is the top folder "Software Repository"(folderId=1).
+ FOSSOLOGY_SERVER = "http://xx.xx.xx.xx:8081/repo" //Optional, by default, it is http://127.0.0.1:8081/repo
+ FOLDER_NAME = "xxxx" //Optional, by default, it is the top folder "Software Repository"(folderId=1).
+ SPDX_DEPLOY_DIR = "${DeployDir}" //Optional, by default, spdx files will be deployed to ${BUILD_DIR}/tmp/deploy/spdx/
```
Note
- If you want to use fossology-rest.bbclass, you have to make sure that fossology server on your host and make sure it works well.
@@ -48,32 +42,24 @@ Note
```
INHERIT += "fossdriver-host"
+ SPDX_DEPLOY_DIR = "${DeployDir}" //Optional, by default, spdx files will be deployed to ${BUILD_DIR}/tmp/deploy/spdx/
```
Note
- If you want to use fossdriver-host.bbclass, you have to make sure that fossology server and fossdriver has been installed on your host and make sure it works well.
Please reference to https://hub.docker.com/r/fossology/fossology/ and https://github.com/fossology/fossdriver.
- Please use meta-spdxscanner/classes/nopackages.bbclass instead of oe-core. Because there is no necessary to create spdx files for *-native.
-3. scancode.bbclass
+3. scancode-tk.bbclass
- inherit the folowing class in your conf/local.conf for all of recipes or
in some recipes which you want.
```
INHERIT += "scancode-tk"
+ SPDX_DEPLOY_DIR = "${DeployDir}" //Optional, by default, spdx files will be deployed to ${BUILD_DIR}/tmp/deploy/spdx/
+
```
Note
+- scancode-tk has to install on host development system, so, make sure the version of python on host development system is 3.6.
- If you want to use scancode.bbclass, There is no need to install anything on your host.
- To aviod loop dependence,please use meta-spdxscanner/classes/nopackages.bbclass instead the file comes from oe-core.
-
-4. dosocs.bbclass
-- inherit the folowing class in your conf/local.conf for all of recipes or
- in some recipes which you want.
-
-```
- INHERIT += "dosocs"
-```
-Note
-- There is no necessary to install any OSS on host.
-- Please use meta-spdxscanner/classes/nopackages.bbclass instead of oe-core. Because there is no necessary to create spdx files for *-native.
-- Default, DoSOCSv2 uses SQLite for database, so dosocs.bbclass doesn't support multi tasks of do_spdx.
diff --git a/external/meta-spdxscanner/classes/fossdriver-host.bbclass b/external/meta-spdxscanner/classes/fossdriver-host.bbclass
index 0b168a60..971595c8 100644
--- a/external/meta-spdxscanner/classes/fossdriver-host.bbclass
+++ b/external/meta-spdxscanner/classes/fossdriver-host.bbclass
@@ -45,10 +45,6 @@ python do_spdx () {
if (d.getVar('PN') == "shadow-sysroot"):
return
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return
-
# We just archive gcc-source for all the gcc related recipes
if d.getVar('BPN') in ['gcc', 'libgcc']:
bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
@@ -60,23 +56,23 @@ python do_spdx () {
temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
info = {}
- info['workdir'] = (d.getVar('WORKDIR', True) or "")
- info['pn'] = (d.getVar( 'PN', True ) or "")
- info['pv'] = (d.getVar( 'PV', True ) or "")
- info['package_download_location'] = (d.getVar( 'SRC_URI', True ) or "")
+ info['workdir'] = (d.getVar('WORKDIR') or "")
+ info['pn'] = (d.getVar( 'PN') or "")
+ info['pv'] = (d.getVar( 'PV') or "")
+ info['package_download_location'] = (d.getVar( 'SRC_URI') or "")
if info['package_download_location'] != "":
info['package_download_location'] = info['package_download_location'].split()[0]
- info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '')
- info['data_license'] = (d.getVar('DATA_LICENSE', True) or '')
+ info['spdx_version'] = (d.getVar('SPDX_VERSION') or '')
+ info['data_license'] = (d.getVar('DATA_LICENSE') or '')
info['creator'] = {}
- info['creator']['Tool'] = (d.getVar('CREATOR_TOOL', True) or '')
- info['license_list_version'] = (d.getVar('LICENSELISTVERSION', True) or '')
- info['package_homepage'] = (d.getVar('HOMEPAGE', True) or "")
- info['package_summary'] = (d.getVar('SUMMARY', True) or "")
+ info['creator']['Tool'] = (d.getVar('CREATOR_TOOL') or '')
+ info['license_list_version'] = (d.getVar('LICENSELISTVERSION') or '')
+ info['package_homepage'] = (d.getVar('HOMEPAGE') or "")
+ info['package_summary'] = (d.getVar('SUMMARY') or "")
info['package_summary'] = info['package_summary'].replace("\n","")
info['package_summary'] = info['package_summary'].replace("'"," ")
- info['package_contains'] = (d.getVar('CONTAINED', True) or "")
- info['package_static_link'] = (d.getVar('STATIC_LINK', True) or "")
+ info['package_contains'] = (d.getVar('CONTAINED') or "")
+ info['package_static_link'] = (d.getVar('STATIC_LINK') or "")
info['modified'] = "false"
srcuri = d.getVar("SRC_URI", False).split()
length = len("file://")
@@ -86,7 +82,7 @@ python do_spdx () {
if item.endswith(".patch") or item.endswith(".diff"):
info['modified'] = "true"
- manifest_dir = (d.getVar('SPDX_DEPLOY_DIR', True) or "")
+ manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
if not os.path.exists( manifest_dir ):
bb.utils.mkdirhier( manifest_dir )
diff --git a/external/meta-spdxscanner/classes/fossology-rest-phased.bbclass b/external/meta-spdxscanner/classes/fossology-rest-phased.bbclass
new file mode 100644
index 00000000..68899b74
--- /dev/null
+++ b/external/meta-spdxscanner/classes/fossology-rest-phased.bbclass
@@ -0,0 +1,739 @@
+# This class integrates real-time license scanning, generation of SPDX standard
+# output and verifiying license info during the building process.
+# It is a combination of efforts from the OE-Core, SPDX and DoSOCSv2 projects.
+#
+# For more information on DoSOCSv2:
+# https://github.com/DoSOCSv2
+#
+# For more information on SPDX:
+# http://www.spdx.org
+#
+# Note:
+# 1) Make sure fossdriver has beed installed in your host
+# 2) By default,spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR]
+# in ./meta/conf/spdx-dosocs.conf.
+inherit spdx-common
+FOSSOLOGY_SERVER ?= "http://127.0.0.1:8081/repo"
+FOSSOLOGY_REUPLOAD ??= "0"
+
+#upload OSS into No.1 folder of fossology
+FOSSOLOGY_FOLDER_ID ??= "1"
+FOSSOLOGY_FOLDER_NAME ??= "${DISTRO_CODENAME}-${DISTRO_VERSION}"
+
+FOSSOLOGY_EXCLUDE_PACKAGES ??= "glibc-locale libtool-cross libgcc-initial shadow-sysroot"
+FOSSOLOGY_EXCLUDE_NATIVE ??= "1"
+FOSSOLOGY_EXCLUDE_SDK ??= "1"
+# translate to common variables
+SPDX_EXCLUDE_PACKAGES := "${FOSSOLOGY_EXCLUDE_PACKAGES}"
+SPDX_EXCLUDE_NATIVE := "${FOSSOLOGY_EXCLUDE_NATIVE}"
+SPDX_EXCLUDE_SDK := "${FOSSOLOGY_EXCLUDE_SDK}"
+
+
+HOSTTOOLS_NONFATAL += "curl quilt unzip"
+
+CREATOR_TOOL = "fossology-rest-phased.bbclass in meta-spdxscanner"
+
+# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
+# the real top-level directory.
+SPDX_S ?= "${S}"
+
+def populate_info(d, info):
+ info['workdir'] = (d.getVar('WORKDIR', True) or "")
+ info['pn'] = (d.getVar( 'PN', True ) or "")
+ info['pv'] = (d.getVar( 'PV', True ) or "")
+ info['package_download_location'] = (d.getVar( 'SRC_URI', True ) or "")
+ if info['package_download_location'] != "":
+ info['package_download_location'] = info['package_download_location'].split()[0]
+ info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '')
+ info['data_license'] = (d.getVar('DATA_LICENSE', True) or '')
+ info['creator'] = {}
+ info['creator']['Tool'] = (d.getVar('CREATOR_TOOL', True) or '')
+ info['license_list_version'] = (d.getVar('LICENSELISTVERSION', True) or '')
+ info['package_homepage'] = (d.getVar('HOMEPAGE', True) or "")
+ info['package_summary'] = (d.getVar('SUMMARY', True) or "")
+ info['package_summary'] = info['package_summary'].replace("\n","")
+ info['package_summary'] = info['package_summary'].replace("'"," ")
+ info['package_contains'] = (d.getVar('CONTAINED', True) or "")
+ info['package_static_link'] = (d.getVar('STATIC_LINK', True) or "")
+ info['modified'] = "false"
+ info['token'] = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ info['manifest_dir'] = (d.getVar('SPDX_DEPLOY_DIR', True) or "")
+ info['srcuri'] = d.getVar("SRC_URI", False).split()
+ length = len("file://")
+ for item in info['srcuri']:
+ if item.startswith("file://"):
+ item = item[length:]
+ if item.endswith(".patch") or item.endswith(".diff"):
+ info['modified'] = "true"
+ info['spdx_outdir'] = d.getVar('SPDX_OUTDIR')
+ info['spdx_workdir'] = d.getVar('SPDX_WORKDIR')
+ info['spdx_temp_dir'] = os.path.join(info['spdx_workdir'], "temp")
+ info['temp_dir'] = os.path.join(d.getVar('WORKDIR'), "temp")
+ info['outfile'] = os.path.join(info['manifest_dir'], info['pn'] + "-" + info['pv'] + ".spdx" )
+ info['sstatefile'] = os.path.join(info['spdx_outdir'], info['pn'] + "-" + info['pv'] + ".spdx" )
+
+def prepare_folders(info):
+ if not os.path.exists( info['manifest_dir'] ):
+ bb.utils.mkdirhier( info['manifest_dir'] )
+ if not os.path.exists(info['spdx_outdir']):
+ bb.utils.mkdirhier(info['spdx_outdir'])
+
+
+def use_cached_files(info):
+ # if spdx sstate or report already exist
+ if os.path.exists(info['outfile']):
+ bb.warn(info['pn'] + "The spdx report file already exists, do nothing.")
+ return
+ if os.path.exists( info['sstatefile'] ):
+ bb.warn(info['pn'] + "The spdx sstate file already exists, do nothing.")
+ create_manifest(info, info['sstatefile'])
+ return
+
+def fossologyupload(d, bb, info):
+ import os, sys, shutil
+
+ pn = d.getVar('PN', True)
+ # exclude packages not necessary (-native, nativesdk-) or on blacklist
+ if excluded_package(d, pn):
+ bb.note("spdx: fossologyanalyze: excluding "+ pn)
+ return
+
+ # retrieve the folder id
+ info['folder_id'] = get_folder_id(d)
+
+ # prepare folders
+ prepare_folders(info)
+
+ # get source
+ spdx_get_src(d)
+
+ bb.note('spdx: Archiving the patched source...')
+ if os.path.isdir(info['spdx_temp_dir']):
+ for f_dir, f in list_files(info['spdx_temp_dir']):
+ temp_file = os.path.join(info['spdx_temp_dir'],f_dir,f)
+ shutil.copy(temp_file, info['temp_dir'])
+ shutil.rmtree(info['spdx_temp_dir'])
+ d.setVar('WORKDIR', info['spdx_workdir'])
+ info['sourcedir'] = info['spdx_workdir']
+ git_path = "%s/git/.git" % info['sourcedir']
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
+
+ tar_name = spdx_create_tarball(d, d.getVar('WORKDIR'), 'patched', info['spdx_outdir'])
+
+ ## get everything from cache. use it to decide if
+ ## something needs to be rerun
+ cur_ver_code = get_ver_code(info['spdx_workdir']).split()[0]
+ # upload archive
+ if invoke_rest_api_upload(d, tar_name, info['outfile'], info['folder_id']) == False:
+ bb.error(info['pn'] + ": Upload failed, please check fossology server.")
+ return
+
+
+def fossologyanalyse(d, bb, info):
+ import os, sys, shutil
+
+ pn = d.getVar('PN', True)
+ # exclude packages not necessary (-native, nativesdk-) or on blacklist
+ if excluded_package(d, pn):
+ bb.note("spdx: fossologyanalyze: excluding "+ pn)
+ return
+
+
+ # retrieve the folder id
+ info['folder_id'] = get_folder_id(d)
+
+ # prepare folders
+ prepare_folders(info)
+
+ # TODO: put into common func or info[]
+ filename = '%s-%s.tar.gz' % (d.getVar('PF'), 'patched')
+ tar_name = os.path.join(info['spdx_outdir'], filename)
+
+ # invoce the analysis
+ if invoke_rest_api_analysis(d, tar_name, info['sstatefile'], info['folder_id']) == False:
+ bb.error("spdx: " + info['pn'] + ": Analysis trigger failed, please check fossology server.")
+ return
+
+
+def fossologytrigger(d, bb, info):
+ import os, sys, shutil
+
+ pn = d.getVar('PN', True)
+ # exclude packages not necessary (-native, nativesdk-) or on blacklist
+ if excluded_package(d, pn):
+ bb.note("spdx: fossologyanalyze: excluding "+ pn)
+ return
+
+ # setup variables
+ info = {}
+ populate_info(d, info)
+
+ # retrieve the folder id
+ info['folder_id'] = get_folder_id(d)
+
+ # TODO: put into common func or info[]
+ filename = '%s-%s.tar.gz' % (d.getVar('PF'), 'patched')
+ tar_name = os.path.join(info['spdx_outdir'], filename)
+
+ report_id = invoke_rest_api_triggerreport(d, tar_name, info['outfile'], info['folder_id'])
+ if report_id == False:
+ bb.error("spdx:" + info['pn'] + ": trigger_spdx failed.")
+ return False
+ else:
+ info['report_id'] = report_id
+ d.setVar('spdx_report_id', info['report_id'])
+ return True
+
+
+def fossologyreport(d, bb, info):
+ import os, sys, shutil
+
+ pn = d.getVar('PN', True)
+ # exclude packages not necessary (-native, nativesdk-) or on blacklist
+ if excluded_package(d, pn):
+ bb.note("spdx: fossologyanalyze: excluding "+ pn)
+ return
+
+ if not info['folder_id']:
+ # retrieve the folder id
+ info['folder_id'] = get_folder_id(d)
+ if not 'report_id' in info:
+ x = d.getVar("spdx_report_id")
+ if not x:
+ bb.error("spdx: no report_id")
+ return False
+ info['report_id'] = x
+
+ # TODO: put into common func or info[]
+ filename = '%s-%s.tar.gz' % (d.getVar('PF'), 'patched')
+ tar_name = os.path.join(info['spdx_outdir'], filename)
+
+ bb.warn("outfile: " + str(info['outfile']))
+
+ if invoke_rest_api_getresult(d, tar_name, info['outfile'], info['folder_id'], info['report_id']) == False:
+ bb.error("spdx: " + info['pn'] + ": get_spdx failed.")
+ return
+
+
+def fossologywaitextracted(d, bb, info):
+ import time
+ import subprocess
+ import os
+ delaytime = 120
+ i = 0
+
+ pn = d.getVar('PN', True)
+ # exclude packages not necessary (-native, nativesdk-) or on blacklist
+ if excluded_package(d, pn):
+ bb.note("spdx: fossologyanalyze: excluding "+ pn)
+ return
+
+ # setup variables
+ info = {}
+ populate_info(d, info)
+
+ # retrieve the folder id
+ info['folder_id'] = get_folder_id(d)
+
+ # TODO: put into common func or info[]
+ filename = '%s-%s.tar.gz' % (d.getVar('PF'), 'patched')
+ tar_name = os.path.join(info['spdx_outdir'], filename)
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting FOSSOLOGY_TOKEN!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_TOKEN comes from fossology server.")
+
+ upload_id = has_upload(d, tar_name, info['folder_id'], True)
+ bb.warn("spdx: upload_id :" + str(upload_id))
+ if upload_id:
+ info['upload_id'] = upload_id
+
+ while i < 360:
+ #api/v1/jobs?upload=id
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/jobs?upload=" \
+ + str(upload_id) \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Invoke rest_api_cmd = " + rest_api_cmd)
+ try:
+ jobstatus = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Could not get job status: \n%s" % e.output.decode("utf-8"))
+ return
+ jobstatus = str(jobstatus, encoding = "utf-8")
+ jobstatus = eval(jobstatus)
+ bb.note(str(jobstatus))
+ if len(jobstatus) == 0:
+ bb.warn("The jobstatus is 0." + str(jobstatus))
+ return False
+ ret=0
+ for i in range(0, len(jobstatus)):
+ bb.warn(str(jobstatus[i]))
+ # wait for any job to be complete before proceeding
+ if jobstatus[i]["status"] == "Completed":
+ bb.note("Job part complete.")
+ else:
+ ret += 1
+ if ret == 0:
+ bb.warn("Job complete.")
+ return
+ i += 1
+ time.sleep(delaytime)
+ else:
+ bb.error("No upload_id")
+ return
+
+addtask spdxupload
+do_spdxupload[depends] = "${SPDXEPENDENCY}"
+do_spdxupload[deptask] += "do_fetch do_unpack do_patch"
+python do_spdxupload() {
+ # setup variables
+ info = {}
+ populate_info(d, info)
+
+ fossologyupload(d, bb, info)
+}
+
+
+addtask spdxwaitextracted
+python do_spdxwaitextracted() {
+ # setup variables
+ info = {}
+ populate_info(d, info)
+ fossologywaitextracted(d, bb, info)
+}
+
+addtask spdxanalyse
+python do_spdxanalyse() {
+ # setup variables
+ info = {}
+ populate_info(d, info)
+ #fossologywaitextracted(d, bb, info)
+ fossologyanalyse(d, bb, info)
+}
+
+addtask spdxreport
+python do_spdxreport(){
+ import time
+ # setup variables
+ info = {}
+ populate_info(d, info)
+
+ if fossologytrigger(d, bb, info):
+ fossologyreport(d, bb, info)
+}
+
+python do_spdx() {
+
+ pn = d.getVar('PN')
+ info = {}
+ populate_info(d, info)
+
+ fossologyupload(d, bb, info)
+
+ fossologywaitextracted(d, bb, info, True)
+
+ fossologyanalyse(d, bb, info)
+
+ if fossologytrigger(d, bb, info):
+ fossologyreport(d, bb, info)
+
+}
+
+
+def get_folder_id_by_name(d, folder_name):
+ import os
+ import subprocess
+ import json
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting FOSSOLOGY_TOKEN!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/folders" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Invoke rest_api_cmd = " + rest_api_cmd )
+ try:
+ all_folder = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Get folder list failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ all_folder = str(all_folder, encoding = "utf-8")
+ all_folder = json.loads(all_folder)
+ if len(all_folder) == 0:
+ bb.note("Can not get folder list.")
+ return False
+ bb.note("all_folder[0][name] = " + all_folder[0]["name"])
+ for i in range(0, len(all_folder)):
+ if all_folder[i]["name"] == folder_name:
+ bb.note("Found " + folder_name + "on fossology server.")
+ return all_folder[i]["id"]
+ return False
+
+def create_folder(d, folder_name):
+ import os
+ import subprocess
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting FOSSOLOGY_TOKEN!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/folders" \
+ + " -H \'parentFolder: 1\'" \
+ + " -H \'folderName: " + folder_name + "\'" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Invoke rest_api_cmd = " + rest_api_cmd)
+ try:
+ add_folder = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Added folder failed: \n%s" % e.output.decode("utf-8"))
+ return False
+
+ add_folder = str(add_folder, encoding = "utf-8")
+ add_folder = eval(add_folder)
+ if str(add_folder["code"]) == "201":
+ bb.note("add_folder = " + folder_name)
+ return add_folder["message"]
+ elif str(add_folder["code"]) == "200":
+ bb.note("Folder : " + folder_name + "has been created.")
+ return get_folder_id_by_name(d, folder_name)
+ else:
+ bb.error(d.getVar('PN', True) + ": Added folder failed, please check your fossology server.")
+ return False
+
+def get_folder_id(d):
+ if d.getVar('FOSSOLOGY_FOLDER_NAME', False):
+ folder_name = d.getVar('FOSSOLOGY_FOLDER_NAME')
+ folder_id = create_folder(d, folder_name)
+ else:
+ folder_id = (d.getVar('FOSSOLOGY_FOLDER_ID', True) or "1")
+ bb.note("Folder Id = " + str(folder_id))
+ return str(folder_id)
+
+def has_upload(d, tar_file, folder_id, skipupload=False):
+ import os
+ import subprocess
+
+ (work_dir, file_name) = os.path.split(tar_file)
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting FOSSOLOGY_TOKEN!\n")
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/uploads" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Invoke rest_api_cmd = " + rest_api_cmd )
+ try:
+ upload_output = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error("curl failed: \n%s" % e.output.decode("utf-8"))
+ return False
+
+ upload_output = str(upload_output, encoding = "utf-8")
+ upload_output = eval(upload_output)
+ if len(upload_output) == 0:
+ bb.warn("The upload of fossology is 0.")
+ return False
+ for i in range(0, len(upload_output)):
+ if upload_output[i]["uploadname"] == file_name and str(upload_output[i]["folderid"]) == str(folder_id):
+ if d.getVar('FOSSOLOGY_REUPLOAD') == "1" and not skipupload:
+ bb.warn("#### Reupload triggered ####")
+ return False
+ #bb.warn("########### upload_output[i][uploadname] = " + upload_output[i]["uploadname"])
+ #bb.warn("########### Found " + file_name + " on fossology server \"Software Repository\" folder. Will skip upload.")
+ return upload_output[i]["id"]
+ return False
+
+def upload(d, tar_file, folder):
+ import os
+ import subprocess
+ import time
+ delaytime = 50
+ i = 0
+ j = 0
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting FOSSOLOGY_TOKEN!\n")
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/uploads" \
+ + " -H \"folderId: " + folder + "\"" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " -H \'uploadDescription: created by REST\'" \
+ + " -H \'public: public\'" \
+ + " -H \'Content-Type: multipart/form-data\'" \
+ + " -F \'fileInput=@\"" + tar_file + "\";type=application/octet-stream\'" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Upload : Invoke rest_api_cmd = " + rest_api_cmd )
+ while i < 10:
+ try:
+ upload = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Upload failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ upload = str(upload, encoding = "utf-8")
+ if not upload:
+ bb.error("No upload response")
+ time.sleep(delaytime)
+ continue
+ if "ERROR" in str(upload):
+ bb.warn("ERROR in upload")
+ time.sleep(delaytime)
+ continue
+ if "Error" in str(upload):
+ bb.warn("Error in upload")
+ time.sleep(delaytime)
+ continue
+ if "error" in str(upload):
+ bb.warn("error in upload")
+ time.sleep(delaytime)
+ continue
+ if "504 Gateway Time-out" in str(upload):
+ bb.warn("504 Gateway Timeout in upload")
+ time.sleep(delaytime)
+ continue
+ upload = eval(upload)
+ if str(upload["code"]) == "201":
+ return upload["message"]
+ i += 1
+ time.sleep(delaytime)
+ bb.error("spdx: " + d.getVar('PN', True) + ": Upload failed, please check your fossology server connection.")
+ bb.error(str(upload))
+ return False
+
+def analysis(d, folder_id, upload_id):
+ import os
+ import subprocess
+ delaytime = 50
+ i = 0
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting FOSSOLOGY_TOKEN!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/jobs" \
+ + " -H \"folderId: " + str(folder_id) + "\"" \
+ + " -H \"uploadId: " + str(upload_id) + "\"" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " -H \'Content-Type: application/json\'" \
+ + " --data \'{\"analysis\": {\"bucket\": true,\"copyright_email_author\": true,\"ecc\": true, \"keyword\": true,\"mime\": true,\"monk\": true,\"nomos\": true,\"ojo\": true,\"package\": true},\"decider\": {\"nomos_monk\": true,\"new_scanner\": true}}\'" \
+ + " --noproxy 127.0.0.1"
+ bb.warn("Analysis : Invoke rest_api_cmd = " + rest_api_cmd )
+ try:
+ analysis = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error("Analysis failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ analysis = str(analysis, encoding = "utf-8")
+ analysis = eval(analysis)
+ if str(analysis["code"]) == "201":
+ return analysis["message"]
+ elif str(analysis["code"]) == "404":
+ bb.error("spdx: " + d.getVar('PN', True) + ": cannot trigger analysis as extraction failed.")
+ bb.error(str(analysis))
+ return False
+ else:
+ bb.error("spdx: analysis trigger failed: " + str(analysis))
+ return False
+
+def trigger(d, folder_id, upload_id):
+ import os
+ import subprocess
+ delaytime = 50
+ i = 0
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting FOSSOLOGY_TOKEN!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/report" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " -H \"uploadId: " + str(upload_id) + "\"" \
+ + " -H \'reportFormat: spdx2tv\'" \
+ + " --noproxy 127.0.0.1"
+ bb.note("trigger : Invoke rest_api_cmd = " + rest_api_cmd )
+ while i < 10:
+ time.sleep(10)
+ try:
+ trigger = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Trigger failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ trigger = str(trigger, encoding = "utf-8")
+ trigger = eval(trigger)
+ if str(trigger["code"]) == "201":
+ return trigger["message"].split("/")[-1]
+ i += 1
+ time.sleep(delaytime * 2)
+ bb.note("spdx: " + d.getVar('PN', True) + ": Trigger failed, will try again.")
+ bb.note("spdx: " + d.getVar('PN', True) + ": Trigger failed, please check your fossology server.")
+ return False
+
+def get_spdx(d, report_id, spdx_file):
+ import os
+ import subprocess
+ import time
+ delaytime = 10
+ complete = False
+ i = 0
+
+ # Wait 20 seconds for the report to be generated on the server
+ time.sleep(20)
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('FOSSOLOGY_TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting FOSSOLOGY_TOKEN!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_TOKEN comes from fossology server.")
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/report/" + report_id \
+ + " -H \'accept: text/plain\'" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("get_spdx : Invoke rest_api_cmd = " + rest_api_cmd )
+ while i < 24:
+ file = open(spdx_file,'wt')
+ try:
+ p = subprocess.Popen(rest_api_cmd, shell=True, universal_newlines=True, stdout=file).wait()
+ except subprocess.CalledProcessError as e:
+ bb.error("Get spdx failed: \n%s. Please check fossology server." % e.output.decode("utf-8"))
+ file.close()
+ os.remove(spdx_file)
+ return False
+ file.flush()
+ file.close()
+ time.sleep(2)
+ file = open(spdx_file,'r+')
+ first_line = file.readline()
+ bb.note("SPDX_FILE first line: " + str(first_line))
+ if "SPDXVersion" in first_line:
+ complete = True
+ file.close()
+
+ if complete == True:
+ return True
+ else:
+ bb.note("spdx: " + d.getVar('PN', True) + ": Get the first line is " + first_line + ". Will try again.")
+ i += 1
+ time.sleep(delaytime)
+ delaytime = delaytime + 10
+ file.close()
+ bb.error("spdx: " + d.getVar('PN', True) + ": SPDX report could not be downloaded.")
+
+def invoke_rest_api_upload(d, tar_file, spdx_file, folder_id):
+ import os
+ import time
+ i = 0
+ bb.note("invoke fossology REST API : tar_file = %s " % tar_file)
+ upload_id = has_upload(d, tar_file, folder_id, False)
+ if upload_id == False:
+ bb.warn("This OSS has not been scanned. So upload it to fossology server.")
+ upload_id = upload(d, tar_file, folder_id)
+ if upload_id == False:
+ return False
+ return True
+
+def invoke_rest_api_analysis(d, tar_file, spdx_file, folder_id):
+ upload_id = has_upload(d, tar_file, folder_id, True)
+ if upload_id == False:
+ if d.getVar('FOSSOLOGY_REUPLOAD') == "1":
+ bb.note("Reupload.")
+ upload_id = upload(d, tar_file, folder_id)
+ if upload_id == False:
+ return False
+ else:
+ bb.note("This OSS has not been uploaded. Skip it.")
+ return False
+ if analysis(d, folder_id, upload_id) == False:
+ bb.note("Analysis failed.")
+ return False
+ bb.note("Analysis success.")
+ return True
+
+def invoke_rest_api_triggerreport(d, tar_file, spdx_file, folder_id):
+ import time
+ i = 0
+
+ upload_id = has_upload(d, tar_file, folder_id, True)
+ if upload_id == False:
+ bb.error("Could not find the file on the fossology server!")
+ return False
+
+ while i < 3:
+ i += 1
+ report_id = trigger(d, folder_id, upload_id)
+ bb.note("Report_id: " + str(report_id))
+ if report_id:
+ return report_id
+ time.sleep(30)
+
+ bb.error("Could not trigger the report generation for " + d.getVar('PN', True))
+ return False
+
+def invoke_rest_api_getresult(d, tar_file, spdx_file, folder_id, report_id):
+ i = 0
+
+ upload_id = has_upload(d, tar_file, folder_id, True)
+ if upload_id == False:
+ bb.error("No upload of this software found on the fossology server!")
+ return False
+
+ while i < 3:
+ i += 1
+ spdx2tv = get_spdx(d, report_id, spdx_file)
+ if spdx2tv == False:
+ bb.note("spdx : " + d.getVar('PN', True) + ": get_spdx failed. Will try again!")
+ else:
+ return True
+
+ bb.error("spdx: get_spdx of " + d.getVar('PN', True) + "failed. Please confirm!")
+ return False
diff --git a/external/meta-spdxscanner/classes/fossology-rest.bbclass b/external/meta-spdxscanner/classes/fossology-rest.bbclass
index d253853d..4e906093 100644
--- a/external/meta-spdxscanner/classes/fossology-rest.bbclass
+++ b/external/meta-spdxscanner/classes/fossology-rest.bbclass
@@ -22,6 +22,8 @@ HOSTTOOLS_NONFATAL += "curl"
CREATOR_TOOL = "fossology-rest.bbclass in meta-spdxscanner"
+NO_PROXY ?= "127.0.0.1"
+
# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
# the real top-level directory.
SPDX_S ?= "${S}"
@@ -107,7 +109,7 @@ python do_spdx () {
for f_dir, f in list_files(spdx_temp_dir):
temp_file = os.path.join(spdx_temp_dir,f_dir,f)
shutil.copy(temp_file, temp_dir)
- # shutil.rmtree(spdx_temp_dir)
+ shutil.rmtree(spdx_temp_dir)
d.setVar('WORKDIR', spdx_workdir)
info['sourcedir'] = spdx_workdir
git_path = "%s/git/.git" % info['sourcedir']
@@ -144,6 +146,8 @@ def get_folder_id_by_name(d, folder_name):
import subprocess
import json
+ no_proxy = (d.getVar('NO_PROXY', True) or "")
+
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -156,7 +160,7 @@ def get_folder_id_by_name(d, folder_name):
rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/folders" \
+ " -H \"Authorization: Bearer " + token + "\"" \
- + " --noproxy 127.0.0.1"
+ + " --noproxy " + no_proxy
bb.note("Invoke rest_api_cmd = " + rest_api_cmd )
try:
all_folder = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
@@ -183,6 +187,7 @@ def create_folder(d, folder_name):
import os
import subprocess
+ no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -197,7 +202,7 @@ def create_folder(d, folder_name):
+ " -H \'parentFolder: 1\'" \
+ " -H \'folderName: " + folder_name + "\'" \
+ " -H \"Authorization: Bearer " + token + "\"" \
- + " --noproxy 127.0.0.1"
+ + " --noproxy " + no_proxy
bb.note("Invoke rest_api_cmd = " + rest_api_cmd)
try:
add_folder = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
@@ -235,7 +240,7 @@ def has_upload(d, tar_file, folder_id):
import subprocess
(work_dir, file_name) = os.path.split(tar_file)
-
+ no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -248,7 +253,7 @@ def has_upload(d, tar_file, folder_id):
rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/uploads" \
+ " -H \"Authorization: Bearer " + token + "\"" \
- + " --noproxy 127.0.0.1"
+ + " --noproxy " + no_proxy
bb.note("Invoke rest_api_cmd = " + rest_api_cmd )
try:
@@ -272,7 +277,7 @@ def has_upload(d, tar_file, folder_id):
bb.note(str(len(upload_output)))
for i in range(0, len(upload_output)):
if upload_output[i]["uploadname"] == file_name and str(upload_output[i]["folderid"]) == str(folder_id):
- bb.warn("Find " + file_name + " in fossology server \"Software Repository\" folder. So, will not upload again.")
+ bb.warn("Find " + file_name + " in fossology server. So, will not upload again.")
return upload_output[i]["id"]
return False
@@ -282,6 +287,7 @@ def upload(d, tar_file, folder):
delaytime = 50
i = 0
+ no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -299,7 +305,7 @@ def upload(d, tar_file, folder):
+ " -H \'public: public\'" \
+ " -H \'Content-Type: multipart/form-data\'" \
+ " -F \'fileInput=@\"" + tar_file + "\";type=application/octet-stream\'" \
- + " --noproxy 127.0.0.1"
+ + " --noproxy " + no_proxy
bb.note("Upload : Invoke rest_api_cmd = " + rest_api_cmd )
while i < 10:
time.sleep(delaytime)
@@ -324,6 +330,7 @@ def analysis(d, folder_id, upload_id):
delaytime = 50
i = 0
+ no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -340,7 +347,7 @@ def analysis(d, folder_id, upload_id):
+ " -H \"Authorization: Bearer " + token + "\"" \
+ " -H \'Content-Type: application/json\'" \
+ " --data \'{\"analysis\": {\"bucket\": true,\"copyright_email_author\": true,\"ecc\": true, \"keyword\": true,\"mime\": true,\"monk\": true,\"nomos\": true,\"package\": true},\"decider\": {\"nomos_monk\": true,\"bulk_reused\": true,\"new_scanner\": true}}\'" \
- + " --noproxy 127.0.0.1"
+ + " --noproxy " + no_proxy
bb.note("Analysis : Invoke rest_api_cmd = " + rest_api_cmd )
while i < 10:
try:
@@ -372,6 +379,7 @@ def trigger(d, folder_id, upload_id):
delaytime = 50
i = 0
+ no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -386,7 +394,7 @@ def trigger(d, folder_id, upload_id):
+ " -H \"Authorization: Bearer " + token + "\"" \
+ " -H \"uploadId: " + str(upload_id) + "\"" \
+ " -H \'reportFormat: spdx2tv\'" \
- + " --noproxy 127.0.0.1"
+ + " --noproxy " + no_proxy
bb.note("trigger : Invoke rest_api_cmd = " + rest_api_cmd )
while i < 10:
time.sleep(delaytime)
@@ -416,6 +424,7 @@ def get_spdx(d, report_id, spdx_file):
complete = False
i = 0
+ no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -428,7 +437,7 @@ def get_spdx(d, report_id, spdx_file):
rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/report/" + report_id \
+ " -H \'accept: text/plain\'" \
+ " -H \"Authorization: Bearer " + token + "\"" \
- + " --noproxy 127.0.0.1"
+ + " --noproxy " + no_proxy
bb.note("get_spdx : Invoke rest_api_cmd = " + rest_api_cmd )
while i < 10:
time.sleep(delaytime)
@@ -461,13 +470,12 @@ def get_spdx(d, report_id, spdx_file):
bb.warn(d.getVar('PN', True) + ": Get the first line is " + first_line + ". Try agin")
file.close()
- os.remove(spdx_file)
i += 1
delaytime = delaytime + 20
time.sleep(delaytime)
file.close()
- bb.warn(d.getVar('PN', True) + ": Get spdx failed, Please check your fossology server.")
+ bb.warn(d.getVar('PN', True) + ": SPDX file maybe have something wrong, please confirm.")
def invoke_rest_api(d, tar_file, spdx_file, folder_id):
import os
diff --git a/external/meta-spdxscanner/classes/nopackages.bbclass b/external/meta-spdxscanner/classes/nopackages.bbclass
index eb058a7a..646a1644 100644
--- a/external/meta-spdxscanner/classes/nopackages.bbclass
+++ b/external/meta-spdxscanner/classes/nopackages.bbclass
@@ -10,4 +10,5 @@ deltask do_package_write_ipk_setscene
deltask do_package_write_deb_setscene
deltask do_package_qa_setscene
deltask do_packagedata_setscene
+deltask do_spdx_upload
deltask do_spdx
diff --git a/external/meta-spdxscanner/classes/spdx-common.bbclass b/external/meta-spdxscanner/classes/spdx-common.bbclass
index 0dcd7938..bc9cf2f9 100644
--- a/external/meta-spdxscanner/classes/spdx-common.bbclass
+++ b/external/meta-spdxscanner/classes/spdx-common.bbclass
@@ -15,8 +15,13 @@ SPDXEPENDENCY += " tar-native:do_populate_sysroot"
SPDX_DEPLOY_DIR ??= "${DEPLOY_DIR}/spdx"
SPDX_TOPDIR ?= "${WORKDIR}/spdx_sstate_dir"
-SPDX_OUTDIR = "${SPDX_TOPDIR}/${TARGET_SYS}/${PF}/"
-SPDX_WORKDIR = "${WORKDIR}/spdx_temp/"
+SPDX_OUTDIR ?= "${SPDX_TOPDIR}/${TARGET_SYS}/${PF}/"
+SPDX_WORKDIR ?= "${WORKDIR}/spdx_temp/"
+
+SPDX_EXCLUDE_NATIVE ??= "1"
+SPDX_EXCLUDE_SDK ??= "1"
+SPDX_EXCLUDE_PACKAGES ??= ""
+
do_spdx[dirs] = "${WORKDIR}"
@@ -28,6 +33,46 @@ SPDX_S ?= "${S}"
addtask do_spdx before do_configure after do_patch
+# Exclude package based on variables.
+# SPDX_EXCLUDE_NATIVE ??= "1"
+# SPDX_EXCLUDE_SDK ??= "1"
+# SPDX_EXCLUDE_PACKAGES ??= ""
+def excluded_package(d, pn):
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ #bb.debug(1, 'spdx: There is a bug in the scan of %s, skip it.' % pn)
+ return True
+ # The following: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving do_spdx here.
+ # -native is for the host aka during the build
+ if pn.endswith('-native') and d.getVar("SPDX_EXCLUDE_NATIVE") == "1":
+ return True
+ # nativesdk- is for the developer SDK
+ if pn.startswith('nativesdk-') and d.getVar("SPDX_EXCLUDE_SDK") == "1":
+ return True
+ # packagegroups have no files to scan
+ if pn.startswith('packagegroup'):
+ return True
+ if pn.startswith('glibc-locale'):
+ return True
+ for p in d.getVar("SPDX_EXCLUDE_PACKAGES").split():
+ if p in pn:
+ return True
+ return False
+
+def exclude_useless_paths(tarinfo):
+ if tarinfo.isdir():
+ if tarinfo.name.endswith('/temp') or tarinfo.name.endswith('/patches') or tarinfo.name.endswith('/.pc'):
+ return None
+ elif tarinfo.name == 'temp' or tarinfo.name == 'patches' or tarinfo.name == '.pc':
+ return None
+ return tarinfo
+
def spdx_create_tarball(d, srcdir, suffix, ar_outdir):
"""
create the tarball from srcdir
@@ -49,9 +94,9 @@ def spdx_create_tarball(d, srcdir, suffix, ar_outdir):
filename = '%s.tar.gz' % d.getVar('PF')
tarname = os.path.join(ar_outdir, filename)
- bb.note('Creating %s' % tarname)
+ bb.warn('Creating %s' % tarname)
tar = tarfile.open(tarname, 'w:gz')
- tar.add(srcdir, arcname=os.path.basename(srcdir))
+ tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths)
tar.close()
#shutil.rmtree(srcdir)
return tarname
@@ -118,10 +163,77 @@ def get_cached_spdx( sstatefile ):
cached_spdx_info=output.decode('utf-8').split(': ')
return cached_spdx_info[1]
+#Find InfoInLicenseFile and fill into PackageLicenseInfoInLicenseFile.
+def find_infoinlicensefile(sstatefile):
+ import subprocess
+ import linecache
+ import re
+
+ info_in_license_file = ""
+ line_nums = []
+ key_words = ["NOTICE", "README", "readme", "COPYING", "LICENSE"]
+
+ for key_word in key_words:
+ search_cmd = "grep -n 'FileName: .*" + key_word + "' " + sstatefile
+ search_output = subprocess.Popen(search_cmd, shell=True, stdout=subprocess.PIPE).communicate()[0]
+ bb.note("Search result: " + str(search_output))
+ if search_output:
+ bb.note("Found " + key_word +" file.")
+ for line in search_output.decode('utf-8').splitlines():
+ num = line.split(":")[0]
+ line_nums.append(num)
+ else:
+ bb.note("No license info files found.")
+ for line_num in line_nums:
+ line_spdx = linecache.getline(sstatefile, int(line_num))
+ file_path = line_spdx.split(": ")[1]
+ base_file_name = os.path.basename(file_path)
+ if base_file_name.startswith("NOTICE"):
+ bb.note("Found NOTICE file " + base_file_name)
+ elif base_file_name.startswith("readme"):
+ bb.note("Found readme file " + base_file_name)
+ elif base_file_name.startswith("README"):
+ bb.note("Found README file " + base_file_name)
+ elif base_file_name.startswith("COPYING") or base_file_name.endswith("COPYING"):
+ bb.note("Found COPYING file " + base_file_name)
+ elif base_file_name.startswith("LICENSE"):
+ bb.note("Found LICENSE file: " + base_file_name)
+ else:
+ continue
+ linecache.clearcache()
+ line_no = int(line_num) + 1
+ line_spdx = linecache.getline(sstatefile, line_no)
+ while not re.match(r'[a-zA-Z]',line_spdx) is None:
+ if not line_spdx.startswith("LicenseInfoInFile"):
+ line_no = line_no + 1
+ linecache.clearcache()
+ line_spdx = linecache.getline(sstatefile, int(line_no))
+ continue
+ license = line_spdx.split(": ")[1]
+ license = license.split("\n")[0]
+ file_path = file_path.split("\n")[0]
+ path_list = file_path.split('/')
+ if len(file_path.split('/')) < 4:
+ file_path_simple = file_path.split('/',1)[1]
+ else:
+ file_path_simple = file_path.split('/',2)[2]
+
+ #license_in_file = file_path + ": " + license
+ license_in_file = "%s%s%s%s" % ("PackageLicenseInfoInLicenseFile: ",file_path_simple,": ",license)
+ license_in_file.replace('\n', '').replace('\r', '')
+ info_in_license_file = info_in_license_file + license_in_file + "\n"
+ line_no = line_no + 1
+ linecache.clearcache()
+ line_spdx = linecache.getline(sstatefile, int(line_no))
+ linecache.clearcache()
+ return info_in_license_file
+
## Add necessary information into spdx file
def write_cached_spdx( info,sstatefile, ver_code ):
import subprocess
+ infoinlicensefile=""
+
def sed_replace(dest_sed_cmd,key_word,replace_info):
dest_sed_cmd = dest_sed_cmd + "-e 's#^" + key_word + ".*#" + \
key_word + replace_info + "#' "
@@ -132,12 +244,16 @@ def write_cached_spdx( info,sstatefile, ver_code ):
+ r"/a\\" + new_line + "' "
return dest_sed_cmd
+ ## Delet ^M in doc format
+ subprocess.call("sed -i -e 's#\r##g' %s" % sstatefile, shell=True)
## Document level information
- sed_cmd = r"sed -i -e 's#\r$##' "
+ sed_cmd = r"sed -i "
spdx_DocumentComment = "<text>SPDX for " + info['pn'] + " version " \
+ info['pv'] + "</text>"
- sed_cmd = sed_replace(sed_cmd,"DocumentComment",spdx_DocumentComment)
-
+ sed_cmd = sed_replace(sed_cmd,"DocumentComment: ",spdx_DocumentComment)
+ sed_cmd = sed_insert(sed_cmd,"SPDXID:","DocumentName: " + info['pn']+"-"+info['pv'])
+ sed_cmd = sed_insert(sed_cmd,"SPDXID:","DocumentNamespace: http://spdx.org/spdxdocs/SPDXRef-" + info['creator']['Tool']+"-"+info['pn']+"_"+info['pv'])
+
## Creator information
sed_cmd = sed_replace(sed_cmd,"Creator: Tool: ",info['creator']['Tool'])
@@ -147,17 +263,24 @@ def write_cached_spdx( info,sstatefile, ver_code ):
sed_cmd = sed_replace(sed_cmd, "PackageDownloadLocation: ",info['package_download_location'])
sed_cmd = sed_insert(sed_cmd, "PackageDownloadLocation: ", "PackageHomePage: " + info['package_homepage'])
sed_cmd = sed_insert(sed_cmd, "PackageDownloadLocation: ", "PackageSummary: " + "<text>" + info['package_summary'] + "</text>")
- sed_cmd = sed_insert(sed_cmd, "PackageCopyrightText: ", "PackageComment: <text>\\nModificationRecord: " + info['modified'] + "\\n</text>")
sed_cmd = sed_replace(sed_cmd, "PackageVerificationCode: ",ver_code)
sed_cmd = sed_insert(sed_cmd, "PackageVerificationCode: ", "PackageDescription: " +
"<text>" + info['pn'] + " version " + info['pv'] + "</text>")
+ sed_cmd = sed_insert(sed_cmd, "PackageVerificationCode: ", "PackageComment: <text>\\nModificationRecord: " + info['modified'] + "\\n" + "</text>")
for contain in info['package_contains'].split( ):
sed_cmd = sed_insert(sed_cmd, "PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " CONTAINS " + contain)
for static_link in info['package_static_link'].split( ):
sed_cmd = sed_insert(sed_cmd, "PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " STATIC_LINK " + static_link)
sed_cmd = sed_cmd + sstatefile
-
subprocess.call("%s" % sed_cmd, shell=True)
+
+ infoinlicensefile = find_infoinlicensefile(sstatefile)
+ for oneline_infoinlicensefile in infoinlicensefile.splitlines():
+ bb.note("find_infoinlicensefile: " + oneline_infoinlicensefile)
+ sed_cmd = r"sed -i -e 's#\r$##' "
+ sed_cmd = sed_insert(sed_cmd, "ModificationRecord: ", oneline_infoinlicensefile)
+ sed_cmd = sed_cmd + sstatefile
+ subprocess.call("%s" % sed_cmd, shell=True)
def is_work_shared(d):
pn = d.getVar('PN')
diff --git a/external/meta-spdxscanner/conf/layer.conf b/external/meta-spdxscanner/conf/layer.conf
index f1c4329a..7f65b897 100644
--- a/external/meta-spdxscanner/conf/layer.conf
+++ b/external/meta-spdxscanner/conf/layer.conf
@@ -9,4 +9,4 @@ BBFILE_COLLECTIONS += "spdxscanner"
BBFILE_PATTERN_spdxscanner = "^${LAYERDIR}/"
BBFILE_PRIORITY_spdxscanner = "6"
-LAYERSERIES_COMPAT_spdxscanner = "sumo thud warrior zeus master"
+LAYERSERIES_COMPAT_spdxscanner = "thud zeus dunfell master"
diff --git a/external/meta-spdxscanner/recipes-devtools/scancode-toolkit/scancode-toolkit-native_3.1.1.bb b/external/meta-spdxscanner/recipes-devtools/scancode-toolkit/scancode-toolkit-native_3.1.1.bb
index 8f6f4667..66492987 100644
--- a/external/meta-spdxscanner/recipes-devtools/scancode-toolkit/scancode-toolkit-native_3.1.1.bb
+++ b/external/meta-spdxscanner/recipes-devtools/scancode-toolkit/scancode-toolkit-native_3.1.1.bb
@@ -8,21 +8,21 @@ SECTION = "devel"
LICENSE = "Apache-2.0"
LIC_FILES_CHKSUM = "file://NOTICE;md5=8aedb84647f637c585e71f8f2e96e5c8"
-EXTRANATIVEPATH_remove = "python-native"
+inherit native
-inherit setuptools pypi distutils native
-
-DEPENDS = "python-setuptools-native xz-native zlib-native libxml2-native \
+DEPENDS = "xz-native zlib-native libxml2-native \
libxslt-native bzip2-native \
"
-SRC_URI = "git://github.com/nexB/scancode-toolkit;branch=master \
+SRC_URI = "git://github.com/nexB/scancode-toolkit;branch=develop \
"
-SRCREV = "1af5ac8449cbb1ce98a0b461a6d9a5ad42a5d248"
-
+SRCREV = "4a5c596a2f02bb69e7764a8e2641286f5625d85e"
S = "${WORKDIR}/git"
+B = "${S}"
+
+export PYTHON_EXE="${HOSTTOOLS_DIR}/python3"
do_configure(){
./scancode --help
@@ -34,7 +34,7 @@ do_install_append(){
install -d ${D}${bindir}/local
install ${S}/scancode ${D}${bindir}/
- install ${S}/bin/* ${D}${bindir}/bin/
- mv ${S}/include/* ${D}${bindir}/include/
+ cp -rf ${S}/bin/* ${D}${bindir}/bin/
+ cp -rf ${S}/include/* ${D}${bindir}/include/
}