summaryrefslogtreecommitdiffstats
path: root/external/meta-spdxscanner/classes
diff options
context:
space:
mode:
authortakeshi_hoshina <takeshi_hoshina@mail.toyota.co.jp>2020-10-22 14:58:56 +0900
committertakeshi_hoshina <takeshi_hoshina@mail.toyota.co.jp>2020-10-22 14:58:56 +0900
commit4204309872da5cb401cbb2729d9e2d4869a87f42 (patch)
treec7415e8600205e40ff7e91e8e5f4c411f30329f2 /external/meta-spdxscanner/classes
parent5b80bfd7bffd4c20d80b7c70a7130529e9a755dd (diff)
Diffstat (limited to 'external/meta-spdxscanner/classes')
-rw-r--r--external/meta-spdxscanner/classes/dosocs-host.bbclass262
-rw-r--r--external/meta-spdxscanner/classes/dosocs.bbclass302
-rw-r--r--external/meta-spdxscanner/classes/fossdriver-host.bbclass241
-rw-r--r--external/meta-spdxscanner/classes/fossology-rest.bbclass499
-rw-r--r--external/meta-spdxscanner/classes/scancode-tk.bbclass139
-rw-r--r--external/meta-spdxscanner/classes/spdx-common.bbclass221
6 files changed, 888 insertions, 776 deletions
diff --git a/external/meta-spdxscanner/classes/dosocs-host.bbclass b/external/meta-spdxscanner/classes/dosocs-host.bbclass
deleted file mode 100644
index a6ed6917..00000000
--- a/external/meta-spdxscanner/classes/dosocs-host.bbclass
+++ /dev/null
@@ -1,262 +0,0 @@
-# This class integrates real-time license scanning, generation of SPDX standard
-# output and verifiying license info during the building process.
-# It is a combination of efforts from the OE-Core, SPDX and DoSOCSv2 projects.
-#
-# For more information on DoSOCSv2:
-# https://github.com/DoSOCSv2
-#
-# For more information on SPDX:
-# http://www.spdx.org
-#
-# Note:
-# 1) Make sure DoSOCSv2 has beed installed in your host
-# 2) By default,spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR]
-# in ./meta/conf/spdx-dosocs.conf.
-
-SPDXSSTATEDIR = "${WORKDIR}/spdx_sstate_dir"
-LICENSELISTVERSION = "2.6"
-CREATOR_TOOL = "meta-spdxscanner"
-# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
-# the real top-level directory.
-
-SPDX_S ?= "${S}"
-
-python do_spdx () {
- import os, sys
- import json
-
- import shutil
-
- pn = d.getVar('PN')
- workdir_tmp = d.getVar('WORKDIR')
-
- ## It's no necessary to get spdx files for *-native
- if pn.find("-native") != -1 or pn.find("binutils-cross") != -1:
- return None
-
- # Forcibly expand the sysroot paths as we're about to change WORKDIR
- d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT'))
- d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE'))
-
- ## gcc and kernel is too big to get spdx file.
- if ('gcc') in d.getVar('PN', True):
- #invoke_dosocs2("/yocto/work002/fnst/leimh/community/gcc-7.3.0/","/yocto/work001/gcc-7.3.spdx",(d.getVar('WORKDIR', True) or ""))
- return None
- if bb.data.inherits_class('kernel', d):
- #invoke_dosocs2("/yocto/work002/fnst/leimh/community/linux-4.14.44","/yocto/work001/linux-4.14.44.spdx",(d.getVar('WORKDIR', True) or ""))
- return None
-
- bb.note('Archiving the configured source...')
- # "gcc-source-${PV}" recipes don't have "do_configure"
- # task, so we need to run "do_preconfigure" instead
- if pn.startswith("gcc-source-"):
- d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
- bb.build.exec_func('do_preconfigure', d)
-
- # Change the WORKDIR to make do_configure run in another dir.
- d.setVar('WORKDIR', d.getVar('SPDX_TEMP_DIR'))
- #if bb.data.inherits_class('kernel-yocto', d):
- # bb.build.exec_func('do_kernel_configme', d)
- #if bb.data.inherits_class('cmake', d):
- # bb.build.exec_func('do_generate_toolchain_file', d)
- bb.build.exec_func('do_unpack', d)
-
- d.setVar('WORKDIR', workdir_tmp)
- info = {}
- info['workdir'] = (d.getVar('WORKDIR', True) or "")
- info['pn'] = (d.getVar( 'PN', True ) or "")
- info['pv'] = (d.getVar( 'PV', True ) or "")
- info['package_download_location'] = (d.getVar( 'SRC_URI', True ) or "")
- if info['package_download_location'] != "":
- info['package_download_location'] = info['package_download_location'].split()[0]
- info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '')
- info['data_license'] = (d.getVar('DATA_LICENSE', True) or '')
- info['creator'] = {}
- info['creator']['Tool'] = (d.getVar('CREATOR_TOOL', True) or '')
- info['license_list_version'] = (d.getVar('LICENSELISTVERSION', True) or '')
- info['package_homepage'] = (d.getVar('HOMEPAGE', True) or "")
- info['package_summary'] = (d.getVar('SUMMARY', True) or "")
- info['package_summary'] = info['package_summary'].replace("\n","")
- info['package_summary'] = info['package_summary'].replace("'"," ")
- info['package_contains'] = (d.getVar('CONTAINED', True) or "")
- info['package_static_link'] = (d.getVar('STATIC_LINK', True) or "")
-
- spdx_sstate_dir = (d.getVar('SPDXSSTATEDIR', True) or "")
- manifest_dir = (d.getVar('SPDX_DEPLOY_DIR', True) or "")
- info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
- sstatefile = os.path.join(spdx_sstate_dir,
- info['pn'] + "-" + info['pv'] + ".spdx" )
-
- ## get everything from cache. use it to decide if
- ## something needs to be rerun
- if not os.path.exists( spdx_sstate_dir ):
- bb.utils.mkdirhier( spdx_sstate_dir )
-
- d.setVar('WORKDIR', d.getVar('SPDX_TEMP_DIR', True))
- info['sourcedir'] = (d.getVar('SPDX_S', True) or "")
- cur_ver_code = get_ver_code( info['sourcedir'] ).split()[0]
- cache_cur = False
- if os.path.exists( sstatefile ):
- ## cache for this package exists. read it in
- cached_spdx = get_cached_spdx( sstatefile )
- if cached_spdx:
- cached_spdx = cached_spdx.split()[0]
- if (cached_spdx == cur_ver_code):
- bb.warn(info['pn'] + "'s ver code same as cache's. do nothing")
- cache_cur = True
- create_manifest(info,sstatefile)
- if not cache_cur:
- git_path = "%s/.git" % info['sourcedir']
- if os.path.exists(git_path):
- remove_dir_tree(git_path)
-
- ## Get spdx file
- #bb.warn(' run_dosocs2 ...... ')
- invoke_dosocs2(info['sourcedir'],sstatefile,info['workdir'])
- if get_cached_spdx( sstatefile ) != None:
- write_cached_spdx( info,sstatefile,cur_ver_code )
- ## CREATE MANIFEST(write to outfile )
- create_manifest(info,sstatefile)
- else:
- bb.warn('Can\'t get the spdx file ' + info['pn'] + '. Please check your dosocs2.')
- d.setVar('WORKDIR', info['workdir'])
-}
-
-addtask spdx after do_patch before do_configure
-
-def invoke_dosocs2( OSS_src_dir, spdx_file, workdir):
- import subprocess
- import string
- import json
- import codecs
-
-
- dosocs2_cmd = "/usr/local/bin/dosocs2"
- dosocs2_oneshot_cmd = dosocs2_cmd + " oneshot " + OSS_src_dir
- print(dosocs2_oneshot_cmd)
- try:
- dosocs2_output = subprocess.check_output(dosocs2_oneshot_cmd,
- stderr=subprocess.STDOUT,
- shell=True)
- except subprocess.CalledProcessError as e:
- bb.warn("Could not invoke dosocs2 oneshot Command "
- "'%s' returned %d:\n%s" % (dosocs2_oneshot_cmd, e.returncode, e.output))
- return None
- dosocs2_output = dosocs2_output.decode('utf-8')
-
- f = codecs.open(spdx_file,'w','utf-8')
- f.write(dosocs2_output)
-
-def create_manifest(info,sstatefile):
- import shutil
- shutil.copyfile(sstatefile,info['outfile'])
-
-def get_cached_spdx( sstatefile ):
- import subprocess
-
- if not os.path.exists( sstatefile ):
- return None
-
- try:
- output = subprocess.check_output(['grep', "PackageVerificationCode", sstatefile])
- except subprocess.CalledProcessError as e:
- bb.error("Index creation command '%s' failed with return code %d:\n%s" % (e.cmd, e.returncode, e.output))
- return None
- cached_spdx_info=output.decode('utf-8').split(': ')
- return cached_spdx_info[1]
-
-## Add necessary information into spdx file
-def write_cached_spdx( info,sstatefile, ver_code ):
- import subprocess
-
- def sed_replace(dest_sed_cmd,key_word,replace_info):
- dest_sed_cmd = dest_sed_cmd + "-e 's#^" + key_word + ".*#" + \
- key_word + replace_info + "#' "
- return dest_sed_cmd
-
- def sed_insert(dest_sed_cmd,key_word,new_line):
- dest_sed_cmd = dest_sed_cmd + "-e '/^" + key_word \
- + r"/a\\" + new_line + "' "
- return dest_sed_cmd
-
- ## Document level information
- sed_cmd = r"sed -i -e 's#\r$##g' "
- spdx_DocumentComment = "<text>SPDX for " + info['pn'] + " version " \
- + info['pv'] + "</text>"
- sed_cmd = sed_replace(sed_cmd,"DocumentComment",spdx_DocumentComment)
-
- ## Creator information
- sed_cmd = sed_replace(sed_cmd,"Creator: ",info['creator']['Tool'])
- sed_cmd = sed_replace(sed_cmd,"LicenseListVersion: ",info['license_list_version'])
-
- ## Package level information
- sed_cmd = sed_replace(sed_cmd,"PackageName: ",info['pn'])
- sed_cmd = sed_insert(sed_cmd,"PackageName: ", "PackageVersion: " + info['pv'])
- sed_cmd = sed_replace(sed_cmd,"PackageDownloadLocation: ",info['package_download_location'])
- sed_cmd = sed_replace(sed_cmd,"PackageHomePage: ",info['package_homepage'])
- sed_cmd = sed_replace(sed_cmd,"PackageSummary: ","<text>" + info['package_summary'] + "</text>")
- sed_cmd = sed_insert(sed_cmd,"PackageVerificationCode: ",ver_code)
- sed_cmd = sed_replace(sed_cmd,"PackageDescription: ",
- "<text>" + info['pn'] + " version " + info['pv'] + "</text>")
- for contain in info['package_contains'].split( ):
- sed_cmd = sed_insert(sed_cmd,"PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " CONTAINS " + contain)
- for static_link in info['package_static_link'].split( ):
- sed_cmd = sed_insert(sed_cmd,"PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " STATIC_LINK " + static_link)
- sed_cmd = sed_cmd + sstatefile
-
- subprocess.call("%s" % sed_cmd, shell=True)
-
-def remove_dir_tree( dir_name ):
- import shutil
- try:
- shutil.rmtree( dir_name )
- except:
- pass
-
-def remove_file( file_name ):
- try:
- os.remove( file_name )
- except OSError as e:
- pass
-
-def list_files( dir ):
- for root, subFolders, files in os.walk( dir ):
- for f in files:
- rel_root = os.path.relpath( root, dir )
- yield rel_root, f
- return
-
-def hash_file( file_name ):
- """
- Return the hex string representation of the SHA1 checksum of the filename
- """
- try:
- import hashlib
- except ImportError:
- return None
-
- sha1 = hashlib.sha1()
- with open( file_name, "rb" ) as f:
- for line in f:
- sha1.update(line)
- return sha1.hexdigest()
-
-def hash_string( data ):
- import hashlib
- sha1 = hashlib.sha1()
- sha1.update( data.encode('utf-8') )
- return sha1.hexdigest()
-
-def get_ver_code( dirname ):
- chksums = []
- for f_dir, f in list_files( dirname ):
- try:
- stats = os.stat(os.path.join(dirname,f_dir,f))
- except OSError as e:
- bb.warn( "Stat failed" + str(e) + "\n")
- continue
- chksums.append(hash_file(os.path.join(dirname,f_dir,f)))
- ver_code_string = ''.join( chksums ).lower()
- ver_code = hash_string( ver_code_string )
- return ver_code
-
diff --git a/external/meta-spdxscanner/classes/dosocs.bbclass b/external/meta-spdxscanner/classes/dosocs.bbclass
deleted file mode 100644
index 755e8260..00000000
--- a/external/meta-spdxscanner/classes/dosocs.bbclass
+++ /dev/null
@@ -1,302 +0,0 @@
-# This class integrates real-time license scanning, generation of SPDX standard
-# output and verifiying license info during the building process.
-# It is a combination of efforts from the OE-Core, SPDX and DoSOCSv2 projects.
-#
-# For more information on DoSOCSv2:
-# https://github.com/DoSOCSv2
-#
-# For more information on SPDX:
-# http://www.spdx.org
-#
-# Note:
-# 1) Make sure DoSOCSv2 has beed installed in your host
-# 2) By default,spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR]
-# in ./meta/conf/spdx-dosocs.conf.
-
-PYTHON_INHERIT = "${@bb.utils.contains('PN', '-native', '', 'python3-dir', d)}"
-PYTHON_INHERIT .= "${@bb.utils.contains('PACKAGECONFIG', 'python3', 'python3native', '', d)}"
-
-inherit ${PYTHON_INHERIT} python3-dir
-
-PYTHON = "${@bb.utils.contains('PN', '-native', '${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN}', '', d)}"
-EXTRANATIVEPATH += "${PYTHON_PN}-native"
-
-# python-config and other scripts are using distutils modules
-# which we patch to access these variables
-export STAGING_INCDIR
-export STAGING_LIBDIR
-
-# autoconf macros will use their internal default preference otherwise
-export PYTHON
-
-do_spdx[depends] += "python3-dosocs2-init-native:do_dosocs2_init"
-do_spdx[depends] += "python3-dosocs2-native:do_populate_sysroot"
-
-SPDXSSTATEDIR = "${WORKDIR}/spdx_sstate_dir"
-
-# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
-# the real top-level directory.
-
-SPDX_S ?= "${S}"
-
-python do_spdx () {
- import os, sys
- import json
-
- pn = d.getVar("PN")
- depends = d.getVar("DEPENDS")
- ## It's no necessary to get spdx files for *-native
- if pn.find("-native") == -1 and pn.find("binutils-cross") == -1:
- PYTHON = "${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN}"
- os.environ['PYTHON'] = PYTHON
- depends = "%s python3-dosocs2-init-native" % depends
- d.setVar("DEPENDS", depends)
- else:
- return None
-
- ## gcc and kernel is too big to get spdx file.
- if ('gcc' or 'linux-yocto') in d.getVar('PN', True):
- return None
-
- info = {}
- info['workdir'] = (d.getVar('WORKDIR', True) or "")
- info['pn'] = (d.getVar( 'PN', True ) or "")
- info['pv'] = (d.getVar( 'PV', True ) or "")
- info['package_download_location'] = (d.getVar( 'SRC_URI', True ) or "")
- if info['package_download_location'] != "":
- info['package_download_location'] = info['package_download_location'].split()[0]
- info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '')
- info['data_license'] = (d.getVar('DATA_LICENSE', True) or '')
- info['creator'] = {}
- info['creator']['Tool'] = (d.getVar('CREATOR_TOOL', True) or '')
- info['license_list_version'] = (d.getVar('LICENSELISTVERSION', True) or '')
- info['package_homepage'] = (d.getVar('HOMEPAGE', True) or "")
- info['package_summary'] = (d.getVar('SUMMARY', True) or "")
- info['package_summary'] = info['package_summary'].replace("\n","")
- info['package_summary'] = info['package_summary'].replace("'"," ")
- info['package_contains'] = (d.getVar('CONTAINED', True) or "")
- info['package_static_link'] = (d.getVar('STATIC_LINK', True) or "")
-
- spdx_sstate_dir = (d.getVar('SPDXSSTATEDIR', True) or "")
- manifest_dir = (d.getVar('SPDX_DEPLOY_DIR', True) or "")
- info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
- sstatefile = os.path.join(spdx_sstate_dir,
- info['pn'] + "-" + info['pv'] + ".spdx" )
-
- ## get everything from cache. use it to decide if
- ## something needs to be rerun
- if not os.path.exists( spdx_sstate_dir ):
- bb.utils.mkdirhier( spdx_sstate_dir )
-
- d.setVar('WORKDIR', d.getVar('SPDX_TEMP_DIR', True))
- info['sourcedir'] = (d.getVar('SPDX_S', True) or "")
- cur_ver_code = get_ver_code( info['sourcedir'] ).split()[0]
- cache_cur = False
- if os.path.exists( sstatefile ):
- ## cache for this package exists. read it in
- cached_spdx = get_cached_spdx( sstatefile )
- if cached_spdx:
- cached_spdx = cached_spdx.split()[0]
- if (cached_spdx == cur_ver_code):
- bb.warn(info['pn'] + "'s ver code same as cache's. do nothing")
- cache_cur = True
- create_manifest(info,sstatefile)
- if not cache_cur:
- git_path = "%s/.git" % info['sourcedir']
- if os.path.exists(git_path):
- remove_dir_tree(git_path)
-
- ## Get spdx file
- #bb.warn(' run_dosocs2 ...... ')
- invoke_dosocs2(info['sourcedir'],sstatefile)
- if get_cached_spdx( sstatefile ) != None:
- write_cached_spdx( info,sstatefile,cur_ver_code )
- ## CREATE MANIFEST(write to outfile )
- create_manifest(info,sstatefile)
- else:
- bb.warn('Can\'t get the spdx file ' + info['pn'] + '. Please check your dosocs2.')
- d.setVar('WORKDIR', info['workdir'])
-}
-#python () {
-# deps = ' python3-dosocs2-native:do_dosocs2_init'
-# d.appendVarFlag('do_spdx', 'depends', deps)
-#}
-
-## Get the src after do_patch.
-python do_get_spdx_s() {
- import shutil
-
- pn = d.getVar('PN')
- ## It's no necessary to get spdx files for *-native
- if d.getVar('PN', True) == d.getVar('BPN', True) + "-native":
- return None
-
- ## gcc and kernel is too big to get spdx file.
- if ('gcc' or 'linux-yocto') in d.getVar('PN', True):
- return None
-
- # Forcibly expand the sysroot paths as we're about to change WORKDIR
- d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT'))
- d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE'))
-
- bb.note('Archiving the configured source...')
- pn = d.getVar('PN')
- # "gcc-source-${PV}" recipes don't have "do_configure"
- # task, so we need to run "do_preconfigure" instead
- if pn.startswith("gcc-source-"):
- d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR'))
- bb.build.exec_func('do_preconfigure', d)
-
- # Change the WORKDIR to make do_configure run in another dir.
- d.setVar('WORKDIR', d.getVar('SPDX_TEMP_DIR'))
- #if bb.data.inherits_class('kernel-yocto', d):
- # bb.build.exec_func('do_kernel_configme', d)
- #if bb.data.inherits_class('cmake', d):
- # bb.build.exec_func('do_generate_toolchain_file', d)
- bb.build.exec_func('do_unpack', d)
-}
-
-python () {
- pn = d.getVar("PN")
- depends = d.getVar("DEPENDS")
-
- if pn.find("-native") == -1 and pn.find("binutils-cross") == -1:
- depends = "%s python3-dosocs2-native" % depends
- d.setVar("DEPENDS", depends)
- bb.build.addtask('do_get_spdx_s','do_configure','do_patch', d)
- bb.build.addtask('do_spdx','do_package', 'do_get_spdx_s', d)
-}
-#addtask get_spdx_s after do_patch before do_configure
-#addtask spdx after do_get_spdx_s before do_package
-
-def invoke_dosocs2( OSS_src_dir, spdx_file):
- import subprocess
- import string
- import json
- import codecs
-
- path = os.getenv('PATH')
- dosocs2_cmd = bb.utils.which(os.getenv('PATH'), "dosocs2")
- dosocs2_oneshot_cmd = dosocs2_cmd + " oneshot " + OSS_src_dir
- print(dosocs2_oneshot_cmd)
- try:
- dosocs2_output = subprocess.check_output(dosocs2_oneshot_cmd,
- stderr=subprocess.STDOUT,
- shell=True)
- except subprocess.CalledProcessError as e:
- bb.fatal("Could not invoke dosocs2 oneshot Command "
- "'%s' returned %d:\n%s" % (dosocs2_oneshot_cmd, e.returncode, e.output))
- dosocs2_output = dosocs2_output.decode('utf-8')
-
- f = codecs.open(spdx_file,'w','utf-8')
- f.write(dosocs2_output)
-
-def create_manifest(info,sstatefile):
- import shutil
- shutil.copyfile(sstatefile,info['outfile'])
-
-def get_cached_spdx( sstatefile ):
- import subprocess
-
- if not os.path.exists( sstatefile ):
- return None
-
- try:
- output = subprocess.check_output(['grep', "PackageVerificationCode", sstatefile])
- except subprocess.CalledProcessError as e:
- bb.error("Index creation command '%s' failed with return code %d:\n%s" % (e.cmd, e.returncode, e.output))
- return None
- cached_spdx_info=output.decode('utf-8').split(': ')
- return cached_spdx_info[1]
-
-## Add necessary information into spdx file
-def write_cached_spdx( info,sstatefile, ver_code ):
- import subprocess
-
- def sed_replace(dest_sed_cmd,key_word,replace_info):
- dest_sed_cmd = dest_sed_cmd + "-e 's#^" + key_word + ".*#" + \
- key_word + replace_info + "#' "
- return dest_sed_cmd
-
- def sed_insert(dest_sed_cmd,key_word,new_line):
- dest_sed_cmd = dest_sed_cmd + "-e '/^" + key_word \
- + r"/a\\" + new_line + "' "
- return dest_sed_cmd
-
- ## Document level information
- sed_cmd = r"sed -i -e 's#\r$##g' "
- spdx_DocumentComment = "<text>SPDX for " + info['pn'] + " version " \
- + info['pv'] + "</text>"
- sed_cmd = sed_replace(sed_cmd,"DocumentComment",spdx_DocumentComment)
-
- ## Package level information
- sed_cmd = sed_replace(sed_cmd,"PackageName: ",info['pn'])
- sed_cmd = sed_insert(sed_cmd,"PackageVersion: ",info['pv'])
- sed_cmd = sed_replace(sed_cmd,"PackageDownloadLocation: ",info['package_download_location'])
- sed_cmd = sed_replace(sed_cmd,"PackageChecksum: ","PackageHomePage: " + info['package_homepage'])
- sed_cmd = sed_replace(sed_cmd,"PackageSummary: ","<text>" + info['package_summary'] + "</text>")
- sed_cmd = sed_replace(sed_cmd,"PackageVerificationCode: ",ver_code)
- sed_cmd = sed_replace(sed_cmd,"PackageDescription: ",
- "<text>" + info['pn'] + " version " + info['pv'] + "</text>")
- for contain in info['package_contains'].split( ):
- sed_cmd = sed_insert(sed_cmd,"PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " CONTAINS " + contain)
- for static_link in info['package_static_link'].split( ):
- sed_cmd = sed_insert(sed_cmd,"PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " STATIC_LINK " + static_link)
- sed_cmd = sed_cmd + sstatefile
-
- subprocess.call("%s" % sed_cmd, shell=True)
-
-def remove_dir_tree( dir_name ):
- import shutil
- try:
- shutil.rmtree( dir_name )
- except:
- pass
-
-def remove_file( file_name ):
- try:
- os.remove( file_name )
- except OSError as e:
- pass
-
-def list_files( dir ):
- for root, subFolders, files in os.walk( dir ):
- for f in files:
- rel_root = os.path.relpath( root, dir )
- yield rel_root, f
- return
-
-def hash_file( file_name ):
- """
- Return the hex string representation of the SHA1 checksum of the filename
- """
- try:
- import hashlib
- except ImportError:
- return None
-
- sha1 = hashlib.sha1()
- with open( file_name, "rb" ) as f:
- for line in f:
- sha1.update(line)
- return sha1.hexdigest()
-
-def hash_string( data ):
- import hashlib
- sha1 = hashlib.sha1()
- sha1.update( data.encode('utf-8') )
- return sha1.hexdigest()
-
-def get_ver_code( dirname ):
- chksums = []
- for f_dir, f in list_files( dirname ):
- try:
- stats = os.stat(os.path.join(dirname,f_dir,f))
- except OSError as e:
- bb.warn( "Stat failed" + str(e) + "\n")
- continue
- chksums.append(hash_file(os.path.join(dirname,f_dir,f)))
- ver_code_string = ''.join( chksums ).lower()
- ver_code = hash_string( ver_code_string )
- return ver_code
-
diff --git a/external/meta-spdxscanner/classes/fossdriver-host.bbclass b/external/meta-spdxscanner/classes/fossdriver-host.bbclass
index a279eab1..0b168a60 100644
--- a/external/meta-spdxscanner/classes/fossdriver-host.bbclass
+++ b/external/meta-spdxscanner/classes/fossdriver-host.bbclass
@@ -1,40 +1,23 @@
# This class integrates real-time license scanning, generation of SPDX standard
# output and verifiying license info during the building process.
-# It is a combination of efforts from the OE-Core, SPDX and DoSOCSv2 projects.
+# It is a combination of efforts from the OE-Core, SPDX and fossology projects.
#
-# For more information on DoSOCSv2:
-# https://github.com/DoSOCSv2
+# For more information on fossology REST API:
+# https://www.fossology.org/get-started/basic-rest-api-calls/
#
# For more information on SPDX:
# http://www.spdx.org
#
# Note:
-# 1) Make sure fossdriver has beed installed in your host
-# 2) By default,spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR]
-# in ./meta/conf/spdx-dosocs.conf.
-
-
-SPDXEPENDENCY += "${PATCHTOOL}-native:do_populate_sysroot"
-SPDXEPENDENCY += " wget-native:do_populate_sysroot"
-SPDXEPENDENCY += " subversion-native:do_populate_sysroot"
-SPDXEPENDENCY += " git-native:do_populate_sysroot"
-SPDXEPENDENCY += " lz4-native:do_populate_sysroot"
-SPDXEPENDENCY += " lzip-native:do_populate_sysroot"
-SPDXEPENDENCY += " xz-native:do_populate_sysroot"
-SPDXEPENDENCY += " unzip-native:do_populate_sysroot"
-SPDXEPENDENCY += " xz-native:do_populate_sysroot"
-SPDXEPENDENCY += " nodejs-native:do_populate_sysroot"
-SPDXEPENDENCY += " quilt-native:do_populate_sysroot"
-SPDXEPENDENCY += " tar-native:do_populate_sysroot"
-
-SPDX_TOPDIR ?= "${WORKDIR}/spdx_sstate_dir"
-SPDX_OUTDIR = "${SPDX_TOPDIR}/${TARGET_SYS}/${PF}/"
-SPDX_WORKDIR = "${WORKDIR}/spdx_temp/"
+# 1) Make sure fossology (after 3.5.0)(https://hub.docker.com/r/fossology/fossology/) has beed started on your host
+# 2) spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR].
+# By default, SPDX_DEPLOY_DIR is tmp/deploy/
+# 3) Added TOKEN has been set in conf/local.conf
+#
-do_spdx[dirs] = "${WORKDIR}"
+inherit spdx-common
-LICENSELISTVERSION = "2.6"
-CREATOR_TOOL = "meta-spdxscanner"
+CREATOR_TOOL = "fossdriver-host.bbclass in meta-spdxscanner"
# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
# the real top-level directory.
@@ -55,8 +38,6 @@ python do_spdx () {
# so avoid archiving source here.
if pn.startswith('glibc-locale'):
return
- if (d.getVar('BPN') == "linux-yocto"):
- return
if (d.getVar('PN') == "libtool-cross"):
return
if (d.getVar('PN') == "libgcc-initial"):
@@ -64,6 +45,9 @@ python do_spdx () {
if (d.getVar('PN') == "shadow-sysroot"):
return
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return
# We just archive gcc-source for all the gcc related recipes
if d.getVar('BPN') in ['gcc', 'libgcc']:
@@ -103,6 +87,9 @@ python do_spdx () {
info['modified'] = "true"
manifest_dir = (d.getVar('SPDX_DEPLOY_DIR', True) or "")
+ if not os.path.exists( manifest_dir ):
+ bb.utils.mkdirhier( manifest_dir )
+
info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
sstatefile = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
@@ -122,8 +109,12 @@ python do_spdx () {
for f_dir, f in list_files(spdx_temp_dir):
temp_file = os.path.join(spdx_temp_dir,f_dir,f)
shutil.copy(temp_file, temp_dir)
- shutil.rmtree(spdx_temp_dir)
+
d.setVar('WORKDIR', spdx_workdir)
+ info['sourcedir'] = spdx_workdir
+ git_path = "%s/git/.git" % info['sourcedir']
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
tar_name = spdx_create_tarball(d, d.getVar('WORKDIR'), 'patched', spdx_outdir)
## get everything from cache. use it to decide if
## something needs to be rerun
@@ -142,76 +133,21 @@ python do_spdx () {
create_manifest(info,sstatefile)
else:
bb.warn('Can\'t get the spdx file ' + info['pn'] + '. Please check your.')
+ remove_file(tar_name)
}
-addtask do_spdx before do_unpack after do_fetch
-
-def spdx_create_tarball(d, srcdir, suffix, ar_outdir):
- """
- create the tarball from srcdir
- """
- import tarfile, shutil
- # Make sure we are only creating a single tarball for gcc sources
- #if (d.getVar('SRC_URI') == ""):
- # return
-
- # For the kernel archive, srcdir may just be a link to the
- # work-shared location. Use os.path.realpath to make sure
- # that we archive the actual directory and not just the link.
- srcdir = os.path.realpath(srcdir)
-
- bb.utils.mkdirhier(ar_outdir)
- if suffix:
- filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
- else:
- filename = '%s.tar.gz' % d.getVar('PF')
- tarname = os.path.join(ar_outdir, filename)
-
- bb.note('Creating %s' % tarname)
- tar = tarfile.open(tarname, 'w:gz')
- tar.add(srcdir, arcname=os.path.basename(srcdir))
- tar.close()
- shutil.rmtree(srcdir)
- return tarname
-
-# Run do_unpack and do_patch
-def spdx_get_src(d):
- import shutil
- spdx_workdir = d.getVar('SPDX_WORKDIR')
- spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
- pn = d.getVar('PN')
-
- # We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return
-
- # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
- if not is_work_shared(d):
- # Change the WORKDIR to make do_unpack do_patch run in another dir.
- d.setVar('WORKDIR', spdx_workdir)
- # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
- d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
-
- # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
- # possibly requiring of the following tasks (such as some recipes's
- # do_patch required 'B' existed).
- bb.utils.mkdirhier(d.getVar('B'))
-
- bb.build.exec_func('do_unpack', d)
-
- # Make sure gcc and kernel sources are patched only once
- if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
- bb.build.exec_func('do_patch', d)
- # Some userland has no source.
- if not os.path.exists( spdx_workdir ):
- bb.utils.mkdirhier(spdx_workdir)
def invoke_fossdriver(tar_file, spdx_file):
import os
import time
delaytime = 20
+ import logging
+
+ logger = logging.getLogger()
+ logger.setLevel(logging.INFO)
+ logging.basicConfig(level=logging.INFO)
+
(work_dir, tar_file) = os.path.split(tar_file)
os.chdir(work_dir)
@@ -238,7 +174,7 @@ def invoke_fossdriver(tar_file, spdx_file):
i = 0
while i < 10:
if (Scanners(server, tar_file, "Software Repository").run() != True):
- bb.warn("%s scanner failed, try again!" % tar_file)
+ bb.warn("%s Scanners failed, try again!" % tar_file)
time.sleep(delaytime)
i+= 1
else:
@@ -270,123 +206,4 @@ def invoke_fossdriver(tar_file, spdx_file):
bb.warn("%s SPDXTV failed, Please check your fossology server." % tar_file)
return False
-def create_manifest(info,sstatefile):
- import shutil
- shutil.copyfile(sstatefile,info['outfile'])
-
-def get_cached_spdx( sstatefile ):
- import subprocess
-
- if not os.path.exists( sstatefile ):
- return None
-
- try:
- output = subprocess.check_output(['grep', "PackageVerificationCode", sstatefile])
- except subprocess.CalledProcessError as e:
- bb.error("Index creation command '%s' failed with return code %d:\n%s" % (e.cmd, e.returncode, e.output))
- return None
- cached_spdx_info=output.decode('utf-8').split(': ')
- return cached_spdx_info[1]
-
-## Add necessary information into spdx file
-def write_cached_spdx( info,sstatefile, ver_code ):
- import subprocess
-
- def sed_replace(dest_sed_cmd,key_word,replace_info):
- dest_sed_cmd = dest_sed_cmd + "-e 's#^" + key_word + ".*#" + \
- key_word + replace_info + "#' "
- return dest_sed_cmd
-
- def sed_insert(dest_sed_cmd,key_word,new_line):
- dest_sed_cmd = dest_sed_cmd + "-e '/^" + key_word \
- + r"/a\\" + new_line + "' "
- return dest_sed_cmd
-
- ## Document level information
- sed_cmd = r"sed -i -e 's#\r$##g' "
- spdx_DocumentComment = "<text>SPDX for " + info['pn'] + " version " \
- + info['pv'] + "</text>"
- sed_cmd = sed_replace(sed_cmd,"DocumentComment",spdx_DocumentComment)
-
- ## Creator information
- sed_cmd = sed_replace(sed_cmd,"Creator: ",info['creator']['Tool'])
-
- ## Package level information
- sed_cmd = sed_replace(sed_cmd, "PackageName: ", info['pn'])
- sed_cmd = sed_insert(sed_cmd, "PackageName: ", "PackageVersion: " + info['pv'])
- sed_cmd = sed_replace(sed_cmd, "PackageDownloadLocation: ",info['package_download_location'])
- sed_cmd = sed_insert(sed_cmd, "PackageDownloadLocation: ", "PackageHomePage: " + info['package_homepage'])
- sed_cmd = sed_insert(sed_cmd, "PackageDownloadLocation: ", "PackageSummary: " + "<text>" + info['package_summary'] + "</text>")
- sed_cmd = sed_insert(sed_cmd, "PackageDownloadLocation: ", "modification record : " + info['modified'])
- sed_cmd = sed_replace(sed_cmd, "PackageVerificationCode: ",ver_code)
- sed_cmd = sed_insert(sed_cmd, "PackageVerificationCode: ", "PackageDescription: " +
- "<text>" + info['pn'] + " version " + info['pv'] + "</text>")
- for contain in info['package_contains'].split( ):
- sed_cmd = sed_insert(sed_cmd, "PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " CONTAINS " + contain)
- for static_link in info['package_static_link'].split( ):
- sed_cmd = sed_insert(sed_cmd, "PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " STATIC_LINK " + static_link)
- sed_cmd = sed_cmd + sstatefile
-
- subprocess.call("%s" % sed_cmd, shell=True)
-
-def is_work_shared(d):
- pn = d.getVar('PN')
- return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source')
-
-def remove_dir_tree(dir_name):
- import shutil
- try:
- shutil.rmtree(dir_name)
- except:
- pass
-
-def remove_file(file_name):
- try:
- os.remove(file_name)
- except OSError as e:
- pass
-
-def list_files(dir ):
- for root, subFolders, files in os.walk(dir):
- for f in files:
- rel_root = os.path.relpath(root, dir)
- yield rel_root, f
- return
-
-def hash_file(file_name):
- """
- Return the hex string representation of the SHA1 checksum of the filename
- """
- try:
- import hashlib
- except ImportError:
- return None
-
- sha1 = hashlib.sha1()
- with open( file_name, "rb" ) as f:
- for line in f:
- sha1.update(line)
- return sha1.hexdigest()
-
-def hash_string(data):
- import hashlib
- sha1 = hashlib.sha1()
- sha1.update(data.encode('utf-8'))
- return sha1.hexdigest()
-
-def get_ver_code(dirname):
- chksums = []
- for f_dir, f in list_files(dirname):
- try:
- stats = os.stat(os.path.join(dirname,f_dir,f))
- except OSError as e:
- bb.warn( "Stat failed" + str(e) + "\n")
- continue
- chksums.append(hash_file(os.path.join(dirname,f_dir,f)))
- ver_code_string = ''.join(chksums).lower()
- ver_code = hash_string(ver_code_string)
- return ver_code
-
-do_spdx[depends] = "${SPDXEPENDENCY}"
-
EXPORT_FUNCTIONS do_spdx
diff --git a/external/meta-spdxscanner/classes/fossology-rest.bbclass b/external/meta-spdxscanner/classes/fossology-rest.bbclass
new file mode 100644
index 00000000..d253853d
--- /dev/null
+++ b/external/meta-spdxscanner/classes/fossology-rest.bbclass
@@ -0,0 +1,499 @@
+# This class integrates real-time license scanning, generation of SPDX standard
+# output and verifiying license info during the building process.
+# It is a combination of efforts from the OE-Core, SPDX and DoSOCSv2 projects.
+#
+# For more information on DoSOCSv2:
+# https://github.com/DoSOCSv2
+#
+# For more information on SPDX:
+# http://www.spdx.org
+#
+# Note:
+# 1) Make sure fossdriver has beed installed in your host
+# 2) By default,spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR]
+# in ./meta/conf/spdx-dosocs.conf.
+inherit spdx-common
+FOSSOLOGY_SERVER ?= "http://127.0.0.1:8081/repo"
+
+#upload OSS into No.1 folder of fossology
+FOLDER_ID = "1"
+
+HOSTTOOLS_NONFATAL += "curl"
+
+CREATOR_TOOL = "fossology-rest.bbclass in meta-spdxscanner"
+
+# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
+# the real top-level directory.
+SPDX_S ?= "${S}"
+
+python do_spdx () {
+ import os, sys, shutil
+
+ pn = d.getVar('PN')
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return
+ # The following: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving do_spdx here.
+ if pn.startswith('glibc-locale'):
+ return
+ if (d.getVar('PN') == "libtool-cross"):
+ return
+ if (d.getVar('PN') == "libgcc-initial"):
+ return
+ if (d.getVar('PN') == "shadow-sysroot"):
+ return
+
+ spdx_outdir = d.getVar('SPDX_OUTDIR')
+ spdx_workdir = d.getVar('SPDX_WORKDIR')
+ spdx_temp_dir = os.path.join(spdx_workdir, "temp")
+ temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
+
+ info = {}
+ info['workdir'] = (d.getVar('WORKDIR', True) or "")
+ info['pn'] = (d.getVar( 'PN', True ) or "")
+ info['pv'] = (d.getVar( 'PV', True ) or "")
+ info['package_download_location'] = (d.getVar( 'SRC_URI', True ) or "")
+ if info['package_download_location'] != "":
+ info['package_download_location'] = info['package_download_location'].split()[0]
+ info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '')
+ info['data_license'] = (d.getVar('DATA_LICENSE', True) or '')
+ info['creator'] = {}
+ info['creator']['Tool'] = (d.getVar('CREATOR_TOOL', True) or '')
+ info['license_list_version'] = (d.getVar('LICENSELISTVERSION', True) or '')
+ info['package_homepage'] = (d.getVar('HOMEPAGE', True) or "")
+ info['package_summary'] = (d.getVar('SUMMARY', True) or "")
+ info['package_summary'] = info['package_summary'].replace("\n","")
+ info['package_summary'] = info['package_summary'].replace("'"," ")
+ info['package_contains'] = (d.getVar('CONTAINED', True) or "")
+ info['package_static_link'] = (d.getVar('STATIC_LINK', True) or "")
+ info['modified'] = "false"
+ info['token'] = (d.getVar('TOKEN', True) or "")
+
+ srcuri = d.getVar("SRC_URI", False).split()
+ length = len("file://")
+ for item in srcuri:
+ if item.startswith("file://"):
+ item = item[length:]
+ if item.endswith(".patch") or item.endswith(".diff"):
+ info['modified'] = "true"
+
+ manifest_dir = (d.getVar('SPDX_DEPLOY_DIR', True) or "")
+ if not os.path.exists( manifest_dir ):
+ bb.utils.mkdirhier( manifest_dir )
+
+ info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ sstatefile = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
+
+ # if spdx has been exist
+ if os.path.exists(info['outfile']):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ return
+ if os.path.exists( sstatefile ):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ create_manifest(info,sstatefile)
+ return
+
+ spdx_get_src(d)
+
+ bb.note('SPDX: Archiving the patched source...')
+ if os.path.isdir(spdx_temp_dir):
+ for f_dir, f in list_files(spdx_temp_dir):
+ temp_file = os.path.join(spdx_temp_dir,f_dir,f)
+ shutil.copy(temp_file, temp_dir)
+ # shutil.rmtree(spdx_temp_dir)
+ d.setVar('WORKDIR', spdx_workdir)
+ info['sourcedir'] = spdx_workdir
+ git_path = "%s/git/.git" % info['sourcedir']
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
+ tar_name = spdx_create_tarball(d, d.getVar('WORKDIR'), 'patched', spdx_outdir)
+
+ ## get everything from cache. use it to decide if
+ ## something needs to be rerun
+ if not os.path.exists(spdx_outdir):
+ bb.utils.mkdirhier(spdx_outdir)
+ cur_ver_code = get_ver_code(spdx_workdir).split()[0]
+ ## Get spdx file
+ bb.note(' run fossology rest api ...... ')
+ if not os.path.isfile(tar_name):
+ bb.warn(info['pn'] + "has no source, do nothing")
+ return
+ folder_id = get_folder_id(d)
+ if invoke_rest_api(d, tar_name, sstatefile, folder_id) == False:
+ bb.warn(info['pn'] + ": Get spdx file fail, please check fossology server.")
+ remove_file(tar_name)
+ return False
+ if get_cached_spdx(sstatefile) != None:
+ write_cached_spdx( info,sstatefile,cur_ver_code )
+ ## CREATE MANIFEST(write to outfile )
+ create_manifest(info,sstatefile)
+ else:
+ bb.warn(info['pn'] + ': Can\'t get the spdx file ' + '. Please check fossology server.')
+ remove_file(tar_name)
+}
+
+def get_folder_id_by_name(d, folder_name):
+ import os
+ import subprocess
+ import json
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/folders" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Invoke rest_api_cmd = " + rest_api_cmd )
+ try:
+ all_folder = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Get folder list failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ all_folder = str(all_folder, encoding = "utf-8")
+ bb.note("all_folder list= " + all_folder)
+ all_folder = json.loads(all_folder)
+ bb.note("len of all_folder = ")
+ bb.note(str(len(all_folder)))
+ if len(all_folder) == 0:
+ bb.note("Can not get folder list.")
+ return False
+ bb.note("all_folder[0][name] = ")
+ bb.note(all_folder[0]["name"])
+ for i in range(0, len(all_folder)):
+ if all_folder[i]["name"] == folder_name:
+ bb.note("Find " + folder_name + "in fossology server ")
+ return all_folder[i]["id"]
+ return False
+
+def create_folder(d, folder_name):
+ import os
+ import subprocess
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/folders" \
+ + " -H \'parentFolder: 1\'" \
+ + " -H \'folderName: " + folder_name + "\'" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Invoke rest_api_cmd = " + rest_api_cmd)
+ try:
+ add_folder = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Added folder failed: \n%s" % e.output.decode("utf-8"))
+ return False
+
+ add_folder = str(add_folder, encoding = "utf-8")
+ bb.note("add_folder = ")
+ bb.note(add_folder)
+ add_folder = eval(add_folder)
+ if str(add_folder["code"]) == "201":
+ bb.note("add_folder = " + folder_name)
+ return add_folder["message"]
+ elif str(add_folder["code"]) == "200":
+ bb.note("Folder : " + folder_name + "has been created.")
+ return get_folder_id_by_name(d, folder_name)
+ else:
+ bb.error(d.getVar('PN', True) + ": Added folder failed, please check your fossology server.")
+ return False
+
+def get_folder_id(d):
+
+ if d.getVar('FOLDER_NAME', False):
+ folder_name = d.getVar('FOLDER_NAME')
+ folder_id = create_folder(d, folder_name)
+ else:
+ folder_id = (d.getVar('FOLDER_ID', True) or "1")
+
+ bb.note("Folder Id = " + str(folder_id))
+ return str(folder_id)
+
+def has_upload(d, tar_file, folder_id):
+ import os
+ import subprocess
+
+ (work_dir, file_name) = os.path.split(tar_file)
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/uploads" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Invoke rest_api_cmd = " + rest_api_cmd )
+
+ try:
+ upload_output = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error("curl failed: \n%s" % e.output.decode("utf-8"))
+ return False
+
+ upload_output = str(upload_output, encoding = "utf-8")
+ upload_output = eval(upload_output)
+ bb.note("upload_output = ")
+ print(upload_output)
+ bb.note("len of upload_output = ")
+ bb.note(str(len(upload_output)))
+ if len(upload_output) == 0:
+ bb.note("The upload of fossology is 0.")
+ return False
+ bb.note("upload_output[0][uploadname] = ")
+ bb.note(upload_output[0]["uploadname"])
+ bb.note("len of upload_output = ")
+ bb.note(str(len(upload_output)))
+ for i in range(0, len(upload_output)):
+ if upload_output[i]["uploadname"] == file_name and str(upload_output[i]["folderid"]) == str(folder_id):
+ bb.warn("Find " + file_name + " in fossology server \"Software Repository\" folder. So, will not upload again.")
+ return upload_output[i]["id"]
+ return False
+
+def upload(d, tar_file, folder):
+ import os
+ import subprocess
+ delaytime = 50
+ i = 0
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/uploads" \
+ + " -H \"folderId: " + folder + "\"" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " -H \'uploadDescription: created by REST\'" \
+ + " -H \'public: public\'" \
+ + " -H \'Content-Type: multipart/form-data\'" \
+ + " -F \'fileInput=@\"" + tar_file + "\";type=application/octet-stream\'" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Upload : Invoke rest_api_cmd = " + rest_api_cmd )
+ while i < 10:
+ time.sleep(delaytime)
+ try:
+ upload = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Upload failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ upload = str(upload, encoding = "utf-8")
+ bb.note("Upload = ")
+ bb.note(upload)
+ upload = eval(upload)
+ if str(upload["code"]) == "201":
+ return upload["message"]
+ i += 1
+ bb.warn(d.getVar('PN', True) + ": Upload is fail, please check your fossology server.")
+ return False
+
+def analysis(d, folder_id, upload_id):
+ import os
+ import subprocess
+ delaytime = 50
+ i = 0
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/jobs" \
+ + " -H \"folderId: " + str(folder_id) + "\"" \
+ + " -H \"uploadId: " + str(upload_id) + "\"" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " -H \'Content-Type: application/json\'" \
+ + " --data \'{\"analysis\": {\"bucket\": true,\"copyright_email_author\": true,\"ecc\": true, \"keyword\": true,\"mime\": true,\"monk\": true,\"nomos\": true,\"package\": true},\"decider\": {\"nomos_monk\": true,\"bulk_reused\": true,\"new_scanner\": true}}\'" \
+ + " --noproxy 127.0.0.1"
+ bb.note("Analysis : Invoke rest_api_cmd = " + rest_api_cmd )
+ while i < 10:
+ try:
+ time.sleep(delaytime)
+ analysis = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error("Analysis failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ time.sleep(delaytime)
+ analysis = str(analysis, encoding = "utf-8")
+ bb.note("analysis = ")
+ bb.note(analysis)
+ analysis = eval(analysis)
+ if str(analysis["code"]) == "201":
+ return analysis["message"]
+ elif str(analysis["code"]) == "404":
+ bb.warn(d.getVar('PN', True) + ": analysis is still not complete.")
+ time.sleep(delaytime*2)
+ else:
+ return False
+ i += 1
+ bb.warn(d.getVar('PN', True) + ": Analysis is fail, will try again.")
+ bb.warn(d.getVar('PN', True) + ": Analysis is fail, please check your fossology server.")
+ return False
+
+def trigger(d, folder_id, upload_id):
+ import os
+ import subprocess
+ delaytime = 50
+ i = 0
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/report" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " -H \"uploadId: " + str(upload_id) + "\"" \
+ + " -H \'reportFormat: spdx2tv\'" \
+ + " --noproxy 127.0.0.1"
+ bb.note("trigger : Invoke rest_api_cmd = " + rest_api_cmd )
+ while i < 10:
+ time.sleep(delaytime)
+ try:
+ trigger = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.error(d.getVar('PN', True) + ": Trigger failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ time.sleep(delaytime)
+ trigger = str(trigger, encoding = "utf-8")
+ trigger = eval(trigger)
+ bb.note("trigger id = ")
+ bb.note(str(trigger["message"]))
+ if str(trigger["code"]) == "201":
+ return trigger["message"].split("/")[-1]
+ i += 1
+ time.sleep(delaytime * 2)
+ bb.warn(d.getVar('PN', True) + ": Trigger is fail, will try again.")
+ bb.warn(d.getVar('PN', True) + ": Trigger is fail, please check your fossology server.")
+ return False
+
+def get_spdx(d, report_id, spdx_file):
+ import os
+ import subprocess
+ import time
+ delaytime = 50
+ complete = False
+ i = 0
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/report/" + report_id \
+ + " -H \'accept: text/plain\'" \
+ + " -H \"Authorization: Bearer " + token + "\"" \
+ + " --noproxy 127.0.0.1"
+ bb.note("get_spdx : Invoke rest_api_cmd = " + rest_api_cmd )
+ while i < 10:
+ time.sleep(delaytime)
+ file = open(spdx_file,'wt')
+ try:
+ p = subprocess.Popen(rest_api_cmd, shell=True, universal_newlines=True, stdout=file).wait()
+ except subprocess.CalledProcessError as e:
+ bb.error("Get spdx failed: \n%s. Please check fossology server." % e.output.decode("utf-8"))
+ file.close()
+ os.remove(spdx_file)
+ return False
+ file.flush()
+ time.sleep(delaytime)
+ file.close()
+ file = open(spdx_file,'r+')
+ first_line = file.readline()
+ if "SPDXVersion" in first_line:
+ line = file.readline()
+ while line:
+ if "LicenseID:" in line:
+ complete = True
+ break
+ line = file.readline()
+ file.close()
+ if complete == False:
+ bb.warn("license info not complete, try agin.")
+ else:
+ return True
+ else:
+ bb.warn(d.getVar('PN', True) + ": Get the first line is " + first_line + ". Try agin")
+
+ file.close()
+ os.remove(spdx_file)
+ i += 1
+ delaytime = delaytime + 20
+ time.sleep(delaytime)
+
+ file.close()
+ bb.warn(d.getVar('PN', True) + ": Get spdx failed, Please check your fossology server.")
+
+def invoke_rest_api(d, tar_file, spdx_file, folder_id):
+ import os
+ import time
+ i = 0
+
+ bb.note("invoke fossology REST API : tar_file = %s " % tar_file)
+ upload_id = has_upload(d, tar_file, folder_id)
+ if upload_id == False:
+ bb.note("This OSS has not been scanned. So upload it to fossology server.")
+ upload_id = upload(d, tar_file, folder_id)
+ if upload_id == False:
+ return False
+
+ if analysis(d, folder_id, upload_id) == False:
+ return False
+ while i < 10:
+ i += 1
+ report_id = trigger(d, folder_id, upload_id)
+ if report_id == False:
+ return False
+ spdx2tv = get_spdx(d, report_id, spdx_file)
+ if spdx2tv == False:
+ bb.warn(d.getVar('PN', True) + ": get_spdx is unnormal. Will try again!")
+ else:
+ return True
+
+ bb.warn("get_spdx of %s is unnormal. Please confirm!")
+ return False
diff --git a/external/meta-spdxscanner/classes/scancode-tk.bbclass b/external/meta-spdxscanner/classes/scancode-tk.bbclass
new file mode 100644
index 00000000..0dc244f9
--- /dev/null
+++ b/external/meta-spdxscanner/classes/scancode-tk.bbclass
@@ -0,0 +1,139 @@
+# This class integrates real-time license scanning, generation of SPDX standard
+# output and verifiying license info during the building process.
+# It is a combination of efforts from the OE-Core, SPDX and ScanCode projects.
+#
+# For more information on ScanCode:
+# https://github.com/nexB/scancode-toolkit
+#
+# For more information on SPDX:
+# http://www.spdx.org
+#
+# Note:
+# 1) By default,spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR]
+# 2) By default, SPDX_DEPLOY_DIR is tmp/deploy
+#
+
+inherit spdx-common
+
+SPDXEPENDENCY += "scancode-toolkit-native:do_populate_sysroot"
+
+CREATOR_TOOL = "scancode-tk.bbclass in meta-spdxscanner"
+
+python do_spdx(){
+ import os, sys, json, shutil
+ pn = d.getVar('PN')
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return
+ # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving source here.
+ if pn.startswith('glibc-locale'):
+ return
+ if (d.getVar('PN') == "libtool-cross"):
+ return
+ if (d.getVar('PN') == "libgcc-initial"):
+ return
+ if (d.getVar('PN') == "shadow-sysroot"):
+ return
+
+ spdx_outdir = d.getVar('SPDX_OUTDIR')
+ spdx_workdir = d.getVar('SPDX_WORKDIR')
+ spdx_temp_dir = os.path.join(spdx_workdir, "temp")
+ temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
+
+ info = {}
+ info['workdir'] = (d.getVar('WORKDIR', True) or "")
+ info['pn'] = (d.getVar( 'PN', True ) or "")
+ info['pv'] = (d.getVar( 'PV', True ) or "")
+ info['package_download_location'] = (d.getVar( 'SRC_URI', True ) or "")
+ if info['package_download_location'] != "":
+ info['package_download_location'] = info['package_download_location'].split()[0]
+ info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '')
+ info['data_license'] = (d.getVar('DATA_LICENSE', True) or '')
+ info['creator'] = {}
+ info['creator']['Tool'] = (d.getVar('CREATOR_TOOL', True) or '')
+ info['license_list_version'] = (d.getVar('LICENSELISTVERSION', True) or '')
+ info['package_homepage'] = (d.getVar('HOMEPAGE', True) or "")
+ info['package_summary'] = (d.getVar('SUMMARY', True) or "")
+ info['package_summary'] = info['package_summary'].replace("\n","")
+ info['package_summary'] = info['package_summary'].replace("'"," ")
+ info['package_contains'] = (d.getVar('CONTAINED', True) or "")
+ info['package_static_link'] = (d.getVar('STATIC_LINK', True) or "")
+ info['modified'] = "false"
+ srcuri = d.getVar("SRC_URI", False).split()
+ length = len("file://")
+ for item in srcuri:
+ if item.startswith("file://"):
+ item = item[length:]
+ if item.endswith(".patch") or item.endswith(".diff"):
+ info['modified'] = "true"
+
+ manifest_dir = (d.getVar('SPDX_DEPLOY_DIR', True) or "")
+ if not os.path.exists( manifest_dir ):
+ bb.utils.mkdirhier( manifest_dir )
+ info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ sstatefile = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ # if spdx has been exist
+ if os.path.exists(info['outfile']):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ return
+ if os.path.exists( sstatefile ):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ create_manifest(info,sstatefile)
+ return
+ spdx_get_src(d)
+
+ bb.note('SPDX: Archiving the patched source...')
+ if os.path.isdir(spdx_temp_dir):
+ for f_dir, f in list_files(spdx_temp_dir):
+ temp_file = os.path.join(spdx_temp_dir,f_dir,f)
+ shutil.copy(temp_file, temp_dir)
+ #shutil.rmtree(spdx_temp_dir)
+ if not os.path.exists(spdx_outdir):
+ bb.utils.mkdirhier(spdx_outdir)
+ cur_ver_code = get_ver_code(spdx_workdir).split()[0]
+ ## Get spdx file
+ bb.note(' run scanCode ...... ')
+ d.setVar('WORKDIR', d.getVar('SPDX_WORKDIR', True))
+ info['sourcedir'] = spdx_workdir
+ git_path = "%s/git/.git" % info['sourcedir']
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
+ invoke_scancode(info['sourcedir'],sstatefile)
+ bb.warn("source dir = " + info['sourcedir'])
+ if get_cached_spdx(sstatefile) != None:
+ write_cached_spdx( info,sstatefile,cur_ver_code )
+ ## CREATE MANIFEST(write to outfile )
+ create_manifest(info,sstatefile)
+ else:
+ bb.warn('Can\'t get the spdx file ' + info['pn'] + '. Please check your.')
+}
+
+def invoke_scancode( OSS_src_dir, spdx_file):
+ import subprocess
+ import string
+ import json
+ import codecs
+ import logging
+
+ logger = logging.getLogger()
+ logger.setLevel(logging.INFO)
+ logging.basicConfig(level=logging.INFO)
+
+ path = os.getenv('PATH')
+ scancode_cmd = bb.utils.which(os.getenv('PATH'), "scancode")
+ scancode_cmd = scancode_cmd + " -lpci --spdx-tv " + spdx_file + " " + OSS_src_dir
+ print(scancode_cmd)
+ try:
+ subprocess.check_output(scancode_cmd,
+ stderr=subprocess.STDOUT,
+ shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.fatal("Could not invoke scancode Command "
+ "'%s' returned %d:\n%s" % (scancode_cmd, e.returncode, e.output))
diff --git a/external/meta-spdxscanner/classes/spdx-common.bbclass b/external/meta-spdxscanner/classes/spdx-common.bbclass
new file mode 100644
index 00000000..0dcd7938
--- /dev/null
+++ b/external/meta-spdxscanner/classes/spdx-common.bbclass
@@ -0,0 +1,221 @@
+# This class supplys common functions.
+
+
+SPDXEPENDENCY += "${PATCHTOOL}-native:do_populate_sysroot"
+SPDXEPENDENCY += " wget-native:do_populate_sysroot"
+SPDXEPENDENCY += " subversion-native:do_populate_sysroot"
+SPDXEPENDENCY += " git-native:do_populate_sysroot"
+SPDXEPENDENCY += " lz4-native:do_populate_sysroot"
+SPDXEPENDENCY += " lzip-native:do_populate_sysroot"
+SPDXEPENDENCY += " xz-native:do_populate_sysroot"
+SPDXEPENDENCY += " unzip-native:do_populate_sysroot"
+SPDXEPENDENCY += " xz-native:do_populate_sysroot"
+SPDXEPENDENCY += " quilt-native:do_populate_sysroot"
+SPDXEPENDENCY += " tar-native:do_populate_sysroot"
+
+SPDX_DEPLOY_DIR ??= "${DEPLOY_DIR}/spdx"
+SPDX_TOPDIR ?= "${WORKDIR}/spdx_sstate_dir"
+SPDX_OUTDIR = "${SPDX_TOPDIR}/${TARGET_SYS}/${PF}/"
+SPDX_WORKDIR = "${WORKDIR}/spdx_temp/"
+
+do_spdx[dirs] = "${WORKDIR}"
+
+LICENSELISTVERSION = "2.6"
+
+# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
+# the real top-level directory.
+SPDX_S ?= "${S}"
+
+addtask do_spdx before do_configure after do_patch
+
+def spdx_create_tarball(d, srcdir, suffix, ar_outdir):
+ """
+ create the tarball from srcdir
+ """
+ import tarfile, shutil
+
+ # Make sure we are only creating a single tarball for gcc sources
+ #if (d.getVar('SRC_URI') == ""):
+ # return
+ # For the kernel archive, srcdir may just be a link to the
+ # work-shared location. Use os.path.realpath to make sure
+ # that we archive the actual directory and not just the link.
+ srcdir = os.path.realpath(srcdir)
+
+ bb.utils.mkdirhier(ar_outdir)
+ if suffix:
+ filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
+ else:
+ filename = '%s.tar.gz' % d.getVar('PF')
+ tarname = os.path.join(ar_outdir, filename)
+
+ bb.note('Creating %s' % tarname)
+ tar = tarfile.open(tarname, 'w:gz')
+ tar.add(srcdir, arcname=os.path.basename(srcdir))
+ tar.close()
+ #shutil.rmtree(srcdir)
+ return tarname
+
+# Run do_unpack and do_patch
+def spdx_get_src(d):
+ import shutil
+ spdx_workdir = d.getVar('SPDX_WORKDIR')
+ spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
+ pn = d.getVar('PN')
+
+ # The kernel class functions require it to be on work-shared, so we dont change WORKDIR
+ if not is_work_shared(d):
+ # Change the WORKDIR to make do_unpack do_patch run in another dir.
+ d.setVar('WORKDIR', spdx_workdir)
+ # Restore the original path to recipe's native sysroot (it's relative to WORKDIR).
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+
+ # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the
+ # possibly requiring of the following tasks (such as some recipes's
+ # do_patch required 'B' existed).
+ bb.utils.mkdirhier(d.getVar('B'))
+
+ bb.build.exec_func('do_unpack', d)
+ # Copy source of kernel to spdx_workdir
+ if is_work_shared(d):
+ d.setVar('WORKDIR', spdx_workdir)
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+ src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
+ bb.utils.mkdirhier(src_dir)
+ if bb.data.inherits_class('kernel',d):
+ share_src = d.getVar('STAGING_KERNEL_DIR')
+ cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
+ cmd_copy_kernel_result = os.popen(cmd_copy_share).read()
+ bb.note("cmd_copy_kernel_result = " + cmd_copy_kernel_result)
+
+ git_path = src_dir + "/.git"
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
+
+ # Make sure gcc and kernel sources are patched only once
+ if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
+ bb.build.exec_func('do_patch', d)
+
+ # Some userland has no source.
+ if not os.path.exists( spdx_workdir ):
+ bb.utils.mkdirhier(spdx_workdir)
+
+def create_manifest(info,sstatefile):
+ import shutil
+ shutil.copyfile(sstatefile,info['outfile'])
+
+def get_cached_spdx( sstatefile ):
+ import subprocess
+
+ if not os.path.exists( sstatefile ):
+ return None
+
+ try:
+ output = subprocess.check_output(['grep', "PackageVerificationCode", sstatefile])
+ except subprocess.CalledProcessError as e:
+ bb.error("Index creation command '%s' failed with return code %d:\n%s" % (e.cmd, e.returncode, e.output))
+ return None
+ cached_spdx_info=output.decode('utf-8').split(': ')
+ return cached_spdx_info[1]
+
+## Add necessary information into spdx file
+def write_cached_spdx( info,sstatefile, ver_code ):
+ import subprocess
+
+ def sed_replace(dest_sed_cmd,key_word,replace_info):
+ dest_sed_cmd = dest_sed_cmd + "-e 's#^" + key_word + ".*#" + \
+ key_word + replace_info + "#' "
+ return dest_sed_cmd
+
+ def sed_insert(dest_sed_cmd,key_word,new_line):
+ dest_sed_cmd = dest_sed_cmd + "-e '/^" + key_word \
+ + r"/a\\" + new_line + "' "
+ return dest_sed_cmd
+
+ ## Document level information
+ sed_cmd = r"sed -i -e 's#\r$##' "
+ spdx_DocumentComment = "<text>SPDX for " + info['pn'] + " version " \
+ + info['pv'] + "</text>"
+ sed_cmd = sed_replace(sed_cmd,"DocumentComment",spdx_DocumentComment)
+
+ ## Creator information
+ sed_cmd = sed_replace(sed_cmd,"Creator: Tool: ",info['creator']['Tool'])
+
+ ## Package level information
+ sed_cmd = sed_replace(sed_cmd, "PackageName: ", info['pn'])
+ sed_cmd = sed_insert(sed_cmd, "PackageName: ", "PackageVersion: " + info['pv'])
+ sed_cmd = sed_replace(sed_cmd, "PackageDownloadLocation: ",info['package_download_location'])
+ sed_cmd = sed_insert(sed_cmd, "PackageDownloadLocation: ", "PackageHomePage: " + info['package_homepage'])
+ sed_cmd = sed_insert(sed_cmd, "PackageDownloadLocation: ", "PackageSummary: " + "<text>" + info['package_summary'] + "</text>")
+ sed_cmd = sed_insert(sed_cmd, "PackageCopyrightText: ", "PackageComment: <text>\\nModificationRecord: " + info['modified'] + "\\n</text>")
+ sed_cmd = sed_replace(sed_cmd, "PackageVerificationCode: ",ver_code)
+ sed_cmd = sed_insert(sed_cmd, "PackageVerificationCode: ", "PackageDescription: " +
+ "<text>" + info['pn'] + " version " + info['pv'] + "</text>")
+ for contain in info['package_contains'].split( ):
+ sed_cmd = sed_insert(sed_cmd, "PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " CONTAINS " + contain)
+ for static_link in info['package_static_link'].split( ):
+ sed_cmd = sed_insert(sed_cmd, "PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " STATIC_LINK " + static_link)
+ sed_cmd = sed_cmd + sstatefile
+
+ subprocess.call("%s" % sed_cmd, shell=True)
+
+def is_work_shared(d):
+ pn = d.getVar('PN')
+ return bb.data.inherits_class('kernel', d) or pn.startswith('gcc-source')
+
+def remove_dir_tree(dir_name):
+ import shutil
+ try:
+ shutil.rmtree(dir_name)
+ except:
+ pass
+
+def remove_file(file_name):
+ try:
+ os.remove(file_name)
+ except OSError as e:
+ pass
+
+def list_files(dir ):
+ for root, subFolders, files in os.walk(dir):
+ for f in files:
+ rel_root = os.path.relpath(root, dir)
+ yield rel_root, f
+ return
+
+def hash_file(file_name):
+ """
+ Return the hex string representation of the SHA1 checksum of the filename
+ """
+ try:
+ import hashlib
+ except ImportError:
+ return None
+
+ sha1 = hashlib.sha1()
+ with open( file_name, "rb" ) as f:
+ for line in f:
+ sha1.update(line)
+ return sha1.hexdigest()
+
+def hash_string(data):
+ import hashlib
+ sha1 = hashlib.sha1()
+ sha1.update(data.encode('utf-8'))
+ return sha1.hexdigest()
+
+def get_ver_code(dirname):
+ chksums = []
+ for f_dir, f in list_files(dirname):
+ try:
+ stats = os.stat(os.path.join(dirname,f_dir,f))
+ except OSError as e:
+ bb.warn( "Stat failed" + str(e) + "\n")
+ continue
+ chksums.append(hash_file(os.path.join(dirname,f_dir,f)))
+ ver_code_string = ''.join(chksums).lower()
+ ver_code = hash_string(ver_code_string)
+ return ver_code
+
+do_spdx[depends] = "${SPDXEPENDENCY}"
+