aboutsummaryrefslogtreecommitdiffstats
path: root/conftest.py
diff options
context:
space:
mode:
authorduerpei <duep.fnst@fujitsu.com>2022-07-28 10:34:43 +0800
committerduerpei <duep.fnst@fujitsu.com>2022-07-28 10:34:43 +0800
commitaa5fab53993f29311f1aef83488eb0f759dabca8 (patch)
treea8f561e714feaa48c577c24b062fef8fe9c9b2d3 /conftest.py
parent76665693bf19bdbe159849b43cc42142d3093c2f (diff)
agl-test-framework: demo code submissionneedlefish_13.93.0needlefish/13.93.013.93.0
Submit the demo code of agl-test-framework The "agl-test framework" encapsulates pytest,which aims to provide a unified test set execution entrance. It supports to run various test sets, even these test sets come from different test frameworks, processing these test logs uniformly, and generating complete test report. In this way, it is convenient to test as many targets as possible, in a wide range, so that the test covers a wider range of objects, and is more comprehensive. At present, we plan to support the following test sets in "agl-test": 1. Transplant test sets under Fuego and AGL-JTA 2. Retain the test set under pyagl and agl-ptest (so will depend on "agl-ptest") 3. Migrate new test sets (with upstream) 4. Append new test sets (without upstream) The output of test run is summarized by levels. The first level is the summary of all test sets, and the second level is the summary of a single test set. Now, they are displayed in HTML format, and other formats also can be considered later. Bug-AGL: SPEC-4345 Signed-off-by: duerpei <duep.fnst@fujitsu.com> Change-Id: I25dfedcf8cdd373544c4fae677330defb5d21840
Diffstat (limited to 'conftest.py')
-rw-r--r--conftest.py170
1 files changed, 170 insertions, 0 deletions
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..eade841
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,170 @@
+# -*- coding:utf-8 -*-
+import pytest
+import json
+import shutil
+import subprocess
+
+from plugins.agl_test_conf import BASE_LOGS_DIR
+from plugins.agl_test_conf import TMP_LOGS_DIR
+from plugins.agl_test_conf import REPORT_LOGS_DIR
+from plugins.agl_test_conf import TMP_TEST_REPORT
+
+
+@pytest.fixture(scope='session' ,autouse=True)
+def setup_compress_function():
+ #Before the test start, clean the env
+ report_json = TMP_LOGS_DIR + "report.json"
+ output = subprocess.run(['ls',report_json],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
+ if(output.returncode == 0):
+ subprocess.run(['rm',report_json])
+
+ #Makdir of TMP_TEST_REPORT and REPORT_LOGS_DIR
+ subprocess.run(['mkdir','-p',TMP_TEST_REPORT])
+ subprocess.run(['mkdir','-p',REPORT_LOGS_DIR])
+
+ yield
+ #Collect report.json from all test sets to generate a report.json for all the test sets
+ report_files = TMP_LOGS_DIR + "report_files"
+ with open(report_files,'w') as report_f:
+ subprocess.run(['find','-name','report.json'],cwd=TMP_LOGS_DIR,stdout=report_f)
+ report_f.close()
+
+ #Get the summary data and write to report.json file
+ summary_data = get_summary_data(report_files)
+ summary_json = TMP_LOGS_DIR + "/report.json"
+ with open(summary_json, 'w') as summary_file:
+ json.dump(summary_data,summary_file,indent=4,sort_keys=False)
+ summary_file.close()
+
+ #Creat summary report in html
+ html = get_summary_report_html(summary_data)
+ html_path = TMP_LOGS_DIR + "test-report/summary-report.html"
+ html_file = open(html_path,"w")
+ html_file.write(html)
+ html_file.close()
+
+ #Copy summary report file
+ source_file = TMP_LOGS_DIR + "test-report/summary-report.html"
+ target_file = REPORT_LOGS_DIR + "summary-report.html"
+ shutil.copyfile(source_file,target_file)
+
+ #Package the test report
+ #TODO The name of the zip file is formatted as follows:
+ # agl-test-log-13.0.1-raspberrypi4-20200808.zip
+ base_name = REPORT_LOGS_DIR + "agl-test-log-xxx"
+ root_dir = TMP_LOGS_DIR + "test-report"
+ shutil.make_archive(base_name,"zip",root_dir)
+
+ # TODO:
+ # Upload the final log to Artifactorial
+
+#Summarize all reports.json file
+def get_summary_data(report_files):
+ summary_data = {}
+ summary_total = summary_passed = summary_failed = summary_skipped = 0
+ files = open(report_files)
+ while True:
+ report = files.readline()
+ if not report:
+ break
+ report = report[1:-1]
+ report_json = TMP_LOGS_DIR + report
+ with open(report_json,'r') as f:
+ data = json.load(f)
+
+ total = passed = failed = skipped = 0
+ total = data["collected"]
+ passed = data["passed"]
+ failed = data["failed"]
+ skipped = data["skipped"]
+ test_status = data["test_status"]
+ test_name = data["test_name"]
+
+ this_summary = {
+ 'total': total,
+ 'passed': passed,
+ 'failed': failed,
+ 'skipped': skipped,
+ 'test_status': test_status,
+ }
+ summary_data[test_name] = this_summary
+
+ summary_total = summary_total + 1
+ if(test_status=="passed"):
+ summary_passed = summary_passed + 1
+ elif(test_status=="failed"):
+ summary_failed = summary_failed + 1
+ else:
+ summary_skipped = summary_skipped + 1
+ f.close()
+ summary_data["summary"] = {
+ "summary_total": summary_total,
+ "summary_passed": summary_passed,
+ "summary_failed": summary_failed,
+ "summary_skipped": summary_skipped,
+ }
+
+ return summary_data
+
+#Generate content for summary report json file
+def get_summary_report_html(summary_data):
+ status = "fail"
+ if(summary_data["summary"]["summary_total"]==summary_data["summary"]["summary_passed"]):
+ status = "success"
+ html = "<html>"
+
+ #<head> </head>
+ html = html + "<head>"
+ html = html + "<title>"
+ html = html + "Summary Report"
+ html = html + "</title>"
+ html = html + "</head>"
+
+ #<body> </body>
+ html = html + "<body>"
+ html = html + "<h1>" + "Summary Report" + "</h1>"
+ html = html + "<p>" + "Status :" + status + "</p>"
+ html = html + "<p>" + "Total: " + str(summary_data["summary"]["summary_total"])
+ html = html + " Pass: " + str(summary_data["summary"]["summary_passed"])
+ html = html + " Fail: " + str(summary_data["summary"]["summary_failed"])
+ html = html + " Skip: " + str(summary_data["summary"]["summary_skipped"]) + "</p>"
+ html = html + "<p>Details : </p>"
+
+ #<table> </table>
+ html = html + "<table border=\"1\" cellspacing=\"2\" >"
+ html = html + "<tr bgcolor = \"2400B0\">"
+ html = html + "<th><font color = \"white\">test suite</font></th>"
+ html = html + "<th><font color = \"white\">status</font></th>"
+ html = html + "<th><font color = \"white\">pass</font></th>"
+ html = html + "<th><font color = \"white\">fail</font></th>"
+ html = html + "<th><font color = \"white\">skip</font></th>"
+ html = html + "</tr>"
+
+ #Add content to the table
+ bgcolor = 0
+ for test_suite in summary_data:
+ if test_suite == "summary":
+ continue
+ if bgcolor == 0:
+ html = html + "<tr bgcolor = \"CCCBE4\">"
+ bgcolor = 1
+ else:
+ html = html + "<tr bgcolor = \"E8E7F2\">"
+ bgcolor = 0
+ html = html + "<th>" + test_suite + "</th>"
+ html = html + "<th>" + summary_data[test_suite]["test_status"] + "</th>"
+ html = html + "<th>" + str(summary_data[test_suite]["passed"]) + "</th>"
+ html = html + "<th>" + str(summary_data[test_suite]["failed"]) + "</th>"
+ html = html + "<th>" + str(summary_data[test_suite]["skipped"]) + "</th>"
+ html = html + "</tr>"
+
+ html = html + "</table>"
+ html = html + "<p></p>"
+ html = html + "<font>Detail log :</font>"
+ #TODO update the link address for agl-test-log-xxx.zip
+ html = html + "<a href=\"" + "address of agl-test-log-xxx.zip "
+ html = html + "\">agl-test-log-13.0.1-raspberrypi4-20200808.zip</a>"
+ html = html + "</body>"
+ html = html + "</html>"
+
+ return html