diff options
-rwxr-xr-x | agl-test | 18 | ||||
-rw-r--r-- | plugins/agl_test_base.py | 3 | ||||
-rw-r--r-- | pytest.ini | 1 | ||||
-rw-r--r-- | tests/crashme/__init__.py | 0 | ||||
-rw-r--r-- | tests/crashme/run_tests.py | 95 | ||||
-rw-r--r-- | tests/crashme/spec.json | 20 |
6 files changed, 136 insertions, 1 deletions
@@ -1,3 +1,19 @@ #!/bin/sh + cd /usr/AGL/agl-test/ -exec pytest "$@" + +# default stress level is low +if [[ ! $STRESS_LEVEL ]] +then + export STRESS_LEVEL="low" +fi +echo "STRESS_LEVEL: $STRESS_LEVEL" + +# default test without dangerous marker +moption="-m " +if [[ $* =~ ${moption} ]] +then + exec pytest "$@" +else + exec pytest "$@" -m "not dangerous" +fi diff --git a/plugins/agl_test_base.py b/plugins/agl_test_base.py index 0e3a153..bd39335 100644 --- a/plugins/agl_test_base.py +++ b/plugins/agl_test_base.py @@ -29,6 +29,9 @@ class AGLBaseTest: def get_workdir(self): return conf.WORK_DIR + self.name + "/resource/" + def get_spec_path(self): + return conf.WORK_DIR + self.name + "/spec.json" + def append_one_caseinfo(name, value, status): self.case_info_list[name] = [name, value, status] @@ -5,3 +5,4 @@ cache_dir = /var/run/agl-test/.pytest_cache markers = oss_default: default test suites of OSS like rpm, etc. + dangerous: dangerous tests, might crash the platform. diff --git a/tests/crashme/__init__.py b/tests/crashme/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/tests/crashme/__init__.py diff --git a/tests/crashme/run_tests.py b/tests/crashme/run_tests.py new file mode 100644 index 0000000..93f62cd --- /dev/null +++ b/tests/crashme/run_tests.py @@ -0,0 +1,95 @@ +import pytest +import subprocess +import pathlib +import os +import json + +from plugins.agl_test_base import AGLBaseTest + +class CrashmeBase(AGLBaseTest): + def __init__(self): + super().__init__(name="crashme") + + def setup_runtest_params(self): + # Get env 'STRESS_LEVEL' + env_dist=os.environ + local_stress_level = str(env_dist.get("STRESS_LEVEL")) + + # If not set correctly or just not set, set default to "low" + if local_stress_level not in ("low", "mid", "high"): + local_stress_level = "low" + + # Read dictionary data out of spec.json + spec_file_location = self.get_spec_path() + fp = open(spec_file_location, 'r') + + json_data = fp.read() + spec_dict = json.loads(json_data) + + dict_key="stress_level_" + str(local_stress_level) + + local_param_nbytes = spec_dict[dict_key]['NBYTES'] + local_param_srand = spec_dict[dict_key]['SRAND'] + local_param_ntries = spec_dict[dict_key]['NTRIES'] + local_param_nsub = spec_dict[dict_key]['NSUB'] + + param_string = str(local_param_nbytes) + ' ' \ + + str(local_param_srand) + ' ' \ + + str(local_param_ntries) + ' ' \ + + str(local_param_nsub) + + return param_string + + def run_test_fun(self): + log_file = self.get_logfile() + cwd_buf = self.get_temp_logdir() + oss_workdir = self.get_workdir() + runtest_param_buf = self.setup_runtest_params() + + run_test_cmd = oss_workdir + 'crashme ' + runtest_param_buf + \ + ' | grep "^Test complete\\|^exit status\\|' + \ + '^child_kill_count\\|[.]\\{3\\}\\|^Number of distinct cases"' + + with open(log_file, 'w') as log_f: + run_test_crashme = subprocess.run(run_test_cmd, shell=True, + cwd=cwd_buf, stdout=log_f, stderr=subprocess.DEVNULL) + log_f.close() + + if (run_test_crashme.returncode == 0): + self.case_info_list = {'test_crashme': + ['test_crashme', str(run_test_crashme.returncode), 'passed']} + else: + self.case_info_list = {'test_crashme': + ['test_crashme', str(run_test_crashme.returncode), 'failed']} + + def precheck(self): + test_file_location = self.get_workdir() + "/crashme" + path_checker = pathlib.Path(test_file_location) + return super().precheck() and path_checker.is_file() + +@pytest.fixture(scope='module') +def testbase(): + #init instance for test + instance = CrashmeBase() + + #run test scripts + instance.run_test_fun() + + yield instance + + #package log files and make report file + instance.log_report() + +def precheck(): + instance = CrashmeBase() + return instance.precheck() +skip_msg = "The current environment does not match the test requirements." +pytestmark = pytest.mark.skipif(precheck() == False, reason = skip_msg) + +@pytest.mark.dangerous +@pytest.mark.order("last") +def test_crashme(testbase: CrashmeBase): + assert testbase.case_info_list['test_crashme'][1] == '0' + +if __name__ == '__main__': + pytest.main("-s run_tests") diff --git a/tests/crashme/spec.json b/tests/crashme/spec.json new file mode 100644 index 0000000..0d47b8c --- /dev/null +++ b/tests/crashme/spec.json @@ -0,0 +1,20 @@ +{ + "stress_level_low": { + "NBYTES": "+2000", + "SRAND": "666", + "NTRIES": "100", + "NSUB": "00:15:00" + }, + "stress_level_mid": { + "NBYTES": "+3000", + "SRAND": "666", + "NTRIES": "150", + "NSUB": "00:30:00" + }, + "stress_level_high": { + "NBYTES": "+8000", + "SRAND": "666", + "NTRIES": "800", + "NSUB": "01:00:00" + } +} |