import pytest
import subprocess
import pathlib

from plugins.agl_test_base import AGLBaseTest

class AIOBase(AGLBaseTest):
    def __init__(self):
        super().__init__(name="aio-stress")

    def run_test_fun(self):
        log_file = self.get_logfile()
        cwd = self.get_temp_logdir()
        workdir = self.get_workdir()
        run_test = workdir + '/aio-stress -s ' + \
                   '$TEST_FILE_SIZE ' + './testfile'

        with open(log_file, 'w') as log_f:
            output = subprocess.run(run_test, shell=True,
                     cwd=cwd, stdout=log_f, stderr=log_f,
                     env={'TEST_FILE_SIZE': '20M'})
        log_f.close()

        if (output.returncode == 0):
            self.case_info_list = {'test_aiostress': ['test_aiostress',
                                   '', 'passed']}
        else:
            self.case_info_list = {'test_aiostress': ['test_aiostress',
                                   '', 'failed']}

    def precheck(self):
        run_test = self.get_workdir() + "/aio-stress"
        path_script = pathlib.Path(run_test)
        return super().precheck() and path_script.is_file()

@pytest.fixture(scope='module')
def testbase():
    #init instance for test
    instance = AIOBase()
    #run test scripts
    instance.run_test_fun()
    #write test suite info to file
    instance.write_run_info()

    yield instance

    #package log files and make report file
    instance.log_report()

def setup_module(testbase: AIOBase):
    print("setup function start")

def precheck():
    instance = AIOBase()
    output = instance.precheck()
    if(output == False):
        #write test suite info to file
        instance.write_skip_info()
    return output

skip_msg = "The current environment does not match the test requirements."
pytestmark = pytest.mark.skipif(precheck() == False, reason = skip_msg)

@pytest.mark.oss_default
def test_aiostress(testbase: AIOBase):
    assert testbase.case_info_list['test_aiostress'][2] == 'passed'

if __name__ == '__main__':
    pytest.main("-s run_tests")