X-Git-Url: http://info.iut-bm.univ-fcomte.fr/pub/gitweb/simgrid.git/blobdiff_plain/97e2219ed6c0e511f6165460cec79afadf42f589..896386afac0a53b642ff35bb8467791bb952559f:/teshsuite/smpi/MBI/MBIutils.py diff --git a/teshsuite/smpi/MBI/MBIutils.py b/teshsuite/smpi/MBI/MBIutils.py index 43e8789296..9eedad5240 100644 --- a/teshsuite/smpi/MBI/MBIutils.py +++ b/teshsuite/smpi/MBI/MBIutils.py @@ -24,7 +24,7 @@ class AbstractTool: def build(self, rootdir, cached=True): """Rebuilds the tool binaries. By default, we try to reuse the existing build.""" - print ("Nothing to do to rebuild the tool binaries.") + print("Nothing to do to rebuild the tool binaries.") def setup(self, rootdir): """ @@ -33,7 +33,7 @@ class AbstractTool: """ # pass - def run(execcmd, filename, binary, num_id, timeout): + def run(self, execcmd, filename, binary, num_id, timeout, batchinfo): """Compile that test code and anaylse it with the Tool if needed (a cache system should be used)""" # pass @@ -65,7 +65,7 @@ possible_details = { 'GlobalConcurrency':'DGlobalConcurrency', # larger scope -# 'BufferingHazard':'EBufferingHazard', + 'BufferingHazard':'EBufferingHazard', 'OK':'FOK'} error_scope = { @@ -78,7 +78,7 @@ error_scope = { 'DRace':'multi-processes', 'DMatch':'multi-processes', 'DGlobalConcurrency':'multi-processes', -# 'EBufferingHazard':'system', + 'EBufferingHazard':'system', 'FOK':'correct executions' } @@ -95,7 +95,7 @@ displayed_name = { 'EBufferingHazard':'Buffering hazard', 'FOK':"Correct execution", - 'aislinn':'Aislinn','civl':'CIVL','hermes':'Hermes', 'isp':'ISP','itac':'ITAC', 'simgrid':'Mc SimGrid', 'smpi':'SMPI','smpivg':'SMPI+VG', 'mpisv':'MPI-SV', 'must':'MUST', 'parcoach':'PARCOACH' + 'aislinn':'Aislinn', 'civl':'CIVL', 'hermes':'Hermes', 'isp':'ISP', 'itac':'ITAC', 'simgrid':'Mc SimGrid', 'smpi':'SMPI', 'smpivg':'SMPI+VG', 'mpisv':'MPI-SV', 'must':'MUST', 'parcoach':'PARCOACH' } def parse_one_code(filename): @@ -119,8 +119,8 @@ def parse_one_code(filename): state = 2 else: raise ValueError(f"Unexpected end of MBI_TESTS header at line {line_num}: \n{line}") - if state == 1 and re.match("\s+\$ ?.*", line): - m = re.match('\s+\$ ?(.*)', line) + if state == 1 and re.match(r'\s+\$ ?.*', line): + m = re.match(r'\s+\$ ?(.*)', line) cmd = m.group(1) nextline = next(input_file) detail = 'OK' @@ -166,32 +166,32 @@ def categorize(tool, toolname, test_id, expected): if outcome == 'timeout': res_category = 'timeout' if elapsed is None: - diagnostic = f'hard timeout' + diagnostic = 'hard timeout' else: diagnostic = f'timeout after {elapsed} sec' elif outcome == 'failure' or outcome == 'segfault': res_category = 'failure' - diagnostic = f'tool error, or test not run' + diagnostic = 'tool error, or test not run' elif outcome == 'UNIMPLEMENTED': res_category = 'unimplemented' - diagnostic = f'coverage issue' + diagnostic = 'coverage issue' elif outcome == 'other': res_category = 'other' - diagnostic = f'inconclusive run' + diagnostic = 'inconclusive run' elif expected == 'OK': if outcome == 'OK': res_category = 'TRUE_NEG' - diagnostic = f'correctly reported no error' + diagnostic = 'correctly reported no error' else: res_category = 'FALSE_POS' - diagnostic = f'reported an error in a correct code' + diagnostic = 'reported an error in a correct code' elif expected == 'ERROR': if outcome == 'OK': res_category = 'FALSE_NEG' - diagnostic = f'failed to detect an error' + diagnostic = 'failed to detect an error' else: res_category = 'TRUE_POS' - diagnostic = f'correctly detected an error' + diagnostic = 'correctly detected an error' else: raise ValueError(f"Unexpected expectation: {expected} (must be OK or ERROR)") @@ -214,7 +214,7 @@ def run_cmd(buildcmd, execcmd, cachefile, filename, binary, timeout, batchinfo, """ if os.path.exists(f'{cachefile}.txt') and os.path.exists(f'{cachefile}.elapsed') and os.path.exists(f'{cachefile}.md5sum'): hash_md5 = hashlib.md5() - with open(filename, 'rb') as sourcefile : + with open(filename, 'rb') as sourcefile: for chunk in iter(lambda: sourcefile.read(4096), b""): hash_md5.update(chunk) newdigest = hash_md5.hexdigest() @@ -235,7 +235,7 @@ def run_cmd(buildcmd, execcmd, cachefile, filename, binary, timeout, batchinfo, output = f"Compiling {binary}.c (batchinfo:{batchinfo})\n\n" output += f"$ {buildcmd}\n" - compil = subprocess.run(buildcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + compil = subprocess.run(buildcmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if compil.stdout is not None: output += str(compil.stdout, errors='replace') if compil.returncode != 0: @@ -260,7 +260,6 @@ def run_cmd(buildcmd, execcmd, cachefile, filename, binary, timeout, batchinfo, pid = process.pid pgid = os.getpgid(pid) # We need that to forcefully kill subprocesses when leaving - outcome = None while True: if poll_obj.poll(5): # Something to read? Do check the timeout status every 5 sec if not line = process.stdout.readline() @@ -271,7 +270,6 @@ def run_cmd(buildcmd, execcmd, cachefile, filename, binary, timeout, batchinfo, if read_line_lambda != None: read_line_lambda(line, process) if time.time() - start_time > timeout: - outcome = 'timeout' with open(f'{cachefile}.timeout', 'w') as outfile: outfile.write(f'{time.time() - start_time} seconds') break @@ -314,7 +312,7 @@ def run_cmd(buildcmd, execcmd, cachefile, filename, binary, timeout, batchinfo, outfile.write(output) with open(f'{cachefile}.md5sum', 'w') as outfile: hashed = hashlib.md5() - with open(filename, 'rb') as sourcefile : + with open(filename, 'rb') as sourcefile: for chunk in iter(lambda: sourcefile.read(4096), b""): hashed.update(chunk) outfile.write(hashed.hexdigest())