-# Copyright 2021-2022. The MBI project. All rights reserved.
+# Copyright 2021-2022. The MBI project. All rights reserved.
# This program is free software; you can redistribute it and/or modify it under the terms of the license (GNU GPL).
import os
def build(self, rootdir, cached=True):
"""Rebuilds the tool binaries. By default, we try to reuse the existing build."""
- print ("Nothing to do to rebuild the tool binaries.")
+ print("Nothing to do to rebuild the tool binaries.")
def setup(self, rootdir):
"""
"""
# pass
- def run(execcmd, filename, binary, num_id, timeout):
+ def run(self, execcmd, filename, binary, num_id, timeout, batchinfo):
"""Compile that test code and anaylse it with the Tool if needed (a cache system should be used)"""
# pass
possible_details = {
# scope limited to one call
'InvalidBuffer':'AInvalidParam', 'InvalidCommunicator':'AInvalidParam', 'InvalidDatatype':'AInvalidParam', 'InvalidRoot':'AInvalidParam', 'InvalidTag':'AInvalidParam', 'InvalidWindow':'AInvalidParam', 'InvalidOperator':'AInvalidParam', 'InvalidOtherArg':'AInvalidParam', 'ActualDatatype':'AInvalidParam',
- 'InvalidSrcDest':'AInvalidParam',
+ 'InvalidSrcDest':'AInvalidParam',
# scope: Process-wide
-# 'OutOfInitFini':'BInitFini',
+# 'OutOfInitFini':'BInitFini',
'CommunicatorLeak':'BResLeak', 'DatatypeLeak':'BResLeak', 'GroupLeak':'BResLeak', 'OperatorLeak':'BResLeak', 'TypeLeak':'BResLeak', 'RequestLeak':'BResLeak',
'MissingStart':'BReqLifecycle', 'MissingWait':'BReqLifecycle',
'LocalConcurrency':'BLocalConcurrency',
# scope: communicator
- 'CallMatching':'DMatch',
+ 'CallMatching':'DMatch',
'CommunicatorMatching':'CMatch', 'DatatypeMatching':'CMatch', 'OperatorMatching':'CMatch', 'RootMatching':'CMatch', 'TagMatching':'CMatch',
- 'MessageRace':'DRace',
-
+ 'MessageRace':'DRace',
+
'GlobalConcurrency':'DGlobalConcurrency',
# larger scope
-# 'BufferingHazard':'EBufferingHazard',
+ 'BufferingHazard':'EBufferingHazard',
'OK':'FOK'}
error_scope = {
'DRace':'multi-processes',
'DMatch':'multi-processes',
'DGlobalConcurrency':'multi-processes',
-# 'EBufferingHazard':'system',
+ 'EBufferingHazard':'system',
'FOK':'correct executions'
}
'EBufferingHazard':'Buffering hazard',
'FOK':"Correct execution",
- 'aislinn':'Aislinn','civl':'CIVL','hermes':'Hermes', 'isp':'ISP','itac':'ITAC', 'simgrid':'Mc SimGrid', 'smpi':'SMPI','smpivg':'SMPI+VG', 'mpisv':'MPI-SV', 'must':'MUST', 'parcoach':'PARCOACH'
+ 'aislinn':'Aislinn', 'civl':'CIVL', 'hermes':'Hermes', 'isp':'ISP', 'itac':'ITAC', 'simgrid':'Mc SimGrid', 'smpi':'SMPI', 'smpivg':'SMPI+VG', 'mpisv':'MPI-SV', 'must':'MUST', 'parcoach':'PARCOACH'
}
def parse_one_code(filename):
state = 2
else:
raise ValueError(f"Unexpected end of MBI_TESTS header at line {line_num}: \n{line}")
- if state == 1 and re.match("\s+\$ ?.*", line):
- m = re.match('\s+\$ ?(.*)', line)
+ if state == 1 and re.match(r'\s+\$ ?.*', line):
+ m = re.match(r'\s+\$ ?(.*)', line)
cmd = m.group(1)
nextline = next(input_file)
detail = 'OK'
if outcome == 'timeout':
res_category = 'timeout'
if elapsed is None:
- diagnostic = f'hard timeout'
+ diagnostic = 'hard timeout'
else:
diagnostic = f'timeout after {elapsed} sec'
elif outcome == 'failure' or outcome == 'segfault':
res_category = 'failure'
- diagnostic = f'tool error, or test not run'
+ diagnostic = 'tool error, or test not run'
elif outcome == 'UNIMPLEMENTED':
res_category = 'unimplemented'
- diagnostic = f'coverage issue'
+ diagnostic = 'coverage issue'
elif outcome == 'other':
res_category = 'other'
- diagnostic = f'inconclusive run'
+ diagnostic = 'inconclusive run'
elif expected == 'OK':
if outcome == 'OK':
res_category = 'TRUE_NEG'
- diagnostic = f'correctly reported no error'
+ diagnostic = 'correctly reported no error'
else:
res_category = 'FALSE_POS'
- diagnostic = f'reported an error in a correct code'
+ diagnostic = 'reported an error in a correct code'
elif expected == 'ERROR':
if outcome == 'OK':
res_category = 'FALSE_NEG'
- diagnostic = f'failed to detect an error'
+ diagnostic = 'failed to detect an error'
else:
res_category = 'TRUE_POS'
- diagnostic = f'correctly detected an error'
+ diagnostic = 'correctly detected an error'
else:
raise ValueError(f"Unexpected expectation: {expected} (must be OK or ERROR)")
def run_cmd(buildcmd, execcmd, cachefile, filename, binary, timeout, batchinfo, read_line_lambda=None):
"""
Runs the test on need. Returns True if the test was ran, and False if it was cached.
-
+
The result is cached if possible, and the test is rerun only if the `test.txt` (containing the tool output) or the `test.elapsed` (containing the timing info) do not exist, or if `test.md5sum` (containing the md5sum of the code to compile) does not match.
Parameters:
- - buildcmd and execcmd are shell commands to run. buildcmd can be any shell line (incuding && groups), but execcmd must be a single binary to run.
+ - buildcmd and execcmd are shell commands to run. buildcmd can be any shell line (incuding && groups), but execcmd must be a single binary to run.
- cachefile is the name of the test
- filename is the source file containing the code
- binary the file name in which to compile the code
"""
if os.path.exists(f'{cachefile}.txt') and os.path.exists(f'{cachefile}.elapsed') and os.path.exists(f'{cachefile}.md5sum'):
hash_md5 = hashlib.md5()
- with open(filename, 'rb') as sourcefile :
+ with open(filename, 'rb') as sourcefile:
for chunk in iter(lambda: sourcefile.read(4096), b""):
hash_md5.update(chunk)
newdigest = hash_md5.hexdigest()
output = f"Compiling {binary}.c (batchinfo:{batchinfo})\n\n"
output += f"$ {buildcmd}\n"
- compil = subprocess.run(buildcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ compil = subprocess.run(buildcmd, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if compil.stdout is not None:
output += str(compil.stdout, errors='replace')
if compil.returncode != 0:
pid = process.pid
pgid = os.getpgid(pid) # We need that to forcefully kill subprocesses when leaving
- outcome = None
while True:
if poll_obj.poll(5): # Something to read? Do check the timeout status every 5 sec if not
line = process.stdout.readline()
if read_line_lambda != None:
read_line_lambda(line, process)
if time.time() - start_time > timeout:
- outcome = 'timeout'
with open(f'{cachefile}.timeout', 'w') as outfile:
outfile.write(f'{time.time() - start_time} seconds')
break
outfile.write(output)
with open(f'{cachefile}.md5sum', 'w') as outfile:
hashed = hashlib.md5()
- with open(filename, 'rb') as sourcefile :
+ with open(filename, 'rb') as sourcefile:
for chunk in iter(lambda: sourcefile.read(4096), b""):
hashed.update(chunk)
outfile.write(hashed.hexdigest())
-
+
return True