forked from organicmaps/organicmaps
Code review results: refactored filtering files on server (categorizing them into tests to run, to skip, and "not found"), refactored testlog_to_xml_converter to reduce the number of checks that test_info is not None.
This commit is contained in:
parent
2014a69bdb
commit
3cfb059e91
2 changed files with 75 additions and 58 deletions
|
@ -35,6 +35,12 @@ skiplist = []
|
|||
runlist = []
|
||||
logfile = "testlog.log"
|
||||
|
||||
TO_RUN = "to_run"
|
||||
SKIP = "skip"
|
||||
NOT_FOUND = "not_found"
|
||||
FAILED = "failed"
|
||||
PASSED = "passed"
|
||||
|
||||
PORT = 34568
|
||||
|
||||
def print_pretty(result, tests):
|
||||
|
@ -70,6 +76,12 @@ Example
|
|||
|
||||
|
||||
def set_global_vars():
|
||||
|
||||
global skiplist
|
||||
global logfile
|
||||
global runlist
|
||||
global workspace_path
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "he:f:o:i:",
|
||||
["help", "exclude=", "include=", "folder=", "output="])
|
||||
|
@ -83,21 +95,15 @@ def set_global_vars():
|
|||
usage()
|
||||
sys.exit()
|
||||
if option in ("-o", "--output"):
|
||||
global logfile
|
||||
logfile = argument
|
||||
elif option in ("-e", "--exclude"):
|
||||
exclude_tests = argument.split(",")
|
||||
for exclude_test in exclude_tests:
|
||||
global skiplist
|
||||
skiplist.append(exclude_test)
|
||||
skiplist = list(set(argument.split(",")))
|
||||
elif option in ("-i", "--include"):
|
||||
print("-i option found, -e option will be ignored!")
|
||||
print("\n-i option found, -e option will be ignored!")
|
||||
include_tests = argument.split(",")
|
||||
for include_test in include_tests:
|
||||
global runlist
|
||||
runlist.append(include_test)
|
||||
elif option in ("-f", "--folder"):
|
||||
global workspace_path
|
||||
workspace_path = argument
|
||||
else:
|
||||
assert False, "unhandled option"
|
||||
|
@ -115,40 +121,45 @@ def stop_server():
|
|||
print("Failed to stop the server...")
|
||||
|
||||
|
||||
def run_tests():
|
||||
tests_path = "{workspace_path}".format(workspace_path=workspace_path)
|
||||
def categorize_tests():
|
||||
global skiplist
|
||||
|
||||
tests_to_run = []
|
||||
local_skiplist = []
|
||||
not_found = []
|
||||
|
||||
test_files_in_dir = filter(lambda x: x.endswith("_tests"), listdir(workspace_path))
|
||||
|
||||
on_disk = lambda x: x in test_files_in_dir
|
||||
not_on_disk = lambda x : not on_disk(x)
|
||||
|
||||
if len(runlist) == 0:
|
||||
local_skiplist = filter(on_disk, skiplist)
|
||||
not_found = filter(not_on_disk, local_skiplist)
|
||||
tests_to_run = filter(lambda x: x not in local_skiplist, test_files_in_dir)
|
||||
else:
|
||||
tests_to_run = filter(on_disk, runlist)
|
||||
not_found = filter(not_on_disk, tests_to_run)
|
||||
|
||||
return {TO_RUN:tests_to_run, SKIP:local_skiplist, NOT_FOUND:not_found}
|
||||
|
||||
|
||||
def run_tests(tests_to_run):
|
||||
|
||||
failed = []
|
||||
passed = []
|
||||
skipped = []
|
||||
not_found = list(runlist)
|
||||
|
||||
if len(runlist) != 0:
|
||||
global skiplist
|
||||
skiplist = []
|
||||
|
||||
server = None
|
||||
for file in listdir(tests_path):
|
||||
|
||||
if not file.endswith("_tests"):
|
||||
continue
|
||||
if file in skiplist:
|
||||
skipped.append(skiplist.pop(skiplist.index(file)))
|
||||
continue
|
||||
|
||||
if len(runlist) > 0 and (file not in runlist):
|
||||
continue
|
||||
|
||||
if len(not_found) > 0:
|
||||
not_found.pop(not_found.index(file))
|
||||
|
||||
for file in tests_to_run:
|
||||
|
||||
if file == "platform_tests":
|
||||
start_server()
|
||||
|
||||
process = subprocess.Popen("{tests_path}/{file} 2>> {logfile}".
|
||||
format(tests_path=tests_path, file=file, logfile=logfile),
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE)
|
||||
format(tests_path=workspace_path, file=file, logfile=logfile),
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE)
|
||||
|
||||
process.wait()
|
||||
|
||||
|
@ -159,10 +170,8 @@ def run_tests():
|
|||
failed.append(file)
|
||||
else:
|
||||
passed.append(file)
|
||||
|
||||
not_found.extend(skiplist)
|
||||
|
||||
return {"failed": failed, "passed": passed, "skipped": skipped, "not_found": not_found}
|
||||
return {FAILED: failed, PASSED: passed}
|
||||
|
||||
|
||||
def rm_log_file():
|
||||
|
@ -176,12 +185,14 @@ def main():
|
|||
set_global_vars()
|
||||
rm_log_file()
|
||||
|
||||
results = run_tests()
|
||||
categorized_tests = categorize_tests()
|
||||
|
||||
print_pretty("failed", results["failed"])
|
||||
print_pretty("skipped", results["skipped"])
|
||||
print_pretty("passed", results["passed"])
|
||||
print_pretty("not found", results["not_found"])
|
||||
results = run_tests(categorized_tests[TO_RUN])
|
||||
|
||||
print_pretty("failed", results[FAILED])
|
||||
print_pretty("skipped", categorized_tests[SKIP])
|
||||
print_pretty("passed", results[PASSED])
|
||||
print_pretty("not found", categorized_tests[NOT_FOUND])
|
||||
|
||||
|
||||
if (__name__ == "__main__"):
|
||||
|
|
|
@ -19,17 +19,23 @@ import xml.etree.ElementTree as ElementTree
|
|||
class TestInfo:
|
||||
|
||||
|
||||
def __init__(self, test_name):
|
||||
def __init__(self):
|
||||
self.test_name = None
|
||||
self.test_comment = None
|
||||
self.test_result = None
|
||||
self.test_duration = 0.0
|
||||
|
||||
def set_name(self, test_name):
|
||||
self.obj_is_valid = True
|
||||
self.test_suite, name = test_name.split("::", 1)
|
||||
|
||||
self.test_suite = self.test_suite[0: -4]
|
||||
name = name.replace("::", ".")
|
||||
|
||||
self.test_name = name
|
||||
self.test_comment = None
|
||||
self.test_result = None
|
||||
self.test_duration = 0.0
|
||||
|
||||
|
||||
def is_valid(self):
|
||||
return self.obj_is_valid
|
||||
|
||||
|
||||
def append_comment(self, comment):
|
||||
|
@ -75,32 +81,32 @@ class Parser:
|
|||
test_info = None
|
||||
|
||||
for line in f.readlines():
|
||||
|
||||
if test_info == None:
|
||||
test_info = TestInfo()
|
||||
|
||||
line = line.rstrip().decode('utf-8')
|
||||
|
||||
if line.startswith("Running"):
|
||||
test_info = TestInfo(line[len("Running "):])
|
||||
test_info.set_name(line[len("Running "):])
|
||||
|
||||
elif line.startswith("Test took"):
|
||||
if test_info is not None:
|
||||
test_info.set_duration(line[len("Test took "):-3])
|
||||
test_info.set_duration(line[len("Test took "):-3])
|
||||
if test_info.is_valid():
|
||||
self.root.append(test_info.xml())
|
||||
|
||||
test_info = None
|
||||
test_info = None
|
||||
|
||||
elif line == "OK" or line.startswith("FAILED"):
|
||||
if test_info is not None:
|
||||
test_info.test_result = line
|
||||
if line.startswith("FAILED"):
|
||||
test_info.append_comment(line[len("FAILED"):])
|
||||
test_info.test_result = line
|
||||
if line.startswith("FAILED"):
|
||||
test_info.append_comment(line[len("FAILED"):])
|
||||
|
||||
else:
|
||||
if test_info is not None:
|
||||
test_info.append_comment(line)
|
||||
test_info.append_comment(line)
|
||||
|
||||
|
||||
def write_xml_file(self):
|
||||
print(">>> Self xml file: {xml_file}".format(xml_file=self.xml_file))
|
||||
|
||||
ElementTree.ElementTree(self.root).write(self.xml_file, encoding="UTF-8", xml_declaration=True)
|
||||
|
||||
|
||||
|
@ -157,7 +163,7 @@ def main():
|
|||
parser.parse_log_file()
|
||||
parser.write_xml_file()
|
||||
|
||||
print("Finished writing the xUnit-style xml file")
|
||||
print("\nFinished writing the xUnit-style xml file\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
Loading…
Add table
Reference in a new issue