forked from organicmaps/organicmaps
[python][generator] Added logs checker.
This commit is contained in:
parent
068ea7fe86
commit
cacd7caaca
7 changed files with 126 additions and 36 deletions
50
tools/python/maps_generator/check_logs.py
Normal file
50
tools/python/maps_generator/check_logs.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
import argparse
|
||||
import sys
|
||||
|
||||
from maps_generator.checks.default_check_set import CheckType
|
||||
from maps_generator.checks.default_check_set import get_logs_check_sets_and_filters
|
||||
from maps_generator.checks.default_check_set import run_checks_and_print_results
|
||||
|
||||
|
||||
def get_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="This script checks maps generation logs and prints results."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--old", type=str, required=True, help="Path to old logs directory.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--new", type=str, required=True, help="Path to new logs directory.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--level",
|
||||
type=str,
|
||||
required=False,
|
||||
choices=("low", "medium", "hard", "strict"),
|
||||
default="hard",
|
||||
help="Messages level.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
type=str,
|
||||
required=False,
|
||||
default="",
|
||||
help="Path to output file. stdout by default.",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
args = get_args()
|
||||
|
||||
s = get_logs_check_sets_and_filters(args.old, args.new)
|
||||
run_checks_and_print_results(
|
||||
s,
|
||||
CheckType[args.level],
|
||||
file=open(args.output, "w") if args.output else sys.stdout,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -30,10 +30,23 @@ def norm(value):
|
|||
return abs(value)
|
||||
elif hasattr(value, "__len__"):
|
||||
return len(value)
|
||||
elif hasattr(value, "norm"):
|
||||
return value.norm()
|
||||
|
||||
assert False, type(value)
|
||||
|
||||
|
||||
def get_rel(r: ResLine) -> bool:
|
||||
rel = 0.0
|
||||
if r.arrow != Arrow.zero:
|
||||
prev = norm(r.previous)
|
||||
if prev == 0:
|
||||
rel = 100.0
|
||||
else:
|
||||
rel = norm(r.diff) * 100.0 / prev
|
||||
return rel
|
||||
|
||||
|
||||
class Check(ABC):
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
|
@ -134,14 +147,7 @@ class CompareCheck(Check):
|
|||
if silent_if_no_results and self.result.arrow == Arrow.zero:
|
||||
return ""
|
||||
|
||||
rel = 0.0
|
||||
if self.result.arrow != Arrow.zero:
|
||||
rel = (
|
||||
norm(self.result.diff)
|
||||
* 100.0
|
||||
/ max(norm(self.result.previous), norm(self.result.current))
|
||||
)
|
||||
|
||||
rel = get_rel(self.result)
|
||||
return (
|
||||
f"{self.name}: {ROW_TO_STR[self.result.arrow]} {rel:.2f}% "
|
||||
f"[{self.format(self.result.previous)} → "
|
||||
|
|
|
@ -12,16 +12,6 @@ ADDR_PATTERN = re.compile(
|
|||
)
|
||||
|
||||
|
||||
class AddrInfo:
|
||||
def __init__(self, matched_percent, total, missing):
|
||||
self.matched_percent = float(matched_percent)
|
||||
self.total = float(total)
|
||||
self.matched = self.total - float(missing)
|
||||
|
||||
def __str__(self):
|
||||
return f"Matched percent: {self.matched_percent}, total: {self.total}, matched: {self.matched}"
|
||||
|
||||
|
||||
def get_addresses_check_set(old_path: str, new_path: str) -> check.CompareCheckSet:
|
||||
def do(path: str):
|
||||
log = logs_reader.Log(path)
|
||||
|
@ -33,11 +23,8 @@ def get_addresses_check_set(old_path: str, new_path: str) -> check.CompareCheckS
|
|||
return None
|
||||
|
||||
d = found[0][0]
|
||||
return AddrInfo(**d)
|
||||
|
||||
def op(lhs: AddrInfo, rhs: AddrInfo):
|
||||
return lhs.matched_percent - rhs.matched_percent
|
||||
return float(d["matched_percent"])
|
||||
|
||||
return check.build_check_set_for_files(
|
||||
"Addresses check", old_path, new_path, do=do, op=op
|
||||
"Addresses check", old_path, new_path, ext=".log", do=do
|
||||
)
|
||||
|
|
44
tools/python/maps_generator/checks/check_log_levels.py
Normal file
44
tools/python/maps_generator/checks/check_log_levels.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
import logging
|
||||
from functools import lru_cache
|
||||
|
||||
from maps_generator.checks import check
|
||||
from maps_generator.checks.logs import logs_reader
|
||||
from maps_generator.generator.stages_declaration import stages
|
||||
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def _get_log_stages(path):
|
||||
log = logs_reader.Log(path)
|
||||
return logs_reader.normalize_logs(logs_reader.split_into_stages(log))
|
||||
|
||||
|
||||
def get_log_levels_check_set(old_path: str, new_path: str) -> check.CompareCheckSet:
|
||||
cs = check.CompareCheckSet("Log levels check")
|
||||
|
||||
def make_do(level, stage_name, cache={}):
|
||||
def do(path):
|
||||
for s in _get_log_stages(path):
|
||||
if s.name == stage_name:
|
||||
k = f"{path}:{stage_name}"
|
||||
if k not in cache:
|
||||
cache[k] = logs_reader.count_levels(s)
|
||||
|
||||
return cache[k][level]
|
||||
return None
|
||||
|
||||
return do
|
||||
|
||||
for stage_name in (
|
||||
stages.get_visible_stages_names() + stages.get_invisible_stages_names()
|
||||
):
|
||||
for level in (logging.CRITICAL, logging.ERROR, logging.WARNING):
|
||||
cs.add_check(
|
||||
check.build_check_set_for_files(
|
||||
f"Stage {stage_name} - {logging.getLevelName(level)} check",
|
||||
old_path,
|
||||
new_path,
|
||||
ext=".log",
|
||||
do=make_do(level, stage_name),
|
||||
)
|
||||
)
|
||||
return cs
|
|
@ -1,4 +1,3 @@
|
|||
import os
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
from enum import Enum
|
||||
|
@ -6,8 +5,9 @@ from typing import Callable
|
|||
from typing import Mapping
|
||||
|
||||
from maps_generator.checks import check
|
||||
from maps_generator.checks.check_addresses import get_addresses_check_set
|
||||
from maps_generator.checks.check_categories import get_categories_check_set
|
||||
from maps_generator.checks.check_mwm_types import count_all_types
|
||||
from maps_generator.checks.check_log_levels import get_log_levels_check_set
|
||||
from maps_generator.checks.check_mwm_types import get_mwm_all_types_check_set
|
||||
from maps_generator.checks.check_mwm_types import get_mwm_type_check_set
|
||||
from maps_generator.checks.check_sections import get_sections_existence_check_set
|
||||
|
@ -43,17 +43,7 @@ def get_threshold(check_type: CheckType) -> Threshold:
|
|||
|
||||
def make_default_filter(threshold: Threshold):
|
||||
def default_filter(r: check.ResLine):
|
||||
if isinstance(r.diff, (int, float)):
|
||||
diff = abs(r.diff)
|
||||
previous = r.previous
|
||||
current = r.current
|
||||
else:
|
||||
assert False, type(r.diff)
|
||||
|
||||
return (
|
||||
diff > threshold.abs
|
||||
and (diff * 100.0 / max(previous, current)) > threshold.rel
|
||||
)
|
||||
return check.norm(r.diff) > threshold.abs and check.get_rel(r) > threshold.rel
|
||||
|
||||
return default_filter
|
||||
|
||||
|
@ -75,6 +65,15 @@ def get_mwm_check_sets_and_filters(
|
|||
}
|
||||
|
||||
|
||||
def get_logs_check_sets_and_filters(
|
||||
old_path: str, new_path: str
|
||||
) -> Mapping[check.Check, Callable]:
|
||||
return {
|
||||
get_addresses_check_set(old_path, new_path): make_default_filter,
|
||||
get_log_levels_check_set(old_path, new_path): None,
|
||||
}
|
||||
|
||||
|
||||
def _print_header(file, header, width=100, s="="):
|
||||
stars = s * ((width - len(header)) // 2)
|
||||
rstars = stars
|
||||
|
|
|
@ -14,6 +14,7 @@ import maps_generator.generator.env as env
|
|||
from maps_generator.generator.stages import get_stage_type
|
||||
from maps_generator.utils.algo import parse_timedelta
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
|
@ -109,6 +109,9 @@ class Stages:
|
|||
for dep in deps:
|
||||
self.dependencies[stage].add(dep)
|
||||
|
||||
def get_invisible_stages_names(self) -> List[AnyStr]:
|
||||
return [get_stage_name(st) for st in self.helper_stages]
|
||||
|
||||
def get_visible_stages_names(self) -> List[AnyStr]:
|
||||
"""Returns all stages names except helper stages names."""
|
||||
stages = []
|
||||
|
|
Loading…
Add table
Reference in a new issue