From d5f2026301b6b97c7ef9630947060545c5edcc79 Mon Sep 17 00:00:00 2001 From: Alexey Zakharenkov Date: Sat, 17 Dec 2022 23:39:48 +0300 Subject: [PATCH] Run the code through flake8 and black --- .github/workflows/python-app.yml | 10 +- checkers/common.py | 121 +++-- checkers/compare_city_caches.py | 21 +- checkers/compare_json_outputs.py | 27 +- css_colours.py | 300 +++++------ make_all_metro_poly.py | 2 +- mapsme_json_to_cities.py | 28 +- process_subways.py | 226 ++++---- processors/__init__.py | 6 +- processors/gtfs.py | 3 +- processors/mapsme.py | 177 +++---- stop_areas/make_stop_areas.py | 186 +++---- stop_areas/make_tram_areas.py | 161 +++--- stop_areas/serve.py | 22 +- subway_io.py | 160 +++--- subway_structure.py | 868 ++++++++++++++++--------------- tests/sample_data.py | 46 +- tests/test_build_tracks.py | 8 +- tests/test_gtfs_processor.py | 4 +- tests/test_projection.py | 6 +- v2h_templates.py | 116 +++-- validation_to_html.py | 152 +++--- 22 files changed, 1377 insertions(+), 1273 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index fa8b992..023c314 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -26,14 +26,14 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 + pip install flake8 black if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + flake8 + - name: Check with black + run: | + black --check --line-length 79 . - name: Test with unittest run: | python -m unittest discover tests diff --git a/checkers/common.py b/checkers/common.py index b760b62..d336435 100644 --- a/checkers/common.py +++ b/checkers/common.py @@ -1,6 +1,6 @@ +import functools import logging import math -import functools """A coordinate of a station precision of which we must take into account @@ -16,42 +16,48 @@ def coords_eq(lon1, lat1, lon2, lat2): def osm_id_comparator(el): """This function is used as key for sorting lists of - OSM-originated objects + OSM-originated objects """ - return (el['osm_type'], el['osm_id']) + return (el["osm_type"], el["osm_id"]) def itinerary_comparator(itinerary): - "This function is used as key for sorting itineraries in a route""" - return (itinerary['stops'], itinerary['interval']) + """This function is used as key for sorting itineraries in a route""" + return (itinerary["stops"], itinerary["interval"]) def compare_stops(stop0, stop1): """Compares json of two stops in route""" - stop_keys = ('name', 'int_name', 'id', 'osm_id', 'osm_type') + stop_keys = ("name", "int_name", "id", "osm_id", "osm_type") stop0_props = tuple(stop0[k] for k in stop_keys) stop1_props = tuple(stop1[k] for k in stop_keys) if stop0_props != stop1_props: - logging.debug("Different stops properties: %s, %s", - stop0_props, stop1_props) + logging.debug( + "Different stops properties: %s, %s", stop0_props, stop1_props + ) return False - if not coords_eq(stop0['lon'], stop0['lat'], - stop1['lon'], stop1['lat']): - logging.debug("Different stops coordinates: %s (%f, %f), %s (%f, %f)", - stop0_props, stop0['lon'], stop0['lat'], - stop1_props, stop1['lon'], stop1['lat']) + if not coords_eq(stop0["lon"], stop0["lat"], stop1["lon"], stop1["lat"]): + logging.debug( + "Different stops coordinates: %s (%f, %f), %s (%f, %f)", + stop0_props, + stop0["lon"], + stop0["lat"], + stop1_props, + stop1["lon"], + stop1["lat"], + ) return False - entrances0 = sorted(stop0['entrances'], key=osm_id_comparator) - entrances1 = sorted(stop1['entrances'], key=osm_id_comparator) + entrances0 = sorted(stop0["entrances"], key=osm_id_comparator) + entrances1 = sorted(stop1["entrances"], key=osm_id_comparator) if entrances0 != entrances1: logging.debug("Different stop entrances") return False - exits0 = sorted(stop0['exits'], key=osm_id_comparator) - exits1 = sorted(stop1['exits'], key=osm_id_comparator) + exits0 = sorted(stop0["exits"], key=osm_id_comparator) + exits1 = sorted(stop1["exits"], key=osm_id_comparator) if exits0 != exits1: logging.debug("Different stop exits") return False @@ -61,21 +67,24 @@ def compare_stops(stop0, stop1): def compare_transfers(transfers0, transfers1): """Compares two arrays of transfers of the form - [(stop1_uid, stop2_uid, time), ...] + [(stop1_uid, stop2_uid, time), ...] """ if len(transfers0) != len(transfers1): - logging.debug("Different len(transfers): %d != %d", - len(transfers0), len(transfers1)) + logging.debug( + "Different len(transfers): %d != %d", + len(transfers0), + len(transfers1), + ) return False - transfers0 = [tuple([t[0], t[1], t[2]]) - if t[0] < t[1] else - tuple([t[1], t[0], t[2]]) - for t in transfers0] - transfers1 = [tuple([t[0], t[1], t[2]]) - if t[0] < t[1] else - tuple([t[1], t[0], t[2]]) - for t in transfers1] + transfers0 = [ + tuple([t[0], t[1], t[2]]) if t[0] < t[1] else tuple([t[1], t[0], t[2]]) + for t in transfers0 + ] + transfers1 = [ + tuple([t[0], t[1], t[2]]) if t[0] < t[1] else tuple([t[1], t[0], t[2]]) + for t in transfers1 + ] transfers0.sort() transfers1.sort() @@ -84,8 +93,9 @@ def compare_transfers(transfers0, transfers1): for tr0, tr1 in zip(transfers0, transfers1): if tr0 != tr1: if diff_cnt == 0: - logging.debug("First pair of different transfers: %s, %s", - tr0, tr1) + logging.debug( + "First pair of different transfers: %s, %s", tr0, tr1 + ) diff_cnt += 1 if diff_cnt: logging.debug("Different transfers number = %d", diff_cnt) @@ -95,46 +105,55 @@ def compare_transfers(transfers0, transfers1): def compare_networks(network0, network1): - if network0['agency_id'] != network1['agency_id']: - logging.debug("Different agency_id at route '%s'", - network0['network']) + if network0["agency_id"] != network1["agency_id"]: + logging.debug("Different agency_id at route '%s'", network0["network"]) return False - route_ids0 = sorted(x['route_id'] for x in network0['routes']) - route_ids1 = sorted(x['route_id'] for x in network1['routes']) + route_ids0 = sorted(x["route_id"] for x in network0["routes"]) + route_ids1 = sorted(x["route_id"] for x in network1["routes"]) if route_ids0 != route_ids1: - logging.debug("Different route_ids: %s != %s", - route_ids0, route_ids1) + logging.debug("Different route_ids: %s != %s", route_ids0, route_ids1) return False - routes0 = sorted(network0['routes'], key=lambda x: x['route_id']) - routes1 = sorted(network1['routes'], key=lambda x: x['route_id']) + routes0 = sorted(network0["routes"], key=lambda x: x["route_id"]) + routes1 = sorted(network1["routes"], key=lambda x: x["route_id"]) # Keys to compare routes. 'name' key is omitted since RouteMaster # can get its name from one of its Routes unpredictably. - route_keys = ('type', 'ref', 'colour', 'route_id') + route_keys = ("type", "ref", "colour", "route_id") for route0, route1 in zip(routes0, routes1): route0_props = tuple(route0[k] for k in route_keys) route1_props = tuple(route1[k] for k in route_keys) if route0_props != route1_props: - logging.debug("Route props of '%s' are different: %s, %s", - route0['route_id'], route0_props, route1_props) + logging.debug( + "Route props of '%s' are different: %s, %s", + route0["route_id"], + route0_props, + route1_props, + ) return False - itineraries0 = sorted(route0['itineraries'], key=itinerary_comparator) - itineraries1 = sorted(route1['itineraries'], key=itinerary_comparator) + itineraries0 = sorted(route0["itineraries"], key=itinerary_comparator) + itineraries1 = sorted(route1["itineraries"], key=itinerary_comparator) for itin0, itin1 in zip(itineraries0, itineraries1): - if itin0['interval'] != itin1['interval']: - logging.debug("Different interval: %d != %d at route %s '%s'", - itin0['interval'], itin1['interval'], - route0['route_id'], route0['name']) + if itin0["interval"] != itin1["interval"]: + logging.debug( + "Different interval: %d != %d at route %s '%s'", + itin0["interval"], + itin1["interval"], + route0["route_id"], + route0["name"], + ) return False - if itin0['stops'] != itin1['stops']: - logging.debug("Different stops at route %s '%s'", - route0['route_id'], route0['name']) + if itin0["stops"] != itin1["stops"]: + logging.debug( + "Different stops at route %s '%s'", + route0["route_id"], + route0["name"], + ) return False return True diff --git a/checkers/compare_city_caches.py b/checkers/compare_city_caches.py index c6c81bc..edba7d5 100644 --- a/checkers/compare_city_caches.py +++ b/checkers/compare_city_caches.py @@ -10,10 +10,11 @@ affect the process_subways.py output really doesn't change it. """ -import sys import json import logging -from common import compare_stops, compare_transfers, compare_networks +import sys + +from common import compare_networks, compare_stops, compare_transfers def compare_jsons(cache0, cache1): @@ -28,21 +29,21 @@ def compare_jsons(cache0, cache1): for name in city_names0: city0 = cache0[name] city1 = cache1[name] - if not compare_networks(city0['network'], city1['network']): + if not compare_networks(city0["network"], city1["network"]): return False - stop_ids0 = sorted(city0['stops'].keys()) - stop_ids1 = sorted(city1['stops'].keys()) + stop_ids0 = sorted(city0["stops"].keys()) + stop_ids1 = sorted(city1["stops"].keys()) if stop_ids0 != stop_ids1: logging.debug("Different stop_ids") return False - stops0 = [v for k, v in sorted(city0['stops'].items())] - stops1 = [v for k, v in sorted(city1['stops'].items())] + stops0 = [v for k, v in sorted(city0["stops"].items())] + stops1 = [v for k, v in sorted(city1["stops"].items())] for stop0, stop1 in zip(stops0, stops1): if not compare_stops(stop0, stop1): return False - if not compare_transfers(city0['transfers'], city1['transfers']): + if not compare_transfers(city0["transfers"], city1["transfers"]): return False return True @@ -57,8 +58,8 @@ if __name__ == "__main__": path0, path1 = sys.argv[1:3] - j0 = json.load(open(path0, encoding='utf-8')) - j1 = json.load(open(path1, encoding='utf-8')) + j0 = json.load(open(path0, encoding="utf-8")) + j1 = json.load(open(path1, encoding="utf-8")) equal = compare_jsons(j0, j1) diff --git a/checkers/compare_json_outputs.py b/checkers/compare_json_outputs.py index 8ded974..2c68f4b 100644 --- a/checkers/compare_json_outputs.py +++ b/checkers/compare_json_outputs.py @@ -10,38 +10,39 @@ affect the process_subways.py output really doesn't change it. """ -import sys import json import logging -from common import compare_stops, compare_transfers, compare_networks +import sys + +from common import compare_networks, compare_stops, compare_transfers def compare_jsons(result0, result1): """Compares two objects which are results of subway generation""" - network_names0 = sorted([x['network'] for x in result0['networks']]) - network_names1 = sorted([x['network'] for x in result1['networks']]) + network_names0 = sorted([x["network"] for x in result0["networks"]]) + network_names1 = sorted([x["network"] for x in result1["networks"]]) if network_names0 != network_names1: logging.debug("Different list of network names!") return False - networks0 = sorted(result0['networks'], key=lambda x: x['network']) - networks1 = sorted(result1['networks'], key=lambda x: x['network']) + networks0 = sorted(result0["networks"], key=lambda x: x["network"]) + networks1 = sorted(result1["networks"], key=lambda x: x["network"]) for network0, network1 in zip(networks0, networks1): if not compare_networks(network0, network1): return False - stop_ids0 = sorted(x['id'] for x in result0['stops']) - stop_ids1 = sorted(x['id'] for x in result1['stops']) + stop_ids0 = sorted(x["id"] for x in result0["stops"]) + stop_ids1 = sorted(x["id"] for x in result1["stops"]) if stop_ids0 != stop_ids1: logging.debug("Different stop_ids") return False - stops0 = sorted(result0['stops'], key=lambda x: x['id']) - stops1 = sorted(result1['stops'], key=lambda x: x['id']) + stops0 = sorted(result0["stops"], key=lambda x: x["id"]) + stops1 = sorted(result1["stops"], key=lambda x: x["id"]) for stop0, stop1 in zip(stops0, stops1): if not compare_stops(stop0, stop1): return False - if not compare_transfers(result0['transfers'], result1['transfers']): + if not compare_transfers(result0["transfers"], result1["transfers"]): return False return True @@ -56,8 +57,8 @@ if __name__ == "__main__": path0, path1 = sys.argv[1:3] - j0 = json.load(open(path0, encoding='utf-8')) - j1 = json.load(open(path1, encoding='utf-8')) + j0 = json.load(open(path0, encoding="utf-8")) + j1 = json.load(open(path1, encoding="utf-8")) equal = compare_jsons(j0, j1) diff --git a/css_colours.py b/css_colours.py index 0dea3e9..7218054 100644 --- a/css_colours.py +++ b/css_colours.py @@ -2,153 +2,153 @@ import re # Source: https://www.w3.org/TR/css3-color/#svg-color CSS_COLOURS = { - 'aliceblue': '#f0f8ff', - 'antiquewhite': '#faebd7', - 'aqua': '#00ffff', - 'aquamarine': '#7fffd4', - 'azure': '#f0ffff', - 'beige': '#f5f5dc', - 'bisque': '#ffe4c4', - 'black': '#000000', - 'blanchedalmond': '#ffebcd', - 'blue': '#0000ff', - 'blueviolet': '#8a2be2', - 'brown': '#a52a2a', - 'burlywood': '#deb887', - 'cadetblue': '#5f9ea0', - 'chartreuse': '#7fff00', - 'chocolate': '#d2691e', - 'coral': '#ff7f50', - 'cornflowerblue': '#6495ed', - 'cornsilk': '#fff8dc', - 'crimson': '#dc143c', - 'cyan': '#00ffff', - 'darkblue': '#00008b', - 'darkcyan': '#008b8b', - 'darkgoldenrod': '#b8860b', - 'darkgray': '#a9a9a9', - 'darkgreen': '#006400', - 'darkgrey': '#a9a9a9', - 'darkkhaki': '#bdb76b', - 'darkmagenta': '#8b008b', - 'darkolivegreen': '#556b2f', - 'darkorange': '#ff8c00', - 'darkorchid': '#9932cc', - 'darkred': '#8b0000', - 'darksalmon': '#e9967a', - 'darkseagreen': '#8fbc8f', - 'darkslateblue': '#483d8b', - 'darkslategray': '#2f4f4f', - 'darkslategrey': '#2f4f4f', - 'darkturquoise': '#00ced1', - 'darkviolet': '#9400d3', - 'deeppink': '#ff1493', - 'deepskyblue': '#00bfff', - 'dimgray': '#696969', - 'dimgrey': '#696969', - 'dodgerblue': '#1e90ff', - 'firebrick': '#b22222', - 'floralwhite': '#fffaf0', - 'forestgreen': '#228b22', - 'fuchsia': '#ff00ff', - 'gainsboro': '#dcdcdc', - 'ghostwhite': '#f8f8ff', - 'gold': '#ffd700', - 'goldenrod': '#daa520', - 'gray': '#808080', - 'green': '#008000', - 'greenyellow': '#adff2f', - 'grey': '#808080', - 'honeydew': '#f0fff0', - 'hotpink': '#ff69b4', - 'indianred': '#cd5c5c', - 'indigo': '#4b0082', - 'ivory': '#fffff0', - 'khaki': '#f0e68c', - 'lavender': '#e6e6fa', - 'lavenderblush': '#fff0f5', - 'lawngreen': '#7cfc00', - 'lemonchiffon': '#fffacd', - 'lightblue': '#add8e6', - 'lightcoral': '#f08080', - 'lightcyan': '#e0ffff', - 'lightgoldenrodyellow': '#fafad2', - 'lightgray': '#d3d3d3', - 'lightgreen': '#90ee90', - 'lightgrey': '#d3d3d3', - 'lightpink': '#ffb6c1', - 'lightsalmon': '#ffa07a', - 'lightseagreen': '#20b2aa', - 'lightskyblue': '#87cefa', - 'lightslategray': '#778899', - 'lightslategrey': '#778899', - 'lightsteelblue': '#b0c4de', - 'lightyellow': '#ffffe0', - 'lime': '#00ff00', - 'limegreen': '#32cd32', - 'linen': '#faf0e6', - 'magenta': '#ff00ff', - 'maroon': '#800000', - 'mediumaquamarine': '#66cdaa', - 'mediumblue': '#0000cd', - 'mediumorchid': '#ba55d3', - 'mediumpurple': '#9370db', - 'mediumseagreen': '#3cb371', - 'mediumslateblue': '#7b68ee', - 'mediumspringgreen': '#00fa9a', - 'mediumturquoise': '#48d1cc', - 'mediumvioletred': '#c71585', - 'midnightblue': '#191970', - 'mintcream': '#f5fffa', - 'mistyrose': '#ffe4e1', - 'moccasin': '#ffe4b5', - 'navajowhite': '#ffdead', - 'navy': '#000080', - 'oldlace': '#fdf5e6', - 'olive': '#808000', - 'olivedrab': '#6b8e23', - 'orange': '#ffa500', - 'orangered': '#ff4500', - 'orchid': '#da70d6', - 'palegoldenrod': '#eee8aa', - 'palegreen': '#98fb98', - 'paleturquoise': '#afeeee', - 'palevioletred': '#db7093', - 'papayawhip': '#ffefd5', - 'peachpuff': '#ffdab9', - 'peru': '#cd853f', - 'pink': '#ffc0cb', - 'plum': '#dda0dd', - 'powderblue': '#b0e0e6', - 'purple': '#800080', - 'red': '#ff0000', - 'rosybrown': '#bc8f8f', - 'royalblue': '#4169e1', - 'saddlebrown': '#8b4513', - 'salmon': '#fa8072', - 'sandybrown': '#f4a460', - 'seagreen': '#2e8b57', - 'seashell': '#fff5ee', - 'sienna': '#a0522d', - 'silver': '#c0c0c0', - 'skyblue': '#87ceeb', - 'slateblue': '#6a5acd', - 'slategray': '#708090', - 'slategrey': '#708090', - 'snow': '#fffafa', - 'springgreen': '#00ff7f', - 'steelblue': '#4682b4', - 'tan': '#d2b48c', - 'teal': '#008080', - 'thistle': '#d8bfd8', - 'tomato': '#ff6347', - 'turquoise': '#40e0d0', - 'violet': '#ee82ee', - 'wheat': '#f5deb3', - 'white': '#ffffff', - 'whitesmoke': '#f5f5f5', - 'yellow': '#ffff00', - 'yellowgreen': '#9acd32', + "aliceblue": "#f0f8ff", + "antiquewhite": "#faebd7", + "aqua": "#00ffff", + "aquamarine": "#7fffd4", + "azure": "#f0ffff", + "beige": "#f5f5dc", + "bisque": "#ffe4c4", + "black": "#000000", + "blanchedalmond": "#ffebcd", + "blue": "#0000ff", + "blueviolet": "#8a2be2", + "brown": "#a52a2a", + "burlywood": "#deb887", + "cadetblue": "#5f9ea0", + "chartreuse": "#7fff00", + "chocolate": "#d2691e", + "coral": "#ff7f50", + "cornflowerblue": "#6495ed", + "cornsilk": "#fff8dc", + "crimson": "#dc143c", + "cyan": "#00ffff", + "darkblue": "#00008b", + "darkcyan": "#008b8b", + "darkgoldenrod": "#b8860b", + "darkgray": "#a9a9a9", + "darkgreen": "#006400", + "darkgrey": "#a9a9a9", + "darkkhaki": "#bdb76b", + "darkmagenta": "#8b008b", + "darkolivegreen": "#556b2f", + "darkorange": "#ff8c00", + "darkorchid": "#9932cc", + "darkred": "#8b0000", + "darksalmon": "#e9967a", + "darkseagreen": "#8fbc8f", + "darkslateblue": "#483d8b", + "darkslategray": "#2f4f4f", + "darkslategrey": "#2f4f4f", + "darkturquoise": "#00ced1", + "darkviolet": "#9400d3", + "deeppink": "#ff1493", + "deepskyblue": "#00bfff", + "dimgray": "#696969", + "dimgrey": "#696969", + "dodgerblue": "#1e90ff", + "firebrick": "#b22222", + "floralwhite": "#fffaf0", + "forestgreen": "#228b22", + "fuchsia": "#ff00ff", + "gainsboro": "#dcdcdc", + "ghostwhite": "#f8f8ff", + "gold": "#ffd700", + "goldenrod": "#daa520", + "gray": "#808080", + "green": "#008000", + "greenyellow": "#adff2f", + "grey": "#808080", + "honeydew": "#f0fff0", + "hotpink": "#ff69b4", + "indianred": "#cd5c5c", + "indigo": "#4b0082", + "ivory": "#fffff0", + "khaki": "#f0e68c", + "lavender": "#e6e6fa", + "lavenderblush": "#fff0f5", + "lawngreen": "#7cfc00", + "lemonchiffon": "#fffacd", + "lightblue": "#add8e6", + "lightcoral": "#f08080", + "lightcyan": "#e0ffff", + "lightgoldenrodyellow": "#fafad2", + "lightgray": "#d3d3d3", + "lightgreen": "#90ee90", + "lightgrey": "#d3d3d3", + "lightpink": "#ffb6c1", + "lightsalmon": "#ffa07a", + "lightseagreen": "#20b2aa", + "lightskyblue": "#87cefa", + "lightslategray": "#778899", + "lightslategrey": "#778899", + "lightsteelblue": "#b0c4de", + "lightyellow": "#ffffe0", + "lime": "#00ff00", + "limegreen": "#32cd32", + "linen": "#faf0e6", + "magenta": "#ff00ff", + "maroon": "#800000", + "mediumaquamarine": "#66cdaa", + "mediumblue": "#0000cd", + "mediumorchid": "#ba55d3", + "mediumpurple": "#9370db", + "mediumseagreen": "#3cb371", + "mediumslateblue": "#7b68ee", + "mediumspringgreen": "#00fa9a", + "mediumturquoise": "#48d1cc", + "mediumvioletred": "#c71585", + "midnightblue": "#191970", + "mintcream": "#f5fffa", + "mistyrose": "#ffe4e1", + "moccasin": "#ffe4b5", + "navajowhite": "#ffdead", + "navy": "#000080", + "oldlace": "#fdf5e6", + "olive": "#808000", + "olivedrab": "#6b8e23", + "orange": "#ffa500", + "orangered": "#ff4500", + "orchid": "#da70d6", + "palegoldenrod": "#eee8aa", + "palegreen": "#98fb98", + "paleturquoise": "#afeeee", + "palevioletred": "#db7093", + "papayawhip": "#ffefd5", + "peachpuff": "#ffdab9", + "peru": "#cd853f", + "pink": "#ffc0cb", + "plum": "#dda0dd", + "powderblue": "#b0e0e6", + "purple": "#800080", + "red": "#ff0000", + "rosybrown": "#bc8f8f", + "royalblue": "#4169e1", + "saddlebrown": "#8b4513", + "salmon": "#fa8072", + "sandybrown": "#f4a460", + "seagreen": "#2e8b57", + "seashell": "#fff5ee", + "sienna": "#a0522d", + "silver": "#c0c0c0", + "skyblue": "#87ceeb", + "slateblue": "#6a5acd", + "slategray": "#708090", + "slategrey": "#708090", + "snow": "#fffafa", + "springgreen": "#00ff7f", + "steelblue": "#4682b4", + "tan": "#d2b48c", + "teal": "#008080", + "thistle": "#d8bfd8", + "tomato": "#ff6347", + "turquoise": "#40e0d0", + "violet": "#ee82ee", + "wheat": "#f5deb3", + "white": "#ffffff", + "whitesmoke": "#f5f5f5", + "yellow": "#ffff00", + "yellowgreen": "#9acd32", } @@ -158,8 +158,8 @@ def normalize_colour(c): c = c.strip().lower() if c in CSS_COLOURS: return CSS_COLOURS[c] - if re.match(r'^#?[0-9a-f]{3}([0-9a-f]{3})?$', c): + if re.match(r"^#?[0-9a-f]{3}([0-9a-f]{3})?$", c): if len(c) == 4: - return c[0]+c[1]+c[1]+c[2]+c[2]+c[3]+c[3] + return c[0] + c[1] + c[1] + c[2] + c[2] + c[3] + c[3] return c - raise ValueError('Unknown colour code: {}'.format(c)) + raise ValueError("Unknown colour code: {}".format(c)) diff --git a/make_all_metro_poly.py b/make_all_metro_poly.py index 610892d..05a01b1 100644 --- a/make_all_metro_poly.py +++ b/make_all_metro_poly.py @@ -31,5 +31,5 @@ def make_disjoint_metro_polygons(): print("END") -if __name__ == '__main__': +if __name__ == "__main__": make_disjoint_metro_polygons() diff --git a/mapsme_json_to_cities.py b/mapsme_json_to_cities.py index 4b8fea8..043d0b6 100644 --- a/mapsme_json_to_cities.py +++ b/mapsme_json_to_cities.py @@ -4,25 +4,29 @@ import json from process_subways import download_cities -if __name__ == '__main__': +if __name__ == "__main__": arg_parser = argparse.ArgumentParser( - description=""" - This script generates a list of good/all network names. - It is used by subway render to generate the list of network at frontend. - It uses two sources: a mapsme.json validator output with good networks, and - a google spreadsheet with networks for the process_subways.download_cities() - function.""", + description=( + """This script generates a list of good/all network names. It is + used by subway render to generate the list of network at frontend. + It uses two sources: a mapsme.json validator output with good + networks, and a google spreadsheet with networks for the + process_subways.download_cities() function.""" + ), formatter_class=argparse.RawTextHelpFormatter, ) arg_parser.add_argument( - 'subway_json_file', - type=argparse.FileType('r'), - help="Validator output defined by -o option of process_subways.py script", + "subway_json_file", + type=argparse.FileType("r"), + help=( + "Validator output defined by -o option " + "of process_subways.py script", + ), ) arg_parser.add_argument( - '--with-bad', + "--with-bad", action="store_true", help="Whether to include cities validation of which was failed", ) @@ -34,7 +38,7 @@ if __name__ == '__main__': subway_json = json.load(subway_json_file) good_cities = set( - n.get('network', n.get('title')) for n in subway_json['networks'] + n.get("network", n.get("title")) for n in subway_json["networks"] ) cities = download_cities() diff --git a/process_subways.py b/process_subways.py index 74d74bb..6b71afd 100755 --- a/process_subways.py +++ b/process_subways.py @@ -11,7 +11,6 @@ import urllib.parse import urllib.request import processors - from subway_io import ( dump_yaml, load_xml, @@ -30,30 +29,30 @@ from subway_structure import ( def overpass_request(overground, overpass_api, bboxes): - query = '[out:json][timeout:1000];(' + query = "[out:json][timeout:1000];(" modes = MODES_OVERGROUND if overground else MODES_RAPID for bbox in bboxes: - bbox_part = '({})'.format(','.join(str(coord) for coord in bbox)) - query += '(' + bbox_part = "({})".format(",".join(str(coord) for coord in bbox)) + query += "(" for mode in modes: query += 'rel[route="{}"]{};'.format(mode, bbox_part) - query += ');' - query += 'rel(br)[type=route_master];' + query += ");" + query += "rel(br)[type=route_master];" if not overground: - query += 'node[railway=subway_entrance]{};'.format(bbox_part) - query += 'rel[public_transport=stop_area]{};'.format(bbox_part) + query += "node[railway=subway_entrance]{};".format(bbox_part) + query += "rel[public_transport=stop_area]{};".format(bbox_part) query += ( - 'rel(br)[type=public_transport][public_transport=stop_area_group];' + "rel(br)[type=public_transport][public_transport=stop_area_group];" ) - query += ');(._;>>;);out body center qt;' - logging.debug('Query: %s', query) - url = '{}?data={}'.format(overpass_api, urllib.parse.quote(query)) + query += ");(._;>>;);out body center qt;" + logging.info("Query: %s", query) + url = "{}?data={}".format(overpass_api, urllib.parse.quote(query)) response = urllib.request.urlopen(url, timeout=1000) if response.getcode() != 200: raise Exception( - 'Failed to query Overpass API: HTTP {}'.format(response.getcode()) + "Failed to query Overpass API: HTTP {}".format(response.getcode()) ) - return json.load(response)['elements'] + return json.load(response)["elements"] def multi_overpass(overground, overpass_api, bboxes): @@ -63,16 +62,13 @@ def multi_overpass(overground, overpass_api, bboxes): for i in range(0, len(bboxes) + SLICE_SIZE - 1, SLICE_SIZE): if i > 0: time.sleep(INTERREQUEST_WAIT) - result.extend( - overpass_request( - overground, overpass_api, bboxes[i : i + SLICE_SIZE] - ) - ) + bboxes_i = bboxes[i : i + SLICE_SIZE] # noqa E203 + result.extend(overpass_request(overground, overpass_api, bboxes_i)) return result def slugify(name): - return re.sub(r'[^a-z0-9_-]+', '', name.lower().replace(' ', '_')) + return re.sub(r"[^a-z0-9_-]+", "", name.lower().replace(" ", "_")) def calculate_centers(elements): @@ -89,13 +85,13 @@ def calculate_centers(elements): def calculate_way_center(el): # If element has been queried via overpass-api with 'out center;' # clause then ways already have 'center' attribute - if 'center' in el: - ways[el['id']] = (el['center']['lat'], el['center']['lon']) + if "center" in el: + ways[el["id"]] = (el["center"]["lat"], el["center"]["lon"]) return center = [0, 0] count = 0 - way_nodes = el['nodes'] - way_nodes_len = len(el['nodes']) + way_nodes = el["nodes"] + way_nodes_len = len(el["nodes"]) for i, nd in enumerate(way_nodes): if nd not in nodes: continue @@ -110,20 +106,20 @@ def calculate_centers(elements): center[1] += nodes[nd][1] count += 1 if count > 0: - el['center'] = {'lat': center[0] / count, 'lon': center[1] / count} - ways[el['id']] = (el['center']['lat'], el['center']['lon']) + el["center"] = {"lat": center[0] / count, "lon": center[1] / count} + ways[el["id"]] = (el["center"]["lat"], el["center"]["lon"]) def calculate_relation_center(el): # If element has been queried via overpass-api with 'out center;' # clause then some relations already have 'center' attribute - if 'center' in el: - relations[el['id']] = (el['center']['lat'], el['center']['lon']) + if "center" in el: + relations[el["id"]] = (el["center"]["lat"], el["center"]["lon"]) return True center = [0, 0] count = 0 - for m in el.get('members', []): - if m['type'] == 'relation' and m['ref'] not in relations: - if m['ref'] in empty_relations: + for m in el.get("members", []): + if m["type"] == "relation" and m["ref"] not in relations: + if m["ref"] in empty_relations: # Ignore empty child relations continue else: @@ -131,31 +127,31 @@ def calculate_centers(elements): return False member_container = ( nodes - if m['type'] == 'node' + if m["type"] == "node" else ways - if m['type'] == 'way' + if m["type"] == "way" else relations ) - if m['ref'] in member_container: - center[0] += member_container[m['ref']][0] - center[1] += member_container[m['ref']][1] + if m["ref"] in member_container: + center[0] += member_container[m["ref"]][0] + center[1] += member_container[m["ref"]][1] count += 1 if count == 0: - empty_relations.add(el['id']) + empty_relations.add(el["id"]) else: - el['center'] = {'lat': center[0] / count, 'lon': center[1] / count} - relations[el['id']] = (el['center']['lat'], el['center']['lon']) + el["center"] = {"lat": center[0] / count, "lon": center[1] / count} + relations[el["id"]] = (el["center"]["lat"], el["center"]["lon"]) return True relations_without_center = [] for el in elements: - if el['type'] == 'node': - nodes[el['id']] = (el['lat'], el['lon']) - elif el['type'] == 'way': - if 'nodes' in el: + if el["type"] == "node": + nodes[el["id"]] = (el["lat"], el["lon"]) + elif el["type"] == "way": + if "nodes" in el: calculate_way_center(el) - elif el['type'] == 'relation': + elif el["type"] == "relation": if not calculate_relation_center(el): relations_without_center.append(el) @@ -173,14 +169,14 @@ def calculate_centers(elements): logging.error( "Cannot calculate center for the relations (%d in total): %s%s", len(relations_without_center), - ', '.join(str(rel['id']) for rel in relations_without_center[:20]), + ", ".join(str(rel["id"]) for rel in relations_without_center[:20]), ", ..." if len(relations_without_center) > 20 else "", ) if empty_relations: logging.warning( "Empty relations (%d in total): %s%s", len(empty_relations), - ', '.join(str(x) for x in list(empty_relations)[:20]), + ", ".join(str(x) for x in list(empty_relations)[:20]), ", ..." if len(empty_relations) > 20 else "", ) @@ -223,72 +219,72 @@ def validate_cities(cities): def main(): parser = argparse.ArgumentParser() parser.add_argument( - '-i', - '--source', - help='File to write backup of OSM data, or to read data from', + "-i", + "--source", + help="File to write backup of OSM data, or to read data from", ) parser.add_argument( - '-x', '--xml', help='OSM extract with routes, to read data from' + "-x", "--xml", help="OSM extract with routes, to read data from" ) parser.add_argument( - '--overpass-api', - default='http://overpass-api.de/api/interpreter', + "--overpass-api", + default="http://overpass-api.de/api/interpreter", help="Overpass API URL", ) parser.add_argument( - '-q', - '--quiet', - action='store_true', - help='Show only warnings and errors', + "-q", + "--quiet", + action="store_true", + help="Show only warnings and errors", ) parser.add_argument( - '-c', '--city', help='Validate only a single city or a country' + "-c", "--city", help="Validate only a single city or a country" ) parser.add_argument( - '-t', - '--overground', - action='store_true', - help='Process overground transport instead of subways', + "-t", + "--overground", + action="store_true", + help="Process overground transport instead of subways", ) parser.add_argument( - '-e', - '--entrances', - type=argparse.FileType('w', encoding='utf-8'), - help='Export unused subway entrances as GeoJSON here', + "-e", + "--entrances", + type=argparse.FileType("w", encoding="utf-8"), + help="Export unused subway entrances as GeoJSON here", ) parser.add_argument( - '-l', - '--log', - type=argparse.FileType('w', encoding='utf-8'), - help='Validation JSON file name', + "-l", + "--log", + type=argparse.FileType("w", encoding="utf-8"), + help="Validation JSON file name", ) for processor_name, processor in inspect.getmembers( - processors, inspect.ismodule + processors, inspect.ismodule ): if not processor_name.startswith("_"): parser.add_argument( - f'--output-{processor_name}', + f"--output-{processor_name}", help=( - 'Processed metro systems output filename ' - f'in {processor_name.upper()} format' + "Processed metro systems output filename " + f"in {processor_name.upper()} format" ), ) - parser.add_argument('--cache', help='Cache file name for processed data') + parser.add_argument("--cache", help="Cache file name for processed data") parser.add_argument( - '-r', '--recovery-path', help='Cache file name for error recovery' + "-r", "--recovery-path", help="Cache file name for error recovery" ) parser.add_argument( - '-d', '--dump', help='Make a YAML file for a city data' + "-d", "--dump", help="Make a YAML file for a city data" ) parser.add_argument( - '-j', '--geojson', help='Make a GeoJSON file for a city data' + "-j", "--geojson", help="Make a GeoJSON file for a city data" ) parser.add_argument( - '--crude', - action='store_true', - help='Do not use OSM railway geometry for GeoJSON', + "--crude", + action="store_true", + help="Do not use OSM railway geometry for GeoJSON", ) options = parser.parse_args() @@ -298,8 +294,8 @@ def main(): log_level = logging.INFO logging.basicConfig( level=log_level, - datefmt='%H:%M:%S', - format='%(asctime)s %(levelname)-7s %(message)s', + datefmt="%H:%M:%S", + format="%(asctime)s %(levelname)-7s %(message)s", ) # Downloading cities from Google Spreadsheets @@ -311,7 +307,7 @@ def main(): if c.name == options.city or c.country == options.city ] if not cities: - logging.error('No cities to process') + logging.error("No cities to process") sys.exit(2) # Augment cities with recovery data @@ -321,59 +317,59 @@ def main(): for city in cities: city.recovery_data = recovery_data.get(city.name, None) - logging.info('Read %s metro networks', len(cities)) + logging.info("Read %s metro networks", len(cities)) # Reading cached json, loading XML or querying Overpass API if options.source and os.path.exists(options.source): - logging.info('Reading %s', options.source) - with open(options.source, 'r') as f: + logging.info("Reading %s", options.source) + with open(options.source, "r") as f: osm = json.load(f) - if 'elements' in osm: - osm = osm['elements'] + if "elements" in osm: + osm = osm["elements"] calculate_centers(osm) elif options.xml: - logging.info('Reading %s', options.xml) + logging.info("Reading %s", options.xml) osm = load_xml(options.xml) calculate_centers(osm) if options.source: - with open(options.source, 'w', encoding='utf-8') as f: + with open(options.source, "w", encoding="utf-8") as f: json.dump(osm, f) else: if len(cities) > 10: logging.error( - 'Would not download that many cities from Overpass API, ' - 'choose a smaller set' + "Would not download that many cities from Overpass API, " + "choose a smaller set" ) sys.exit(3) bboxes = [c.bbox for c in cities] - logging.info('Downloading data from Overpass API') + logging.info("Downloading data from Overpass API") osm = multi_overpass(options.overground, options.overpass_api, bboxes) calculate_centers(osm) if options.source: - with open(options.source, 'w', encoding='utf-8') as f: + with open(options.source, "w", encoding="utf-8") as f: json.dump(osm, f) - logging.info('Downloaded %s elements', len(osm)) + logging.info("Downloaded %s elements", len(osm)) - logging.info('Sorting elements by city') + logging.info("Sorting elements by city") add_osm_elements_to_cities(osm, cities) - logging.info('Building routes for each city') + logging.info("Building routes for each city") good_cities = validate_cities(cities) - logging.info('Finding transfer stations') + logging.info("Finding transfer stations") transfers = find_transfers(osm, cities) good_city_names = set(c.name for c in good_cities) logging.info( - '%s good cities: %s', + "%s good cities: %s", len(good_city_names), - ', '.join(sorted(good_city_names)), + ", ".join(sorted(good_city_names)), ) bad_city_names = set(c.name for c in cities) - good_city_names logging.info( - '%s bad cities: %s', + "%s bad cities: %s", len(bad_city_names), - ', '.join(sorted(bad_city_names)), + ", ".join(sorted(bad_city_names)), ) if options.recovery_path: @@ -386,46 +382,46 @@ def main(): if os.path.isdir(options.dump): for c in cities: with open( - os.path.join(options.dump, slugify(c.name) + '.yaml'), - 'w', - encoding='utf-8', + os.path.join(options.dump, slugify(c.name) + ".yaml"), + "w", + encoding="utf-8", ) as f: dump_yaml(c, f) elif len(cities) == 1: - with open(options.dump, 'w', encoding='utf-8') as f: + with open(options.dump, "w", encoding="utf-8") as f: dump_yaml(cities[0], f) else: - logging.error('Cannot dump %s cities at once', len(cities)) + logging.error("Cannot dump %s cities at once", len(cities)) if options.geojson: if os.path.isdir(options.geojson): for c in cities: with open( os.path.join( - options.geojson, slugify(c.name) + '.geojson' + options.geojson, slugify(c.name) + ".geojson" ), - 'w', - encoding='utf-8', + "w", + encoding="utf-8", ) as f: json.dump(make_geojson(c, not options.crude), f) elif len(cities) == 1: - with open(options.geojson, 'w', encoding='utf-8') as f: + with open(options.geojson, "w", encoding="utf-8") as f: json.dump(make_geojson(cities[0], not options.crude), f) else: logging.error( - 'Cannot make a geojson of %s cities at once', len(cities) + "Cannot make a geojson of %s cities at once", len(cities) ) if options.log: res = [] for c in cities: v = c.get_validation_result() - v['slug'] = slugify(c.name) + v["slug"] = slugify(c.name) res.append(v) json.dump(res, options.log, indent=2, ensure_ascii=False) for processor_name, processor in inspect.getmembers( - processors, inspect.ismodule + processors, inspect.ismodule ): option_name = f"output_{processor_name}" @@ -436,5 +432,5 @@ def main(): processor.process(cities, transfers, filename, options.cache) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/processors/__init__.py b/processors/__init__.py index 579844b..4f5ed84 100644 --- a/processors/__init__.py +++ b/processors/__init__.py @@ -1,2 +1,4 @@ -# Import only those processors (modules) you want to use -from . import mapsme, gtfs +# Import only those processors (modules) you want to use. +# Ignore F401 "module imported but unused" violation since these modules +# are addressed via introspection. +from . import mapsme, gtfs # noqa F401 diff --git a/processors/gtfs.py b/processors/gtfs.py index 54fccb6..32d539a 100644 --- a/processors/gtfs.py +++ b/processors/gtfs.py @@ -328,7 +328,8 @@ def process( transit_data = transit_to_dict(cities, transfers) gtfs_data = transit_data_to_gtfs(transit_data) - # TODO: make universal cache for all processors, and apply the cache to GTFS + # TODO: make universal cache for all processors, + # and apply the cache to GTFS make_gtfs(filename, gtfs_data) diff --git a/processors/mapsme.py b/processors/mapsme.py index 01da216..b8818ea 100755 --- a/processors/mapsme.py +++ b/processors/mapsme.py @@ -1,9 +1,14 @@ import json -import os import logging - +import os from collections import defaultdict +from subway_structure import ( + DISPLACEMENT_TOLERANCE, + distance, + el_center, + Station, +) from ._common import ( DEFAULT_INTERVAL, format_colour, @@ -11,19 +16,12 @@ from ._common import ( SPEED_ON_TRANSFER, TRANSFER_PENALTY, ) -from subway_structure import ( - distance, - el_center, - Station, - DISPLACEMENT_TOLERANCE, -) -OSM_TYPES = {'n': (0, 'node'), 'w': (2, 'way'), 'r': (3, 'relation')} +OSM_TYPES = {"n": (0, "node"), "w": (2, "way"), "r": (3, "relation")} ENTRANCE_PENALTY = 60 # seconds SPEED_TO_ENTRANCE = 5 * KMPH_TO_MPS # m/s SPEED_ON_LINE = 40 * KMPH_TO_MPS # m/s -DEFAULT_INTERVAL = 2.5 * 60 # seconds def uid(elid, typ=None): @@ -32,7 +30,7 @@ def uid(elid, typ=None): if not typ: osm_id = (osm_id << 2) + OSM_TYPES[t][0] elif typ != t: - raise Exception('Got {}, expected {}'.format(elid, typ)) + raise Exception("Got {}, expected {}".format(elid, typ)) return osm_id << 1 @@ -67,7 +65,8 @@ def if_object_is_used(method): class MapsmeCache: def __init__(self, cache_path, cities): if not cache_path: - # cache is not used, all actions with cache must be silently skipped + # Cache is not used, + # all actions with cache must be silently skipped self.is_used = False return self.cache_path = cache_path @@ -75,7 +74,7 @@ class MapsmeCache: self.cache = {} if os.path.exists(cache_path): try: - with open(cache_path, 'r', encoding='utf-8') as f: + with open(cache_path, "r", encoding="utf-8") as f: self.cache = json.load(f) except json.decoder.JSONDecodeError: logging.warning( @@ -94,9 +93,9 @@ class MapsmeCache: not moved far away. """ city_cache_data = self.cache[city.name] - for stoparea_id, cached_stoparea in city_cache_data['stops'].items(): - station_id = cached_stoparea['osm_type'][0] + str( - cached_stoparea['osm_id'] + for stoparea_id, cached_stoparea in city_cache_data["stops"].items(): + station_id = cached_stoparea["osm_type"][0] + str( + cached_stoparea["osm_id"] ) city_station = city.elements.get(station_id) if not city_station or not Station.is_station( @@ -105,7 +104,7 @@ class MapsmeCache: return False station_coords = el_center(city_station) cached_station_coords = tuple( - cached_stoparea[coord] for coord in ('lon', 'lat') + cached_stoparea[coord] for coord in ("lon", "lat") ) displacement = distance(station_coords, cached_station_coords) if displacement > DISPLACEMENT_TOLERANCE: @@ -121,8 +120,8 @@ class MapsmeCache: if not city.is_good and city.name in self.cache: city_cached_data = self.cache[city.name] if self._is_cached_city_usable(city): - stops.update(city_cached_data['stops']) - networks.append(city_cached_data['network']) + stops.update(city_cached_data["stops"]) + networks.append(city_cached_data["network"]) logging.info("Taking %s from cache", city.name) self.recovered_city_names.add(city.name) @@ -131,7 +130,7 @@ class MapsmeCache: """Add transfers from usable cached cities to 'transfers' dict passed as argument.""" for city_name in self.recovered_city_names: - city_cached_transfers = self.cache[city_name]['transfers'] + city_cached_transfers = self.cache[city_name]["transfers"] for stop1_uid, stop2_uid, transfer_time in city_cached_transfers: if (stop1_uid, stop2_uid) not in transfers: transfers[(stop1_uid, stop2_uid)] = transfer_time @@ -141,9 +140,10 @@ class MapsmeCache: """Create/replace one cache element with new data container. This should be done for each good city.""" self.cache[city_name] = { - 'network': network, - 'stops': {}, # stoparea el_id -> jsonified stop data - 'transfers': [], # list of tuples (stoparea1_uid, stoparea2_uid, time); uid1 < uid2 + "network": network, + "stops": {}, # stoparea el_id -> jsonified stop data + "transfers": [], # list of tuples + # (stoparea1_uid, stoparea2_uid, time); uid1 < uid2 } @if_object_is_used @@ -157,7 +157,7 @@ class MapsmeCache: """Add stoparea to the cache of each city the stoparea is in.""" stoparea_uid = uid(stoparea_id) for city_name in self.stop_cities[stoparea_uid]: - self.cache[city_name]['stops'][stoparea_id] = st + self.cache[city_name]["stops"][stoparea_id] = st @if_object_is_used def add_transfer(self, stoparea1_uid, stoparea2_uid, transfer_time): @@ -167,14 +167,14 @@ class MapsmeCache: & self.stop_cities[stoparea1_uid] & self.stop_cities[stoparea2_uid] ): - self.cache[city_name]['transfers'].append( + self.cache[city_name]["transfers"].append( (stoparea1_uid, stoparea2_uid, transfer_time) ) @if_object_is_used def save(self): try: - with open(self.cache_path, 'w', encoding='utf-8') as f: + with open(self.cache_path, "w", encoding="utf-8") as f: json.dump(self.cache, f, ensure_ascii=False) except Exception as e: logging.warning("Failed to save cache: %s", str(e)) @@ -192,14 +192,14 @@ def process(cities, transfers, filename, cache_path): exits = [] min_distance = None for n in nodes: - d = distance(center, (n['lon'], n['lat'])) + d = distance(center, (n["lon"], n["lat"])) if not min_distance: min_distance = d * 2 / 3 elif d < min_distance: continue too_close = False for e in exits: - d = distance((e['lon'], e['lat']), (n['lon'], n['lat'])) + d = distance((e["lon"], e["lat"]), (n["lon"], n["lat"])) if d < min_distance: too_close = True break @@ -217,20 +217,20 @@ def process(cities, transfers, filename, cache_path): cache.provide_stops_and_networks(stops, networks) for city in good_cities: - network = {'network': city.name, 'routes': [], 'agency_id': city.id} + network = {"network": city.name, "routes": [], "agency_id": city.id} cache.initialize_good_city(city.name, network) for route in city: routes = { - 'type': route.mode, - 'ref': route.ref, - 'name': route.name, - 'colour': format_colour(route.colour), - 'route_id': uid(route.id, 'r'), - 'itineraries': [], + "type": route.mode, + "ref": route.ref, + "name": route.name, + "colour": format_colour(route.colour), + "route_id": uid(route.id, "r"), + "itineraries": [], } if route.infill: - routes['casing'] = routes['colour'] - routes['colour'] = format_colour(route.infill) + routes["casing"] = routes["colour"] + routes["colour"] = format_colour(route.infill) for i, variant in enumerate(route): itin = [] for stop in variant: @@ -242,41 +242,42 @@ def process(cities, transfers, filename, cache_path): round(stop.distance / SPEED_ON_LINE), ] ) - # Make exits from platform nodes, if we don't have proper exits + # Make exits from platform nodes, + # if we don't have proper exits if ( len(stop.stoparea.entrances) + len(stop.stoparea.exits) == 0 ): for pl in stop.stoparea.platforms: pl_el = city.elements[pl] - if pl_el['type'] == 'node': + if pl_el["type"] == "node": pl_nodes = [pl_el] - elif pl_el['type'] == 'way': + elif pl_el["type"] == "way": pl_nodes = [ - city.elements.get('n{}'.format(n)) - for n in pl_el['nodes'] + city.elements.get("n{}".format(n)) + for n in pl_el["nodes"] ] else: pl_nodes = [] - for m in pl_el['members']: - if m['type'] == 'way': + for m in pl_el["members"]: + if m["type"] == "way": if ( - '{}{}'.format( - m['type'][0], m['ref'] + "{}{}".format( + m["type"][0], m["ref"] ) in city.elements ): pl_nodes.extend( [ city.elements.get( - 'n{}'.format(n) + "n{}".format(n) ) for n in city.elements[ - '{}{}'.format( - m['type'][0], - m['ref'], + "{}{}".format( + m["type"][0], + m["ref"], ) - ]['nodes'] + ]["nodes"] ] ) pl_nodes = [n for n in pl_nodes if n] @@ -284,37 +285,39 @@ def process(cities, transfers, filename, cache_path): stop.stoparea.centers[pl], pl_nodes ) - routes['itineraries'].append( + routes["itineraries"].append( { - 'stops': itin, - 'interval': round(variant.interval or DEFAULT_INTERVAL), + "stops": itin, + "interval": round( + variant.interval or DEFAULT_INTERVAL + ), } ) - network['routes'].append(routes) + network["routes"].append(routes) networks.append(network) for stop_id, stop in stop_areas.items(): st = { - 'name': stop.name, - 'int_name': stop.int_name, - 'lat': stop.center[1], - 'lon': stop.center[0], - 'osm_type': OSM_TYPES[stop.station.id[0]][1], - 'osm_id': int(stop.station.id[1:]), - 'id': uid(stop.id), - 'entrances': [], - 'exits': [], + "name": stop.name, + "int_name": stop.int_name, + "lat": stop.center[1], + "lon": stop.center[0], + "osm_type": OSM_TYPES[stop.station.id[0]][1], + "osm_id": int(stop.station.id[1:]), + "id": uid(stop.id), + "entrances": [], + "exits": [], } - for e_l, k in ((stop.entrances, 'entrances'), (stop.exits, 'exits')): + for e_l, k in ((stop.entrances, "entrances"), (stop.exits, "exits")): for e in e_l: - if e[0] == 'n': + if e[0] == "n": st[k].append( { - 'osm_type': 'node', - 'osm_id': int(e[1:]), - 'lon': stop.centers[e][0], - 'lat': stop.centers[e][1], - 'distance': ENTRANCE_PENALTY + "osm_type": "node", + "osm_id": int(e[1:]), + "lon": stop.centers[e][0], + "lat": stop.centers[e][1], + "distance": ENTRANCE_PENALTY + round( distance(stop.centers[e], stop.center) / SPEED_TO_ENTRANCE @@ -325,31 +328,31 @@ def process(cities, transfers, filename, cache_path): if stop.platforms: for pl in stop.platforms: for n in platform_nodes[pl]: - for k in ('entrances', 'exits'): + for k in ("entrances", "exits"): st[k].append( { - 'osm_type': n['type'], - 'osm_id': n['id'], - 'lon': n['lon'], - 'lat': n['lat'], - 'distance': ENTRANCE_PENALTY + "osm_type": n["type"], + "osm_id": n["id"], + "lon": n["lon"], + "lat": n["lat"], + "distance": ENTRANCE_PENALTY + round( distance( - (n['lon'], n['lat']), stop.center + (n["lon"], n["lat"]), stop.center ) / SPEED_TO_ENTRANCE ), } ) else: - for k in ('entrances', 'exits'): + for k in ("entrances", "exits"): st[k].append( { - 'osm_type': OSM_TYPES[stop.station.id[0]][1], - 'osm_id': int(stop.station.id[1:]), - 'lon': stop.centers[stop.id][0], - 'lat': stop.centers[stop.id][1], - 'distance': 60, + "osm_type": OSM_TYPES[stop.station.id[0]][1], + "osm_id": int(stop.station.id[1:]), + "lon": stop.centers[stop.id][0], + "lat": stop.centers[stop.id][1], + "distance": 60, } ) @@ -385,9 +388,9 @@ def process(cities, transfers, filename, cache_path): ] result = { - 'stops': list(stops.values()), - 'transfers': pairwise_transfers, - 'networks': networks, + "stops": list(stops.values()), + "transfers": pairwise_transfers, + "networks": networks, } if not filename.lower().endswith("json"): diff --git a/stop_areas/make_stop_areas.py b/stop_areas/make_stop_areas.py index 43699a9..54b0dd5 100755 --- a/stop_areas/make_stop_areas.py +++ b/stop_areas/make_stop_areas.py @@ -1,14 +1,15 @@ #!/usr/bin/env python3 -import json import codecs -from lxml import etree -import sys -import kdtree +import json import math import re +import sys import urllib.parse import urllib.request +import kdtree +from lxml import etree + QUERY = """ [out:json][timeout:250][bbox:{{bbox}}]; @@ -32,17 +33,17 @@ out meta center qt; def el_id(el): - return el['type'][0] + str(el.get('id', el.get('ref', ''))) + return el["type"][0] + str(el.get("id", el.get("ref", ""))) class StationWrapper: def __init__(self, st): - if 'center' in st: - self.coords = (st['center']['lon'], st['center']['lat']) - elif 'lon' in st: - self.coords = (st['lon'], st['lat']) + if "center" in st: + self.coords = (st["center"]["lon"], st["center"]["lat"]) + elif "lon" in st: + self.coords = (st["lon"], st["lat"]) else: - raise Exception('Coordinates not found for station {}'.format(st)) + raise Exception("Coordinates not found for station {}".format(st)) self.station = st def __len__(self): @@ -53,85 +54,85 @@ class StationWrapper: def distance(self, other): """Calculate distance in meters.""" - dx = math.radians(self[0] - other['lon']) * math.cos( - 0.5 * math.radians(self[1] + other['lat']) + dx = math.radians(self[0] - other["lon"]) * math.cos( + 0.5 * math.radians(self[1] + other["lat"]) ) - dy = math.radians(self[1] - other['lat']) + dy = math.radians(self[1] - other["lat"]) return 6378137 * math.sqrt(dx * dx + dy * dy) def overpass_request(bbox): - url = 'http://overpass-api.de/api/interpreter?data={}'.format( - urllib.parse.quote(QUERY.replace('{{bbox}}', bbox)) + url = "http://overpass-api.de/api/interpreter?data={}".format( + urllib.parse.quote(QUERY.replace("{{bbox}}", bbox)) ) response = urllib.request.urlopen(url, timeout=1000) if response.getcode() != 200: raise Exception( - 'Failed to query Overpass API: HTTP {}'.format(response.getcode()) + "Failed to query Overpass API: HTTP {}".format(response.getcode()) ) - reader = codecs.getreader('utf-8') - return json.load(reader(response))['elements'] + reader = codecs.getreader("utf-8") + return json.load(reader(response))["elements"] def add_stop_areas(src): if not src: - raise Exception('Empty dataset provided to add_stop_areas') + raise Exception("Empty dataset provided to add_stop_areas") # Add station=* tags to stations in subway and light_rail routes stations = {} for el in src: - if 'tags' in el and el['tags'].get('railway', None) == 'station': + if "tags" in el and el["tags"].get("railway", None) == "station": stations[el_id(el)] = el for el in src: if ( - el['type'] == 'relation' - and 'tags' in el - and el['tags'].get('route', None) in ('subway', 'light_rail') + el["type"] == "relation" + and "tags" in el + and el["tags"].get("route", None) in ("subway", "light_rail") ): - for m in el['members']: + for m in el["members"]: st = stations.get(el_id(m), None) - if st and 'station' not in st['tags']: - st['tags']['station'] = el['tags']['route'] - st['modified'] = True + if st and "station" not in st["tags"]: + st["tags"]["station"] = el["tags"]["route"] + st["modified"] = True # Create a kd-tree out of subway stations stations = kdtree.create(dimensions=2) for el in src: - if 'tags' in el and el['tags'].get('station', None) in ( - 'subway', - 'light_rail', + if "tags" in el and el["tags"].get("station", None) in ( + "subway", + "light_rail", ): stations.add(StationWrapper(el)) if stations.is_leaf: - raise Exception('No stations found') + raise Exception("No stations found") # Populate a list of nearby subway exits and platforms for each station MAX_DISTANCE = 300 # meters stop_areas = {} for el in src: - if 'tags' not in el: + if "tags" not in el: continue - if 'station' in el['tags']: + if "station" in el["tags"]: continue - if el['tags'].get('railway', None) not in ( - 'subway_entrance', - 'platform', - ) and el['tags'].get('public_transport', None) not in ( - 'platform', - 'stop_position', + if el["tags"].get("railway", None) not in ( + "subway_entrance", + "platform", + ) and el["tags"].get("public_transport", None) not in ( + "platform", + "stop_position", ): continue - coords = el.get('center', el) - station = stations.search_nn((coords['lon'], coords['lat']))[0].data + coords = el.get("center", el) + station = stations.search_nn((coords["lon"], coords["lat"]))[0].data if station.distance(coords) < MAX_DISTANCE: k = ( - station.station['id'], - station.station['tags'].get('name', 'station_with_no_name'), + station.station["id"], + station.station["tags"].get("name", "station_with_no_name"), ) # Disregard exits and platforms that are differently named - if el['tags'].get('name', k[1]) == k[1]: + if el["tags"].get("name", k[1]) == k[1]: if k not in stop_areas: stop_areas[k] = {el_id(station.station): station.station} stop_areas[k][el_id(el)] = el @@ -139,11 +140,11 @@ def add_stop_areas(src): # Find existing stop_area relations for stations and remove these stations for el in src: if ( - el['type'] == 'relation' - and el['tags'].get('public_transport', None) == 'stop_area' + el["type"] == "relation" + and el["tags"].get("public_transport", None) == "stop_area" ): found = False - for m in el['members']: + for m in el["members"]: if found: break for st in stop_areas: @@ -153,89 +154,90 @@ def add_stop_areas(src): break # Create OSM XML for new stop_area relations - root = etree.Element('osm', version='0.6') + root = etree.Element("osm", version="0.6") rid = -1 for st, members in stop_areas.items(): - rel = etree.SubElement(root, 'relation', id=str(rid)) + rel = etree.SubElement(root, "relation", id=str(rid)) rid -= 1 - etree.SubElement(rel, 'tag', k='type', v='public_transport') - etree.SubElement(rel, 'tag', k='public_transport', v='stop_area') - etree.SubElement(rel, 'tag', k='name', v=st[1]) + etree.SubElement(rel, "tag", k="type", v="public_transport") + etree.SubElement(rel, "tag", k="public_transport", v="stop_area") + etree.SubElement(rel, "tag", k="name", v=st[1]) for m in members.values(): if ( - m['tags'].get( - 'railway', m['tags'].get('public_transport', None) + m["tags"].get( + "railway", m["tags"].get("public_transport", None) ) - == 'platform' + == "platform" ): - role = 'platform' - elif m['tags'].get('public_transport', None) == 'stop_position': - role = 'stop' + role = "platform" + elif m["tags"].get("public_transport", None) == "stop_position": + role = "stop" else: - role = '' + role = "" etree.SubElement( - rel, 'member', ref=str(m['id']), type=m['type'], role=role + rel, "member", ref=str(m["id"]), type=m["type"], role=role ) # Add all downloaded elements for el in src: - obj = etree.SubElement(root, el['type']) + obj = etree.SubElement(root, el["type"]) for a in ( - 'id', - 'type', - 'user', - 'uid', - 'version', - 'changeset', - 'timestamp', - 'lat', - 'lon', + "id", + "type", + "user", + "uid", + "version", + "changeset", + "timestamp", + "lat", + "lon", ): if a in el: obj.set(a, str(el[a])) - if 'modified' in el: - obj.set('action', 'modify') - if 'tags' in el: - for k, v in el['tags'].items(): - etree.SubElement(obj, 'tag', k=k, v=v) - if 'members' in el: - for m in el['members']: + if "modified" in el: + obj.set("action", "modify") + if "tags" in el: + for k, v in el["tags"].items(): + etree.SubElement(obj, "tag", k=k, v=v) + if "members" in el: + for m in el["members"]: etree.SubElement( obj, - 'member', - ref=str(m['ref']), - type=m['type'], - role=m.get('role', ''), + "member", + ref=str(m["ref"]), + type=m["type"], + role=m.get("role", ""), ) - if 'nodes' in el: - for n in el['nodes']: - etree.SubElement(obj, 'nd', ref=str(n)) + if "nodes" in el: + for n in el["nodes"]: + etree.SubElement(obj, "nd", ref=str(n)) return etree.tostring(root, pretty_print=True) -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) < 2: print( - 'Read a JSON from Overpass and output JOSM OSM XML with added stop_area relations' + "Read a JSON from Overpass and output JOSM OSM XML with added " + "stop_area relations" ) print( - 'Usage: {} {{|}} [output.osm]'.format( + "Usage: {} {{|}} [output.osm]".format( sys.argv[0] ) ) sys.exit(1) - if re.match(r'^[-0-9.,]+$', sys.argv[1]): + if re.match(r"^[-0-9.,]+$", sys.argv[1]): src = overpass_request(sys.argv[1]) else: - with open(sys.argv[1], 'r') as f: - src = json.load(f)['elements'] + with open(sys.argv[1], "r") as f: + src = json.load(f)["elements"] result = add_stop_areas(src) if len(sys.argv) < 3: - print(result.decode('utf-8')) + print(result.decode("utf-8")) else: - with open(sys.argv[2], 'wb') as f: + with open(sys.argv[2], "wb") as f: f.write(result) diff --git a/stop_areas/make_tram_areas.py b/stop_areas/make_tram_areas.py index f06fdac..eea244d 100755 --- a/stop_areas/make_tram_areas.py +++ b/stop_areas/make_tram_areas.py @@ -1,14 +1,15 @@ #!/usr/bin/env python3 -import json import codecs -from lxml import etree -import sys -import kdtree +import json import math import re +import sys import urllib.parse import urllib.request +import kdtree +from lxml import etree + QUERY = """ [out:json][timeout:250][bbox:{{bbox}}]; @@ -23,17 +24,17 @@ out meta center qt; def el_id(el): - return el['type'][0] + str(el.get('id', el.get('ref', ''))) + return el["type"][0] + str(el.get("id", el.get("ref", ""))) class StationWrapper: def __init__(self, st): - if 'center' in st: - self.coords = (st['center']['lon'], st['center']['lat']) - elif 'lon' in st: - self.coords = (st['lon'], st['lat']) + if "center" in st: + self.coords = (st["center"]["lon"], st["center"]["lat"]) + elif "lon" in st: + self.coords = (st["lon"], st["lat"]) else: - raise Exception('Coordinates not found for station {}'.format(st)) + raise Exception("Coordinates not found for station {}".format(st)) self.station = st def __len__(self): @@ -44,50 +45,50 @@ class StationWrapper: def distance(self, other): """Calculate distance in meters.""" - dx = math.radians(self[0] - other['lon']) * math.cos( - 0.5 * math.radians(self[1] + other['lat']) + dx = math.radians(self[0] - other["lon"]) * math.cos( + 0.5 * math.radians(self[1] + other["lat"]) ) - dy = math.radians(self[1] - other['lat']) + dy = math.radians(self[1] - other["lat"]) return 6378137 * math.sqrt(dx * dx + dy * dy) def overpass_request(bbox): - url = 'http://overpass-api.de/api/interpreter?data={}'.format( - urllib.parse.quote(QUERY.replace('{{bbox}}', bbox)) + url = "http://overpass-api.de/api/interpreter?data={}".format( + urllib.parse.quote(QUERY.replace("{{bbox}}", bbox)) ) response = urllib.request.urlopen(url, timeout=1000) if response.getcode() != 200: raise Exception( - 'Failed to query Overpass API: HTTP {}'.format(response.getcode()) + "Failed to query Overpass API: HTTP {}".format(response.getcode()) ) - reader = codecs.getreader('utf-8') - return json.load(reader(response))['elements'] + reader = codecs.getreader("utf-8") + return json.load(reader(response))["elements"] def is_part_of_stop(tags): - if tags.get('public_transport') in ('platform', 'stop_position'): + if tags.get("public_transport") in ("platform", "stop_position"): return True - if tags.get('railway') == 'platform': + if tags.get("railway") == "platform": return True return False def add_stop_areas(src): if not src: - raise Exception('Empty dataset provided to add_stop_areas') + raise Exception("Empty dataset provided to add_stop_areas") # Create a kd-tree out of tram stations stations = kdtree.create(dimensions=2) for el in src: - if 'tags' in el and el['tags'].get('railway') == 'tram_stop': + if "tags" in el and el["tags"].get("railway") == "tram_stop": stations.add(StationWrapper(el)) if stations.is_leaf: - raise Exception('No stations found') + raise Exception("No stations found") elements = {} for el in src: - if el.get('tags'): + if el.get("tags"): elements[el_id(el)] = el # Populate a list of nearby subway exits and platforms for each station @@ -96,27 +97,27 @@ def add_stop_areas(src): for el in src: # Only tram routes if ( - 'tags' not in el - or el['type'] != 'relation' - or el['tags'].get('route') != 'tram' + "tags" not in el + or el["type"] != "relation" + or el["tags"].get("route") != "tram" ): continue - for m in el['members']: + for m in el["members"]: if el_id(m) not in elements: continue pel = elements[el_id(m)] - if not is_part_of_stop(pel['tags']): + if not is_part_of_stop(pel["tags"]): continue - if pel['tags'].get('railway') == 'tram_stop': + if pel["tags"].get("railway") == "tram_stop": continue - coords = pel.get('center', pel) - station = stations.search_nn( - (coords['lon'], coords['lat']) - )[0].data + coords = pel.get("center", pel) + station = stations.search_nn((coords["lon"], coords["lat"]))[ + 0 + ].data if station.distance(coords) < MAX_DISTANCE: k = ( - station.station['id'], - station.station['tags'].get('name', None), + station.station["id"], + station.station["tags"].get("name", None), ) if k not in stop_areas: stop_areas[k] = {el_id(station.station): station.station} @@ -125,11 +126,11 @@ def add_stop_areas(src): # Find existing stop_area relations for stations and remove these stations for el in src: if ( - el['type'] == 'relation' - and el['tags'].get('public_transport', None) == 'stop_area' + el["type"] == "relation" + and el["tags"].get("public_transport", None) == "stop_area" ): found = False - for m in el['members']: + for m in el["members"]: if found: break for st in stop_areas: @@ -139,81 +140,81 @@ def add_stop_areas(src): break # Create OSM XML for new stop_area relations - root = etree.Element('osm', version='0.6') + root = etree.Element("osm", version="0.6") rid = -1 for st, members in stop_areas.items(): - rel = etree.SubElement(root, 'relation', id=str(rid)) + rel = etree.SubElement(root, "relation", id=str(rid)) rid -= 1 - etree.SubElement(rel, 'tag', k='type', v='public_transport') - etree.SubElement(rel, 'tag', k='public_transport', v='stop_area') + etree.SubElement(rel, "tag", k="type", v="public_transport") + etree.SubElement(rel, "tag", k="public_transport", v="stop_area") if st[1]: - etree.SubElement(rel, 'tag', k='name', v=st[1]) + etree.SubElement(rel, "tag", k="name", v=st[1]) for m in members.values(): etree.SubElement( - rel, 'member', ref=str(m['id']), type=m['type'], role='' + rel, "member", ref=str(m["id"]), type=m["type"], role="" ) # Add all downloaded elements for el in src: - obj = etree.SubElement(root, el['type']) + obj = etree.SubElement(root, el["type"]) for a in ( - 'id', - 'type', - 'user', - 'uid', - 'version', - 'changeset', - 'timestamp', - 'lat', - 'lon', + "id", + "type", + "user", + "uid", + "version", + "changeset", + "timestamp", + "lat", + "lon", ): if a in el: obj.set(a, str(el[a])) - if 'modified' in el: - obj.set('action', 'modify') - if 'tags' in el: - for k, v in el['tags'].items(): - etree.SubElement(obj, 'tag', k=k, v=v) - if 'members' in el: - for m in el['members']: + if "modified" in el: + obj.set("action", "modify") + if "tags" in el: + for k, v in el["tags"].items(): + etree.SubElement(obj, "tag", k=k, v=v) + if "members" in el: + for m in el["members"]: etree.SubElement( obj, - 'member', - ref=str(m['ref']), - type=m['type'], - role=m.get('role', ''), + "member", + ref=str(m["ref"]), + type=m["type"], + role=m.get("role", ""), ) - if 'nodes' in el: - for n in el['nodes']: - etree.SubElement(obj, 'nd', ref=str(n)) + if "nodes" in el: + for n in el["nodes"]: + etree.SubElement(obj, "nd", ref=str(n)) return etree.tostring(root, pretty_print=True, encoding="utf-8") -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) < 2: print( - 'Read a JSON from Overpass and output JOSM OSM XML ' - 'with added stop_area relations' + "Read a JSON from Overpass and output JOSM OSM XML " + "with added stop_area relations" ) print( - 'Usage: {} {{|}} [output.osm]'.format( + "Usage: {} {{|}} [output.osm]".format( sys.argv[0] ) ) sys.exit(1) - if re.match(r'^[-0-9.,]+$', sys.argv[1]): - bbox = sys.argv[1].split(',') - src = overpass_request(','.join([bbox[i] for i in (1, 0, 3, 2)])) + if re.match(r"^[-0-9.,]+$", sys.argv[1]): + bbox = sys.argv[1].split(",") + src = overpass_request(",".join([bbox[i] for i in (1, 0, 3, 2)])) else: - with open(sys.argv[1], 'r') as f: - src = json.load(f)['elements'] + with open(sys.argv[1], "r") as f: + src = json.load(f)["elements"] result = add_stop_areas(src) if len(sys.argv) < 3: - print(result.decode('utf-8')) + print(result.decode("utf-8")) else: - with open(sys.argv[2], 'wb') as f: + with open(sys.argv[2], "wb") as f: f.write(result) diff --git a/stop_areas/serve.py b/stop_areas/serve.py index e5d695e..3e8dc28 100755 --- a/stop_areas/serve.py +++ b/stop_areas/serve.py @@ -1,28 +1,30 @@ #!/usr/bin/env python3 -from flask import Flask, request, make_response, render_template +from flask import Flask, make_response, render_template, request + from make_stop_areas import add_stop_areas, overpass_request + app = Flask(__name__) app.debug = True -@app.route('/') +@app.route("/") def form(): - return render_template('index.html') + return render_template("index.html") -@app.route('/process', methods=['GET']) +@app.route("/process", methods=["GET"]) def convert(): - src = overpass_request(request.args.get('bbox')) + src = overpass_request(request.args.get("bbox")) if not src: - return 'No data from overpass, sorry.' + return "No data from overpass, sorry." result = add_stop_areas(src) response = make_response(result) - response.headers['Content-Disposition'] = ( - 'attachment; filename="stop_areas.osm"' - ) + response.headers[ + "Content-Disposition" + ] = 'attachment; filename="stop_areas.osm"' return response -if __name__ == '__main__': +if __name__ == "__main__": app.run() diff --git a/subway_io.py b/subway_io.py index f45e367..cbd252a 100644 --- a/subway_io.py +++ b/subway_io.py @@ -12,33 +12,33 @@ def load_xml(f): elements = [] for event, element in etree.iterparse(f): - if element.tag in ('node', 'way', 'relation'): - el = {'type': element.tag, 'id': int(element.get('id'))} - if element.tag == 'node': - for n in ('lat', 'lon'): + if element.tag in ("node", "way", "relation"): + el = {"type": element.tag, "id": int(element.get("id"))} + if element.tag == "node": + for n in ("lat", "lon"): el[n] = float(element.get(n)) tags = {} nd = [] members = [] for sub in element: - if sub.tag == 'tag': - tags[sub.get('k')] = sub.get('v') - elif sub.tag == 'nd': - nd.append(int(sub.get('ref'))) - elif sub.tag == 'member': + if sub.tag == "tag": + tags[sub.get("k")] = sub.get("v") + elif sub.tag == "nd": + nd.append(int(sub.get("ref"))) + elif sub.tag == "member": members.append( { - 'type': sub.get('type'), - 'ref': int(sub.get('ref')), - 'role': sub.get('role', ''), + "type": sub.get("type"), + "ref": int(sub.get("ref")), + "role": sub.get("role", ""), } ) if tags: - el['tags'] = tags + el["tags"] = tags if nd: - el['nodes'] = nd + el["nodes"] = nd if members: - el['members'] = members + el["members"] = members elements.append(el) element.clear() @@ -55,7 +55,7 @@ def _get_yaml_compatible_string(scalar): if string and ( string[0] in _YAML_SPECIAL_CHARACTERS or any(seq in string for seq in _YAML_SPECIAL_SEQUENCES) - or string.endswith(':') + or string.endswith(":") ): string = string.replace("'", "''") string = "'{}'".format(string) @@ -63,25 +63,25 @@ def _get_yaml_compatible_string(scalar): def dump_yaml(city, f): - def write_yaml(data, f, indent=''): + def write_yaml(data, f, indent=""): if isinstance(data, (set, list)): - f.write('\n') + f.write("\n") for i in data: f.write(indent) - f.write('- ') - write_yaml(i, f, indent + ' ') + f.write("- ") + write_yaml(i, f, indent + " ") elif isinstance(data, dict): - f.write('\n') + f.write("\n") for k, v in data.items(): if v is None: continue - f.write(indent + _get_yaml_compatible_string(k) + ': ') - write_yaml(v, f, indent + ' ') + f.write(indent + _get_yaml_compatible_string(k) + ": ") + write_yaml(v, f, indent + " ") if isinstance(v, (list, set, dict)): - f.write('\n') + f.write("\n") else: f.write(_get_yaml_compatible_string(data)) - f.write('\n') + f.write("\n") INCLUDE_STOP_AREAS = False stops = set() @@ -91,14 +91,14 @@ def dump_yaml(city, f): [(sa.transfer or sa.id, sa.name) for sa in route.stop_areas()] ) rte = { - 'type': route.mode, - 'ref': route.ref, - 'name': route.name, - 'colour': route.colour, - 'infill': route.infill, - 'station_count': len(stations), - 'stations': list(stations.values()), - 'itineraries': {}, + "type": route.mode, + "ref": route.ref, + "name": route.name, + "colour": route.colour, + "infill": route.infill, + "station_count": len(stations), + "stations": list(stations.values()), + "itineraries": {}, } for variant in route: if INCLUDE_STOP_AREAS: @@ -107,33 +107,33 @@ def dump_yaml(city, f): s = st.stoparea if s.id == s.station.id: v_stops.append( - '{} ({})'.format(s.station.name, s.station.id) + "{} ({})".format(s.station.name, s.station.id) ) else: v_stops.append( - '{} ({}) in {} ({})'.format( + "{} ({}) in {} ({})".format( s.station.name, s.station.id, s.name, s.id ) ) else: v_stops = [ - '{} ({})'.format( + "{} ({})".format( s.stoparea.station.name, s.stoparea.station.id ) for s in variant ] - rte['itineraries'][variant.id] = v_stops + rte["itineraries"][variant.id] = v_stops stops.update(v_stops) routes.append(rte) transfers = [] for t in city.transfers: - v_stops = ['{} ({})'.format(s.name, s.id) for s in t] + v_stops = ["{} ({})".format(s.name, s.id) for s in t] transfers.append(sorted(v_stops)) result = { - 'stations': sorted(stops), - 'transfers': sorted(transfers, key=lambda t: t[0]), - 'routes': sorted(routes, key=lambda r: r['ref']), + "stations": sorted(stops), + "transfers": sorted(transfers, key=lambda t: t[0]), + "routes": sorted(routes, key=lambda r: r["ref"]), } write_yaml(result, f) @@ -154,15 +154,15 @@ def make_geojson(city, include_tracks_geometry=True): ) features.append( { - 'type': 'Feature', - 'geometry': { - 'type': 'LineString', - 'coordinates': tracks, + "type": "Feature", + "geometry": { + "type": "LineString", + "coordinates": tracks, }, - 'properties': { - 'ref': variant.ref, - 'name': variant.name, - 'stroke': variant.colour, + "properties": { + "ref": variant.ref, + "name": variant.name, + "stroke": variant.colour, }, } ) @@ -173,41 +173,41 @@ def make_geojson(city, include_tracks_geometry=True): for stop in stops: features.append( { - 'type': 'Feature', - 'geometry': { - 'type': 'Point', - 'coordinates': stop, + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": stop, }, - 'properties': { - 'marker-size': 'small', - 'marker-symbol': 'circle', + "properties": { + "marker-size": "small", + "marker-symbol": "circle", }, } ) for stoparea in stopareas: features.append( { - 'type': 'Feature', - 'geometry': { - 'type': 'Point', - 'coordinates': stoparea.center, + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": stoparea.center, }, - 'properties': { - 'name': stoparea.name, - 'marker-size': 'small', - 'marker-color': '#ff2600' + "properties": { + "name": stoparea.name, + "marker-size": "small", + "marker-color": "#ff2600" if stoparea in transfers - else '#797979', + else "#797979", }, } ) - return {'type': 'FeatureCollection', 'features': features} + return {"type": "FeatureCollection", "features": features} def _dumps_route_id(route_id): - """Argument is a route_id that depends on route colour and ref. Name - can be taken from route_master or can be route's own, we don't take it - into consideration. Some of route attributes can be None. The function makes + """Argument is a route_id that depends on route colour and ref. Name can + be taken from route_master or can be route's own, we don't take it into + consideration. Some of route attributes can be None. The function makes route_id json-compatible - dumps it to a string.""" return json.dumps(route_id, ensure_ascii=False) @@ -224,7 +224,7 @@ def read_recovery_data(path): shuffled stations in routes.""" data = None try: - with open(path, 'r') as f: + with open(path, "r") as f: try: data = json.load(f) except json.decoder.JSONDecodeError as e: @@ -258,21 +258,21 @@ def write_recovery_data(path, current_data, cities): itineraries = [] for variant in route: itin = { - 'stations': [], - 'name': variant.name, - 'from': variant.element['tags'].get('from'), - 'to': variant.element['tags'].get('to'), + "stations": [], + "name": variant.name, + "from": variant.element["tags"].get("from"), + "to": variant.element["tags"].get("to"), } for stop in variant: station = stop.stoparea.station station_name = station.name - if station_name == '?' and station.int_name: + if station_name == "?" and station.int_name: station_name = station.int_name - itin['stations'].append( + itin["stations"].append( { - 'oms_id': station.id, - 'name': station_name, - 'center': station.center, + "oms_id": station.id, + "name": station_name, + "center": station.center, } ) if itin is not None: @@ -293,7 +293,7 @@ def write_recovery_data(path, current_data, cities): } for city_name, routes in data.items() } - with open(path, 'w', encoding='utf-8') as f: + with open(path, "w", encoding="utf-8") as f: json.dump(data, f, ensure_ascii=False, indent=2) except Exception as e: logging.warning("Cannot write recovery data to '%s': %s", path, str(e)) diff --git a/subway_structure.py b/subway_structure.py index ceb17dc..739d9c5 100644 --- a/subway_structure.py +++ b/subway_structure.py @@ -4,11 +4,12 @@ import math import re import urllib.parse import urllib.request -from css_colours import normalize_colour from collections import Counter, defaultdict +from css_colours import normalize_colour -SPREADSHEET_ID = '1SEW1-NiNOnA2qDwievcxYV1FOaQl1mb1fdeyqAxHu3k' + +SPREADSHEET_ID = "1SEW1-NiNOnA2qDwievcxYV1FOaQl1mb1fdeyqAxHu3k" MAX_DISTANCE_TO_ENTRANCES = 300 # in meters MAX_DISTANCE_STOP_TO_LINE = 50 # in meters ALLOWED_STATIONS_MISMATCH = 0.02 # part of total station count @@ -20,33 +21,33 @@ DISALLOWED_ANGLE_BETWEEN_STOPS = 20 # in degrees # it is likely the same object DISPLACEMENT_TOLERANCE = 300 # in meters -MODES_RAPID = set(('subway', 'light_rail', 'monorail', 'train')) -MODES_OVERGROUND = set(('tram', 'bus', 'trolleybus', 'aerialway', 'ferry')) -DEFAULT_MODES_RAPID = set(('subway', 'light_rail')) -DEFAULT_MODES_OVERGROUND = set(('tram',)) # TODO: bus and trolleybus? +MODES_RAPID = set(("subway", "light_rail", "monorail", "train")) +MODES_OVERGROUND = set(("tram", "bus", "trolleybus", "aerialway", "ferry")) +DEFAULT_MODES_RAPID = set(("subway", "light_rail")) +DEFAULT_MODES_OVERGROUND = set(("tram",)) # TODO: bus and trolleybus? ALL_MODES = MODES_RAPID | MODES_OVERGROUND RAILWAY_TYPES = set( ( - 'rail', - 'light_rail', - 'subway', - 'narrow_gauge', - 'funicular', - 'monorail', - 'tram', + "rail", + "light_rail", + "subway", + "narrow_gauge", + "funicular", + "monorail", + "tram", ) ) CONSTRUCTION_KEYS = ( - 'construction', - 'proposed', - 'construction:railway', - 'proposed:railway', + "construction", + "proposed", + "construction:railway", + "proposed:railway", ) used_entrances = set() -START_END_TIMES_RE = re.compile(r'.*?(\d{2}):(\d{2})-(\d{2}):(\d{2}).*') +START_END_TIMES_RE = re.compile(r".*?(\d{2}):(\d{2})-(\d{2}):(\d{2}).*") def get_start_end_times(opening_hours): @@ -73,19 +74,19 @@ def osm_interval_to_seconds(interval_str): (https://wiki.openstreetmap.org/wiki/Key:interval#Format) """ hours, minutes, seconds = 0, 0, 0 - semicolon_count = interval_str.count(':') + semicolon_count = interval_str.count(":") try: if semicolon_count == 0: minutes = int(interval_str) elif semicolon_count == 1: - hours, minutes = map(int, interval_str.split(':')) + hours, minutes = map(int, interval_str.split(":")) elif semicolon_count == 2: - hours, minutes, seconds = map(int, interval_str.split(':')) + hours, minutes, seconds = map(int, interval_str.split(":")) else: return None except ValueError: return None - return seconds + 60*minutes + 60*60*hours + return seconds + 60 * minutes + 60 * 60 * hours class CriticalValidationError(Exception): @@ -96,25 +97,25 @@ class CriticalValidationError(Exception): def el_id(el): if not el: return None - if 'type' not in el: - raise Exception('What is this element? {}'.format(el)) - return el['type'][0] + str(el.get('id', el.get('ref', ''))) + if "type" not in el: + raise Exception("What is this element? {}".format(el)) + return el["type"][0] + str(el.get("id", el.get("ref", ""))) def el_center(el): if not el: return None - if 'lat' in el: - return (el['lon'], el['lat']) - elif 'center' in el: - return (el['center']['lon'], el['center']['lat']) + if "lat" in el: + return (el["lon"], el["lat"]) + elif "center" in el: + return (el["center"]["lon"], el["center"]["lat"]) return None def distance(p1, p2): if p1 is None or p2 is None: raise Exception( - 'One of arguments to distance({}, {}) is None'.format(p1, p2) + "One of arguments to distance({}, {}) is None".format(p1, p2) ) dx = math.radians(p1[0] - p2[0]) * math.cos( 0.5 * math.radians(p1[1] + p2[1]) @@ -148,10 +149,11 @@ def project_on_line(p, line): result = { # In the first approximation, position on rails is the index of the # closest vertex of line to the point p. Fractional value means that - # the projected point lies on a segment between two vertices. More than - # one value can occur if a route follows the same tracks more than once. - 'positions_on_line': None, - 'projected_point': None, # (lon, lat) + # the projected point lies on a segment between two vertices. + # More than one value can occur if a route follows the same tracks + # more than once. + "positions_on_line": None, + "projected_point": None, # (lon, lat) } if len(line) < 2: @@ -162,13 +164,13 @@ def project_on_line(p, line): for i, vertex in enumerate(line): d = distance(p, vertex) if d < d_min: - result['positions_on_line'] = [i] - result['projected_point'] = vertex + result["positions_on_line"] = [i] + result["projected_point"] = vertex d_min = d closest_to_vertex = True - elif vertex == result['projected_point']: + elif vertex == result["projected_point"]: # Repeated occurrence of the track vertex in line, like Oslo Line 5 - result['positions_on_line'].append(i) + result["positions_on_line"].append(i) # And then calculate distances to each segment for seg in range(len(line) - 1): # Check bbox for speed @@ -195,14 +197,15 @@ def project_on_line(p, line): ) d = distance(p, projected_point) if d < d_min: - result['positions_on_line'] = [seg + u] - result['projected_point'] = projected_point + result["positions_on_line"] = [seg + u] + result["projected_point"] = projected_point d_min = d closest_to_vertex = False - elif projected_point == result['projected_point']: - # Repeated occurrence of the track segment in line, like Oslo Line 5 + elif projected_point == result["projected_point"]: + # Repeated occurrence of the track segment in line, + # like Oslo Line 5 if not closest_to_vertex: - result['positions_on_line'].append(seg + u) + result["positions_on_line"].append(seg + u) return result @@ -253,7 +256,7 @@ def distance_on_line(p1, p2, line, start_vertex=0): return distance(line[seg1], line[seg1 + 1]) * abs(pos2 - pos1), seg1 if seg2 < seg1: # Should not happen - raise Exception('Pos1 %s is after pos2 %s', seg1, seg2) + raise Exception("Pos1 %s is after pos2 %s", seg1, seg2) d = 0 if pos1 < 1: d += distance(line[seg1], line[seg1 + 1]) * (1 - pos1) @@ -277,19 +280,19 @@ def angle_between(p1, c, p2): def format_elid_list(ids): - msg = ', '.join(sorted(ids)[:20]) + msg = ", ".join(sorted(ids)[:20]) if len(ids) > 20: - msg += ', ...' + msg += ", ..." return msg class Station: @staticmethod def get_modes(el): - mode = el['tags'].get('station') + mode = el["tags"].get("station") modes = [] if not mode else [mode] for m in ALL_MODES: - if el['tags'].get(m) == 'yes': + if el["tags"].get(m) == "yes": modes.append(m) return set(modes) @@ -298,17 +301,17 @@ class Station: # public_transport=station is too ambiguous and unspecific to use, # so we expect for it to be backed by railway=station. if ( - 'tram' in modes - and el.get('tags', {}).get('railway') == 'tram_stop' + "tram" in modes + and el.get("tags", {}).get("railway") == "tram_stop" ): return True - if el.get('tags', {}).get('railway') not in ('station', 'halt'): + if el.get("tags", {}).get("railway") not in ("station", "halt"): return False for k in CONSTRUCTION_KEYS: - if k in el['tags']: + if k in el["tags"]: return False # Not checking for station=train, obviously - if 'train' not in modes and Station.get_modes(el).isdisjoint(modes): + if "train" not in modes and Station.get_modes(el).isdisjoint(modes): return False return True @@ -316,58 +319,58 @@ class Station: """Call this with a railway=station node.""" if not Station.is_station(el, city.modes): raise Exception( - 'Station object should be instantiated from a station node. ' - 'Got: {}'.format(el) + "Station object should be instantiated from a station node. " + "Got: {}".format(el) ) self.id = el_id(el) self.element = el self.modes = Station.get_modes(el) - self.name = el['tags'].get('name', '?') - self.int_name = el['tags'].get( - 'int_name', el['tags'].get('name:en', None) + self.name = el["tags"].get("name", "?") + self.int_name = el["tags"].get( + "int_name", el["tags"].get("name:en", None) ) try: - self.colour = normalize_colour(el['tags'].get('colour', None)) + self.colour = normalize_colour(el["tags"].get("colour", None)) except ValueError as e: self.colour = None city.warn(str(e), el) self.center = el_center(el) if self.center is None: - raise Exception('Could not find center of {}'.format(el)) + raise Exception("Could not find center of {}".format(el)) def __repr__(self): - return 'Station(id={}, modes={}, name={}, center={})'.format( - self.id, ','.join(self.modes), self.name, self.center + return "Station(id={}, modes={}, name={}, center={})".format( + self.id, ",".join(self.modes), self.name, self.center ) class StopArea: @staticmethod def is_stop(el): - if 'tags' not in el: + if "tags" not in el: return False - if el['tags'].get('railway') == 'stop': + if el["tags"].get("railway") == "stop": return True - if el['tags'].get('public_transport') == 'stop_position': + if el["tags"].get("public_transport") == "stop_position": return True return False @staticmethod def is_platform(el): - if 'tags' not in el: + if "tags" not in el: return False - if el['tags'].get('railway') in ('platform', 'platform_edge'): + if el["tags"].get("railway") in ("platform", "platform_edge"): return True - if el['tags'].get('public_transport') == 'platform': + if el["tags"].get("public_transport") == "platform": return True return False @staticmethod def is_track(el): - if el['type'] != 'way' or 'tags' not in el: + if el["type"] != "way" or "tags" not in el: return False - return el['tags'].get('railway') in RAILWAY_TYPES + return el["tags"].get("railway") in RAILWAY_TYPES def __init__(self, station, city, stop_area=None): """Call this with a Station object.""" @@ -379,7 +382,7 @@ class StopArea: self.platforms = set() # set of el_ids of platforms self.exits = set() # el_id of subway_entrance for leaving the platform self.entrances = set() # el_id of subway_entrance for entering - # the platform + # the platform self.center = None # lon, lat of the station centre point self.centers = {} # el_id -> (lon, lat) for all elements self.transfer = None # el_id of a transfer relation @@ -390,13 +393,13 @@ class StopArea: self.colour = station.colour if stop_area: - self.name = stop_area['tags'].get('name', self.name) - self.int_name = stop_area['tags'].get( - 'int_name', stop_area['tags'].get('name:en', self.int_name) + self.name = stop_area["tags"].get("name", self.name) + self.int_name = stop_area["tags"].get( + "int_name", stop_area["tags"].get("name:en", self.int_name) ) try: self.colour = ( - normalize_colour(stop_area['tags'].get('colour')) + normalize_colour(stop_area["tags"].get("colour")) or self.colour ) except ValueError as e: @@ -404,43 +407,43 @@ class StopArea: # If we have a stop area, add all elements from it warned_about_tracks = False - for m in stop_area['members']: + for m in stop_area["members"]: k = el_id(m) m_el = city.elements.get(k) - if m_el and 'tags' in m_el: + if m_el and "tags" in m_el: if Station.is_station(m_el, city.modes): if k != station.id: city.error( - 'Stop area has multiple stations', stop_area + "Stop area has multiple stations", stop_area ) elif StopArea.is_stop(m_el): self.stops.add(k) elif StopArea.is_platform(m_el): self.platforms.add(k) - elif m_el['tags'].get('railway') == 'subway_entrance': - if m_el['type'] != 'node': - city.warn('Subway entrance is not a node', m_el) + elif m_el["tags"].get("railway") == "subway_entrance": + if m_el["type"] != "node": + city.warn("Subway entrance is not a node", m_el) if ( - m_el['tags'].get('entrance') != 'exit' - and m['role'] != 'exit_only' + m_el["tags"].get("entrance") != "exit" + and m["role"] != "exit_only" ): self.entrances.add(k) if ( - m_el['tags'].get('entrance') != 'entrance' - and m['role'] != 'entry_only' + m_el["tags"].get("entrance") != "entrance" + and m["role"] != "entry_only" ): self.exits.add(k) elif StopArea.is_track(m_el): if not warned_about_tracks: city.warn( - 'Tracks in a stop_area relation', stop_area + "Tracks in a stop_area relation", stop_area ) warned_about_tracks = True else: # Otherwise add nearby entrances center = station.center for c_el in city.elements.values(): - if c_el.get('tags', {}).get('railway') == 'subway_entrance': + if c_el.get("tags", {}).get("railway") == "subway_entrance": c_id = el_id(c_el) if c_id not in city.stop_areas: c_center = el_center(c_el) @@ -449,28 +452,28 @@ class StopArea: and distance(center, c_center) <= MAX_DISTANCE_TO_ENTRANCES ): - if c_el['type'] != 'node': + if c_el["type"] != "node": city.warn( - 'Subway entrance is not a node', c_el + "Subway entrance is not a node", c_el ) - etag = c_el['tags'].get('entrance') - if etag != 'exit': + etag = c_el["tags"].get("entrance") + if etag != "exit": self.entrances.add(c_id) - if etag != 'entrance': + if etag != "entrance": self.exits.add(c_id) if self.exits and not self.entrances: city.warn( - 'Only exits for a station, no entrances', + "Only exits for a station, no entrances", stop_area or station.element, ) if self.entrances and not self.exits: - city.warn('No exits for a station', stop_area or station.element) + city.warn("No exits for a station", stop_area or station.element) for el in self.get_elements(): self.centers[el] = el_center(city.elements[el]) - """Calculates the center point of the station. This algorithm + """Calculate the center point of the station. This algorithm cannot rely on a station node, since many stop_areas can share one. Basically it averages center points of all platforms and stop positions.""" @@ -494,8 +497,9 @@ class StopArea: return result def __repr__(self): - return 'StopArea(id={}, name={}, station={}, transfer={}, center={})'.format( - self.id, self.name, self.station, self.transfer, self.center + return ( + f"StopArea(id={self.id}, name={self.name}, station={self.station}," + f" transfer={self.transfer}, center={self.center})" ) @@ -520,34 +524,34 @@ class RouteStop: @staticmethod def get_actual_role(el, role, modes): if StopArea.is_stop(el): - return 'stop' + return "stop" elif StopArea.is_platform(el): - return 'platform' + return "platform" elif Station.is_station(el, modes): - if 'platform' in role: - return 'platform' + if "platform" in role: + return "platform" else: - return 'stop' + return "stop" return None def add(self, member, relation, city): el = city.elements[el_id(member)] - role = member['role'] + role = member["role"] if StopArea.is_stop(el): - if 'platform' in role: - city.warn('Stop position in a platform role in a route', el) - if el['type'] != 'node': - city.error('Stop position is not a node', el) + if "platform" in role: + city.warn("Stop position in a platform role in a route", el) + if el["type"] != "node": + city.error("Stop position is not a node", el) self.stop = el_center(el) - if 'entry_only' not in role: + if "entry_only" not in role: self.can_exit = True - if 'exit_only' not in role: + if "exit_only" not in role: self.can_enter = True elif Station.is_station(el, city.modes): - if el['type'] != 'node': - city.notice('Station in route is not a node', el) + if el["type"] != "node": + city.notice("Station in route is not a node", el) if not self.seen_stop and not self.seen_platform: self.stop = el_center(el) @@ -555,12 +559,12 @@ class RouteStop: self.can_exit = True elif StopArea.is_platform(el): - if 'stop' in role: - city.warn('Platform in a stop role in a route', el) - if 'exit_only' not in role: + if "stop" in role: + city.warn("Platform in a stop role in a route", el) + if "exit_only" not in role: self.platform_entry = el_id(el) self.can_enter = True - if 'entry_only' not in role: + if "entry_only" not in role: self.platform_exit = el_id(el) self.can_exit = True if not self.seen_stop: @@ -568,39 +572,38 @@ class RouteStop: multiple_check = False actual_role = RouteStop.get_actual_role(el, role, city.modes) - if actual_role == 'platform': - if role == 'platform_entry_only': + if actual_role == "platform": + if role == "platform_entry_only": multiple_check = self.seen_platform_entry self.seen_platform_entry = True - elif role == 'platform_exit_only': + elif role == "platform_exit_only": multiple_check = self.seen_platform_exit self.seen_platform_exit = True else: - if role != 'platform' and 'stop' not in role: + if role != "platform" and "stop" not in role: city.warn( - "Platform \"{}\" ({}) with invalid role '{}' in route".format( - el['tags'].get('name', ''), el_id(el), role, - ), + f'Platform "{el["tags"].get("name", "")}" ' + f'({el_id(el)}) with invalid role "{role}" in route', relation, ) multiple_check = self.seen_platform self.seen_platform_entry = True self.seen_platform_exit = True - elif actual_role == 'stop': + elif actual_role == "stop": multiple_check = self.seen_stop self.seen_stop = True if multiple_check: - log_function = city.error if actual_role == 'stop' else city.notice + log_function = city.error if actual_role == "stop" else city.notice log_function( - 'Multiple {}s for a station "{}" ({}) in a route relation'.format( - actual_role, el['tags'].get('name', ''), el_id(el) - ), + f'Multiple {actual_role}s for a station "' + f'{el["tags"].get("name", "")} ' + f"({el_id(el)}) in a route relation", relation, ) def __repr__(self): return ( - 'RouteStop(stop={}, pl_entry={}, pl_exit={}, stoparea={})'.format( + "RouteStop(stop={}, pl_entry={}, pl_exit={}, stoparea={})".format( self.stop, self.platform_entry, self.platform_exit, @@ -615,38 +618,38 @@ class Route: @staticmethod def is_route(el, modes): if ( - el['type'] != 'relation' - or el.get('tags', {}).get('type') != 'route' + el["type"] != "relation" + or el.get("tags", {}).get("type") != "route" ): return False - if 'members' not in el: + if "members" not in el: return False - if el['tags'].get('route') not in modes: + if el["tags"].get("route") not in modes: return False for k in CONSTRUCTION_KEYS: - if k in el['tags']: + if k in el["tags"]: return False - if 'ref' not in el['tags'] and 'name' not in el['tags']: + if "ref" not in el["tags"] and "name" not in el["tags"]: return False return True @staticmethod def get_network(relation): - for k in ('network:metro', 'network', 'operator'): - if k in relation['tags']: - return relation['tags'][k] + for k in ("network:metro", "network", "operator"): + if k in relation["tags"]: + return relation["tags"][k] return None @staticmethod def get_interval(tags): v = None - for k in ('interval', 'headway'): + for k in ("interval", "headway"): if k in tags: v = tags[k] break else: for kk in tags: - if kk.startswith(k + ':'): + if kk.startswith(k + ":"): v = tags[kk] break if not v: @@ -654,9 +657,9 @@ class Route: return osm_interval_to_seconds(v) def __init__(self, relation, city, master=None): - assert Route.is_route(relation, city.modes), ( - f'The relation does not seem to be a route: {relation}' - ) + assert Route.is_route( + relation, city.modes + ), f"The relation does not seem to be a route: {relation}" self.city = city self.element = relation self.id = el_id(relation) @@ -688,15 +691,15 @@ class Route: last_track = [] track = [] warned_about_holes = False - for m in self.element['members']: + for m in self.element["members"]: el = self.city.elements.get(el_id(m), None) if not el or not StopArea.is_track(el): continue - if 'nodes' not in el or len(el['nodes']) < 2: - self.city.error('Cannot find nodes in a railway', el) + if "nodes" not in el or len(el["nodes"]) < 2: + self.city.error("Cannot find nodes in a railway", el) continue - nodes = ['n{}'.format(n) for n in el['nodes']] - if m['role'] == 'backward': + nodes = ["n{}".format(n) for n in el["nodes"]] + if m["role"] == "backward": nodes.reverse() line_nodes.update(nodes) if not track: @@ -722,7 +725,7 @@ class Route: # Store the track if it is long and clean it if not warned_about_holes: self.city.warn( - 'Hole in route rails near node {}'.format( + "Hole in route rails near node {}".format( track[-1] ), self.element, @@ -744,47 +747,48 @@ class Route: def get_stop_projections(self): projected = [project_on_line(x.stop, self.tracks) for x in self.stops] - stop_near_tracks_criterion = lambda stop_index: ( - projected[stop_index]['projected_point'] is not None - and distance( - self.stops[stop_index].stop, - projected[stop_index]['projected_point'], + + def stop_near_tracks_criterion(stop_index: int): + return ( + projected[stop_index]["projected_point"] is not None + and distance( + self.stops[stop_index].stop, + projected[stop_index]["projected_point"], + ) + <= MAX_DISTANCE_STOP_TO_LINE ) - <= MAX_DISTANCE_STOP_TO_LINE - ) + return projected, stop_near_tracks_criterion def project_stops_on_line(self): projected, stop_near_tracks_criterion = self.get_stop_projections() projected_stops_data = { - 'first_stop_on_rails_index': None, - 'last_stop_on_rails_index': None, - 'stops_on_longest_line': [], # list [{'route_stop': RouteStop, - # 'coords': (lon, lat), - # 'positions_on_rails': [] } + "first_stop_on_rails_index": None, + "last_stop_on_rails_index": None, + "stops_on_longest_line": [], # list [{'route_stop': RouteStop, + # 'coords': (lon, lat), + # 'positions_on_rails': [] } } first_index = 0 - while ( - first_index < len(self.stops) - and not stop_near_tracks_criterion(first_index) + while first_index < len(self.stops) and not stop_near_tracks_criterion( + first_index ): first_index += 1 - projected_stops_data['first_stop_on_rails_index'] = first_index + projected_stops_data["first_stop_on_rails_index"] = first_index last_index = len(self.stops) - 1 - while ( - last_index > projected_stops_data['first_stop_on_rails_index'] - and not stop_near_tracks_criterion(last_index) - ): + while last_index > projected_stops_data[ + "first_stop_on_rails_index" + ] and not stop_near_tracks_criterion(last_index): last_index -= 1 - projected_stops_data['last_stop_on_rails_index'] = last_index + projected_stops_data["last_stop_on_rails_index"] = last_index for i, route_stop in enumerate(self.stops): if not first_index <= i <= last_index: continue - if projected[i]['projected_point'] is None: + if projected[i]["projected_point"] is None: self.city.error( 'Stop "{}" {} is nowhere near the tracks'.format( route_stop.stoparea.name, route_stop.stop @@ -793,11 +797,11 @@ class Route: ) else: stop_data = { - 'route_stop': route_stop, - 'coords': None, - 'positions_on_rails': None, + "route_stop": route_stop, + "coords": None, + "positions_on_rails": None, } - projected_point = projected[i]['projected_point'] + projected_point = projected[i]["projected_point"] # We've got two separate stations with a good stretch of # railway tracks between them. Put these on tracks. d = round(distance(route_stop.stop, projected_point)) @@ -809,11 +813,11 @@ class Route: self.element, ) else: - stop_data['coords'] = projected_point - stop_data['positions_on_rails'] = projected[i][ - 'positions_on_line' + stop_data["coords"] = projected_point + stop_data["positions_on_rails"] = projected[i][ + "positions_on_line" ] - projected_stops_data['stops_on_longest_line'].append(stop_data) + projected_stops_data["stops_on_longest_line"].append(stop_data) return projected_stops_data def calculate_distances(self): @@ -824,9 +828,9 @@ class Route: direct = distance(stop.stop, self.stops[i - 1].stop) d_line = None if ( - self.first_stop_on_rails_index - <= i - <= self.last_stop_on_rails_index + self.first_stop_on_rails_index + <= i + <= self.last_stop_on_rails_index ): d_line = distance_on_line( self.stops[i - 1].stop, stop.stop, self.tracks, vertex @@ -840,31 +844,31 @@ class Route: def process_tags(self, master): relation = self.element - master_tags = {} if not master else master['tags'] - if 'ref' not in relation['tags'] and 'ref' not in master_tags: - self.city.notice('Missing ref on a route', relation) - self.ref = relation['tags'].get( - 'ref', master_tags.get('ref', relation['tags'].get('name', None)) + master_tags = {} if not master else master["tags"] + if "ref" not in relation["tags"] and "ref" not in master_tags: + self.city.notice("Missing ref on a route", relation) + self.ref = relation["tags"].get( + "ref", master_tags.get("ref", relation["tags"].get("name", None)) ) - self.name = relation['tags'].get('name', None) - self.mode = relation['tags']['route'] + self.name = relation["tags"].get("name", None) + self.mode = relation["tags"]["route"] if ( - 'colour' not in relation['tags'] - and 'colour' not in master_tags - and self.mode != 'tram' + "colour" not in relation["tags"] + and "colour" not in master_tags + and self.mode != "tram" ): - self.city.notice('Missing colour on a route', relation) + self.city.notice("Missing colour on a route", relation) try: self.colour = normalize_colour( - relation['tags'].get('colour', master_tags.get('colour', None)) + relation["tags"].get("colour", master_tags.get("colour", None)) ) except ValueError as e: self.colour = None self.city.warn(str(e), relation) try: self.infill = normalize_colour( - relation['tags'].get( - 'colour:infill', master_tags.get('colour:infill', None) + relation["tags"].get( + "colour:infill", master_tags.get("colour:infill", None) ) ) except ValueError as e: @@ -872,17 +876,17 @@ class Route: self.city.warn(str(e), relation) self.network = Route.get_network(relation) self.interval = Route.get_interval( - relation['tags'] + relation["tags"] ) or Route.get_interval(master_tags) self.start_time, self.end_time = get_start_end_times( - relation['tags'].get( - 'opening_hours', master_tags.get('opening_hours', '') + relation["tags"].get( + "opening_hours", master_tags.get("opening_hours", "") ) ) - if relation['tags'].get('public_transport:version') == '1': + if relation["tags"].get("public_transport:version") == "1": self.city.warn( - 'Public transport version is 1, which means the route ' - 'is an unsorted pile of objects', + "Public transport version is 1, which means the route " + "is an unsorted pile of objects", relation, ) @@ -892,8 +896,8 @@ class Route: seen_platforms = False repeat_pos = None stop_position_elements = [] - for m in self.element['members']: - if 'inactive' in m['role']: + for m in self.element["members"]: + if "inactive" in m["role"]: continue k = el_id(m) if k in self.city.stations: @@ -901,19 +905,19 @@ class Route: st = st_list[0] if len(st_list) > 1: self.city.error( - f'Ambiguous station {st.name} in route. Please ' - 'use stop_position or split interchange stations', + f"Ambiguous station {st.name} in route. Please " + "use stop_position or split interchange stations", self.element, ) el = self.city.elements[k] actual_role = RouteStop.get_actual_role( - el, m['role'], self.city.modes + el, m["role"], self.city.modes ) if actual_role: - if m['role'] and actual_role not in m['role']: + if m["role"] and actual_role not in m["role"]: self.city.warn( "Wrong role '{}' for {} {}".format( - m['role'], actual_role, k + m["role"], actual_role, k ), self.element, ) @@ -929,11 +933,11 @@ class Route: if ( (seen_stops and seen_platforms) or ( - actual_role == 'stop' + actual_role == "stop" and not seen_platforms ) or ( - actual_role == 'platform' + actual_role == "platform" and not seen_stops ) ): @@ -947,12 +951,12 @@ class Route: if repeat_pos >= len(self.stops): continue # Check that the type matches - if (actual_role == 'stop' and seen_stops) or ( - actual_role == 'platform' and seen_platforms + if (actual_role == "stop" and seen_stops) or ( + actual_role == "platform" and seen_platforms ): self.city.error( 'Found an out-of-place {}: "{}" ({})'.format( - actual_role, el['tags'].get('name', ''), k + actual_role, el["tags"].get("name", ""), k ), self.element, ) @@ -965,7 +969,7 @@ class Route: repeat_pos += 1 if repeat_pos >= len(self.stops): self.city.error( - 'Incorrect order of {}s at {}'.format( + "Incorrect order of {}s at {}".format( actual_role, k ), self.element, @@ -984,25 +988,26 @@ class Route: continue if k not in self.city.elements: - if 'stop' in m['role'] or 'platform' in m['role']: + if "stop" in m["role"] or "platform" in m["role"]: raise CriticalValidationError( f"{m['role']} {m['type']} {m['ref']} for route " f"relation {self.element['id']} is not in the dataset" ) continue el = self.city.elements[k] - if 'tags' not in el: - self.city.error(f'Untagged object {k} in a route', self.element) + if "tags" not in el: + self.city.error( + f"Untagged object {k} in a route", self.element + ) continue is_under_construction = False for ck in CONSTRUCTION_KEYS: - if ck in el['tags']: + if ck in el["tags"]: self.city.warn( - 'Under construction {} {} in route. Consider ' - 'setting \'inactive\' role or removing construction attributes'.format( - m['role'] or 'feature', k - ), + f"Under construction {m['role'] or 'feature'} {k} " + "in route. Consider setting 'inactive' role or " + "removing construction attributes", self.element, ) is_under_construction = True @@ -1012,28 +1017,28 @@ class Route: if Station.is_station(el, self.city.modes): # A station may be not included into this route due to previous - # 'stop area has multiple stations' error. No other error message is needed. + # 'stop area has multiple stations' error. No other error + # message is needed. pass - elif el['tags'].get('railway') in ('station', 'halt'): + elif el["tags"].get("railway") in ("station", "halt"): self.city.error( - 'Missing station={} on a {}'.format(self.mode, m['role']), + "Missing station={} on a {}".format(self.mode, m["role"]), el, ) else: actual_role = RouteStop.get_actual_role( - el, m['role'], self.city.modes + el, m["role"], self.city.modes ) if actual_role: self.city.error( - '{} {} {} is not connected to a station in route'.format( - actual_role, m['type'], m['ref'] - ), + f"{actual_role} {m['type']} {m['ref']} is not " + "connected to a station in route", self.element, ) elif not StopArea.is_track(el): self.city.warn( - 'Unknown member type for {} {} in route'.format( - m['type'], m['ref'] + "Unknown member type for {} {} in route".format( + m["type"], m["ref"] ), self.element, ) @@ -1048,7 +1053,7 @@ class Route: if stop_id not in line_nodes: self.city.warn( 'Stop position "{}" ({}) is not on tracks'.format( - stop_el['tags'].get('name', ''), stop_id + stop_el["tags"].get("name", ""), stop_id ), self.element, ) @@ -1057,12 +1062,12 @@ class Route: # Can be empty. self.tracks = [el_center(self.city.elements.get(k)) for k in tracks] if ( - None in self.tracks + None in self.tracks ): # usually, extending BBOX for the city is needed self.tracks = [] for n in filter(lambda x: x not in self.city.elements, tracks): self.city.warn( - f'The dataset is missing the railway tracks node {n}', + f"The dataset is missing the railway tracks node {n}", self.element, ) break @@ -1078,7 +1083,7 @@ class Route: ): self.city.warn( "Non-closed rail sequence in a circular route", - self.element + self.element, ) projected_stops_data = self.project_stops_on_line() @@ -1090,13 +1095,13 @@ class Route: """Store better stop coordinates and indexes of first/last stops that lie on a continuous track line, to the instance attributes. """ - for attr in ('first_stop_on_rails_index', 'last_stop_on_rails_index'): + for attr in ("first_stop_on_rails_index", "last_stop_on_rails_index"): setattr(self, attr, projected_stops_data[attr]) - for stop_data in projected_stops_data['stops_on_longest_line']: - route_stop = stop_data['route_stop'] - route_stop.positions_on_rails = stop_data['positions_on_rails'] - if stop_coords := stop_data['coords']: + for stop_data in projected_stops_data["stops_on_longest_line"]: + route_stop = stop_data["route_stop"] + route_stop.positions_on_rails = stop_data["positions_on_rails"] + if stop_coords := stop_data["coords"]: route_stop.stop = stop_coords def get_extended_tracks(self): @@ -1141,7 +1146,7 @@ class Route: seg2 -= 1 # u2 = 1.0 if seg2 + 2 < len(tracks): - tracks = tracks[0:seg2 + 2] + tracks = tracks[0 : seg2 + 2] # noqa E203 tracks[-1] = self.stops[-1].stop if first_stop_location != (None, None): @@ -1172,8 +1177,9 @@ class Route: self.stops[si + 2].stop, ) if angle < ALLOWED_ANGLE_BETWEEN_STOPS: - msg = 'Angle between stops around "{}" is too narrow, {} degrees'.format( - self.stops[si + 1].stoparea.name, angle + msg = ( + f'Angle between stops around "{self.stops[si + 1]}" ' + f"is too narrow, {angle} degrees" ) if angle < DISALLOWED_ANGLE_BETWEEN_STOPS: disorder_errors.append(msg) @@ -1192,7 +1198,7 @@ class Route: allowed_order_violations = 1 if self.is_circular else 0 max_position_on_rails = -1 for stop_data in stop_sequence: - positions_on_rails = stop_data['positions_on_rails'] + positions_on_rails = stop_data["positions_on_rails"] suitable_occurrence = 0 while ( suitable_occurrence < len(positions_on_rails) @@ -1205,7 +1211,7 @@ class Route: suitable_occurrence -= 1 allowed_order_violations -= 1 else: - route_stop = stop_data['route_stop'] + route_stop = stop_data["route_stop"] return 'Stops on tracks are unordered near "{}" {}'.format( route_stop.stoparea.name, route_stop.stop ) @@ -1220,16 +1226,16 @@ class Route: :return: error message on the first order violation or None. """ error_message = self.check_stops_order_on_tracks_direct( - projected_stops_data['stops_on_longest_line'] + projected_stops_data["stops_on_longest_line"] ) if error_message: error_message_reversed = self.check_stops_order_on_tracks_direct( - reversed(projected_stops_data['stops_on_longest_line']) + reversed(projected_stops_data["stops_on_longest_line"]) ) if error_message_reversed is None: error_message = None self.city.warn( - 'Tracks seem to go in the opposite direction to stops', + "Tracks seem to go in the opposite direction to stops", self.element, ) self.tracks.reverse() @@ -1284,7 +1290,7 @@ class Route: for stop in self.stops: station = stop.stoparea.station stop_name = station.name - if stop_name == '?' and station.int_name: + if stop_name == "?" and station.int_name: stop_name = station.int_name # We won't programmatically recover routes with repeating stations: # such cases are rare and deserves manual verification @@ -1300,7 +1306,7 @@ class Route: suitable_itineraries = [] for itinerary in self.city.recovery_data[route_id]: itinerary_stop_names = [ - stop['name'] for stop in itinerary['stations'] + stop["name"] for stop in itinerary["stations"] ] if not ( len(stop_names) == len(itinerary_stop_names) @@ -1308,9 +1314,9 @@ class Route: ): continue big_station_displacement = False - for it_stop in itinerary['stations']: - name = it_stop['name'] - it_stop_center = it_stop['center'] + for it_stop in itinerary["stations"]: + name = it_stop["name"] + it_stop_center = it_stop["center"] self_stop_center = self_stops[name].stoparea.station.center if ( distance(it_stop_center, self_stop_center) @@ -1326,23 +1332,23 @@ class Route: elif len(suitable_itineraries) == 1: matching_itinerary = suitable_itineraries[0] else: - from_tag = self.element['tags'].get('from') - to_tag = self.element['tags'].get('to') + from_tag = self.element["tags"].get("from") + to_tag = self.element["tags"].get("to") if not from_tag and not to_tag: return False matching_itineraries = [ itin for itin in suitable_itineraries if from_tag - and itin['from'] == from_tag + and itin["from"] == from_tag or to_tag - and itin['to'] == to_tag + and itin["to"] == to_tag ] if len(matching_itineraries) != 1: return False matching_itinerary = matching_itineraries[0] self.stops = [ - self_stops[stop['name']] for stop in matching_itinerary['stations'] + self_stops[stop["name"]] for stop in matching_itinerary["stations"] ] return True @@ -1357,8 +1363,8 @@ class Route: def __repr__(self): return ( - 'Route(id={}, mode={}, ref={}, name={}, network={}, interval={}, ' - 'circular={}, num_stops={}, line_length={} m, from={}, to={}' + "Route(id={}, mode={}, ref={}, name={}, network={}, interval={}, " + "circular={}, num_stops={}, line_length={} m, from={}, to={}" ).format( self.id, self.mode, @@ -1382,27 +1388,27 @@ class RouteMaster: self.has_master = master is not None self.interval_from_master = False if master: - self.ref = master['tags'].get( - 'ref', master['tags'].get('name', None) + self.ref = master["tags"].get( + "ref", master["tags"].get("name", None) ) try: self.colour = normalize_colour( - master['tags'].get('colour', None) + master["tags"].get("colour", None) ) except ValueError: self.colour = None try: self.infill = normalize_colour( - master['tags'].get('colour:infill', None) + master["tags"].get("colour:infill", None) ) except ValueError: self.infill = None self.network = Route.get_network(master) - self.mode = master['tags'].get( - 'route_master', None + self.mode = master["tags"].get( + "route_master", None ) # This tag is required, but okay - self.name = master['tags'].get('name', None) - self.interval = Route.get_interval(master['tags']) + self.name = master["tags"].get("name", None) + self.interval = Route.get_interval(master["tags"]) self.interval_from_master = self.interval is not None else: self.ref = None @@ -1438,8 +1444,9 @@ class RouteMaster: self.infill = route.infill elif route.infill and route.infill != self.infill: city.notice( - 'Route "{}" has different infill colour from master "{}"'.format( - route.infill, self.infill + ( + f'Route "{route.infill}" has different infill colour ' + f'from master "{self.infill}"' ), route.element, ) @@ -1461,7 +1468,7 @@ class RouteMaster: self.mode = route.mode elif route.mode != self.mode: city.error( - 'Incompatible PT mode: master has {} and route has {}'.format( + "Incompatible PT mode: master has {} and route has {}".format( self.mode, route.mode ), route.element, @@ -1501,13 +1508,10 @@ class RouteMaster: return iter(self.routes) def __repr__(self): - return 'RouteMaster(id={}, mode={}, ref={}, name={}, network={}, num_variants={}'.format( - self.id, - self.mode, - self.ref, - self.name, - self.network, - len(self.routes), + return ( + f"RouteMaster(id={self.id}, mode={self.mode}, ref={self.ref}, " + f"name={self.name}, network={self.network}, " + f"num_variants={len(self.routes)}" ) @@ -1527,26 +1531,28 @@ class City: self.overground = overground if not overground: self.num_stations = int(city_data["num_stations"]) - self.num_lines = int(city_data["num_lines"] or '0') - self.num_light_lines = int(city_data["num_light_lines"] or '0') - self.num_interchanges = int(city_data["num_interchanges"] or '0') + self.num_lines = int(city_data["num_lines"] or "0") + self.num_light_lines = int(city_data["num_light_lines"] or "0") + self.num_interchanges = int(city_data["num_interchanges"] or "0") else: - self.num_tram_lines = int(city_data["num_tram_lines"] or '0') - self.num_trolleybus_lines = int(city_data["num_trolleybus_lines"] or '0') - self.num_bus_lines = int(city_data["num_bus_lines"] or '0') - self.num_other_lines = int(city_data["num_other_lines"] or '0') + self.num_tram_lines = int(city_data["num_tram_lines"] or "0") + self.num_trolleybus_lines = int( + city_data["num_trolleybus_lines"] or "0" + ) + self.num_bus_lines = int(city_data["num_bus_lines"] or "0") + self.num_other_lines = int(city_data["num_other_lines"] or "0") # Aquiring list of networks and modes networks = ( - None - if not city_data["networks"] - else city_data["networks"].split(':') + None + if not city_data["networks"] + else city_data["networks"].split(":") ) if not networks or len(networks[-1]) == 0: self.networks = [] else: self.networks = set( - filter(None, [x.strip() for x in networks[-1].split(';')]) + filter(None, [x.strip() for x in networks[-1].split(";")]) ) if not networks or len(networks) < 2 or len(networks[0]) == 0: if self.overground: @@ -1554,10 +1560,10 @@ class City: else: self.modes = DEFAULT_MODES_RAPID else: - self.modes = set([x.strip() for x in networks[0].split(',')]) + self.modes = set([x.strip() for x in networks[0].split(",")]) # Reversing bbox so it is (xmin, ymin, xmax, ymax) - bbox = city_data["bbox"].split(',') + bbox = city_data["bbox"].split(",") if len(bbox) == 4: self.bbox = [float(bbox[i]) for i in (1, 0, 3, 2)] else: @@ -1578,11 +1584,11 @@ class City: @staticmethod def log_message(message, el): if el: - tags = el.get('tags', {}) + tags = el.get("tags", {}) message += ' ({} {}, "{}")'.format( - el['type'], - el.get('id', el.get('ref')), - tags.get('name', tags.get('ref', '')), + el["type"], + el.get("id", el.get("ref")), + tags.get("name", tags.get("ref", "")), ) return message @@ -1613,59 +1619,59 @@ class City: return False def add(self, el): - if el['type'] == 'relation' and 'members' not in el: + if el["type"] == "relation" and "members" not in el: return self.elements[el_id(el)] = el - if not (el['type'] == 'relation' and 'tags' in el): + if not (el["type"] == "relation" and "tags" in el): return - relation_type = el['tags'].get('type') - if relation_type == 'route_master': - for m in el['members']: - if m['type'] != 'relation': + relation_type = el["tags"].get("type") + if relation_type == "route_master": + for m in el["members"]: + if m["type"] != "relation": continue if el_id(m) in self.masters: - self.error('Route in two route_masters', m) + self.error("Route in two route_masters", m) self.masters[el_id(m)] = el - elif el['tags'].get('public_transport') == 'stop_area': - if relation_type != 'public_transport': + elif el["tags"].get("public_transport") == "stop_area": + if relation_type != "public_transport": self.warn( "stop_area relation with " f"type={relation_type}, needed type=public_transport", - el + el, ) return warned_about_duplicates = False - for m in el['members']: + for m in el["members"]: stop_areas = self.stop_areas[el_id(m)] if el in stop_areas and not warned_about_duplicates: - self.warn('Duplicate element in a stop area', el) + self.warn("Duplicate element in a stop area", el) warned_about_duplicates = True else: stop_areas.append(el) def make_transfer(self, sag): transfer = set() - for m in sag['members']: + for m in sag["members"]: k = el_id(m) el = self.elements.get(k) if not el: # A sag member may validly not belong to the city while # the sag does - near the city bbox boundary continue - if 'tags' not in el: + if "tags" not in el: self.warn( - 'An untagged object {} in a stop_area_group'.format(k), sag + "An untagged object {} in a stop_area_group".format(k), sag ) continue if ( - el['type'] != 'relation' - or el['tags'].get('type') != 'public_transport' - or el['tags'].get('public_transport') != 'stop_area' + el["type"] != "relation" + or el["tags"].get("type") != "public_transport" + or el["tags"].get("public_transport") != "stop_area" ): continue if k in self.stations: @@ -1677,9 +1683,12 @@ class City: # Châtelet subway station <-> # "Châtelet - Les Halles" railway station <-> # Les Halles subway station - # Counterexample 2: Saint-Petersburg, transfers Витебский вокзал <-> Пушкинская <-> Звенигородская + # Counterexample 2: Saint-Petersburg, transfers + # Витебский вокзал <-> + # Пушкинская <-> + # Звенигородская self.warn( - 'Stop area {} belongs to multiple interchanges'.format( + "Stop area {} belongs to multiple interchanges".format( k ) ) @@ -1694,13 +1703,13 @@ class City: if Station.is_station(el, self.modes): # See PR https://github.com/mapsme/subways/pull/98 if ( - el['type'] == 'relation' - and el['tags'].get('type') != 'multipolygon' + el["type"] == "relation" + and el["tags"].get("type") != "multipolygon" ): + rel_type = el["tags"].get("type") self.warn( - "A railway station cannot be a relation of type '{}'".format( - el['tags'].get('type') - ), + "A railway station cannot be a relation of type " + f"{rel_type}", el, ) continue @@ -1719,12 +1728,13 @@ class City: for st_el in station.get_elements(): self.stations[st_el].append(station) - # Check that stops and platforms belong to single stop_area + # Check that stops and platforms belong to + # a single stop_area for sp in station.stops | station.platforms: if sp in self.stops_and_platforms: self.notice( - 'A stop or a platform {} belongs to multiple ' - 'stop areas, might be correct'.format(sp) + f"A stop or a platform {sp} belongs to " + "multiple stop areas, might be correct" ) else: self.stops_and_platforms.add(sp) @@ -1732,7 +1742,7 @@ class City: # Extract routes for el in self.elements.values(): if Route.is_route(el, self.modes): - if el['tags'].get('access') in ('no', 'private'): + if el["tags"].get("access") in ("no", "private"): continue route_id = el_id(el) master = self.masters.get(route_id, None) @@ -1750,10 +1760,10 @@ class City: route = self.route_class(el, self, master) if not route.stops: - self.warn('Route has no stops', el) + self.warn("Route has no stops", el) continue elif len(route.stops) == 1: - self.warn('Route has only one stop', el) + self.warn("Route has only one stop", el) continue k = el_id(master) if master else route.ref @@ -1761,15 +1771,16 @@ class City: self.routes[k] = RouteMaster(master) self.routes[k].add(route, self) - # Sometimes adding a route to a newly initialized RouteMaster can fail + # Sometimes adding a route to a newly initialized RouteMaster + # can fail if len(self.routes[k]) == 0: del self.routes[k] # And while we're iterating over relations, find interchanges if ( - el['type'] == 'relation' - and el.get('tags', {}).get('public_transport', None) - == 'stop_area_group' + el["type"] == "relation" + and el.get("tags", {}).get("public_transport", None) + == "stop_area_group" ): self.make_transfer(el) @@ -1799,45 +1810,45 @@ class City: def get_validation_result(self): result = { - 'name': self.name, - 'country': self.country, - 'continent': self.continent, - 'stations_found': getattr(self, 'found_stations', 0), - 'transfers_found': getattr(self, 'found_interchanges', 0), - 'unused_entrances': getattr(self, 'unused_entrances', 0), - 'networks': getattr(self, 'found_networks', 0), + "name": self.name, + "country": self.country, + "continent": self.continent, + "stations_found": getattr(self, "found_stations", 0), + "transfers_found": getattr(self, "found_interchanges", 0), + "unused_entrances": getattr(self, "unused_entrances", 0), + "networks": getattr(self, "found_networks", 0), } if not self.overground: result.update( { - 'subwayl_expected': self.num_lines, - 'lightrl_expected': self.num_light_lines, - 'subwayl_found': getattr(self, 'found_lines', 0), - 'lightrl_found': getattr(self, 'found_light_lines', 0), - 'stations_expected': self.num_stations, - 'transfers_expected': self.num_interchanges, + "subwayl_expected": self.num_lines, + "lightrl_expected": self.num_light_lines, + "subwayl_found": getattr(self, "found_lines", 0), + "lightrl_found": getattr(self, "found_light_lines", 0), + "stations_expected": self.num_stations, + "transfers_expected": self.num_interchanges, } ) else: result.update( { - 'stations_expected': 0, - 'transfers_expected': 0, - 'busl_expected': self.num_bus_lines, - 'trolleybusl_expected': self.num_trolleybus_lines, - 'traml_expected': self.num_tram_lines, - 'otherl_expected': self.num_other_lines, - 'busl_found': getattr(self, 'found_bus_lines', 0), - 'trolleybusl_found': getattr( - self, 'found_trolleybus_lines', 0 + "stations_expected": 0, + "transfers_expected": 0, + "busl_expected": self.num_bus_lines, + "trolleybusl_expected": self.num_trolleybus_lines, + "traml_expected": self.num_tram_lines, + "otherl_expected": self.num_other_lines, + "busl_found": getattr(self, "found_bus_lines", 0), + "trolleybusl_found": getattr( + self, "found_trolleybus_lines", 0 ), - 'traml_found': getattr(self, 'found_tram_lines', 0), - 'otherl_found': getattr(self, 'found_other_lines', 0), + "traml_found": getattr(self, "found_tram_lines", 0), + "otherl_found": getattr(self, "found_other_lines", 0), } ) - result['warnings'] = self.warnings - result['errors'] = self.errors - result['notices'] = self.notices + result["warnings"] = self.warnings + result["errors"] = self.errors + result["notices"] = self.notices return result def count_unused_entrances(self): @@ -1845,19 +1856,19 @@ class City: stop_areas = set() for el in self.elements.values(): if ( - el['type'] == 'relation' - and 'tags' in el - and el['tags'].get('public_transport') == 'stop_area' - and 'members' in el + el["type"] == "relation" + and "tags" in el + and el["tags"].get("public_transport") == "stop_area" + and "members" in el ): - stop_areas.update([el_id(m) for m in el['members']]) + stop_areas.update([el_id(m) for m in el["members"]]) unused = [] not_in_sa = [] for el in self.elements.values(): if ( - el['type'] == 'node' - and 'tags' in el - and el['tags'].get('railway') == 'subway_entrance' + el["type"] == "node" + and "tags" in el + and el["tags"].get("railway") == "subway_entrance" ): i = el_id(el) if i in self.stations: @@ -1870,15 +1881,13 @@ class City: self.entrances_not_in_stop_areas = len(not_in_sa) if unused: self.notice( - '{} subway entrances are not connected to a station: {}'.format( - len(unused), format_elid_list(unused) - ) + f"{len(unused)} subway entrances are not connected to a " + f"station: {format_elid_list(unused)}" ) if not_in_sa: self.notice( - '{} subway entrances are not in stop_area relations: {}'.format( - len(not_in_sa), format_elid_list(not_in_sa) - ) + f"{len(not_in_sa)} subway entrances are not in stop_area " + f"relations: {format_elid_list(not_in_sa)}" ) def check_return_routes(self, rmaster): @@ -1887,9 +1896,10 @@ class City: for variant in rmaster: if len(variant) < 2: continue - # Using transfer ids because a train can arrive at different stations within a transfer - # But disregard transfer that may give an impression of a circular route - # (for example, Simonis / Elisabeth station and route 2 in Brussels) + # Using transfer ids because a train can arrive at different + # stations within a transfer. But disregard transfer that may give + # an impression of a circular route (for example, + # Simonis / Elisabeth station and route 2 in Brussels) if variant[0].stoparea.transfer == variant[-1].stoparea.transfer: t = (variant[0].stoparea.id, variant[-1].stoparea.id) else: @@ -1907,60 +1917,64 @@ class City: if len(variants) == 0: self.error( - 'An empty route master {}. Please set construction:route ' - 'if it is under construction'.format(rmaster.id) + "An empty route master {}. Please set construction:route " + "if it is under construction".format(rmaster.id) ) elif len(variants) == 1: - log_function = self.error if not rmaster.best.is_circular else self.notice + log_function = ( + self.error if not rmaster.best.is_circular else self.notice + ) log_function( - 'Only one route in route_master. ' - 'Please check if it needs a return route', + "Only one route in route_master. " + "Please check if it needs a return route", rmaster.best.element, ) else: for t, rel in variants.items(): if t not in have_return: - self.notice('Route does not have a return direction', rel) + self.notice("Route does not have a return direction", rel) def validate_lines(self): self.found_light_lines = len( - [x for x in self.routes.values() if x.mode != 'subway'] + [x for x in self.routes.values() if x.mode != "subway"] ) self.found_lines = len(self.routes) - self.found_light_lines if self.found_lines != self.num_lines: self.error( - 'Found {} subway lines, expected {}'.format( + "Found {} subway lines, expected {}".format( self.found_lines, self.num_lines ) ) if self.found_light_lines != self.num_light_lines: self.error( - 'Found {} light rail lines, expected {}'.format( + "Found {} light rail lines, expected {}".format( self.found_light_lines, self.num_light_lines ) ) def validate_overground_lines(self): self.found_tram_lines = len( - [x for x in self.routes.values() if x.mode == 'tram'] + [x for x in self.routes.values() if x.mode == "tram"] ) self.found_bus_lines = len( - [x for x in self.routes.values() if x.mode == 'bus'] + [x for x in self.routes.values() if x.mode == "bus"] ) self.found_trolleybus_lines = len( - [x for x in self.routes.values() if x.mode == 'trolleybus'] + [x for x in self.routes.values() if x.mode == "trolleybus"] ) self.found_other_lines = len( [ x for x in self.routes.values() - if x.mode not in ('bus', 'trolleybus', 'tram') + if x.mode not in ("bus", "trolleybus", "tram") ] ) if self.found_tram_lines != self.num_tram_lines: - log_function = self.error if self.found_tram_lines == 0 else self.notice + log_function = ( + self.error if self.found_tram_lines == 0 else self.notice + ) log_function( - 'Found {} tram lines, expected {}'.format( + "Found {} tram lines, expected {}".format( self.found_tram_lines, self.num_tram_lines ), ) @@ -1981,7 +1995,7 @@ class City: if unused_stations: self.unused_stations = len(unused_stations) self.notice( - '{} unused stations: {}'.format( + "{} unused stations: {}".format( self.unused_stations, format_elid_list(unused_stations) ) ) @@ -1994,43 +2008,43 @@ class City: self.validate_lines() if self.found_stations != self.num_stations: - msg = 'Found {} stations in routes, expected {}'.format( + msg = "Found {} stations in routes, expected {}".format( self.found_stations, self.num_stations ) log_function = ( self.error if not ( - 0 - <= (self.num_stations - self.found_stations) - / self.num_stations - <= ALLOWED_STATIONS_MISMATCH - ) + 0 + <= (self.num_stations - self.found_stations) + / self.num_stations + <= ALLOWED_STATIONS_MISMATCH + ) else self.warn ) log_function(msg) if self.found_interchanges != self.num_interchanges: - msg = 'Found {} interchanges, expected {}'.format( + msg = "Found {} interchanges, expected {}".format( self.found_interchanges, self.num_interchanges ) log_function = ( self.error if self.num_interchanges != 0 - and not ( - (self.num_interchanges - self.found_interchanges) - / self.num_interchanges - <= ALLOWED_TRANSFERS_MISMATCH - ) + and not ( + (self.num_interchanges - self.found_interchanges) + / self.num_interchanges + <= ALLOWED_TRANSFERS_MISMATCH + ) else self.warn ) log_function(msg) self.found_networks = len(networks) if len(networks) > max(1, len(self.networks)): - n_str = '; '.join( - ['{} ({})'.format(k, v) for k, v in networks.items()] + n_str = "; ".join( + ["{} ({})".format(k, v) for k, v in networks.items()] ) - self.notice('More than one network: {}'.format(n_str)) + self.notice("More than one network: {}".format(n_str)) self.validate_called = True @@ -2040,14 +2054,14 @@ def find_transfers(elements, cities): stop_area_groups = [] for el in elements: if ( - el['type'] == 'relation' - and 'members' in el - and el.get('tags', {}).get('public_transport') == 'stop_area_group' + el["type"] == "relation" + and "members" in el + and el.get("tags", {}).get("public_transport") == "stop_area_group" ): stop_area_groups.append(el) - # StopArea.id uniquely identifies a StopArea. - # We must ensure StopArea uniqueness since one stop_area relation may result in + # StopArea.id uniquely identifies a StopArea. We must ensure StopArea + # uniqueness since one stop_area relation may result in # several StopArea instances at inter-city interchanges. stop_area_ids = defaultdict(set) # el_id -> set of StopArea.id stop_area_objects = dict() # StopArea.id -> one of StopArea instances @@ -2058,7 +2072,7 @@ def find_transfers(elements, cities): for sag in stop_area_groups: transfer = set() - for m in sag['members']: + for m in sag["members"]: k = el_id(m) if k not in stop_area_ids: continue @@ -2075,42 +2089,42 @@ def get_unused_entrances_geojson(elements): features = [] for el in elements: if ( - el['type'] == 'node' - and 'tags' in el - and el['tags'].get('railway') == 'subway_entrance' + el["type"] == "node" + and "tags" in el + and el["tags"].get("railway") == "subway_entrance" ): if el_id(el) not in used_entrances: - geometry = {'type': 'Point', 'coordinates': el_center(el)} + geometry = {"type": "Point", "coordinates": el_center(el)} properties = { k: v - for k, v in el['tags'].items() - if k not in ('railway', 'entrance') + for k, v in el["tags"].items() + if k not in ("railway", "entrance") } features.append( { - 'type': 'Feature', - 'geometry': geometry, - 'properties': properties, + "type": "Feature", + "geometry": geometry, + "properties": properties, } ) - return {'type': 'FeatureCollection', 'features': features} + return {"type": "FeatureCollection", "features": features} def download_cities(overground=False): assert not overground, "Overground transit not implemented yet" url = ( - 'https://docs.google.com/spreadsheets/d/{}/export?format=csv{}'.format( - SPREADSHEET_ID, '&gid=1881416409' if overground else '' + "https://docs.google.com/spreadsheets/d/{}/export?format=csv{}".format( + SPREADSHEET_ID, "&gid=1881416409" if overground else "" ) ) response = urllib.request.urlopen(url) if response.getcode() != 200: raise Exception( - 'Failed to download cities spreadsheet: HTTP {}'.format( + "Failed to download cities spreadsheet: HTTP {}".format( response.getcode() ) ) - data = response.read().decode('utf-8') + data = response.read().decode("utf-8") reader = csv.DictReader( data.splitlines(), fieldnames=( @@ -2136,8 +2150,8 @@ def download_cities(overground=False): name = city_data["name"].strip() if name in names: logging.warning( - 'Duplicate city name in the google spreadsheet: %s', - city_data + "Duplicate city name in the google spreadsheet: %s", + city_data, ) names.add(name) return cities diff --git a/tests/sample_data.py b/tests/sample_data.py index dca00e1..0fffacd 100644 --- a/tests/sample_data.py +++ b/tests/sample_data.py @@ -55,7 +55,6 @@ sample_networks = { "positions_on_rails": [], }, }, - "Only 2 stations connected with rails": { "xml": """ @@ -125,7 +124,6 @@ sample_networks = { "positions_on_rails": [[0], [1]], }, }, - "Only 6 stations, no rails": { "xml": """ @@ -214,7 +212,6 @@ sample_networks = { "positions_on_rails": [], }, }, - "One rail line connecting all stations": { "xml": """ @@ -328,7 +325,6 @@ sample_networks = { "positions_on_rails": [[0], [1], [2], [3], [4], [5]], }, }, - "One rail line connecting all stations except the last": { "xml": """ @@ -439,7 +435,6 @@ sample_networks = { "positions_on_rails": [[0], [1], [2], [3], [4]], }, }, - "One rail line connecting all stations except the fist": { "xml": """ @@ -550,7 +545,6 @@ sample_networks = { "positions_on_rails": [[0], [1], [2], [3], [4]], }, }, - "One rail line connecting all stations except the fist and the last": { "xml": """ @@ -658,7 +652,6 @@ sample_networks = { "positions_on_rails": [[0], [1], [2], [3]], }, }, - "One rail line connecting only 2 first stations": { "xml": """ @@ -760,7 +753,6 @@ sample_networks = { "positions_on_rails": [[0], [1]], }, }, - "One rail line connecting only 2 last stations": { "xml": """ @@ -862,7 +854,6 @@ sample_networks = { "positions_on_rails": [[0], [1]], }, }, - "One rail connecting all stations and protruding at both ends": { "xml": """ @@ -986,8 +977,10 @@ sample_networks = { "positions_on_rails": [[1], [2], [3], [4], [5], [6]], }, }, - - "Several rails with reversed order for backward route, connecting all stations and protruding at both ends": { + ( + "Several rails with reversed order for backward route, " + "connecting all stations and protruding at both ends" + ): { "xml": """ @@ -1116,8 +1109,10 @@ sample_networks = { "positions_on_rails": [[1], [2], [3], [4], [5], [6]], }, }, - - "One rail laying near all stations requiring station projecting, protruding at both ends": { + ( + "One rail laying near all stations requiring station projecting, " + "protruding at both ends" + ): { "xml": """ @@ -1210,15 +1205,28 @@ sample_networks = { "forward": { "first_stop_on_rails_index": 0, "last_stop_on_rails_index": 5, - "positions_on_rails": [[1/7], [2/7], [3/7], [4/7], [5/7], [6/7]], + "positions_on_rails": [ + [1 / 7], + [2 / 7], + [3 / 7], + [4 / 7], + [5 / 7], + [6 / 7], + ], }, "backward": { "first_stop_on_rails_index": 0, "last_stop_on_rails_index": 5, - "positions_on_rails": [[1/7], [2/7], [3/7], [4/7], [5/7], [6/7]], + "positions_on_rails": [ + [1 / 7], + [2 / 7], + [3 / 7], + [4 / 7], + [5 / 7], + [6 / 7], + ], }, }, - "One rail laying near all stations except the first and last": { "xml": """ @@ -1314,15 +1322,14 @@ sample_networks = { "forward": { "first_stop_on_rails_index": 1, "last_stop_on_rails_index": 4, - "positions_on_rails": [[0], [1/3], [2/3], [1]], + "positions_on_rails": [[0], [1 / 3], [2 / 3], [1]], }, "backward": { "first_stop_on_rails_index": 1, "last_stop_on_rails_index": 4, - "positions_on_rails": [[0], [1/3], [2/3], [1]], + "positions_on_rails": [[0], [1 / 3], [2 / 3], [1]], }, }, - "Circle route without rails": { "xml": """ @@ -1391,7 +1398,6 @@ sample_networks = { "positions_on_rails": [], }, }, - "Circle route with closed rail line connecting all stations": { "xml": """ diff --git a/tests/test_build_tracks.py b/tests/test_build_tracks.py index cca10e1..da16780 100644 --- a/tests/test_build_tracks.py +++ b/tests/test_build_tracks.py @@ -25,7 +25,7 @@ class TestOneRouteTracks(unittest.TestCase): "name": "Null Island", "country": "World", "continent": "Africa", - "num_stations": None, # Would be taken from the sample network data under testing + "num_stations": None, # Would be taken from the sample network data "num_lines": 1, "num_light_lines": 0, "num_interchanges": 0, @@ -127,11 +127,11 @@ class TestOneRouteTracks(unittest.TestCase): f"Wrong {attr} for {route_label} route", ) - first_index = route_data["first_stop_on_rails_index"] - last_index = route_data["last_stop_on_rails_index"] + first_ind = route_data["first_stop_on_rails_index"] + last_ind = route_data["last_stop_on_rails_index"] positions_on_rails = [ rs.positions_on_rails - for rs in route.stops[first_index : last_index + 1] + for rs in route.stops[first_ind : last_ind + 1] # noqa E203 ] self.assertListAlmostEqual( positions_on_rails, route_data["positions_on_rails"] diff --git a/tests/test_gtfs_processor.py b/tests/test_gtfs_processor.py index ca206f9..5a234e8 100644 --- a/tests/test_gtfs_processor.py +++ b/tests/test_gtfs_processor.py @@ -60,8 +60,8 @@ class TestGTFS(TestCase): ) def test__dict_to_row__numeric_values(self) -> None: - """Test that zero numeric values remain zeros in dict_to_row() function, - and not empty strings or None. + """Test that zero numeric values remain zeros in dict_to_row() + function, and not empty strings or None. """ shapes = [ diff --git a/tests/test_projection.py b/tests/test_projection.py index 44c362f..4ca0c17 100644 --- a/tests/test_projection.py +++ b/tests/test_projection.py @@ -133,7 +133,11 @@ class TestProjection(unittest.TestCase): """The tested function should accept points as any consecutive container with index operator. """ - types = (tuple, list, collections.deque,) + types = ( + tuple, + list, + collections.deque, + ) point = (0, 0.5) segment_end1 = (0, 0) diff --git a/v2h_templates.py b/v2h_templates.py index 8198d9b..3162180 100644 --- a/v2h_templates.py +++ b/v2h_templates.py @@ -1,7 +1,19 @@ +validator_osm_wiki_url = ( + "https://wiki.openstreetmap.org/wiki/Quality_assurance#subway-preprocessor" +) +github_url = "https://github.com/alexey-zakharenkov/subways" +produced_by = f"""Produced by +Subway Preprocessor on {{date}}.""" +metro_mapping_osm_article = "https://wiki.openstreetmap.org/wiki/Metro_Mapping" +list_of_metro_systems_url = ( + "https://en.wikipedia.org/wiki/List_of_metro_systems#List" +) + + # These are templates for validation_to_html.py # Variables should be in curly braces -STYLE = ''' +STYLE = """ -''' +""" -INDEX_HEADER = ''' +INDEX_HEADER = f""" Subway Validator -(s) +{STYLE}

Subway Validation Results

-

{good_cities} of {total_cities} networks validated without errors. -To make a network validate successfully please follow the -metro mapping instructions. -Commit your changes to the OSM and then check back to the updated validation results after the next validation cycle, please. -See the validator instance(s) description -for the schedule and capabilities.

+

{{good_cities}} of {{total_cities}} networks validated without +errors. To make a network validate successfully please follow the +metro mapping +instructions. Commit your changes to the OSM and then check back to the +updated validation results after the next validation cycle, please. +See the validator instance(s) +description for the schedule and capabilities.

View networks on a map

-'''.replace('(s)', STYLE) +""" -INDEX_CONTINENT = ''' +INDEX_CONTINENT = """ @@ -157,9 +170,9 @@ INDEX_CONTINENT = ''' {content} -''' +""" -INDEX_COUNTRY = ''' +INDEX_COUNTRY = """ @@ -172,56 +185,58 @@ INDEX_COUNTRY = ''' -''' +""" -INDEX_FOOTER = ''' +INDEX_FOOTER = f"""
 
Continent{num_notices}
  {country}{num_warnings} {num_notices}
- + -''' +""" -COUNTRY_HEADER = ''' +COUNTRY_HEADER = f""" -Subway Validator: {country} +Subway Validator: {{country}} -(s) +{STYLE}
-

Subway Validation Results for {country}

+

Subway Validation Results for {{country}}

Return to the countries list.

-{?subways} +{{?subways}} -{end}{?overground} +{{end}}{{?overground}} -{end} +{{end}} -'''.replace('(s)', STYLE) +""" -COUNTRY_CITY = ''' +COUNTRY_CITY = """ {?subways} @@ -229,36 +244,55 @@ COUNTRY_CITY = ''' {end}{?overground} - + {end} - + -''' +""" -COUNTRY_FOOTER = ''' +COUNTRY_FOOTER = f"""
CitySubway Lines Light Rail LinesTram Lines Bus Lines T-Bus Lines Other LinesStations Interchanges Unused Entrances
{city} {?yaml}Y{end} {?json}J{end} - {?json}M{end} + {?json}M{end} sub: {subwayl_found} / {subwayl_expected}t: {traml_found} / {traml_expected} b: {busl_found} / {busl_expected}tb: {trolleybusl_found} / {trolleybusl_expected} + tb: {trolleybusl_found} / {trolleybusl_expected} + o: {otherl_found} / {otherl_expected}st: {stations_found} / {stations_expected}int: {transfers_found} / {transfers_expected} + int: {transfers_found} / {transfers_expected} + ent: {unused_entrances}
{?errors} -
🛑 Errors
+
+
+ 🛑 Errors +
{errors}
{end} {?warnings} -
⚠️ Warnings
+
+
+ ⚠️ Warnings +
{warnings}
{end} {?notices} -
ℹ️ Notices
+
+
+ ℹ️ Notices +
{notices} {end}
- +
{produced_by}
-''' +""" diff --git a/validation_to_html.py b/validation_to_html.py index fe21734..9eca75c 100755 --- a/validation_to_html.py +++ b/validation_to_html.py @@ -1,38 +1,47 @@ #!/usr/bin/env python3 import datetime -import re -import os -import sys import json +import os +import re +import sys + from subway_structure import SPREADSHEET_ID -from v2h_templates import * +from v2h_templates import ( + COUNTRY_CITY, + COUNTRY_FOOTER, + COUNTRY_HEADER, + INDEX_CONTINENT, + INDEX_COUNTRY, + INDEX_FOOTER, + INDEX_HEADER, +) class CityData: def __init__(self, city=None): self.city = city is not None self.data = { - 'good_cities': 0, - 'total_cities': 1 if city else 0, - 'num_errors': 0, - 'num_warnings': 0, - 'num_notices': 0 + "good_cities": 0, + "total_cities": 1 if city else 0, + "num_errors": 0, + "num_warnings": 0, + "num_notices": 0, } self.slug = None if city: - self.slug = city['slug'] - self.country = city['country'] - self.continent = city['continent'] - self.errors = city['errors'] - self.warnings = city['warnings'] - self.notices = city['notices'] + self.slug = city["slug"] + self.country = city["country"] + self.continent = city["continent"] + self.errors = city["errors"] + self.warnings = city["warnings"] + self.notices = city["notices"] if not self.errors: - self.data['good_cities'] = 1 - self.data['num_errors'] = len(self.errors) - self.data['num_warnings'] = len(self.warnings) - self.data['num_notices'] = len(self.notices) + self.data["good_cities"] = 1 + self.data["num_errors"] = len(self.errors) + self.data["num_warnings"] = len(self.warnings) + self.data["num_notices"] = len(self.notices) for k, v in city.items(): - if 'found' in k or 'expected' in k or 'unused' in k: + if "found" in k or "expected" in k or "unused" in k: self.data[k] = v def not__get__(self, i): @@ -49,37 +58,37 @@ class CityData: def format(self, s): def test_eq(v1, v2): - return '1' if v1 == v2 else '0' + return "1" if v1 == v2 else "0" for k in self.data: - s = s.replace('{' + k + '}', str(self.data[k])) - s = s.replace('{slug}', self.slug or '') + s = s.replace("{" + k + "}", str(self.data[k])) + s = s.replace("{slug}", self.slug or "") for k in ( - 'subwayl', - 'lightrl', - 'stations', - 'transfers', - 'busl', - 'trolleybusl', - 'traml', - 'otherl', + "subwayl", + "lightrl", + "stations", + "transfers", + "busl", + "trolleybusl", + "traml", + "otherl", ): - if k + '_expected' in self.data: + if k + "_expected" in self.data: s = s.replace( - '{=' + k + '}', + "{=" + k + "}", test_eq( - self.data[k + '_found'], self.data[k + '_expected'] + self.data[k + "_found"], self.data[k + "_expected"] ), ) s = s.replace( - '{=cities}', - test_eq(self.data['good_cities'], self.data['total_cities']), + "{=cities}", + test_eq(self.data["good_cities"], self.data["total_cities"]), ) s = s.replace( - '{=entrances}', test_eq(self.data['unused_entrances'], 0) + "{=entrances}", test_eq(self.data["unused_entrances"], 0) ) - for k in ('errors', 'warnings', 'notices'): - s = s.replace('{=' + k + '}', test_eq(self.data['num_' + k], 0)) + for k in ("errors", "warnings", "notices"): + s = s.replace("{=" + k + "}", test_eq(self.data["num_" + k], 0)) return s @@ -89,27 +98,27 @@ def tmpl(s, data=None, **kwargs): if kwargs: for k, v in kwargs.items(): if v is not None: - s = s.replace('{' + k + '}', str(v)) + s = s.replace("{" + k + "}", str(v)) s = re.sub( - r'\{\?' + k + r'\}(.+?)\{end\}', - r'\1' if v else '', + r"\{\?" + k + r"\}(.+?)\{end\}", + r"\1" if v else "", s, flags=re.DOTALL, ) - s = s.replace('{date}', date) + s = s.replace("{date}", date) google_url = ( - 'https://docs.google.com/spreadsheets/d/{}/edit?usp=sharing'.format( + "https://docs.google.com/spreadsheets/d/{}/edit?usp=sharing".format( SPREADSHEET_ID ) ) - s = s.replace('{google}', google_url) + s = s.replace("{google}", google_url) return s -EXPAND_OSM_TYPE = {'n': 'node', 'w': 'way', 'r': 'relation'} -RE_SHORT = re.compile(r'\b([nwr])(\d+)\b') -RE_FULL = re.compile(r'\b(node|way|relation) (\d+)\b') -RE_COORDS = re.compile(r'\((-?\d+\.\d+), (-?\d+\.\d+)\)') +EXPAND_OSM_TYPE = {"n": "node", "w": "way", "r": "relation"} +RE_SHORT = re.compile(r"\b([nwr])(\d+)\b") +RE_FULL = re.compile(r"\b(node|way|relation) (\d+)\b") +RE_COORDS = re.compile(r"\((-?\d+\.\d+), (-?\d+\.\d+)\)") def osm_links(s): @@ -123,25 +132,26 @@ def osm_links(s): s = RE_SHORT.sub(link, s) s = RE_FULL.sub(link, s) s = RE_COORDS.sub( - r'(pos)', + r'(pos)', s, ) return s def esc(s): - return s.replace('&', '&').replace('<', '<').replace('>', '>') + return s.replace("&", "&").replace("<", "<").replace(">", ">") if len(sys.argv) < 2: - print('Reads a log from subway validator and prepares HTML files.') + print("Reads a log from subway validator and prepares HTML files.") print( - 'Usage: {} []'.format(sys.argv[0]) + "Usage: {} []".format(sys.argv[0]) ) sys.exit(1) -with open(sys.argv[1], 'r', encoding='utf-8') as f: - data = {c['name']: CityData(c) for c in json.load(f)} +with open(sys.argv[1], "r", encoding="utf-8") as f: + data = {c["name"]: CityData(c) for c in json.load(f)} countries = {} continents = {} @@ -154,16 +164,16 @@ for c in data.values(): c_by_c[c.continent].add(c.country) world = sum(continents.values(), CityData()) -overground = 'traml_expected' in next(iter(data.values())).data -date = datetime.datetime.utcnow().strftime('%d.%m.%Y %H:%M UTC') -path = '.' if len(sys.argv) < 3 else sys.argv[2] -index = open(os.path.join(path, 'index.html'), 'w', encoding='utf-8') +overground = "traml_expected" in next(iter(data.values())).data +date = datetime.datetime.utcnow().strftime("%d.%m.%Y %H:%M UTC") +path = "." if len(sys.argv) < 3 else sys.argv[2] +index = open(os.path.join(path, "index.html"), "w", encoding="utf-8") index.write(tmpl(INDEX_HEADER, world)) for continent in sorted(continents.keys()): - content = '' + content = "" for country in sorted(c_by_c[continent]): - country_file_name = country.lower().replace(' ', '-') + '.html' + country_file_name = country.lower().replace(" ", "-") + ".html" content += tmpl( INDEX_COUNTRY, countries[country], @@ -172,7 +182,7 @@ for continent in sorted(continents.keys()): continent=continent, ) country_file = open( - os.path.join(path, country_file_name), 'w', encoding='utf-8' + os.path.join(path, country_file_name), "w", encoding="utf-8" ) country_file.write( tmpl( @@ -187,18 +197,22 @@ for continent in sorted(continents.keys()): if city.country == country: file_base = os.path.join(path, city.slug) yaml_file = ( - city.slug + '.yaml' - if os.path.exists(file_base + '.yaml') + city.slug + ".yaml" + if os.path.exists(file_base + ".yaml") else None ) json_file = ( - city.slug + '.geojson' - if os.path.exists(file_base + '.geojson') + city.slug + ".geojson" + if os.path.exists(file_base + ".geojson") else None ) - errors = '
'.join([osm_links(esc(e)) for e in city.errors]) - warnings = '
'.join([osm_links(esc(w)) for w in city.warnings]) - notices = '
'.join([osm_links(esc(n)) for n in city.notices]) + errors = "
".join([osm_links(esc(e)) for e in city.errors]) + warnings = "
".join( + [osm_links(esc(w)) for w in city.warnings] + ) + notices = "
".join( + [osm_links(esc(n)) for n in city.notices] + ) country_file.write( tmpl( COUNTRY_CITY,