PEPify-8 with black -l 79 -S
This commit is contained in:
parent
b422ccb6ad
commit
6596d9789c
11 changed files with 1273 additions and 531 deletions
|
@ -9,12 +9,14 @@ def make_disjoint_metro_polygons():
|
|||
|
||||
polygons = []
|
||||
for c in cities:
|
||||
polygon = shapely.geometry.Polygon([
|
||||
(c.bbox[1], c.bbox[0]),
|
||||
(c.bbox[1], c.bbox[2]),
|
||||
(c.bbox[3], c.bbox[2]),
|
||||
(c.bbox[3], c.bbox[0]),
|
||||
])
|
||||
polygon = shapely.geometry.Polygon(
|
||||
[
|
||||
(c.bbox[1], c.bbox[0]),
|
||||
(c.bbox[1], c.bbox[2]),
|
||||
(c.bbox[3], c.bbox[2]),
|
||||
(c.bbox[3], c.bbox[0]),
|
||||
]
|
||||
)
|
||||
polygons.append(polygon)
|
||||
|
||||
union = shapely.ops.unary_union(polygons)
|
||||
|
|
|
@ -18,7 +18,7 @@ if __name__ == '__main__':
|
|||
arg_parser.add_argument(
|
||||
'subway_json_file',
|
||||
type=argparse.FileType('r'),
|
||||
help="Validator output defined by -o option of process_subways.py script"
|
||||
help="Validator output defined by -o option of process_subways.py script",
|
||||
)
|
||||
|
||||
arg_parser.add_argument(
|
||||
|
@ -33,7 +33,9 @@ if __name__ == '__main__':
|
|||
subway_json_file = args.subway_json_file
|
||||
subway_json = json.load(subway_json_file)
|
||||
|
||||
good_cities = set(n.get('network', n.get('title')) for n in subway_json['networks'])
|
||||
good_cities = set(
|
||||
n.get('network', n.get('title')) for n in subway_json['networks']
|
||||
)
|
||||
cities = download_cities()
|
||||
|
||||
lines = []
|
||||
|
|
|
@ -39,13 +39,17 @@ def overpass_request(overground, overpass_api, bboxes):
|
|||
if not overground:
|
||||
query += 'node[railway=subway_entrance]{};'.format(bbox_part)
|
||||
query += 'rel[public_transport=stop_area]{};'.format(bbox_part)
|
||||
query += 'rel(br)[type=public_transport][public_transport=stop_area_group];'
|
||||
query += (
|
||||
'rel(br)[type=public_transport][public_transport=stop_area_group];'
|
||||
)
|
||||
query += ');(._;>>;);out body center qt;'
|
||||
logging.debug('Query: %s', query)
|
||||
url = '{}?data={}'.format(overpass_api, urllib.parse.quote(query))
|
||||
response = urllib.request.urlopen(url, timeout=1000)
|
||||
if response.getcode() != 200:
|
||||
raise Exception('Failed to query Overpass API: HTTP {}'.format(response.getcode()))
|
||||
raise Exception(
|
||||
'Failed to query Overpass API: HTTP {}'.format(response.getcode())
|
||||
)
|
||||
return json.load(response)['elements']
|
||||
|
||||
|
||||
|
@ -56,7 +60,11 @@ def multi_overpass(overground, overpass_api, bboxes):
|
|||
for i in range(0, len(bboxes) + SLICE_SIZE - 1, SLICE_SIZE):
|
||||
if i > 0:
|
||||
time.sleep(INTERREQUEST_WAIT)
|
||||
result.extend(overpass_request(overground, overpass_api, bboxes[i:i+SLICE_SIZE]))
|
||||
result.extend(
|
||||
overpass_request(
|
||||
overground, overpass_api, bboxes[i : i + SLICE_SIZE]
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
|
@ -66,14 +74,14 @@ def slugify(name):
|
|||
|
||||
def calculate_centers(elements):
|
||||
"""Adds 'center' key to each way/relation in elements,
|
||||
except for empty ways or relations.
|
||||
Relies on nodes-ways-relations order in the elements list.
|
||||
except for empty ways or relations.
|
||||
Relies on nodes-ways-relations order in the elements list.
|
||||
"""
|
||||
nodes = {} # id(int) => (lat, lon)
|
||||
ways = {} # id(int) => (lat, lon)
|
||||
nodes = {} # id(int) => (lat, lon)
|
||||
ways = {} # id(int) => (lat, lon)
|
||||
relations = {} # id(int) => (lat, lon)
|
||||
empty_relations = set() # ids(int) of relations without members
|
||||
# or containing only empty relations
|
||||
# or containing only empty relations
|
||||
|
||||
def calculate_way_center(el):
|
||||
# If element has been queried via overpass-api with 'out center;'
|
||||
|
@ -108,9 +116,13 @@ def calculate_centers(elements):
|
|||
else:
|
||||
# Center of child relation is not known yet
|
||||
return False
|
||||
member_container = (nodes if m['type'] == 'node' else
|
||||
ways if m['type'] == 'way' else
|
||||
relations)
|
||||
member_container = (
|
||||
nodes
|
||||
if m['type'] == 'node'
|
||||
else ways
|
||||
if m['type'] == 'way'
|
||||
else relations
|
||||
)
|
||||
if m['ref'] in member_container:
|
||||
center[0] += member_container[m['ref']][0]
|
||||
center[1] += member_container[m['ref']][1]
|
||||
|
@ -145,54 +157,104 @@ def calculate_centers(elements):
|
|||
relations_without_center = new_relations_without_center
|
||||
|
||||
if relations_without_center:
|
||||
logging.error("Cannot calculate center for the relations (%d in total): %s%s",
|
||||
len(relations_without_center),
|
||||
', '.join(str(rel['id']) for rel in relations_without_center[:20]),
|
||||
", ..." if len(relations_without_center) > 20 else "")
|
||||
logging.error(
|
||||
"Cannot calculate center for the relations (%d in total): %s%s",
|
||||
len(relations_without_center),
|
||||
', '.join(str(rel['id']) for rel in relations_without_center[:20]),
|
||||
", ..." if len(relations_without_center) > 20 else "",
|
||||
)
|
||||
if empty_relations:
|
||||
logging.warning("Empty relations (%d in total): %s%s",
|
||||
len(empty_relations),
|
||||
', '.join(str(x) for x in list(empty_relations)[:20]),
|
||||
", ..." if len(empty_relations) > 20 else "")
|
||||
logging.warning(
|
||||
"Empty relations (%d in total): %s%s",
|
||||
len(empty_relations),
|
||||
', '.join(str(x) for x in list(empty_relations)[:20]),
|
||||
", ..." if len(empty_relations) > 20 else "",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'-i', '--source', help='File to write backup of OSM data, or to read data from')
|
||||
parser.add_argument('-x', '--xml', help='OSM extract with routes, to read data from')
|
||||
parser.add_argument('--overpass-api',
|
||||
default='http://overpass-api.de/api/interpreter',
|
||||
help="Overpass API URL")
|
||||
parser.add_argument('-q', '--quiet', action='store_true', help='Show only warnings and errors')
|
||||
parser.add_argument('-c', '--city', help='Validate only a single city or a country')
|
||||
parser.add_argument('-t', '--overground', action='store_true',
|
||||
help='Process overground transport instead of subways')
|
||||
parser.add_argument('-e', '--entrances', type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='Export unused subway entrances as GeoJSON here')
|
||||
parser.add_argument('-l', '--log', type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='Validation JSON file name')
|
||||
parser.add_argument('-o', '--output', type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='Processed metro systems output')
|
||||
'-i',
|
||||
'--source',
|
||||
help='File to write backup of OSM data, or to read data from',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-x', '--xml', help='OSM extract with routes, to read data from'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--overpass-api',
|
||||
default='http://overpass-api.de/api/interpreter',
|
||||
help="Overpass API URL",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-q',
|
||||
'--quiet',
|
||||
action='store_true',
|
||||
help='Show only warnings and errors',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-c', '--city', help='Validate only a single city or a country'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-t',
|
||||
'--overground',
|
||||
action='store_true',
|
||||
help='Process overground transport instead of subways',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
'--entrances',
|
||||
type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='Export unused subway entrances as GeoJSON here',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-l',
|
||||
'--log',
|
||||
type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='Validation JSON file name',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o',
|
||||
'--output',
|
||||
type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='Processed metro systems output',
|
||||
)
|
||||
parser.add_argument('--cache', help='Cache file name for processed data')
|
||||
parser.add_argument('-r', '--recovery-path', help='Cache file name for error recovery')
|
||||
parser.add_argument('-d', '--dump', help='Make a YAML file for a city data')
|
||||
parser.add_argument('-j', '--geojson', help='Make a GeoJSON file for a city data')
|
||||
parser.add_argument('--crude', action='store_true',
|
||||
help='Do not use OSM railway geometry for GeoJSON')
|
||||
parser.add_argument(
|
||||
'-r', '--recovery-path', help='Cache file name for error recovery'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-d', '--dump', help='Make a YAML file for a city data'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-j', '--geojson', help='Make a GeoJSON file for a city data'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--crude',
|
||||
action='store_true',
|
||||
help='Do not use OSM railway geometry for GeoJSON',
|
||||
)
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.quiet:
|
||||
log_level = logging.WARNING
|
||||
else:
|
||||
log_level = logging.INFO
|
||||
logging.basicConfig(level=log_level, datefmt='%H:%M:%S',
|
||||
format='%(asctime)s %(levelname)-7s %(message)s')
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
datefmt='%H:%M:%S',
|
||||
format='%(asctime)s %(levelname)-7s %(message)s',
|
||||
)
|
||||
|
||||
# Downloading cities from Google Spreadsheets
|
||||
cities = download_cities(options.overground)
|
||||
if options.city:
|
||||
cities = [c for c in cities if c.name == options.city or c.country == options.city]
|
||||
cities = [
|
||||
c
|
||||
for c in cities
|
||||
if c.name == options.city or c.country == options.city
|
||||
]
|
||||
if not cities:
|
||||
logging.error('No cities to process')
|
||||
sys.exit(2)
|
||||
|
@ -223,8 +285,10 @@ if __name__ == '__main__':
|
|||
json.dump(osm, f)
|
||||
else:
|
||||
if len(cities) > 10:
|
||||
logging.error('Would not download that many cities from Overpass API, '
|
||||
'choose a smaller set')
|
||||
logging.error(
|
||||
'Would not download that many cities from Overpass API, '
|
||||
'choose a smaller set'
|
||||
)
|
||||
sys.exit(3)
|
||||
bboxes = [c.bbox for c in cities]
|
||||
logging.info('Downloading data from Overpass API')
|
||||
|
@ -247,10 +311,18 @@ if __name__ == '__main__':
|
|||
try:
|
||||
c.extract_routes()
|
||||
except CriticalValidationError as e:
|
||||
logging.error("Critical validation error while processing %s: %s", c.name, str(e))
|
||||
logging.error(
|
||||
"Critical validation error while processing %s: %s",
|
||||
c.name,
|
||||
str(e),
|
||||
)
|
||||
c.error(str(e))
|
||||
except AssertionError as e:
|
||||
logging.error("Validation logic error while processing %s: %s", c.name, str(e))
|
||||
logging.error(
|
||||
"Validation logic error while processing %s: %s",
|
||||
c.name,
|
||||
str(e),
|
||||
)
|
||||
c.error("Validation logic error: {}".format(str(e)))
|
||||
else:
|
||||
c.validate()
|
||||
|
@ -261,11 +333,17 @@ if __name__ == '__main__':
|
|||
transfers = find_transfers(osm, cities)
|
||||
|
||||
good_city_names = set(c.name for c in good_cities)
|
||||
logging.info('%s good cities: %s', len(good_city_names),
|
||||
', '.join(sorted(good_city_names)))
|
||||
logging.info(
|
||||
'%s good cities: %s',
|
||||
len(good_city_names),
|
||||
', '.join(sorted(good_city_names)),
|
||||
)
|
||||
bad_city_names = set(c.name for c in cities) - good_city_names
|
||||
logging.info('%s bad cities: %s', len(bad_city_names),
|
||||
', '.join(sorted(bad_city_names)))
|
||||
logging.info(
|
||||
'%s bad cities: %s',
|
||||
len(bad_city_names),
|
||||
', '.join(sorted(bad_city_names)),
|
||||
)
|
||||
|
||||
if options.recovery_path:
|
||||
write_recovery_data(options.recovery_path, recovery_data, cities)
|
||||
|
@ -276,8 +354,11 @@ if __name__ == '__main__':
|
|||
if options.dump:
|
||||
if os.path.isdir(options.dump):
|
||||
for c in cities:
|
||||
with open(os.path.join(options.dump, slugify(c.name) + '.yaml'),
|
||||
'w', encoding='utf-8') as f:
|
||||
with open(
|
||||
os.path.join(options.dump, slugify(c.name) + '.yaml'),
|
||||
'w',
|
||||
encoding='utf-8',
|
||||
) as f:
|
||||
dump_yaml(c, f)
|
||||
elif len(cities) == 1:
|
||||
with open(options.dump, 'w', encoding='utf-8') as f:
|
||||
|
@ -288,14 +369,21 @@ if __name__ == '__main__':
|
|||
if options.geojson:
|
||||
if os.path.isdir(options.geojson):
|
||||
for c in cities:
|
||||
with open(os.path.join(options.geojson, slugify(c.name) + '.geojson'),
|
||||
'w', encoding='utf-8') as f:
|
||||
with open(
|
||||
os.path.join(
|
||||
options.geojson, slugify(c.name) + '.geojson'
|
||||
),
|
||||
'w',
|
||||
encoding='utf-8',
|
||||
) as f:
|
||||
json.dump(make_geojson(c, not options.crude), f)
|
||||
elif len(cities) == 1:
|
||||
with open(options.geojson, 'w', encoding='utf-8') as f:
|
||||
json.dump(make_geojson(cities[0], not options.crude), f)
|
||||
else:
|
||||
logging.error('Cannot make a geojson of %s cities at once', len(cities))
|
||||
logging.error(
|
||||
'Cannot make a geojson of %s cities at once', len(cities)
|
||||
)
|
||||
|
||||
if options.log:
|
||||
res = []
|
||||
|
@ -306,5 +394,9 @@ if __name__ == '__main__':
|
|||
json.dump(res, options.log, indent=2, ensure_ascii=False)
|
||||
|
||||
if options.output:
|
||||
json.dump(processor.process(cities, transfers, options.cache),
|
||||
options.output, indent=1, ensure_ascii=False)
|
||||
json.dump(
|
||||
processor.process(cities, transfers, options.cache),
|
||||
options.output,
|
||||
indent=1,
|
||||
ensure_ascii=False,
|
||||
)
|
||||
|
|
|
@ -3,15 +3,17 @@ import os
|
|||
import logging
|
||||
from collections import defaultdict
|
||||
from subway_structure import (
|
||||
distance, el_center, Station,
|
||||
DISPLACEMENT_TOLERANCE
|
||||
distance,
|
||||
el_center,
|
||||
Station,
|
||||
DISPLACEMENT_TOLERANCE,
|
||||
)
|
||||
|
||||
|
||||
OSM_TYPES = {'n': (0, 'node'), 'w': (2, 'way'), 'r': (3, 'relation')}
|
||||
ENTRANCE_PENALTY = 60 # seconds
|
||||
TRANSFER_PENALTY = 30 # seconds
|
||||
KMPH_TO_MPS = 1/3.6 # km/h to m/s conversion multiplier
|
||||
KMPH_TO_MPS = 1 / 3.6 # km/h to m/s conversion multiplier
|
||||
SPEED_TO_ENTRANCE = 5 * KMPH_TO_MPS # m/s
|
||||
SPEED_ON_TRANSFER = 3.5 * KMPH_TO_MPS # m/s
|
||||
SPEED_ON_LINE = 40 * KMPH_TO_MPS # m/s
|
||||
|
@ -37,18 +39,22 @@ class DummyCache:
|
|||
def __getattr__(self, name):
|
||||
"""This results in that a call to any method effectively does nothing
|
||||
and does not generate exceptions."""
|
||||
|
||||
def method(*args, **kwargs):
|
||||
return None
|
||||
|
||||
return method
|
||||
|
||||
|
||||
def if_object_is_used(method):
|
||||
"""Decorator to skip method execution under certain condition.
|
||||
Relies on "is_used" object property."""
|
||||
|
||||
def inner(self, *args, **kwargs):
|
||||
if not self.is_used:
|
||||
return
|
||||
return method(self, *args, **kwargs)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
|
@ -66,8 +72,11 @@ class MapsmeCache:
|
|||
with open(cache_path, 'r', encoding='utf-8') as f:
|
||||
self.cache = json.load(f)
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.warning("City cache '%s' is not a valid json file. "
|
||||
"Building cache from scratch.", cache_path)
|
||||
logging.warning(
|
||||
"City cache '%s' is not a valid json file. "
|
||||
"Building cache from scratch.",
|
||||
cache_path,
|
||||
)
|
||||
self.recovered_city_names = set()
|
||||
# One stoparea may participate in routes of different cities
|
||||
self.stop_cities = defaultdict(set) # stoparea id -> city names
|
||||
|
@ -80,15 +89,20 @@ class MapsmeCache:
|
|||
"""
|
||||
city_cache_data = self.cache[city.name]
|
||||
for stoparea_id, cached_stoparea in city_cache_data['stops'].items():
|
||||
station_id = cached_stoparea['osm_type'][0] + str(cached_stoparea['osm_id'])
|
||||
station_id = cached_stoparea['osm_type'][0] + str(
|
||||
cached_stoparea['osm_id']
|
||||
)
|
||||
city_station = city.elements.get(station_id)
|
||||
if (not city_station or
|
||||
not Station.is_station(city_station, city.modes)):
|
||||
if not city_station or not Station.is_station(
|
||||
city_station, city.modes
|
||||
):
|
||||
return False
|
||||
station_coords = el_center(city_station)
|
||||
cached_station_coords = tuple(cached_stoparea[coord] for coord in ('lon', 'lat'))
|
||||
cached_station_coords = tuple(
|
||||
cached_stoparea[coord] for coord in ('lon', 'lat')
|
||||
)
|
||||
displacement = distance(station_coords, cached_station_coords)
|
||||
if displacement > DISPLACEMENT_TOLERANCE:
|
||||
if displacement > DISPLACEMENT_TOLERANCE:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -123,7 +137,7 @@ class MapsmeCache:
|
|||
self.cache[city_name] = {
|
||||
'network': network,
|
||||
'stops': {}, # stoparea el_id -> jsonified stop data
|
||||
'transfers': [] # list of tuples (stoparea1_uid, stoparea2_uid, time); uid1 < uid2
|
||||
'transfers': [], # list of tuples (stoparea1_uid, stoparea2_uid, time); uid1 < uid2
|
||||
}
|
||||
|
||||
@if_object_is_used
|
||||
|
@ -142,9 +156,11 @@ class MapsmeCache:
|
|||
@if_object_is_used
|
||||
def add_transfer(self, stoparea1_uid, stoparea2_uid, transfer_time):
|
||||
"""If a transfer is inside a good city, add it to the city's cache."""
|
||||
for city_name in (self.good_city_names &
|
||||
self.stop_cities[stoparea1_uid] &
|
||||
self.stop_cities[stoparea2_uid]):
|
||||
for city_name in (
|
||||
self.good_city_names
|
||||
& self.stop_cities[stoparea1_uid]
|
||||
& self.stop_cities[stoparea2_uid]
|
||||
):
|
||||
self.cache[city_name]['transfers'].append(
|
||||
(stoparea1_uid, stoparea2_uid, transfer_time)
|
||||
)
|
||||
|
@ -186,7 +202,6 @@ def process(cities, transfers, cache_path):
|
|||
exits.append(n)
|
||||
return exits
|
||||
|
||||
|
||||
cache = MapsmeCache(cache_path, cities)
|
||||
|
||||
stop_areas = {} # stoparea el_id -> StopArea instance
|
||||
|
@ -206,7 +221,7 @@ def process(cities, transfers, cache_path):
|
|||
'name': route.name,
|
||||
'colour': format_colour(route.colour),
|
||||
'route_id': uid(route.id, 'r'),
|
||||
'itineraries': []
|
||||
'itineraries': [],
|
||||
}
|
||||
if route.infill:
|
||||
routes['casing'] = routes['colour']
|
||||
|
@ -216,33 +231,62 @@ def process(cities, transfers, cache_path):
|
|||
for stop in variant:
|
||||
stop_areas[stop.stoparea.id] = stop.stoparea
|
||||
cache.link_stop_with_city(stop.stoparea.id, city.name)
|
||||
itin.append([uid(stop.stoparea.id), round(stop.distance/SPEED_ON_LINE)])
|
||||
itin.append(
|
||||
[
|
||||
uid(stop.stoparea.id),
|
||||
round(stop.distance / SPEED_ON_LINE),
|
||||
]
|
||||
)
|
||||
# Make exits from platform nodes, if we don't have proper exits
|
||||
if len(stop.stoparea.entrances) + len(stop.stoparea.exits) == 0:
|
||||
if (
|
||||
len(stop.stoparea.entrances) + len(stop.stoparea.exits)
|
||||
== 0
|
||||
):
|
||||
for pl in stop.stoparea.platforms:
|
||||
pl_el = city.elements[pl]
|
||||
if pl_el['type'] == 'node':
|
||||
pl_nodes = [pl_el]
|
||||
elif pl_el['type'] == 'way':
|
||||
pl_nodes = [city.elements.get('n{}'.format(n))
|
||||
for n in pl_el['nodes']]
|
||||
pl_nodes = [
|
||||
city.elements.get('n{}'.format(n))
|
||||
for n in pl_el['nodes']
|
||||
]
|
||||
else:
|
||||
pl_nodes = []
|
||||
for m in pl_el['members']:
|
||||
if m['type'] == 'way':
|
||||
if '{}{}'.format(m['type'][0], m['ref']) in city.elements:
|
||||
if (
|
||||
'{}{}'.format(
|
||||
m['type'][0], m['ref']
|
||||
)
|
||||
in city.elements
|
||||
):
|
||||
pl_nodes.extend(
|
||||
[city.elements.get('n{}'.format(n))
|
||||
for n in city.elements['{}{}'.format(
|
||||
m['type'][0], m['ref'])]['nodes']])
|
||||
[
|
||||
city.elements.get(
|
||||
'n{}'.format(n)
|
||||
)
|
||||
for n in city.elements[
|
||||
'{}{}'.format(
|
||||
m['type'][0],
|
||||
m['ref'],
|
||||
)
|
||||
]['nodes']
|
||||
]
|
||||
)
|
||||
pl_nodes = [n for n in pl_nodes if n]
|
||||
platform_nodes[pl] = find_exits_for_platform(
|
||||
stop.stoparea.centers[pl], pl_nodes)
|
||||
stop.stoparea.centers[pl], pl_nodes
|
||||
)
|
||||
|
||||
routes['itineraries'].append({
|
||||
'stops': itin,
|
||||
'interval': round((variant.interval or DEFAULT_INTERVAL) * 60)
|
||||
})
|
||||
routes['itineraries'].append(
|
||||
{
|
||||
'stops': itin,
|
||||
'interval': round(
|
||||
(variant.interval or DEFAULT_INTERVAL) * 60
|
||||
),
|
||||
}
|
||||
)
|
||||
network['routes'].append(routes)
|
||||
networks.append(network)
|
||||
|
||||
|
@ -261,41 +305,57 @@ def process(cities, transfers, cache_path):
|
|||
for e_l, k in ((stop.entrances, 'entrances'), (stop.exits, 'exits')):
|
||||
for e in e_l:
|
||||
if e[0] == 'n':
|
||||
st[k].append({
|
||||
'osm_type': 'node',
|
||||
'osm_id': int(e[1:]),
|
||||
'lon': stop.centers[e][0],
|
||||
'lat': stop.centers[e][1],
|
||||
'distance': ENTRANCE_PENALTY + round(distance(
|
||||
stop.centers[e], stop.center)/SPEED_TO_ENTRANCE)
|
||||
})
|
||||
st[k].append(
|
||||
{
|
||||
'osm_type': 'node',
|
||||
'osm_id': int(e[1:]),
|
||||
'lon': stop.centers[e][0],
|
||||
'lat': stop.centers[e][1],
|
||||
'distance': ENTRANCE_PENALTY
|
||||
+ round(
|
||||
distance(stop.centers[e], stop.center)
|
||||
/ SPEED_TO_ENTRANCE
|
||||
),
|
||||
}
|
||||
)
|
||||
if len(stop.entrances) + len(stop.exits) == 0:
|
||||
if stop.platforms:
|
||||
for pl in stop.platforms:
|
||||
for n in platform_nodes[pl]:
|
||||
for k in ('entrances', 'exits'):
|
||||
st[k].append({
|
||||
'osm_type': n['type'],
|
||||
'osm_id': n['id'],
|
||||
'lon': n['lon'],
|
||||
'lat': n['lat'],
|
||||
'distance': ENTRANCE_PENALTY + round(distance(
|
||||
(n['lon'], n['lat']), stop.center)/SPEED_TO_ENTRANCE)
|
||||
})
|
||||
st[k].append(
|
||||
{
|
||||
'osm_type': n['type'],
|
||||
'osm_id': n['id'],
|
||||
'lon': n['lon'],
|
||||
'lat': n['lat'],
|
||||
'distance': ENTRANCE_PENALTY
|
||||
+ round(
|
||||
distance(
|
||||
(n['lon'], n['lat']), stop.center
|
||||
)
|
||||
/ SPEED_TO_ENTRANCE
|
||||
),
|
||||
}
|
||||
)
|
||||
else:
|
||||
for k in ('entrances', 'exits'):
|
||||
st[k].append({
|
||||
'osm_type': OSM_TYPES[stop.station.id[0]][1],
|
||||
'osm_id': int(stop.station.id[1:]),
|
||||
'lon': stop.centers[stop.id][0],
|
||||
'lat': stop.centers[stop.id][1],
|
||||
'distance': 60
|
||||
})
|
||||
st[k].append(
|
||||
{
|
||||
'osm_type': OSM_TYPES[stop.station.id[0]][1],
|
||||
'osm_id': int(stop.station.id[1:]),
|
||||
'lon': stop.centers[stop.id][0],
|
||||
'lat': stop.centers[stop.id][1],
|
||||
'distance': 60,
|
||||
}
|
||||
)
|
||||
|
||||
stops[stop_id] = st
|
||||
cache.add_stop(stop_id, st)
|
||||
|
||||
pairwise_transfers = {} # (stoparea1_uid, stoparea2_uid) -> time; uid1 < uid2
|
||||
pairwise_transfers = (
|
||||
{}
|
||||
) # (stoparea1_uid, stoparea2_uid) -> time; uid1 < uid2
|
||||
for t_set in transfers:
|
||||
t = list(t_set)
|
||||
for t_first in range(len(t) - 1):
|
||||
|
@ -306,23 +366,24 @@ def process(cities, transfers, cache_path):
|
|||
uid1 = uid(stoparea1.id)
|
||||
uid2 = uid(stoparea2.id)
|
||||
uid1, uid2 = sorted([uid1, uid2])
|
||||
transfer_time = (TRANSFER_PENALTY
|
||||
+ round(distance(stoparea1.center,
|
||||
stoparea2.center)
|
||||
/ SPEED_ON_TRANSFER))
|
||||
transfer_time = TRANSFER_PENALTY + round(
|
||||
distance(stoparea1.center, stoparea2.center)
|
||||
/ SPEED_ON_TRANSFER
|
||||
)
|
||||
pairwise_transfers[(uid1, uid2)] = transfer_time
|
||||
cache.add_transfer(uid1, uid2, transfer_time)
|
||||
|
||||
cache.provide_transfers(pairwise_transfers)
|
||||
cache.save()
|
||||
|
||||
pairwise_transfers = [(stop1_uid, stop2_uid, transfer_time)
|
||||
for (stop1_uid, stop2_uid), transfer_time
|
||||
in pairwise_transfers.items()]
|
||||
pairwise_transfers = [
|
||||
(stop1_uid, stop2_uid, transfer_time)
|
||||
for (stop1_uid, stop2_uid), transfer_time in pairwise_transfers.items()
|
||||
]
|
||||
|
||||
result = {
|
||||
'stops': list(stops.values()),
|
||||
'transfers': pairwise_transfers,
|
||||
'networks': networks
|
||||
'networks': networks,
|
||||
}
|
||||
return result
|
||||
|
|
|
@ -19,7 +19,7 @@ In more detail, the script does the following:
|
|||
- Copies results onto remote server, if it is set up.
|
||||
|
||||
During this procedure, as many steps are skipped as possible. Namely:
|
||||
- Making metro extract is skipped if \$PLANET_METRO variable is set and the file exists.
|
||||
- Generation of metro extract is skipped if \$PLANET_METRO variable is set and the file exists.
|
||||
- Update with osmupdate is skipped if \$SKIP_PLANET_UPDATE or \$SKIP_FILTERING is set.
|
||||
- Filtering is skipped if \$SKIP_FILTERING is set and \$FILTERED_DATA is set and the file exists.
|
||||
|
||||
|
|
|
@ -54,17 +54,21 @@ class StationWrapper:
|
|||
def distance(self, other):
|
||||
"""Calculate distance in meters."""
|
||||
dx = math.radians(self[0] - other['lon']) * math.cos(
|
||||
0.5 * math.radians(self[1] + other['lat']))
|
||||
0.5 * math.radians(self[1] + other['lat'])
|
||||
)
|
||||
dy = math.radians(self[1] - other['lat'])
|
||||
return 6378137 * math.sqrt(dx*dx + dy*dy)
|
||||
return 6378137 * math.sqrt(dx * dx + dy * dy)
|
||||
|
||||
|
||||
def overpass_request(bbox):
|
||||
url = 'http://overpass-api.de/api/interpreter?data={}'.format(
|
||||
urllib.parse.quote(QUERY.replace('{{bbox}}', bbox)))
|
||||
urllib.parse.quote(QUERY.replace('{{bbox}}', bbox))
|
||||
)
|
||||
response = urllib.request.urlopen(url, timeout=1000)
|
||||
if response.getcode() != 200:
|
||||
raise Exception('Failed to query Overpass API: HTTP {}'.format(response.getcode()))
|
||||
raise Exception(
|
||||
'Failed to query Overpass API: HTTP {}'.format(response.getcode())
|
||||
)
|
||||
reader = codecs.getreader('utf-8')
|
||||
return json.load(reader(response))['elements']
|
||||
|
||||
|
@ -80,8 +84,11 @@ def add_stop_areas(src):
|
|||
stations[el_id(el)] = el
|
||||
|
||||
for el in src:
|
||||
if (el['type'] == 'relation' and 'tags' in el and
|
||||
el['tags'].get('route', None) in ('subway', 'light_rail')):
|
||||
if (
|
||||
el['type'] == 'relation'
|
||||
and 'tags' in el
|
||||
and el['tags'].get('route', None) in ('subway', 'light_rail')
|
||||
):
|
||||
for m in el['members']:
|
||||
st = stations.get(el_id(m), None)
|
||||
if st and 'station' not in st['tags']:
|
||||
|
@ -91,7 +98,10 @@ def add_stop_areas(src):
|
|||
# Create a kd-tree out of subway stations
|
||||
stations = kdtree.create(dimensions=2)
|
||||
for el in src:
|
||||
if 'tags' in el and el['tags'].get('station', None) in ('subway', 'light_rail'):
|
||||
if 'tags' in el and el['tags'].get('station', None) in (
|
||||
'subway',
|
||||
'light_rail',
|
||||
):
|
||||
stations.add(StationWrapper(el))
|
||||
|
||||
if stations.is_leaf:
|
||||
|
@ -105,13 +115,21 @@ def add_stop_areas(src):
|
|||
continue
|
||||
if 'station' in el['tags']:
|
||||
continue
|
||||
if (el['tags'].get('railway', None) not in ('subway_entrance', 'platform') and
|
||||
el['tags'].get('public_transport', None) not in ('platform', 'stop_position')):
|
||||
if el['tags'].get('railway', None) not in (
|
||||
'subway_entrance',
|
||||
'platform',
|
||||
) and el['tags'].get('public_transport', None) not in (
|
||||
'platform',
|
||||
'stop_position',
|
||||
):
|
||||
continue
|
||||
coords = el.get('center', el)
|
||||
station = stations.search_nn((coords['lon'], coords['lat']))[0].data
|
||||
if station.distance(coords) < MAX_DISTANCE:
|
||||
k = (station.station['id'], station.station['tags'].get('name', 'station_with_no_name'))
|
||||
k = (
|
||||
station.station['id'],
|
||||
station.station['tags'].get('name', 'station_with_no_name'),
|
||||
)
|
||||
# Disregard exits and platforms that are differently named
|
||||
if el['tags'].get('name', k[1]) == k[1]:
|
||||
if k not in stop_areas:
|
||||
|
@ -120,7 +138,10 @@ def add_stop_areas(src):
|
|||
|
||||
# Find existing stop_area relations for stations and remove these stations
|
||||
for el in src:
|
||||
if el['type'] == 'relation' and el['tags'].get('public_transport', None) == 'stop_area':
|
||||
if (
|
||||
el['type'] == 'relation'
|
||||
and el['tags'].get('public_transport', None) == 'stop_area'
|
||||
):
|
||||
found = False
|
||||
for m in el['members']:
|
||||
if found:
|
||||
|
@ -141,18 +162,35 @@ def add_stop_areas(src):
|
|||
etree.SubElement(rel, 'tag', k='public_transport', v='stop_area')
|
||||
etree.SubElement(rel, 'tag', k='name', v=st[1])
|
||||
for m in members.values():
|
||||
if m['tags'].get('railway', m['tags'].get('public_transport', None)) == 'platform':
|
||||
if (
|
||||
m['tags'].get(
|
||||
'railway', m['tags'].get('public_transport', None)
|
||||
)
|
||||
== 'platform'
|
||||
):
|
||||
role = 'platform'
|
||||
elif m['tags'].get('public_transport', None) == 'stop_position':
|
||||
role = 'stop'
|
||||
else:
|
||||
role = ''
|
||||
etree.SubElement(rel, 'member', ref=str(m['id']), type=m['type'], role=role)
|
||||
etree.SubElement(
|
||||
rel, 'member', ref=str(m['id']), type=m['type'], role=role
|
||||
)
|
||||
|
||||
# Add all downloaded elements
|
||||
for el in src:
|
||||
obj = etree.SubElement(root, el['type'])
|
||||
for a in ('id', 'type', 'user', 'uid', 'version', 'changeset', 'timestamp', 'lat', 'lon'):
|
||||
for a in (
|
||||
'id',
|
||||
'type',
|
||||
'user',
|
||||
'uid',
|
||||
'version',
|
||||
'changeset',
|
||||
'timestamp',
|
||||
'lat',
|
||||
'lon',
|
||||
):
|
||||
if a in el:
|
||||
obj.set(a, str(el[a]))
|
||||
if 'modified' in el:
|
||||
|
@ -162,8 +200,13 @@ def add_stop_areas(src):
|
|||
etree.SubElement(obj, 'tag', k=k, v=v)
|
||||
if 'members' in el:
|
||||
for m in el['members']:
|
||||
etree.SubElement(obj, 'member', ref=str(m['ref']),
|
||||
type=m['type'], role=m.get('role', ''))
|
||||
etree.SubElement(
|
||||
obj,
|
||||
'member',
|
||||
ref=str(m['ref']),
|
||||
type=m['type'],
|
||||
role=m.get('role', ''),
|
||||
)
|
||||
if 'nodes' in el:
|
||||
for n in el['nodes']:
|
||||
etree.SubElement(obj, 'nd', ref=str(n))
|
||||
|
@ -173,8 +216,14 @@ def add_stop_areas(src):
|
|||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 2:
|
||||
print('Read a JSON from Overpass and output JOSM OSM XML with added stop_area relations')
|
||||
print('Usage: {} {{<export.json>|<bbox>}} [output.osm]'.format(sys.argv[0]))
|
||||
print(
|
||||
'Read a JSON from Overpass and output JOSM OSM XML with added stop_area relations'
|
||||
)
|
||||
print(
|
||||
'Usage: {} {{<export.json>|<bbox>}} [output.osm]'.format(
|
||||
sys.argv[0]
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if re.match(r'^[-0-9.,]+$', sys.argv[1]):
|
||||
|
|
|
@ -45,17 +45,21 @@ class StationWrapper:
|
|||
def distance(self, other):
|
||||
"""Calculate distance in meters."""
|
||||
dx = math.radians(self[0] - other['lon']) * math.cos(
|
||||
0.5 * math.radians(self[1] + other['lat']))
|
||||
0.5 * math.radians(self[1] + other['lat'])
|
||||
)
|
||||
dy = math.radians(self[1] - other['lat'])
|
||||
return 6378137 * math.sqrt(dx*dx + dy*dy)
|
||||
return 6378137 * math.sqrt(dx * dx + dy * dy)
|
||||
|
||||
|
||||
def overpass_request(bbox):
|
||||
url = 'http://overpass-api.de/api/interpreter?data={}'.format(
|
||||
urllib.parse.quote(QUERY.replace('{{bbox}}', bbox)))
|
||||
urllib.parse.quote(QUERY.replace('{{bbox}}', bbox))
|
||||
)
|
||||
response = urllib.request.urlopen(url, timeout=1000)
|
||||
if response.getcode() != 200:
|
||||
raise Exception('Failed to query Overpass API: HTTP {}'.format(response.getcode()))
|
||||
raise Exception(
|
||||
'Failed to query Overpass API: HTTP {}'.format(response.getcode())
|
||||
)
|
||||
reader = codecs.getreader('utf-8')
|
||||
return json.load(reader(response))['elements']
|
||||
|
||||
|
@ -91,7 +95,11 @@ def add_stop_areas(src):
|
|||
stop_areas = {}
|
||||
for el in src:
|
||||
# Only tram routes
|
||||
if 'tags' not in el or el['type'] != 'relation' or el['tags'].get('route') != 'tram':
|
||||
if (
|
||||
'tags' not in el
|
||||
or el['type'] != 'relation'
|
||||
or el['tags'].get('route') != 'tram'
|
||||
):
|
||||
continue
|
||||
for m in el['members']:
|
||||
if el_id(m) not in elements:
|
||||
|
@ -102,16 +110,24 @@ def add_stop_areas(src):
|
|||
if pel['tags'].get('railway') == 'tram_stop':
|
||||
continue
|
||||
coords = pel.get('center', pel)
|
||||
station = stations.search_nn((coords['lon'], coords['lat']))[0].data
|
||||
station = stations.search_nn(
|
||||
(coords['lon'], coords['lat'])
|
||||
)[0].data
|
||||
if station.distance(coords) < MAX_DISTANCE:
|
||||
k = (station.station['id'], station.station['tags'].get('name', None))
|
||||
k = (
|
||||
station.station['id'],
|
||||
station.station['tags'].get('name', None),
|
||||
)
|
||||
if k not in stop_areas:
|
||||
stop_areas[k] = {el_id(station.station): station.station}
|
||||
stop_areas[k][el_id(m)] = pel
|
||||
|
||||
# Find existing stop_area relations for stations and remove these stations
|
||||
for el in src:
|
||||
if el['type'] == 'relation' and el['tags'].get('public_transport', None) == 'stop_area':
|
||||
if (
|
||||
el['type'] == 'relation'
|
||||
and el['tags'].get('public_transport', None) == 'stop_area'
|
||||
):
|
||||
found = False
|
||||
for m in el['members']:
|
||||
if found:
|
||||
|
@ -133,12 +149,24 @@ def add_stop_areas(src):
|
|||
if st[1]:
|
||||
etree.SubElement(rel, 'tag', k='name', v=st[1])
|
||||
for m in members.values():
|
||||
etree.SubElement(rel, 'member', ref=str(m['id']), type=m['type'], role='')
|
||||
etree.SubElement(
|
||||
rel, 'member', ref=str(m['id']), type=m['type'], role=''
|
||||
)
|
||||
|
||||
# Add all downloaded elements
|
||||
for el in src:
|
||||
obj = etree.SubElement(root, el['type'])
|
||||
for a in ('id', 'type', 'user', 'uid', 'version', 'changeset', 'timestamp', 'lat', 'lon'):
|
||||
for a in (
|
||||
'id',
|
||||
'type',
|
||||
'user',
|
||||
'uid',
|
||||
'version',
|
||||
'changeset',
|
||||
'timestamp',
|
||||
'lat',
|
||||
'lon',
|
||||
):
|
||||
if a in el:
|
||||
obj.set(a, str(el[a]))
|
||||
if 'modified' in el:
|
||||
|
@ -148,8 +176,13 @@ def add_stop_areas(src):
|
|||
etree.SubElement(obj, 'tag', k=k, v=v)
|
||||
if 'members' in el:
|
||||
for m in el['members']:
|
||||
etree.SubElement(obj, 'member', ref=str(m['ref']),
|
||||
type=m['type'], role=m.get('role', ''))
|
||||
etree.SubElement(
|
||||
obj,
|
||||
'member',
|
||||
ref=str(m['ref']),
|
||||
type=m['type'],
|
||||
role=m.get('role', ''),
|
||||
)
|
||||
if 'nodes' in el:
|
||||
for n in el['nodes']:
|
||||
etree.SubElement(obj, 'nd', ref=str(n))
|
||||
|
@ -159,8 +192,15 @@ def add_stop_areas(src):
|
|||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 2:
|
||||
print('Read a JSON from Overpass and output JOSM OSM XML with added stop_area relations')
|
||||
print('Usage: {} {{<export.json>|<bbox>}} [output.osm]'.format(sys.argv[0]))
|
||||
print(
|
||||
'Read a JSON from Overpass and output JOSM OSM XML '
|
||||
'with added stop_area relations'
|
||||
)
|
||||
print(
|
||||
'Usage: {} {{<export.json>|<bbox>}} [output.osm]'.format(
|
||||
sys.argv[0]
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if re.match(r'^[-0-9.,]+$', sys.argv[1]):
|
||||
|
|
|
@ -18,8 +18,11 @@ def convert():
|
|||
return 'No data from overpass, sorry.'
|
||||
result = add_stop_areas(src)
|
||||
response = make_response(result)
|
||||
response.headers['Content-Disposition'] = 'attachment; filename="stop_areas.osm"'
|
||||
response.headers['Content-Disposition'] = (
|
||||
'attachment; filename="stop_areas.osm"'
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run()
|
||||
|
|
180
subway_io.py
180
subway_io.py
|
@ -26,9 +26,13 @@ def load_xml(f):
|
|||
elif sub.tag == 'nd':
|
||||
nd.append(int(sub.get('ref')))
|
||||
elif sub.tag == 'member':
|
||||
members.append({'type': sub.get('type'),
|
||||
'ref': int(sub.get('ref')),
|
||||
'role': sub.get('role', '')})
|
||||
members.append(
|
||||
{
|
||||
'type': sub.get('type'),
|
||||
'ref': int(sub.get('ref')),
|
||||
'role': sub.get('role', ''),
|
||||
}
|
||||
)
|
||||
if tags:
|
||||
el['tags'] = tags
|
||||
if nd:
|
||||
|
@ -44,13 +48,15 @@ def load_xml(f):
|
|||
_YAML_SPECIAL_CHARACTERS = "!&*{}[],#|>@`'\""
|
||||
_YAML_SPECIAL_SEQUENCES = ("- ", ": ", "? ")
|
||||
|
||||
|
||||
def _get_yaml_compatible_string(scalar):
|
||||
"""Enclose string in single quotes in some cases"""
|
||||
string = str(scalar)
|
||||
if (string and
|
||||
(string[0] in _YAML_SPECIAL_CHARACTERS
|
||||
or any(seq in string for seq in _YAML_SPECIAL_SEQUENCES)
|
||||
or string.endswith(':'))):
|
||||
if string and (
|
||||
string[0] in _YAML_SPECIAL_CHARACTERS
|
||||
or any(seq in string for seq in _YAML_SPECIAL_SEQUENCES)
|
||||
or string.endswith(':')
|
||||
):
|
||||
string = string.replace("'", "''")
|
||||
string = "'{}'".format(string)
|
||||
return string
|
||||
|
@ -81,7 +87,9 @@ def dump_yaml(city, f):
|
|||
stops = set()
|
||||
routes = []
|
||||
for route in city:
|
||||
stations = OrderedDict([(sa.transfer or sa.id, sa.name) for sa in route.stop_areas()])
|
||||
stations = OrderedDict(
|
||||
[(sa.transfer or sa.id, sa.name) for sa in route.stop_areas()]
|
||||
)
|
||||
rte = {
|
||||
'type': route.mode,
|
||||
'ref': route.ref,
|
||||
|
@ -90,7 +98,7 @@ def dump_yaml(city, f):
|
|||
'infill': route.infill,
|
||||
'station_count': len(stations),
|
||||
'stations': list(stations.values()),
|
||||
'itineraries': {}
|
||||
'itineraries': {},
|
||||
}
|
||||
for variant in route:
|
||||
if INCLUDE_STOP_AREAS:
|
||||
|
@ -98,14 +106,22 @@ def dump_yaml(city, f):
|
|||
for st in variant:
|
||||
s = st.stoparea
|
||||
if s.id == s.station.id:
|
||||
v_stops.append('{} ({})'.format(s.station.name, s.station.id))
|
||||
v_stops.append(
|
||||
'{} ({})'.format(s.station.name, s.station.id)
|
||||
)
|
||||
else:
|
||||
v_stops.append('{} ({}) in {} ({})'.format(s.station.name, s.station.id,
|
||||
s.name, s.id))
|
||||
v_stops.append(
|
||||
'{} ({}) in {} ({})'.format(
|
||||
s.station.name, s.station.id, s.name, s.id
|
||||
)
|
||||
)
|
||||
else:
|
||||
v_stops = ['{} ({})'.format(
|
||||
s.stoparea.station.name,
|
||||
s.stoparea.station.id) for s in variant]
|
||||
v_stops = [
|
||||
'{} ({})'.format(
|
||||
s.stoparea.station.name, s.stoparea.station.id
|
||||
)
|
||||
for s in variant
|
||||
]
|
||||
rte['itineraries'][variant.id] = v_stops
|
||||
stops.update(v_stops)
|
||||
routes.append(rte)
|
||||
|
@ -132,64 +148,73 @@ def make_geojson(city, tracks=True):
|
|||
for rmaster in city:
|
||||
for variant in rmaster:
|
||||
if not tracks:
|
||||
features.append({
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'LineString',
|
||||
'coordinates': [s.stop for s in variant],
|
||||
},
|
||||
'properties': {
|
||||
'ref': variant.ref,
|
||||
'name': variant.name,
|
||||
'stroke': variant.colour
|
||||
features.append(
|
||||
{
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'LineString',
|
||||
'coordinates': [s.stop for s in variant],
|
||||
},
|
||||
'properties': {
|
||||
'ref': variant.ref,
|
||||
'name': variant.name,
|
||||
'stroke': variant.colour,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
elif variant.tracks:
|
||||
features.append({
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'LineString',
|
||||
'coordinates': variant.tracks,
|
||||
},
|
||||
'properties': {
|
||||
'ref': variant.ref,
|
||||
'name': variant.name,
|
||||
'stroke': variant.colour
|
||||
features.append(
|
||||
{
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'LineString',
|
||||
'coordinates': variant.tracks,
|
||||
},
|
||||
'properties': {
|
||||
'ref': variant.ref,
|
||||
'name': variant.name,
|
||||
'stroke': variant.colour,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
for st in variant:
|
||||
stops.add(st.stop)
|
||||
stopareas.add(st.stoparea)
|
||||
|
||||
for stop in stops:
|
||||
features.append({
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': stop,
|
||||
},
|
||||
'properties': {
|
||||
'marker-size': 'small',
|
||||
'marker-symbol': 'circle'
|
||||
features.append(
|
||||
{
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': stop,
|
||||
},
|
||||
'properties': {
|
||||
'marker-size': 'small',
|
||||
'marker-symbol': 'circle',
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
for stoparea in stopareas:
|
||||
features.append({
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': stoparea.center,
|
||||
},
|
||||
'properties': {
|
||||
'name': stoparea.name,
|
||||
'marker-size': 'small',
|
||||
'marker-color': '#ff2600' if stoparea in transfers else '#797979'
|
||||
features.append(
|
||||
{
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': stoparea.center,
|
||||
},
|
||||
'properties': {
|
||||
'name': stoparea.name,
|
||||
'marker-size': 'small',
|
||||
'marker-color': '#ff2600'
|
||||
if stoparea in transfers
|
||||
else '#797979',
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
return {'type': 'FeatureCollection', 'features': features}
|
||||
|
||||
|
||||
|
||||
def _dumps_route_id(route_id):
|
||||
"""Argument is a route_id that depends on route colour and ref. Name
|
||||
can be taken from route_master or can be route's own, we don't take it
|
||||
|
@ -223,9 +248,11 @@ def read_recovery_data(path):
|
|||
return {}
|
||||
else:
|
||||
data = {
|
||||
city_name: {_loads_route_id(route_id): route_data
|
||||
for route_id, route_data in routes.items()}
|
||||
for city_name, routes in data.items()
|
||||
city_name: {
|
||||
_loads_route_id(route_id): route_data
|
||||
for route_id, route_data in routes.items()
|
||||
}
|
||||
for city_name, routes in data.items()
|
||||
}
|
||||
return data
|
||||
|
||||
|
@ -241,20 +268,24 @@ def write_recovery_data(path, current_data, cities):
|
|||
route_id = (route.colour, route.ref)
|
||||
itineraries = []
|
||||
for variant in route:
|
||||
itin = {'stations': [],
|
||||
'name': variant.name,
|
||||
'from': variant.element['tags'].get('from'),
|
||||
'to': variant.element['tags'].get('to')}
|
||||
itin = {
|
||||
'stations': [],
|
||||
'name': variant.name,
|
||||
'from': variant.element['tags'].get('from'),
|
||||
'to': variant.element['tags'].get('to'),
|
||||
}
|
||||
for stop in variant:
|
||||
station = stop.stoparea.station
|
||||
station_name = station.name
|
||||
if station_name == '?' and station.int_name:
|
||||
station_name = station.int_name
|
||||
itin['stations'].append({
|
||||
'oms_id': station.id,
|
||||
'name': station_name,
|
||||
'center': station.center
|
||||
})
|
||||
itin['stations'].append(
|
||||
{
|
||||
'oms_id': station.id,
|
||||
'name': station_name,
|
||||
'center': station.center,
|
||||
}
|
||||
)
|
||||
if itin is not None:
|
||||
itineraries.append(itin)
|
||||
routes[route_id] = itineraries
|
||||
|
@ -267,12 +298,13 @@ def write_recovery_data(path, current_data, cities):
|
|||
|
||||
try:
|
||||
data = {
|
||||
city_name: {_dumps_route_id(route_id): route_data
|
||||
for route_id, route_data in routes.items()}
|
||||
city_name: {
|
||||
_dumps_route_id(route_id): route_data
|
||||
for route_id, route_data in routes.items()
|
||||
}
|
||||
for city_name, routes in data.items()
|
||||
}
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||
except Exception as e:
|
||||
logging.warning("Cannot write recovery data to '%s': %s", path, str(e))
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -17,7 +17,7 @@ class CityData:
|
|||
'good_cities': 0,
|
||||
'total_cities': 1 if city else 0,
|
||||
'num_errors': 0,
|
||||
'num_warnings': 0
|
||||
'num_warnings': 0,
|
||||
}
|
||||
self.slug = None
|
||||
if city:
|
||||
|
@ -51,18 +51,34 @@ class CityData:
|
|||
return '1' if v1 == v2 else '0'
|
||||
|
||||
for k in self.data:
|
||||
s = s.replace('{'+k+'}', str(self.data[k]))
|
||||
s = s.replace('{' + k + '}', str(self.data[k]))
|
||||
s = s.replace('{slug}', self.slug or '')
|
||||
for k in ('subwayl', 'lightrl', 'stations', 'transfers', 'busl',
|
||||
'trolleybusl', 'traml', 'otherl'):
|
||||
if k+'_expected' in self.data:
|
||||
s = s.replace('{='+k+'}',
|
||||
test_eq(self.data[k+'_found'], self.data[k+'_expected']))
|
||||
s = s.replace('{=cities}',
|
||||
test_eq(self.data['good_cities'], self.data['total_cities']))
|
||||
s = s.replace('{=entrances}', test_eq(self.data['unused_entrances'], 0))
|
||||
for k in (
|
||||
'subwayl',
|
||||
'lightrl',
|
||||
'stations',
|
||||
'transfers',
|
||||
'busl',
|
||||
'trolleybusl',
|
||||
'traml',
|
||||
'otherl',
|
||||
):
|
||||
if k + '_expected' in self.data:
|
||||
s = s.replace(
|
||||
'{=' + k + '}',
|
||||
test_eq(
|
||||
self.data[k + '_found'], self.data[k + '_expected']
|
||||
),
|
||||
)
|
||||
s = s.replace(
|
||||
'{=cities}',
|
||||
test_eq(self.data['good_cities'], self.data['total_cities']),
|
||||
)
|
||||
s = s.replace(
|
||||
'{=entrances}', test_eq(self.data['unused_entrances'], 0)
|
||||
)
|
||||
for k in ('errors', 'warnings'):
|
||||
s = s.replace('{='+k+'}', test_eq(self.data['num_'+k], 0))
|
||||
s = s.replace('{=' + k + '}', test_eq(self.data['num_' + k], 0))
|
||||
return s
|
||||
|
||||
|
||||
|
@ -72,10 +88,19 @@ def tmpl(s, data=None, **kwargs):
|
|||
if kwargs:
|
||||
for k, v in kwargs.items():
|
||||
if v is not None:
|
||||
s = s.replace('{'+k+'}', str(v))
|
||||
s = re.sub(r'\{\?'+k+r'\}(.+?)\{end\}', r'\1' if v else '', s, flags=re.DOTALL)
|
||||
s = s.replace('{' + k + '}', str(v))
|
||||
s = re.sub(
|
||||
r'\{\?' + k + r'\}(.+?)\{end\}',
|
||||
r'\1' if v else '',
|
||||
s,
|
||||
flags=re.DOTALL,
|
||||
)
|
||||
s = s.replace('{date}', date)
|
||||
google_url = 'https://docs.google.com/spreadsheets/d/{}/edit?usp=sharing'.format(SPREADSHEET_ID)
|
||||
google_url = (
|
||||
'https://docs.google.com/spreadsheets/d/{}/edit?usp=sharing'.format(
|
||||
SPREADSHEET_ID
|
||||
)
|
||||
)
|
||||
s = s.replace('{google}', google_url)
|
||||
return s
|
||||
|
||||
|
@ -88,13 +113,18 @@ RE_COORDS = re.compile(r'\((-?\d+\.\d+), (-?\d+\.\d+)\)')
|
|||
|
||||
def osm_links(s):
|
||||
"""Converts object mentions to HTML links."""
|
||||
|
||||
def link(m):
|
||||
return '<a href="https://www.openstreetmap.org/{}/{}">{}</a>'.format(
|
||||
EXPAND_OSM_TYPE[m.group(1)[0]], m.group(2), m.group(0))
|
||||
EXPAND_OSM_TYPE[m.group(1)[0]], m.group(2), m.group(0)
|
||||
)
|
||||
|
||||
s = RE_SHORT.sub(link, s)
|
||||
s = RE_FULL.sub(link, s)
|
||||
s = RE_COORDS.sub(
|
||||
r'(<a href="https://www.openstreetmap.org/search?query=\2%2C\1#map=18/\2/\1">pos</a>)', s)
|
||||
r'(<a href="https://www.openstreetmap.org/search?query=\2%2C\1#map=18/\2/\1">pos</a>)',
|
||||
s,
|
||||
)
|
||||
return s
|
||||
|
||||
|
||||
|
@ -104,7 +134,9 @@ def esc(s):
|
|||
|
||||
if len(sys.argv) < 2:
|
||||
print('Reads a log from subway validator and prepares HTML files.')
|
||||
print('Usage: {} <validation.log> [<target_directory>]'.format(sys.argv[0]))
|
||||
print(
|
||||
'Usage: {} <validation.log> [<target_directory>]'.format(sys.argv[0])
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
with open(sys.argv[1], 'r', encoding='utf-8') as f:
|
||||
|
@ -131,27 +163,67 @@ for continent in sorted(continents.keys()):
|
|||
content = ''
|
||||
for country in sorted(c_by_c[continent]):
|
||||
country_file_name = country.lower().replace(' ', '-') + '.html'
|
||||
content += tmpl(INDEX_COUNTRY, countries[country], file=country_file_name,
|
||||
country=country, continent=continent)
|
||||
country_file = open(os.path.join(path, country_file_name), 'w', encoding='utf-8')
|
||||
country_file.write(tmpl(COUNTRY_HEADER, country=country, continent=continent,
|
||||
overground=overground, subways=not overground))
|
||||
content += tmpl(
|
||||
INDEX_COUNTRY,
|
||||
countries[country],
|
||||
file=country_file_name,
|
||||
country=country,
|
||||
continent=continent,
|
||||
)
|
||||
country_file = open(
|
||||
os.path.join(path, country_file_name), 'w', encoding='utf-8'
|
||||
)
|
||||
country_file.write(
|
||||
tmpl(
|
||||
COUNTRY_HEADER,
|
||||
country=country,
|
||||
continent=continent,
|
||||
overground=overground,
|
||||
subways=not overground,
|
||||
)
|
||||
)
|
||||
for name, city in sorted(data.items()):
|
||||
if city.country == country:
|
||||
file_base = os.path.join(path, city.slug)
|
||||
yaml_file = city.slug + '.yaml' if os.path.exists(file_base + '.yaml') else None
|
||||
json_file = city.slug + '.geojson' if os.path.exists(
|
||||
file_base + '.geojson') else None
|
||||
yaml_file = (
|
||||
city.slug + '.yaml'
|
||||
if os.path.exists(file_base + '.yaml')
|
||||
else None
|
||||
)
|
||||
json_file = (
|
||||
city.slug + '.geojson'
|
||||
if os.path.exists(file_base + '.geojson')
|
||||
else None
|
||||
)
|
||||
e = '<br>'.join([osm_links(esc(e)) for e in city.errors])
|
||||
w = '<br>'.join([osm_links(esc(w)) for w in city.warnings])
|
||||
country_file.write(tmpl(COUNTRY_CITY, city,
|
||||
city=name, country=country, continent=continent,
|
||||
yaml=yaml_file, json=json_file, subways=not overground,
|
||||
errors=e, warnings=w, overground=overground))
|
||||
country_file.write(tmpl(COUNTRY_FOOTER, country=country, continent=continent))
|
||||
country_file.write(
|
||||
tmpl(
|
||||
COUNTRY_CITY,
|
||||
city,
|
||||
city=name,
|
||||
country=country,
|
||||
continent=continent,
|
||||
yaml=yaml_file,
|
||||
json=json_file,
|
||||
subways=not overground,
|
||||
errors=e,
|
||||
warnings=w,
|
||||
overground=overground,
|
||||
)
|
||||
)
|
||||
country_file.write(
|
||||
tmpl(COUNTRY_FOOTER, country=country, continent=continent)
|
||||
)
|
||||
country_file.close()
|
||||
index.write(tmpl(INDEX_CONTINENT, continents[continent],
|
||||
content=content, continent=continent))
|
||||
index.write(
|
||||
tmpl(
|
||||
INDEX_CONTINENT,
|
||||
continents[continent],
|
||||
content=content,
|
||||
continent=continent,
|
||||
)
|
||||
)
|
||||
|
||||
index.write(tmpl(INDEX_FOOTER))
|
||||
index.close()
|
||||
|
|
Loading…
Add table
Reference in a new issue