Support for writing multiple yaml and geojson files at once, fixes #28
This commit is contained in:
parent
36837b794e
commit
ee8b9171c9
3 changed files with 68 additions and 18 deletions
|
@ -3,6 +3,7 @@ import argparse
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
|
@ -272,6 +273,10 @@ def make_geojson(city, tracks=True):
|
|||
return {'type': 'FeatureCollection', 'features': features}
|
||||
|
||||
|
||||
def slugify(name):
|
||||
return re.sub(r'[^a-z0-9_-]+', '', name.lower().replace(' ', '_'))
|
||||
|
||||
|
||||
OSM_TYPES = {'n': (0, 'node'), 'w': (2, 'way'), 'r': (3, 'relation')}
|
||||
SPEED_TO_ENTRANCE = 4 # km/h
|
||||
SPEED_ON_TRANSFER = 3.5
|
||||
|
@ -279,7 +284,7 @@ SPEED_ON_LINE = 40
|
|||
DEFAULT_INTERVAL = 150 # seconds
|
||||
|
||||
|
||||
def prepare_mapsme_data(transfers, cities):
|
||||
def prepare_mapsme_data(transfers, cities, cache_name):
|
||||
def uid(elid, typ=None):
|
||||
t = elid[0]
|
||||
osm_id = int(elid[1:])
|
||||
|
@ -292,8 +297,20 @@ def prepare_mapsme_data(transfers, cities):
|
|||
def format_colour(c):
|
||||
return c[1:] if c else None
|
||||
|
||||
cache = {}
|
||||
if cache_name:
|
||||
with open(cache_name, 'r', encoding='utf-8') as f:
|
||||
cache = json.load(f)
|
||||
|
||||
stops = {} # el_id -> station data
|
||||
networks = []
|
||||
|
||||
good_cities = set([c.name for c in cities])
|
||||
for city_name, data in cache.items():
|
||||
if city_name in good_cities:
|
||||
continue
|
||||
# TODO: get a network, stops and transfers from cache
|
||||
|
||||
for city in cities:
|
||||
network = {'network': city.name, 'routes': [], 'agency_id': city.id}
|
||||
for route in city:
|
||||
|
@ -355,6 +372,10 @@ def prepare_mapsme_data(transfers, cities):
|
|||
t[t_second].center)*3.6/SPEED_ON_TRANSFER)
|
||||
])
|
||||
|
||||
if cache_name:
|
||||
with open(cache_name, 'w', encoding='utf-8') as f:
|
||||
json.dump(cache, f)
|
||||
|
||||
result = {
|
||||
'stops': m_stops,
|
||||
'transfers': c_transfers,
|
||||
|
@ -379,10 +400,9 @@ if __name__ == '__main__':
|
|||
help='Validation JSON file name')
|
||||
parser.add_argument('-o', '--output', type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='JSON file for MAPS.ME')
|
||||
parser.add_argument('-d', '--dump', type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='Make a YAML file for a city data')
|
||||
parser.add_argument('-j', '--json', type=argparse.FileType('w', encoding='utf-8'),
|
||||
help='Make a GeoJSON file for a city data')
|
||||
parser.add_argument('--cache', help='Cache file name for MAPS.ME data')
|
||||
parser.add_argument('-d', '--dump', help='Make a YAML file for a city data')
|
||||
parser.add_argument('-j', '--json', help='Make a GeoJSON file for a city data')
|
||||
parser.add_argument('--crude', action='store_true',
|
||||
help='Do not use OSM railway geometry for GeoJSON')
|
||||
options = parser.parse_args()
|
||||
|
@ -451,26 +471,42 @@ if __name__ == '__main__':
|
|||
|
||||
logging.info('%s good cities: %s', len(good_cities), ', '.join([c.name for c in good_cities]))
|
||||
|
||||
if options.log:
|
||||
res = [x.get_validation_result() for x in cities]
|
||||
json.dump(res, options.log)
|
||||
|
||||
if options.entrances:
|
||||
json.dump(get_unused_entrances_geojson(osm), options.entrances)
|
||||
|
||||
if options.dump:
|
||||
if len(cities) == 1:
|
||||
dump_data(cities[0], options.dump)
|
||||
if os.path.isdir(options.dump):
|
||||
for c in cities:
|
||||
with open(os.path.join(options.dump, slugify(c.name) + '.yaml'),
|
||||
'w', encoding='utf-8') as f:
|
||||
dump_data(c, f)
|
||||
elif len(cities) == 1:
|
||||
with open(options.dump, 'w', encoding='utf-8') as f:
|
||||
dump_data(cities[0], f)
|
||||
else:
|
||||
logging.error('Cannot dump %s cities at once', len(cities))
|
||||
|
||||
if options.json:
|
||||
if len(cities) == 1:
|
||||
json.dump(make_geojson(cities[0], not options.crude), options.json)
|
||||
if os.path.isdir(options.json):
|
||||
for c in cities:
|
||||
with open(os.path.join(options.dump, slugify(c.name) + '.geojson'),
|
||||
'w', encoding='utf-8') as f:
|
||||
json.dump(make_geojson(c, not options.crude), f)
|
||||
elif len(cities) == 1:
|
||||
with open(options.json, 'w', encoding='utf-8') as f:
|
||||
json.dump(make_geojson(cities[0], not options.crude), f)
|
||||
else:
|
||||
logging.error('Cannot make a json of %s cities at once', len(cities))
|
||||
|
||||
if options.log:
|
||||
res = []
|
||||
for c in cities:
|
||||
v = c.get_validation_result()
|
||||
v['slug'] = slugify(c.name)
|
||||
res.append(v)
|
||||
json.dump(res, options.log)
|
||||
|
||||
# Finally, prepare a JSON file for MAPS.ME
|
||||
if options.output:
|
||||
json.dump(prepare_mapsme_data(transfers, good_cities), options.output,
|
||||
indent=1, ensure_ascii=False)
|
||||
json.dump(prepare_mapsme_data(transfers, good_cities, options.cache),
|
||||
options.output, indent=1, ensure_ascii=False)
|
||||
|
|
|
@ -29,6 +29,10 @@ th {
|
|||
.color1 {
|
||||
background: lightgreen;
|
||||
}
|
||||
.hlink {
|
||||
color: #888;
|
||||
opacity: 0.5;
|
||||
}
|
||||
</style>
|
||||
'''
|
||||
|
||||
|
@ -119,8 +123,8 @@ COUNTRY_HEADER = '''
|
|||
'''.replace('(s)', STYLE)
|
||||
|
||||
COUNTRY_CITY = '''
|
||||
<tr>
|
||||
<td class="bold color{good_cities}">{city}</td>
|
||||
<tr id="{slug}">
|
||||
<td class="bold color{good_cities}">{city}{?yaml} <a href="{yaml}" class="hlink">Y</a>{end}{?json} <a href="{json}" class="hlink">J</a>{end}</td>
|
||||
<td class="color{=subwayl}">sub: {subwayl_found} / {subwayl_expected}</td>
|
||||
<td class="color{=lightrl}">lr: {lightrl_found} / {lightrl_expected}</td>
|
||||
<td class="color{=stations}">st: {stations_found} / {stations_expected}</td>
|
||||
|
|
|
@ -19,7 +19,9 @@ class CityData:
|
|||
'num_errors': 0,
|
||||
'num_warnings': 0
|
||||
}
|
||||
self.slug = None
|
||||
if city:
|
||||
self.slug = city['slug']
|
||||
self.country = city['country']
|
||||
self.continent = city['continent']
|
||||
self.errors = city['errors']
|
||||
|
@ -50,6 +52,7 @@ class CityData:
|
|||
|
||||
for k in self.data:
|
||||
s = s.replace('{'+k+'}', str(self.data[k]))
|
||||
s = s.replace('{slug}', self.slug or '')
|
||||
for k in ('subwayl', 'lightrl', 'stations', 'transfers'):
|
||||
s = s.replace('{='+k+'}',
|
||||
test_eq(self.data[k+'_found'], self.data[k+'_expected']))
|
||||
|
@ -66,7 +69,9 @@ def tmpl(s, data=None, **kwargs):
|
|||
s = data.format(s)
|
||||
if kwargs:
|
||||
for k, v in kwargs.items():
|
||||
s = s.replace('{'+k+'}', v)
|
||||
if v is not None:
|
||||
s = s.replace('{'+k+'}', str(v))
|
||||
s = re.sub(r'\{\?'+k+r'\}(.+?)\{end\}', r'\1' if v else '', s)
|
||||
s = s.replace('{date}', date)
|
||||
google_url = 'https://docs.google.com/spreadsheets/d/{}/edit?usp=sharing'.format(SPREADSHEET_ID)
|
||||
s = s.replace('{google}', google_url)
|
||||
|
@ -129,10 +134,15 @@ for continent in sorted(continents.keys()):
|
|||
country_file.write(tmpl(COUNTRY_HEADER, country=country, continent=continent))
|
||||
for name, city in sorted(data.items()):
|
||||
if city.country == country:
|
||||
file_base = os.path.join(path, city.slug)
|
||||
yaml_file = city.slug + '.yaml' if os.path.exists(file_base + '.yaml') else None
|
||||
json_file = city.slug + '.geojson' if os.path.exists(
|
||||
file_base + '.geojson') else None
|
||||
e = '<br>'.join([osm_links(esc(e)) for e in city.errors])
|
||||
w = '<br>'.join([osm_links(esc(w)) for w in city.warnings])
|
||||
country_file.write(tmpl(COUNTRY_CITY, city,
|
||||
city=name, country=country, continent=continent,
|
||||
yaml=yaml_file, json=json_file,
|
||||
errors=e, warnings=w))
|
||||
country_file.write(tmpl(COUNTRY_FOOTER, country=country, continent=continent))
|
||||
country_file.close()
|
||||
|
|
Loading…
Add table
Reference in a new issue