Merge pull request #7 from alexey-zakharenkov/master-az

Autosplit and more
This commit is contained in:
jbenua 2020-12-08 16:08:00 +03:00 committed by GitHub
commit 273f208e75
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
66 changed files with 5467 additions and 2649 deletions

4
.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
__pycache__
*.pyc
.idea
nohup.out

View file

@ -2,18 +2,42 @@
В этих каталогах лежит набор инструментов для редактирования набора границ
в формате [Osmosis Poly](http://wiki.openstreetmap.org/wiki/Osmosis/Polygon_Filter_File_Format).
Для работы требуется база данных PostgreSQL + PostGIS, инициализированная из
файла `scripts/borders.sql`. Для оценки размера файла MWM нужно заполнить
таблицу `tiles` из файла планеты (см. `scripts/process_planet.sh`).
## Развёртывание в Docker
Самое простое — запустить систему в Docker-контейнерах.
#### Предварительные требования
* Должен быть установлен docker https://docs.docker.com/engine/install/
* и docker-compose https://docs.docker.com/compose/install/
* Для всей планеты во время сборки необходимо ~200 Гб дискового пространства
(после сборки — 30 Гб), разворачивание может длиться около суток.
#### Настройка сборки
В файле docker-compose.yaml нужно выставить желаемый порт, на котором будет
работать веб-интерфейс (сейчас это число 8081 в строке "8081:80"),
и URL с файлом планеты в переменной PLANET_URL. Переменные PLANET_URL_<suffix>
не используются, это просто примеры. Для тестирования советуется подставить
в PLANET_URL небольшой файл, тогда вся сборка займёт несколько минут.
## Развёртывание вручную
Для работы требуется база данных PostgreSQL + PostGIS, инициализированная
скриптами из каталога `db`. Последовательность выполнение скриптов и необходимые
переменные окружения см. в `db/Dockerfile.db` и `docker-compose.yaml`.
Для оценки размера файла MWM нужно заполнить
таблицу `tiles` из файла планеты (см. скрипты `db/*tiles*.sh`).
Также для обновления и замены границ из OpenStreetMap желательно импортировать
таблицу `osm_borders` — см. `scripts/osm_borders.sh`. Начальный набор границ
для редактирования можно либо загрузить скриптом `scripts/poly2postgis.py`,
либо скопировать из таблицы `osm_borders` по, например, `admin_level=2`.
таблицу `osm_borders` — см. `db/prepare_borders.sh` и `db/load_borders.sh`.
Начальный набор границ для редактирования можно либо загрузить скриптом
`scripts/poly2postgis.py`, либо скопировать из таблицы `osm_borders` по,
например, `admin_level=2`.
После редактирования набор файлов `poly` создаст скрипт `scripts/export_poly.py`.
После редактирования набор файлов `poly` создаст скрипт `scripts/export_poly.py`
или ссылка *Скачать poly - всё* в веб-интерфейсе.
## Серверная часть
#### Серверная часть
Два скрипта в каталоге `server` должны работать постоянно на фоне.
@ -25,13 +49,14 @@
в столбце количества данных, и найдя их, пересчитывает. Запустите, если нужна
оценка размера MWM.
## Веб-интерфейс
#### Веб-интерфейс
Файлы в каталоге `www` не требуют каких-либо интерпретаторов или выделенных серверов:
просто откройте `index.html` в браузере. На карте нарисованы границы, по клику
на границу панель справа наполнится кнопками. Оттуда можно разрезать и склеивать
границы, переименовывать их, заменять и дополнять из таблицы `osm_borders`,
а также экспортировать в JOSM для сложных модификаций.
Файлы в каталоге `web/app/static` не требуют каких-либо интерпретаторов или
выделенных серверов: просто откройте `index.html` в браузере. На карте
нарисованы границы, по клику на границу панель справа наполнится кнопками.
Оттуда можно разрезать и склеивать границы, переименовывать их, заменять и
дополнять из таблицы `osm_borders`, а также экспортировать в JOSM для сложных
модификаций.
## Автор и лицензия

35
db/Dockerfile.db Normal file
View file

@ -0,0 +1,35 @@
FROM postgres:12
WORKDIR /borders/
RUN apt-get update && apt-get install -y \
wget \
postgresql-contrib `# contains hstore extension` \
postgresql-12-postgis-3 \
osmctools \
osm2pgsql \
python3 \
python3-psycopg2
ARG PLANET_URL=${PLANET_URL}
ENV PLANET=planet-file
RUN wget "${PLANET_URL}" -O "${PLANET}"
ENV FILTERED_PLANET=${PLANET}-filtered.o5m
COPY prepare_borders.sh tiles2pg.py prepare_tiles.sh ./
RUN ["/bin/bash", "prepare_borders.sh"]
RUN ["/bin/bash", "prepare_tiles.sh"]
RUN chmod a+w /borders/
COPY init_databases.sh /docker-entrypoint-initdb.d/00-init_databases.sh
COPY create_extensions.sql /docker-entrypoint-initdb.d/01-create_extensions.sql
COPY load_borders.sh /docker-entrypoint-initdb.d/10-load_borders.sh
COPY create_tables.sql /docker-entrypoint-initdb.d/20-create_tables.sql
COPY load_tiles.sh /docker-entrypoint-initdb.d/30-load_tiles.sh
COPY create_osm_places_table.sql /docker-entrypoint-initdb.d/40-create_osm_places_table.sql
COPY load_osm_places_table.sh /docker-entrypoint-initdb.d/41-load_osm_places_table.sh

7
db/create_extensions.sql Normal file
View file

@ -0,0 +1,7 @@
\c gis
CREATE EXTENSION postgis;
CREATE EXTENSION hstore;
\c borders
CREATE EXTENSION postgis;

View file

@ -0,0 +1,3 @@
\c borders borders
CREATE INDEX tiles_idx ON tiles USING gist (tile);

View file

@ -0,0 +1,86 @@
\c gis postgres
----------- Collect city polygons
CREATE TABLE osm_places AS
SELECT
osm_id,
place,
'polygon'::text AS g_type, -- geometry_type
max(CASE
WHEN regexp_replace(population, '[ .,]+', '', 'g') ~ '^\d+$'
THEN regexp_replace(population, '[ .,]+', '', 'g')::int
ELSE NULL
END
) AS population,
ST_Buffer(ST_Transform(ST_Collect(way),4326), 0) AS way,
coalesce(max("name"), max("name:en")) AS name
FROM planet_osm_polygon
WHERE place IN ('city', 'town', 'village', 'hamlet', 'isolated_dwelling')
GROUP BY osm_id, place;
----------- Collect city nodes
INSERT INTO osm_places
SELECT
osm_id,
place,
'point'::text AS g_type, -- geometry_type
CASE
WHEN regexp_replace(population, '[ .,]+', '', 'g') ~ '^\d+$'
THEN regexp_replace(population, '[ .,]+', '', 'g')::int
ELSE NULL
END AS population,
ST_Transform(way,4326) AS way,
coalesce("name", "name:en") AS name
FROM planet_osm_point
WHERE place IN ('city', 'town', 'village', 'hamlet', 'isolated_dwelling');
CREATE INDEX osm_places_gist_idx ON osm_places USING gist (way);
-- Update node population with polygon population where
-- the polygon duplicates the node and node has no population
UPDATE osm_places
SET population = q.max_population
FROM (
SELECT n.osm_id node_id,
greatest(p.population, n.population) max_population
FROM osm_places n, osm_places p
WHERE p.g_type='polygon'
AND n.g_type='point'
AND ST_Contains(p.way, n.way)
AND (strpos(n.name, p.name) > 0 OR strpos(p.name, n.name) > 0)
) q
WHERE g_type='point' AND osm_id = q.node_id;
-- Delete polygons where exists a node within it with the same name
DELETE FROM osm_places
WHERE g_type='polygon'
AND osm_id IN (SELECT p.osm_id
FROM osm_places n, osm_places p
WHERE p.g_type='polygon'
AND n.g_type='point'
AND ST_Contains(p.way, n.way)
AND (strpos(n.name, p.name) > 0 OR strpos(p.name, n.name) > 0));
-- Convert [multi]polygons to points - for further faster requests "is city in region"
ALTER TABLE osm_places ADD COLUMN center geometry;
UPDATE osm_places p
SET center = (
CASE
WHEN ST_Contains(way, ST_Centroid(way)) -- true for 99% of polygons
THEN ST_Centroid(way)
-- for the rest 1% of city polygons choose arbitrary point as a center
ELSE (
SELECT (ST_DumpPoints(way)).geom
FROM osm_places
WHERE osm_id = p.osm_id
LIMIT 1
)
END);
CREATE INDEX osm_places_center_gist_idx ON osm_places USING gist (center);
DROP INDEX osm_places_gist_idx;
ALTER TABLE osm_places DROP COLUMN way;

43
db/create_tables.sql Normal file
View file

@ -0,0 +1,43 @@
\c borders borders
CREATE TABLE tiles (
tile geometry NOT NULL,
count INTEGER NOT NULL DEFAULT 0
);
CREATE TABLE borders (
id BIGINT PRIMARY KEY,
parent_id BIGINT REFERENCES borders(id),
name VARCHAR(200),
geom geometry NOT NULL,
disabled boolean NOT NULL DEFAULT FALSE,
count_k INTEGER,
modified TIMESTAMP NOT NULL,
cmnt VARCHAR(500),
mwm_size_est REAL
);
CREATE INDEX borders_geom_gits_idx ON borders USING gist (geom);
CREATE INDEX borders_parent_id_idx ON borders (parent_id);
CREATE TABLE borders_backup (
backup VARCHAR(30) NOT NULL,
id BIGINT NOT NULL,
parent_id BIGINT,
name VARCHAR(200) NOT NULL,
geom geometry NOT NULL,
disabled boolean NOT NULL DEFAULT FALSE,
count_k INTEGER,
modified TIMESTAMP NOT NULL,
cmnt VARCHAR(500),
mwm_size_est REAL,
PRIMARY KEY (backup, id)
);
CREATE TABLE splitting (
osm_border_id BIGINT NOT NULL REFERENCES osm_borders(osm_id), -- reference to parent osm region
subregion_ids BIGINT[] NOT NULL,
mwm_size_est REAL NOT NULL,
mwm_size_thr INTEGER NOT NULL, -- mwm size threshold in Kb, 4-bytes INTEGER is enough
geom geometry NOT NULL
);
CREATE INDEX splitting_idx ON splitting (osm_border_id, mwm_size_thr);

9
db/init_databases.sh Normal file
View file

@ -0,0 +1,9 @@
#!/bin/bash
set -e
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
CREATE USER borders WITH PASSWORD 'borders';
CREATE DATABASE gis;
CREATE DATABASE borders;
GRANT ALL PRIVILEGES ON DATABASE borders TO borders;
EOSQL

70
db/load_borders.sh Executable file
View file

@ -0,0 +1,70 @@
#!/bin/bash
set -e
OSM2PGSQL=osm2pgsql
DATABASE=gis
DATABASE_BORDERS=borders
OSM2PGSQL_KEYS='--cache 2000 --number-processes 6'
OSM2PGSQL_STYLE=
if [[ "`uname`" == 'Darwin' ]]; then
WHICH='which -s'
MKTEMP='mktemp -t '
else
WHICH=which
MKTEMP='mktemp --suff='
fi
if ! $WHICH $OSM2PGSQL; then
echo "No osm2pgsql found."
exit 1
fi
# Load filtered data into an osm2pgsql database
echo Loading data into the database
# Creating a style file if we weren't provided with one
if [ -z "$OSM2PGSQL_STYLE" ]; then
OSM2PGSQL_STYLE=$(${MKTEMP}osm2pgsql_style)
OSM2PGSQL_STYLE_TMP=1
cat > $OSM2PGSQL_STYLE <<EOSTYLE
way admin_level text polygon
way area text
way boundary text polygon
node,way name text linear
node,way name:en text linear
node,way name:ru text linear
node,way place text polygon
node,way population text linear
EOSTYLE
fi
$OSM2PGSQL --slim --drop --hstore --style $OSM2PGSQL_STYLE -d $DATABASE \
-r o5m $OSM2PGSQL_KEYS $FILTERED_PLANET
RET=$?
rm -f $FILTERED_PLANET
if [ "$OSM2PGSQL_STYLE_TMP" == "1" ]; then
rm -f $OSM2PGSQL_STYLE
fi
[ $RET != 0 ] && exit 3
# Make osm_borders table
echo Creating osm_borders table
psql $DATABASE -c "
DROP TABLE IF EXISTS osm_borders;
CREATE TABLE osm_borders AS
SELECT osm_id,
ST_Buffer(ST_Transform(ST_Collect(way),4326), 0) AS way,
admin_level::INT AS admin_level,
coalesce(max(\"name:en\"), max(name)) AS name
FROM planet_osm_polygon
WHERE boundary='administrative' AND osm_id < 0 AND admin_level IN ('2', '3', '4', '5', '6', '7')
GROUP BY osm_id, admin_level
HAVING coalesce(max(\"name:en\"), max(name)) IS NOT NULL;
ALTER TABLE osm_borders ADD PRIMARY KEY (osm_id);
" || exit 3
# Copy it to the borders database
echo Copying osm_borders table to the borders database
pg_dump -O -t osm_borders $DATABASE | psql -U borders $DATABASE_BORDERS

View file

@ -0,0 +1,4 @@
DATABASE=gis
DATABASE_BORDERS=borders
pg_dump -O -t osm_places $DATABASE | psql -U borders $DATABASE_BORDERS

17
db/load_tiles.sh Executable file
View file

@ -0,0 +1,17 @@
#!/bin/sh
DATABASE=borders
TABLE=tiles
DB_USER=borders
if [ ! -r "$PLANET-tiles.csv" ]; then
echo "Planet file cannot be found or read."
exit 1
fi
set -e -u
echo Loading tiles into the database
cat $PLANET-tiles.csv | python3 tiles2pg.py -d $DATABASE -t $TABLE
rm -f $PLANET-tiles.csv
psql -U $DB_USER -d $DATABASE -c "CREATE INDEX ${TABLE}_idx ON ${TABLE} USING gist (tile)"

35
db/prepare_borders.sh Normal file
View file

@ -0,0 +1,35 @@
#!/bin/bash
set -e
OSMFILTER=osmfilter
OSMCONVERT=osmconvert
if [[ ! -r "$PLANET" ]]; then
echo "Error: planet file cannot be found or read."
exit 1
fi
if [ ! -x `which $OSMFILTER` ]; then
echo "No osmfilter found."
exit 1
fi
if [ ! -x `which $OSMCONVERT` ]; then
echo "No osmconvert found."
exit 1
fi
# 1. Filter planet file, leaving only administrative borders (and cities)
echo Filtering planet
if [[ "$PLANET" != *.o5m ]]; then
CONVERTED_PLANET=${PLANET}.o5m
$OSMCONVERT $PLANET --out-o5m -o=$CONVERTED_PLANET
else
CONVERTED_PLANET=$PLANET
fi
$OSMFILTER $CONVERTED_PLANET\
--keep="boundary=administrative or ( place=city =town =hamlet =village =isolated_dwelling )"\
--out-o5m -o=$FILTERED_PLANET\
|| exit 3
chmod +r $FILTERED_PLANET

21
db/prepare_tiles.sh Executable file
View file

@ -0,0 +1,21 @@
#!/bin/sh
OSMCONVERT=osmconvert
if [[ ! -r "$PLANET" ]]; then
echo "Planet file cannot be found or read."
exit 1
fi
set -e -u
echo Extracting node coordinates
$OSMCONVERT --out-osm $PLANET | perl -n -e 'print sprintf "%d %d\n", $1*100, $2*100 if /<node.+lat="([^"]+)".+lon="([^"]+)"/;' > $PLANET-nodes.csv
echo Sorting node list
LC_ALL=C sort -o $PLANET-nodes-sorted.csv $PLANET-nodes.csv
rm $PLANET-nodes.csv
echo Counting unique tiles
LC_ALL=C uniq -c $PLANET-nodes-sorted.csv $PLANET-tiles.csv
rm $PLANET-nodes-sorted.csv

62
db/tiles2pg.py Executable file
View file

@ -0,0 +1,62 @@
#!/usr/bin/python3
"""This script takes a file where each line of the form
<count> <lat_x_100> <lon_x_100>
represents the number of OSM nodes in a rectangular tile
[lat, lon, lat + 0.01, lon + 0.01].
lat_x_100 is latitude multiplied by 100 and truncated to an integer.
"""
import argparse
import logging
import sys
import psycopg2
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Import tiles from CSV into a database')
parser.add_argument('-t', '--table', default='tiles', help='Target directory (default=tiles)')
parser.add_argument('-d', '--database', default='borders', help='Database name (default=borders)')
parser.add_argument('-v', dest='verbose', action='store_true', help='Print status messages')
options = parser.parse_args()
log_level = logging.INFO if options.verbose else logging.WARNING
logging.basicConfig(level=log_level, format='%(levelname)s: %(message)s')
TILE_SIDE = 0.01 # degrees
with psycopg2.connect(f'dbname={options.database}') as conn:
with conn.cursor() as cur:
cnt = 0
for line in sys.stdin:
tokens = line.split()
if len(tokens) == 3:
try:
(count, lat, lon) = (int(t) for t in tokens)
except ValueError:
logging.critical(f"Wrong number format at line {cnt}")
conn.rollback()
sys.exit(1)
lat /= 100.0
lon /= 100.0
cur.execute(f"""
INSERT INTO {options.table} (count, tile)
VALUES (%s,
ST_SetSRID(ST_MakeBox2d(ST_Point(%s, %s),
ST_Point(%s, %s)),
4326)
)
""", (count, lon, lat, lon + TILE_SIDE, lat + TILE_SIDE)
)
cnt += 1
else:
logging.warning(f"Incorrect count-lat-lon line '{line}'")
logging.info("Commit")
conn.commit()
logging.info(f"Uploaded {cnt} tiles")

34
docker-compose.yaml Normal file
View file

@ -0,0 +1,34 @@
version: "3"
services:
web:
build:
context: ./web
dockerfile: Dockerfile.web
container_name: web
restart: always
depends_on:
- db
links:
- "db:dbhost"
ports:
- "8081:80"
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
db:
build:
context: ./db
dockerfile: Dockerfile.db
args:
PLANET_URL: https://planet.openstreetmap.org/pbf/planet-latest.osm.pbf
PLANET_URL_small: http://download.geofabrik.de/africa/eritrea-latest.osm.pbf
container_name: db
restart: always
environment:
POSTGRES_HOST_AUTH_METHOD: password
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
ports:
- "55432:5432"

View file

@ -1,33 +0,0 @@
create table tiles (
tile geometry not null,
count integer not null default 0
);
create table borders (
name varchar(200) not null primary key,
geom geometry not null,
disabled boolean not null default FALSE,
count_k integer,
modified timestamp not null,
cmnt varchar(500)
);
create table borders_backup (
backup varchar(30) not null,
name varchar(200) not null,
geom geometry not null,
disabled boolean not null default FALSE,
count_k integer,
modified timestamp not null,
cmnt varchar(500),
primary key (backup, name)
);
create table points (
geom geometry not null,
type integer not null default 0
);
create index border_idx on borders using gist (geom);
create index tiles_idx on tiles using gist (tile);
create index points_ids on points using gist (geom);

View file

@ -1,60 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
import psycopg2
import os, argparse
def parse_double_points(line):
if "Double" in line:
words = line.split()
lat = words[9].split("(")[1][:-1]
lon = words[10].split(")")[0]
return float(lon), float(lat), 1
def parse_unknown_outgoing(line):
if "Unknowing" in line:
words = line.split()
lat = words[9]
lon = words[10]
return float(lon), float(lat), 2
filters = (parse_double_points, parse_unknown_outgoing)
parser = argparse.ArgumentParser(description='Extract borders warning points from generator log files to databse.')
parser.add_argument('-s', '--source', help='Generator log file path.')
parser.add_argument('-c', '--connection', help='Database connection string.')
parser.add_argument('-t', '--truncate', action='store_true', help='Truncate old data. WARINIG old data will be lost!')
parser.add_argument('-v', dest='verbose', action='store_true', help='Print status messages.')
options = parser.parse_args()
# Check log file for existance.
if not os.path.exists(options.source):
print "Generator log file", options.source, "does not exists."
exit(1)
# Process the log.
points = []
with open(options.source) as logfile:
for line in logfile.readlines():
for f in filters:
result = f(line)
if result:
points.append(result)
break
# Print stats.
print "Found {0} points".format(len(points))
print "Found {0} ways that do not lead to the external mwm and {1} roads that crossing the border several times.". format(
len(filter(lambda a: a[2] == 2, points)), len(filter(lambda a: a[2] == 1, points))
)
# Commit to the database
conn = psycopg2.connect(options.connection)
cursor = conn.cursor()
if options.truncate:
print "Truncating old data..."
cursor.execute("TRUNCATE TABLE points")
for p in points:
cursor.execute("INSERT into points (geom, type) VALUES (ST_GeomFromText('POINT(%s %s)', 4326), %s)", p)
conn.commit()

View file

@ -1,83 +0,0 @@
#!/bin/sh
OSM2PGSQL=osm2pgsql
OSMFILTER=./osmfilter
OSMCONVERT=./osmconvert
DATABASE=gis
DATABASE_BORDERS=borders
OSM2PGSQL_KEYS='--cache 2000 --number-processes 6'
OSM2PGSQL_STYLE=
if [[ ! -r "$1" ]]
then
echo Import borders and towns from the planet into osm_borders table
echo Syntax: $0 \<planet_file\>
exit 1
fi
PLANET=$1
if [[ "`uname`" == 'Darwin' ]]; then
WHICH='which -s'
MKTEMP='mktemp -t '
else
WHICH=which
MKTEMP='mktemp --suff='
fi
# 0. Test for all required tools and files
if ! $WHICH psql; then
echo "Do you have postgresql installed?"
exit 1
fi
if ! $WHICH $OSM2PGSQL; then
echo "No osm2pgsql found."
exit 1
fi
if [ ! -x "$OSMFILTER" ]; then
wget -O - http://m.m.i24.cc/osmfilter.c |cc -x c - -O3 -o $OSMFILTER
fi
if [ ! -x "$OSMCONVERT" ]; then
wget -O - http://m.m.i24.cc/osmconvert.c | cc -x c - -lz -O3 -o $OSMCONVERT
fi
# 1. Filter planet file, leaving only administrative borders (and cities)
echo Filtering planet
FILTERED=$(${MKTEMP}osmadm)
$OSMFILTER $PLANET --keep="boundary=administrative or place=" --out-o5m -o=$FILTERED || exit 3
# 2. Load filtered data into an osm2pgsql database
echo Loading data into the database
# Creating a style file if we weren't provided with one
if [ -z "$OSM2PGSQL_STYLE" ]; then
OSM2PGSQL_STYLE=$(${MKTEMP}osm2pgsql_style)
OSM2PGSQL_STYLE_TMP=1
cat > $OSM2PGSQL_STYLE <<EOSTYLE
way admin_level text polygon
way area text
way boundary text polygon
node,way name text linear
node,way name:en text linear
node,way name:ru text linear
node,way place text polygon
node,way population text linear
EOSTYLE
fi
$OSM2PGSQL --slim --drop --hstore --style $OSM2PGSQL_STYLE -d $DATABASE -r o5m $OSM2PGSQL_KEYS $FILTERED
RET=$?
rm $FILTERED
if [ "$OSM2PGSQL_STYLE_TMP" == "1" ]; then
rm $OSM2PGSQL_STYLE
fi
[ $RET != 0 ] && exit 3
# 3. Make osm_borders table
echo Creating osm_borders table
psql $DATABASE -c "drop table if exists osm_borders; create table osm_borders as select min(osm_id) as osm_id, ST_Buffer(ST_Transform(ST_Collect(way),4326), 0) as way, admin_level::int as admin_level, coalesce(max(\"name:en\"), name) as name from planet_osm_polygon where boundary='administrative' and admin_level in ('2', '3', '4', '5', '6') group by name, admin_level;" || exit 3
# 4. Copy it to the borders database
echo Copying osm_borders table to the borders database
psql $DATABASE_BORDERS -c "drop table if exists osm_borders;" || exit 3
pg_dump -t osm_borders $DATABASE | psql $DATABASE_BORDERS
echo Done!

View file

@ -1,7 +1,9 @@
#!/usr/bin/python
import psycopg2
import glob
import psycopg2
def read_polygon(f):
"""Reads an array of coordinates with the final 'END' line."""
coords = []
@ -26,6 +28,7 @@ def read_polygon(f):
coords.append(coords[0])
return '({})'.format(','.join(coords))
def read_multipolygon(f):
"""Read the entire poly file and parse in into a WKT."""
polygons = []
@ -53,6 +56,7 @@ def read_multipolygon(f):
else:
return "MULTIPOLYGON({})".format(','.join(polygons))
def convert_poly(input_file, cur):
"""Reads a multipolygon from input_file and inserts it into borders table."""
with open(input_file, 'r') as f:
@ -60,11 +64,12 @@ def convert_poly(input_file, cur):
wkt = read_multipolygon(f)
print ' ', name
try:
cur.execute('insert into borders (name, geom, modified) values (%s, ST_GeomFromText(%s), now())', (name, wkt))
cur.execute('INSERT INTO borders (name, geom, modified) VALUES (%s, ST_GeomFromText(%s), now())', (name, wkt))
except psycopg2.Error as e:
print wkt
raise e
if __name__ == "__main__":
conn = psycopg2.connect('dbname=borders')
cur = conn.cursor()

View file

@ -1,49 +0,0 @@
#!/bin/sh
OSMCONVERT=./osmconvert
DATABASE=borders
TABLE=tiles
if [[ ! -r "$1" ]]
then
echo Calculate tile densities for a planet
echo Syntax: $0 \<planet_file\>
exit 1
fi
set -e -u
if ! which -s psql; then
echo "Do you have postgresql installed?"
exit 1
fi
if [ ! -x "$OSMCONVERT" ]; then
wget -O - http://m.m.i24.cc/osmconvert.c | cc -x c - -lz -O3 -o $OSMCONVERT
fi
PLANET=$(echo $(basename $1) | sed 's/\..*//')
echo Extracting node coordinates
$OSMCONVERT --out-osm $1 | perl -n -e 'print sprintf "%d %d\n", $1*100, $2*100 if /<node.+lat="([^"]+)".+lon="([^"]+)"/;' > $PLANET-nodes.csv
echo Sorting node list
LC_ALL=C sort -o $PLANET-nodes-sorted.csv $PLANET-nodes.csv
rm $PLANET-nodes.csv
echo Counting unique tiles
LC_ALL=C uniq -c $PLANET-nodes-sorted.csv $PLANET-tiles.csv
rm $PLANET-nodes-sorted.csv
echo Cleaning up tiles table and index
psql $DATABASE -c "DELETE FROM $TABLE; DROP INDEX IF EXISTS ${TABLE}_idx;"
echo Loading tiles into the database
pv $PLANET-tiles.csv | python "$(dirname "$0")/tiles2pg.py" -d $DATABASE -t $TABLE
rm $PLANET-tiles.csv
echo Indexing tiles
psql $DATABASE -c "CREATE INDEX ${TABLE}_idx ON $TABLE USING GIST (tile);"
echo Dumping the table
pg_dump -t $TABLE $DATABASE | gzip > $PLANET-tiles.sql.gz
echo Done!

View file

@ -1,29 +0,0 @@
#!/usr/bin/python
import psycopg2, sys, re, argparse
parser = argparse.ArgumentParser(description='Import tiles from CSV into a database')
parser.add_argument('-t', '--table', default='tiles', help='Target directory (default=tiles)')
parser.add_argument('-d', '--database', default='borders', help='Database name (default=borders)')
parser.add_argument('-v', dest='verbose', action='store_true', help='Print status messages')
options = parser.parse_args()
conn = psycopg2.connect("dbname={}".format(options.database))
cur = conn.cursor()
cnt = 0
for line in sys.stdin:
m = re.match(r'^\s*(\d+)\s+(-?\d+)\s+(-?\d+)', line)
if m:
(count, lat, lon) = (int(m.group(1)), float(m.group(2))/100, float(m.group(3))/100)
cur.execute('insert into {} (count, tile) values (%s, ST_SetSRID(ST_MakeBox2d(ST_Point(%s, %s), ST_Point(%s, %s)), 4326));'.format(options.table), (count, lon, lat, lon + 0.01, lat + 0.01))
cnt = cnt + 1
else:
print line
if options.verbose:
print 'Commit'
conn.commit()
if options.verbose:
print 'Uploaded {} tiles'.format(cnt)
cur.close()
conn.close()

View file

@ -1,923 +0,0 @@
#!/usr/bin/python
from flask import Flask, g, request, json, jsonify, abort, Response, send_file, send_from_directory
from flask.ext.cors import CORS
from flask.ext.compress import Compress
import psycopg2
import io, re, zipfile, unicodedata
import config
try:
from lxml import etree
LXML = True
except:
LXML = False
app = Flask(__name__)
app.debug=config.DEBUG
Compress(app)
CORS(app)
@app.route('/')
def hello_world():
return 'Hello <b>World</b>!'
@app.route('/www/<path:path>')
def send_js(path):
if config.DEBUG:
return send_from_directory('../www/', path)
abort(404)
@app.before_request
def before_request():
g.conn = psycopg2.connect(config.CONNECTION)
@app.teardown_request
def teardown(exception):
conn = getattr(g, 'conn', None)
if conn is not None:
conn.close()
@app.route('/bbox')
def query_bbox():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
simplify_l = request.args.get('simplify')
if simplify_l == '2':
simplify = 0.1
elif simplify_l == '1':
simplify = 0.01
else:
simplify = 0
table = request.args.get('table')
if table in config.OTHER_TABLES:
table = config.OTHER_TABLES[table]
else:
table = config.TABLE
cur = g.conn.cursor()
cur.execute("""SELECT name, ST_AsGeoJSON({geom}, 7) as geometry, ST_NPoints(geom),
modified, disabled, count_k, cmnt,
round(CASE WHEN ST_Area(geography(geom)) = 'NaN' THEN 0 ELSE ST_Area(geography(geom)) END) as area
FROM {table}
WHERE geom && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s))
order by area desc;
""".format(table=table, geom='ST_SimplifyPreserveTopology(geom, {})'.format(simplify) if simplify > 0 else 'geom'),
(xmin, ymin, xmax, ymax))
result = []
for rec in cur:
props = { 'name': rec[0], 'nodes': rec[2], 'modified': rec[3], 'disabled': rec[4], 'count_k': rec[5], 'comment': rec[6], 'area': rec[7] }
feature = { 'type': 'Feature', 'geometry': json.loads(rec[1]), 'properties': props }
result.append(feature)
return jsonify(type='FeatureCollection', features=result)
@app.route('/small')
def query_small_in_bbox():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
table = request.args.get('table')
if table in config.OTHER_TABLES:
table = config.OTHER_TABLES[table]
else:
table = config.TABLE
cur = g.conn.cursor()
cur.execute('''SELECT name, round(ST_Area(geography(ring))) as area, ST_X(ST_Centroid(ring)), ST_Y(ST_Centroid(ring))
FROM (
SELECT name, (ST_Dump(geom)).geom as ring
FROM {table}
WHERE geom && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s))
) g
WHERE ST_Area(geography(ring)) < %s;'''.format(table=table), (xmin, ymin, xmax, ymax, config.SMALL_KM2 * 1000000))
result = []
for rec in cur:
result.append({ 'name': rec[0], 'area': rec[1], 'lon': float(rec[2]), 'lat': float(rec[3]) })
return jsonify(features=result)
@app.route('/routing')
def query_routing_points():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
cur = g.conn.cursor()
try:
cur.execute('''SELECT ST_X(geom), ST_Y(geom), type
FROM points
WHERE geom && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)
);''', (xmin, ymin, xmax, ymax))
except psycopg2.Error, e:
return jsonify(features=[])
result = []
for rec in cur:
result.append({ 'lon': rec[0], 'lat': rec[1], 'type': rec[2] })
return jsonify(features=result)
@app.route('/crossing')
def query_crossing():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
region = request.args.get('region', '').encode('utf-8')
points = request.args.get('points') == '1'
rank = request.args.get('rank') or '4'
cur = g.conn.cursor()
sql = """SELECT id, ST_AsGeoJSON({line}, 7) as geometry, region, processed FROM {table}
WHERE line && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)) and processed = 0 {reg} and rank <= %s;
""".format(table=config.CROSSING_TABLE, reg='and region = %s' if region else '', line='line' if not points else 'ST_Centroid(line)')
params = [xmin, ymin, xmax, ymax]
if region:
params.append(region)
params.append(rank)
cur.execute(sql, tuple(params))
result = []
for rec in cur:
props = { 'id': rec[0], 'region': rec[2], 'processed': rec[3] }
feature = { 'type': 'Feature', 'geometry': json.loads(rec[1]), 'properties': props }
result.append(feature)
return jsonify(type='FeatureCollection', features=result)
@app.route('/tables')
def check_osm_table():
osm = False
backup = False
old = []
crossing = False
try:
cur = g.conn.cursor()
cur.execute('select osm_id, ST_Area(way), admin_level, name from {} limit 2;'.format(config.OSM_TABLE))
if cur.rowcount == 2:
osm = True
except psycopg2.Error, e:
pass
try:
cur.execute('select backup, name, ST_Area(geom), modified, disabled, count_k, cmnt from {} limit 2;'.format(config.BACKUP))
backup = True
except psycopg2.Error, e:
pass
for t, tname in config.OTHER_TABLES.iteritems():
try:
cur.execute('select name, ST_Area(geom), modified, disabled, count_k, cmnt from {} limit 2;'.format(tname))
if cur.rowcount == 2:
old.append(t)
except psycopg2.Error, e:
pass
try:
cur = g.conn.cursor()
cur.execute('select id, ST_Length(line), region, processed from {} limit 2;'.format(config.CROSSING_TABLE))
if cur.rowcount == 2:
crossing = True
except psycopg2.Error, e:
pass
return jsonify(osm=osm, tables=old, readonly=config.READONLY, backup=backup, crossing=crossing)
@app.route('/search')
def search():
query = request.args.get('q').encode('utf-8')
cur = g.conn.cursor()
cur.execute('select ST_XMin(geom), ST_YMin(geom), ST_XMax(geom), ST_YMax(geom) from borders where name ilike %s limit 1', ('%{0}%'.format(query),))
if cur.rowcount > 0:
rec = cur.fetchone()
return jsonify(bounds=[rec[0], rec[1], rec[2], rec[3]])
return jsonify(status='not found')
@app.route('/split')
def split():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
line = request.args.get('line')
cur = g.conn.cursor()
# check that we're splitting a single polygon
cur.execute('select ST_NumGeometries(geom) from {} where name = %s;'.format(config.TABLE), (name,))
res = cur.fetchone()
if not res or res[0] != 1:
return jsonify(status='border should have one outer ring')
cur.execute('select ST_AsText((ST_Dump(ST_Split(geom, ST_GeomFromText(%s, 4326)))).geom) from {} where name = %s;'.format(config.TABLE), (line, name))
if cur.rowcount > 1:
# no use of doing anything if the polygon wasn't modified
geometries = []
for res in cur:
geometries.append(res[0])
# get disabled flag and delete old border
cur.execute('select disabled from {} where name = %s;'.format(config.TABLE), (name,))
disabled = cur.fetchone()[0]
cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,))
# find untaken name series
base_name = name
found = False
while not found:
base_name = base_name + '_'
cur.execute('select count(1) from {} where name like %s;'.format(config.TABLE), (name.replace('_', '\_').replace('%', '\%') + '%',))
found = cur.fetchone()[0] == 0
# insert new geometries
counter = 1
for geom in geometries:
cur.execute('insert into {table} (name, geom, disabled, count_k, modified) values (%s, ST_GeomFromText(%s, 4326), %s, -1, now());'.format(table=config.TABLE), ('{}{}'.format(base_name, counter), geom, disabled))
counter = counter + 1
g.conn.commit()
return jsonify(status='ok')
@app.route('/join')
def join_borders():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
name2 = request.args.get('name2').encode('utf-8')
if name == name2:
return jsonify(status='cannot join region with itself')
cur = g.conn.cursor()
cur.execute('update {table} set geom = ST_Union(geom, b2.g), count_k = -1 from (select geom as g from {table} where name = %s) as b2 where name = %s;'.format(table=config.TABLE), (name2, name))
cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name2,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/point')
def find_osm_borders():
lat = request.args.get('lat')
lon = request.args.get('lon')
cur = g.conn.cursor()
cur.execute("select osm_id, name, admin_level, (case when ST_Area(geography(way)) = 'NaN' then 0 else ST_Area(geography(way))/1000000 end) as area_km from {table} where ST_Contains(way, ST_SetSRID(ST_Point(%s, %s), 4326)) order by admin_level desc, name asc;".format(table=config.OSM_TABLE), (lon, lat))
result = []
for rec in cur:
b = { 'id': rec[0], 'name': rec[1], 'admin_level': rec[2], 'area': rec[3] }
result.append(b)
return jsonify(borders=result)
@app.route('/from_osm')
def copy_from_osm():
if config.READONLY:
abort(405)
osm_id = request.args.get('id')
name = request.args.get('name').encode('utf-8')
cur = g.conn.cursor()
cur.execute('insert into {table} (geom, name, modified, count_k) select o.way as way, {name}, now(), -1 from {osm} o where o.osm_id = %s limit 1;'.format(table=config.TABLE, osm=config.OSM_TABLE, name='%s' if name != '' else '%s || o.name'), (name, osm_id))
g.conn.commit()
return jsonify(status='ok')
@app.route('/rename')
def set_name():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
new_name = request.args.get('newname').encode('utf-8')
cur = g.conn.cursor()
cur.execute('update {} set name = %s where name = %s;'.format(config.TABLE), (new_name, name))
g.conn.commit()
return jsonify(status='ok')
@app.route('/delete')
def delete_border():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
cur = g.conn.cursor()
cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/disable')
def disable_border():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
cur = g.conn.cursor()
cur.execute('update {} set disabled = true where name = %s;'.format(config.TABLE), (name,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/enable')
def enable_border():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
cur = g.conn.cursor()
cur.execute('update {} set disabled = false where name = %s;'.format(config.TABLE), (name,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/comment', methods=['POST'])
def update_comment():
name = request.form['name'].encode('utf-8')
comment = request.form['comment'].encode('utf-8')
cur = g.conn.cursor()
cur.execute('update {} set cmnt = %s where name = %s;'.format(config.TABLE), (comment, name))
g.conn.commit()
return jsonify(status='ok')
@app.route('/divpreview')
def divide_preview():
like = request.args.get('like').encode('utf-8')
query = request.args.get('query')
cur = g.conn.cursor()
cur.execute('select name, ST_AsGeoJSON(ST_Simplify(way, 0.01)) as way from {table}, (select way as pway from {table} where name like %s) r where ST_Contains(r.pway, way) and {query};'.format(table=config.OSM_TABLE, query=query), (like,))
result = []
for rec in cur:
feature = { 'type': 'Feature', 'geometry': json.loads(rec[1]), 'properties': { 'name': rec[0] } }
result.append(feature)
return jsonify(type='FeatureCollection', features=result)
@app.route('/divide')
def divide():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
like = request.args.get('like').encode('utf-8')
query = request.args.get('query')
prefix = request.args.get('prefix', '').encode('utf-8')
if prefix != '':
prefix = '{}_'.format(prefix);
cur = g.conn.cursor()
cur.execute('''insert into {table} (geom, name, modified, count_k)
select o.way as way, %s || name, now(), -1
from {osm} o, (
select way from {osm} where name like %s
) r
where ST_Contains(r.way, o.way) and {query};
'''.format(table=config.TABLE, osm=config.OSM_TABLE, query=query), (prefix, like,))
cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/chop1')
def chop_largest_or_farthest():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
cur = g.conn.cursor()
cur.execute('select ST_NumGeometries(geom) from {} where name = %s;'.format(config.TABLE), (name,))
res = cur.fetchone()
if not res or res[0] < 2:
return jsonify(status='border should have more than one outer ring')
cur.execute("""INSERT INTO {table} (name, disabled, modified, geom)
SELECT name, disabled, modified, geom from
(
(WITH w AS (SELECT name, disabled, (ST_Dump(geom)).geom AS g FROM {table} WHERE name = %s)
(SELECT name||'_main' as name, disabled, now() as modified, g as geom, ST_Area(g) as a FROM w ORDER BY a DESC LIMIT 1)
UNION ALL
SELECT name||'_small' as name, disabled, now() as modified, ST_Collect(g) AS geom, ST_Area(ST_Collect(g)) as a
FROM (SELECT name, disabled, g, ST_Area(g) AS a FROM w ORDER BY a DESC OFFSET 1) ww
GROUP BY name, disabled)
) x;""".format(table=config.TABLE), (name,))
cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/hull')
def draw_hull():
if config.READONLY:
abort(405)
name = request.args.get('name').encode('utf-8')
cur = g.conn.cursor()
cur.execute('select ST_NumGeometries(geom) from {} where name = %s;'.format(config.TABLE), (name,))
res = cur.fetchone()
if not res or res[0] < 2:
return jsonify(status='border should have more than one outer ring')
cur.execute('update {} set geom = ST_ConvexHull(geom) where name = %s;'.format(config.TABLE), (name,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/fixcrossing')
def fix_crossing():
if config.READONLY:
abort(405)
preview = request.args.get('preview') == '1'
region = request.args.get('region').encode('utf-8')
if region is None:
return jsonify(status='Please specify a region')
ids = request.args.get('ids')
if ids is None or len(ids) == 0:
return jsonify(status='Please specify a list of line ids')
ids = tuple(ids.split(','))
cur = g.conn.cursor()
if preview:
cur.execute("""
WITH lines as (SELECT ST_Buffer(ST_Collect(line), 0.002, 1) as g FROM {cross} WHERE id IN %s)
SELECT ST_AsGeoJSON(ST_Collect(ST_MakePolygon(er.ring))) FROM
(
SELECT ST_ExteriorRing((ST_Dump(ST_Union(ST_Buffer(geom, 0.0), lines.g))).geom) as ring FROM {table}, lines WHERE name = %s
) as er
""".format(table=config.TABLE, cross=config.CROSSING_TABLE), (ids, region))
res = cur.fetchone()
if not res:
return jsonify(status='Failed to extend geometry')
f = { "type": "Feature", "properties": {}, "geometry": json.loads(res[0]) }
#return jsonify(type="FeatureCollection", features=[f])
return jsonify(type="Feature", properties={}, geometry=json.loads(res[0]))
else:
cur.execute("""
WITH lines as (SELECT ST_Buffer(ST_Collect(line), 0.002, 1) as g FROM {cross} WHERE id IN %s)
UPDATE {table} SET geom = res.g FROM
(
SELECT ST_Collect(ST_MakePolygon(er.ring)) as g FROM
(
SELECT ST_ExteriorRing((ST_Dump(ST_Union(ST_Buffer(geom, 0.0), lines.g))).geom) as ring FROM {table}, lines WHERE name = %s
) as er
) as res
WHERE name = %s
""".format(table=config.TABLE, cross=config.CROSSING_TABLE), (ids, region, region))
cur.execute("""
UPDATE {table} b SET geom = ST_Difference(b.geom, o.geom)
FROM {table} o
WHERE ST_Overlaps(b.geom, o.geom)
AND o.name = %s
""".format(table=config.TABLE), (region,))
cur.execute("UPDATE {cross} SET processed = 1 WHERE id IN %s".format(cross=config.CROSSING_TABLE), (ids,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/backup')
def backup_do():
if config.READONLY:
abort(405)
cur = g.conn.cursor()
cur.execute("SELECT to_char(now(), 'IYYY-MM-DD HH24:MI'), max(backup) from {};".format(config.BACKUP))
(timestamp, tsmax) = cur.fetchone()
if timestamp == tsmax:
return jsonify(status='please try again later')
cur.execute('INSERT INTO {backup} (backup, name, geom, disabled, count_k, modified, cmnt) SELECT %s, name, geom, disabled, count_k, modified, cmnt from {table};'.format(backup=config.BACKUP, table=config.TABLE), (timestamp,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/restore')
def backup_restore():
if config.READONLY:
abort(405)
ts = request.args.get('timestamp')
cur = g.conn.cursor()
cur.execute('SELECT count(1) from {} where backup = %s;'.format(config.BACKUP), (ts,))
(count,) = cur.fetchone()
if count <= 0:
return jsonify(status='no such timestamp')
cur.execute('DELETE FROM {};'.format(config.TABLE))
cur.execute('INSERT INTO {table} (name, geom, disabled, count_k, modified, cmnt) SELECT name, geom, disabled, count_k, modified, cmnt from {backup} where backup = %s;'.format(backup=config.BACKUP, table=config.TABLE), (ts,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/backlist')
def backup_list():
cur = g.conn.cursor()
cur.execute("SELECT backup, count(1) from {} group by backup order by backup desc;".format(config.BACKUP))
result = []
for res in cur:
result.append({ 'timestamp': res[0], 'text': res[0], 'count': res[1] })
# todo: count number of different objects for the last one
return jsonify(backups=result)
@app.route('/backdelete')
def backup_delete():
if config.READONLY:
abort(405)
ts = request.args.get('timestamp')
cur = g.conn.cursor()
cur.execute('SELECT count(1) from {} where backup = %s;'.format(config.BACKUP), (ts,))
(count,) = cur.fetchone()
if count <= 0:
return jsonify(status='no such timestamp')
cur.execute('DELETE FROM {} WHERE backup = %s;'.format(config.BACKUP), (ts,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/josm')
def make_osm():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
table = request.args.get('table')
if table in config.OTHER_TABLES:
table = config.OTHER_TABLES[table]
else:
table = config.TABLE
cur = g.conn.cursor()
cur.execute('SELECT name, disabled, ST_AsGeoJSON(geom, 7) as geometry FROM {table} WHERE ST_Intersects(ST_SetSRID(ST_Buffer(ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)), 0.3), 4326), geom);'.format(table=table), (xmin, ymin, xmax, ymax))
node_pool = { 'id': 1 } # 'lat_lon': id
regions = [] # { name: name, rings: [['outer', [ids]], ['inner', [ids]], ...] }
for rec in cur:
geometry = json.loads(rec[2])
rings = []
if geometry['type'] == 'Polygon':
parse_polygon(node_pool, rings, geometry['coordinates'])
elif geometry['type'] == 'MultiPolygon':
for polygon in geometry['coordinates']:
parse_polygon(node_pool, rings, polygon)
if len(rings) > 0:
regions.append({ 'name': rec[0], 'disabled': rec[1], 'rings': rings })
xml = '<?xml version="1.0" encoding="UTF-8"?><osm version="0.6" upload="false">'
for latlon, node_id in node_pool.items():
if latlon != 'id':
(lat, lon) = latlon.split()
xml = xml + '<node id="{id}" visible="true" version="1" lat="{lat}" lon="{lon}" />'.format(id=node_id, lat=lat, lon=lon)
wrid = 1
ways = {} # json: id
for region in regions:
w1key = ring_hash(region['rings'][0][1])
if not config.JOSM_FORCE_MULTI and len(region['rings']) == 1 and w1key not in ways:
# simple case: a way
ways[w1key] = wrid
xml = xml + '<way id="{id}" visible="true" version="1">'.format(id=wrid)
xml = xml + '<tag k="name" v={} />'.format(quoteattr(region['name']))
if region['disabled']:
xml = xml + '<tag k="disabled" v="yes" />'
for nd in region['rings'][0][1]:
xml = xml + '<nd ref="{ref}" />'.format(ref=nd)
xml = xml + '</way>'
wrid = wrid + 1
else:
# multipolygon
rxml = '<relation id="{id}" visible="true" version="1">'.format(id=wrid)
wrid = wrid + 1
rxml = rxml + '<tag k="type" v="multipolygon" />'
rxml = rxml + '<tag k="name" v={} />'.format(quoteattr(region['name']))
if region['disabled']:
rxml = rxml + '<tag k="disabled" v="yes" />'
for ring in region['rings']:
wkey = ring_hash(ring[1])
if wkey in ways:
# already have that way
rxml = rxml + '<member type="way" ref="{ref}" role="{role}" />'.format(ref=ways[wkey], role=ring[0])
else:
ways[wkey] = wrid
xml = xml + '<way id="{id}" visible="true" version="1">'.format(id=wrid)
rxml = rxml + '<member type="way" ref="{ref}" role="{role}" />'.format(ref=wrid, role=ring[0])
for nd in ring[1]:
xml = xml + '<nd ref="{ref}" />'.format(ref=nd)
xml = xml + '</way>'
wrid = wrid + 1
xml = xml + rxml + '</relation>'
xml = xml + '</osm>'
return Response(xml, mimetype='application/x-osm+xml')
@app.route('/josmbord')
def josm_borders_along():
name = request.args.get('name')
line = request.args.get('line')
cur = g.conn.cursor()
# select all outer osm borders inside a buffer of the given line
cur.execute("""
with linestr as (
select ST_Intersection(geom, ST_Buffer(ST_GeomFromText(%s, 4326), 0.2)) as line
from {table} where name = %s
), osmborders as (
select (ST_Dump(way)).geom as g from {osm}, linestr where ST_Intersects(line, way)
)
select ST_AsGeoJSON((ST_Dump(ST_LineMerge(ST_Intersection(ST_Collect(ST_ExteriorRing(g)), line)))).geom) from osmborders, linestr group by line
""".format(table=config.TABLE, osm=config.OSM_TABLE), (line, name))
node_pool = { 'id': 1 } # 'lat_lon': id
lines = []
for rec in cur:
geometry = json.loads(rec[0])
if geometry['type'] == 'LineString':
nodes = parse_linestring(node_pool, geometry['coordinates'])
elif geometry['type'] == 'MultiLineString':
nodes = []
for line in geometry['coordinates']:
nodes.extend(parse_linestring(node_pool, line))
if len(nodes) > 0:
lines.append(nodes)
xml = '<?xml version="1.0" encoding="UTF-8"?><osm version="0.6" upload="false">'
for latlon, node_id in node_pool.items():
if latlon != 'id':
(lat, lon) = latlon.split()
xml = xml + '<node id="{id}" visible="true" version="1" lat="{lat}" lon="{lon}" />'.format(id=node_id, lat=lat, lon=lon)
wrid = 1
for line in lines:
xml = xml + '<way id="{id}" visible="true" version="1">'.format(id=wrid)
for nd in line:
xml = xml + '<nd ref="{ref}" />'.format(ref=nd)
xml = xml + '</way>'
wrid = wrid + 1
xml = xml + '</osm>'
return Response(xml, mimetype='application/x-osm+xml')
def quoteattr(value):
value = value.replace('&', '&amp;').replace('>', '&gt;').replace('<', '&lt;')
value = value.replace('\n', '&#10;').replace('\r', '&#13;').replace('\t', '&#9;')
value = value.replace('"', '&quot;')
return '"{}"'.format(value)
def ring_hash(refs):
#return json.dumps(refs)
return hash(tuple(sorted(refs)))
def parse_polygon(node_pool, rings, polygon):
role = 'outer'
for ring in polygon:
rings.append([role, parse_linestring(node_pool, ring)])
role = 'inner'
def parse_linestring(node_pool, linestring):
nodes = []
for lonlat in linestring:
ref = '{} {}'.format(lonlat[1], lonlat[0])
if ref in node_pool:
node_id = node_pool[ref]
else:
node_id = node_pool['id']
node_pool[ref] = node_id
node_pool['id'] = node_id + 1
nodes.append(node_id)
return nodes
def append_way(way, way2):
another = list(way2) # make copy to not modify original list
if way[0] == way[-1] or another[0] == another[-1]:
return None
if way[0] == another[0] or way[-1] == another[-1]:
another.reverse()
if way[-1] == another[0]:
result = list(way)
result.extend(another[1:])
return result
elif way[0] == another[-1]:
result = another
result.extend(way)
return result
return None
def way_to_wkt(node_pool, refs):
coords = []
for nd in refs:
coords.append('{} {}'.format(node_pool[nd]['lon'], node_pool[nd]['lat']))
return '({})'.format(','.join(coords))
def import_error(msg):
if config.IMPORT_ERROR_ALERT:
return '<script>alert("{}");</script>'.format(msg)
else:
return jsonify(status=msg)
def extend_bbox(bbox, x, y=None):
if y is not None:
x = [x, y, x, y]
bbox[0] = min(bbox[0], x[0])
bbox[1] = min(bbox[1], x[1])
bbox[2] = max(bbox[2], x[2])
bbox[3] = max(bbox[3], x[3])
def bbox_contains(outer, inner):
return outer[0] <= inner[0] and outer[1] <= inner[1] and outer[2] >= inner[2] and outer[3] >= inner[3]
@app.route('/import', methods=['POST'])
def import_osm():
if config.READONLY:
abort(405)
if not LXML:
return import_error('importing is disabled due to absent lxml library')
f = request.files['file']
if not f:
return import_error('failed upload')
try:
tree = etree.parse(f)
except:
return import_error('malformed xml document')
if not tree:
return import_error('bad document')
root = tree.getroot()
# read nodes and ways
nodes = {} # id: { lat, lon, modified }
for node in root.iter('node'):
if node.get('action') == 'delete':
continue
modified = int(node.get('id')) < 0 or node.get('action') == 'modify'
nodes[node.get('id')] = { 'lat': float(node.get('lat')), 'lon': float(node.get('lon')), 'modified': modified }
ways = {} # id: { name, disabled, modified, bbox, nodes, used }
for way in root.iter('way'):
if way.get('action') == 'delete':
continue
way_nodes = []
bbox = [1e4, 1e4, -1e4, -1e4]
modified = int(way.get('id')) < 0 or way.get('action') == 'modify'
for node in way.iter('nd'):
ref = node.get('ref')
if not ref in nodes:
return import_error('missing node {} in way {}'.format(ref, way.get('id')))
way_nodes.append(ref)
if nodes[ref]['modified']:
modified = True
extend_bbox(bbox, float(nodes[ref]['lon']), float(nodes[ref]['lat']))
name = None
disabled = False
for tag in way.iter('tag'):
if tag.get('k') == 'name':
name = tag.get('v')
if tag.get('k') == 'disabled' and tag.get('v') == 'yes':
disabled = True
if len(way_nodes) < 2:
return import_error('way with less than 2 nodes: {}'.format(way.get('id')))
ways[way.get('id')] = { 'name': name, 'disabled': disabled, 'modified': modified, 'bbox': bbox, 'nodes': way_nodes, 'used': False }
# finally we are constructing regions: first, from multipolygons
regions = {} # name: { modified, disabled, wkt }
for rel in root.iter('relation'):
modified = int(rel.get('id')) < 0 or rel.get('action') == 'modify'
name = None
disabled = False
multi = False
inner = []
outer = []
for tag in rel.iter('tag'):
if tag.get('k') == 'name':
name = tag.get('v')
if tag.get('k') == 'disabled' and tag.get('v') == 'yes':
disabled = True
if tag.get('k') == 'type' and tag.get('v') == 'multipolygon':
multi = True
if not multi:
return import_error('found non-multipolygon relation: {}'.format(rel.get('id')))
if not name:
return import_error('relation {} has no name'.format(rel.get('id')))
if name in regions:
return import_error('multiple relations with the same name {}'.format(name))
for member in rel.iter('member'):
ref = member.get('ref')
if not ref in ways:
return import_error('missing way {} in relation {}'.format(ref, rel.get('id')))
if ways[ref]['modified']:
modified = True
role = member.get('role')
if role == 'outer':
outer.append(ways[ref])
elif role == 'inner':
inner.append(ways[ref])
else:
return import_error('unknown role {} in relation {}'.format(role, rel.get('id')))
ways[ref]['used'] = True
# after parsing ways, so 'used' flag is set
if rel.get('action') == 'delete':
continue
if len(outer) == 0:
continue
#return import_error('relation {} has no outer ways'.format(rel.get('id')))
# reconstruct rings in multipolygon
for multi in (inner, outer):
i = 0
while i < len(multi):
way = multi[i]['nodes']
while way[0] != way[-1]:
productive = False
j = i + 1
while way[0] != way[-1] and j < len(multi):
new_way = append_way(way, multi[j]['nodes'])
if new_way:
multi[i] = dict(multi[i])
multi[i]['nodes'] = new_way
way = new_way
if multi[j]['modified']:
multi[i]['modified'] = True
extend_bbox(multi[i]['bbox'], multi[j]['bbox'])
del multi[j]
productive = True
else:
j = j + 1
if not productive:
return import_error('unconnected way in relation {}'.format(rel.get('id')))
i = i + 1
# check for 2-node rings
for multi in (outer, inner):
for way in multi:
if len(way['nodes']) < 3:
return import_error('Way in relation {} has only {} nodes'.format(rel.get('id'), len(way['nodes'])))
# sort inner and outer rings
polygons = []
for way in outer:
rings = [way_to_wkt(nodes, way['nodes'])]
for i in range(len(inner)-1, -1, -1):
if bbox_contains(way['bbox'], inner[i]['bbox']):
rings.append(way_to_wkt(nodes, inner[i]['nodes']))
del inner[i]
polygons.append('({})'.format(','.join(rings)))
regions[name] = { 'modified': modified, 'disabled': disabled, 'wkt': 'MULTIPOLYGON({})'.format(','.join(polygons)) }
# make regions from unused named ways
for wid, w in ways.iteritems():
if w['used']:
continue
if not w['name']:
continue
#return import_error('unused in multipolygon way with no name: {}'.format(wid))
if w['nodes'][0] != w['nodes'][-1]:
return import_error('non-closed unused in multipolygon way: {}'.format(wid))
if len(w['nodes']) < 3:
return import_error('way {} has {} nodes'.format(wid, len(w['nodes'])))
if w['name'] in regions:
return import_error('way {} has the same name as other way/multipolygon'.format(wid))
regions[w['name']] = { 'modified': w['modified'], 'disabled': w['disabled'], 'wkt': 'POLYGON({})'.format(way_to_wkt(nodes, w['nodes'])) }
# submit modifications to the database
cur = g.conn.cursor()
added = 0
updated = 0
for name, region in regions.iteritems():
if not region['modified']:
continue
cur.execute('select count(1) from {} where name = %s'.format(config.TABLE), (name,))
res = cur.fetchone()
try:
if res and res[0] > 0:
# update
cur.execute('update {table} set disabled = %s, geom = ST_GeomFromText(%s, 4326), modified = now(), count_k = -1 where name = %s'.format(table=config.TABLE), (region['disabled'], region['wkt'], name))
updated = updated + 1
else:
# create
cur.execute('insert into {table} (name, disabled, geom, modified, count_k) values (%s, %s, ST_GeomFromText(%s, 4326), now(), -1);'.format(table=config.TABLE), (name, region['disabled'], region['wkt']))
added = added + 1
except psycopg2.Error, e:
print 'WKT: {}'.format(region['wkt'])
raise
g.conn.commit()
return jsonify(regions=len(regions), added=added, updated=updated)
@app.route('/poly')
def export_poly():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
table = request.args.get('table')
if table in config.OTHER_TABLES:
table = config.OTHER_TABLES[table]
else:
table = config.TABLE
cur = g.conn.cursor()
if xmin and xmax and ymin and ymax:
cur.execute("""SELECT name, ST_AsGeoJSON(geom, 7) as geometry FROM {table} WHERE disabled = false
and ST_Intersects(ST_SetSRID(ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)), 4326), geom);
""".format(table=table), (xmin, ymin, xmax, ymax))
else:
cur.execute("""SELECT name, ST_AsGeoJSON(geom, 7) as geometry FROM {table} WHERE disabled = false;""".format(table=table))
memory_file = io.BytesIO();
with zipfile.ZipFile(memory_file, 'w', zipfile.ZIP_DEFLATED) as zf:
for res in cur:
geometry = json.loads(res[1])
polygons = [geometry['coordinates']] if geometry['type'] == 'Polygon' else geometry['coordinates']
# sanitize name, src: http://stackoverflow.com/a/295466/1297601
name = res[0].decode('utf-8')
name = unicodedata.normalize('NFKD', name)
name = name.encode('ascii', 'ignore')
name = re.sub('[^\w _-]', '', name).strip()
name = name + '.poly'
poly = io.BytesIO()
poly.write(res[0] + '\n')
pcounter = 1
for polygon in polygons:
outer = True
for ring in polygon:
poly.write('{}\n'.format(pcounter if outer else -pcounter))
pcounter = pcounter + 1
for coord in ring:
poly.write('\t{:E}\t{:E}\n'.format(coord[0], coord[1]))
poly.write('END\n')
outer = False
poly.write('END\n')
zf.writestr(name, poly.getvalue())
poly.close()
memory_file.seek(0)
return send_file(memory_file, attachment_filename='borders.zip', as_attachment=True)
@app.route('/stat')
def statistics():
group = request.args.get('group')
table = request.args.get('table')
if table in config.OTHER_TABLES:
table = config.OTHER_TABLES[table]
else:
table = config.TABLE
cur = g.conn.cursor()
if group == 'total':
cur.execute('select count(1) from borders;')
return jsonify(total=cur.fetchone()[0])
elif group == 'sizes':
cur.execute("select name, count_k, ST_NPoints(geom), ST_AsGeoJSON(ST_Centroid(geom)), (case when ST_Area(geography(geom)) = 'NaN' then 0 else ST_Area(geography(geom)) / 1000000 end) as area, disabled, (case when cmnt is null or cmnt = '' then false else true end) as cmnt from {};".format(table))
result = []
for res in cur:
coord = json.loads(res[3])['coordinates']
result.append({ 'name': res[0], 'lat': coord[1], 'lon': coord[0], 'size': res[1], 'nodes': res[2], 'area': res[4], 'disabled': res[5], 'commented': res[6] })
return jsonify(regions=result)
elif group == 'topo':
cur.execute("select name, count(1), min(case when ST_Area(geography(g)) = 'NaN' then 0 else ST_Area(geography(g)) end) / 1000000, sum(ST_NumInteriorRings(g)), ST_AsGeoJSON(ST_Centroid(ST_Collect(g))) from (select name, (ST_Dump(geom)).geom as g from {}) a group by name;".format(table))
result = []
for res in cur:
coord = json.loads(res[4])['coordinates']
result.append({ 'name': res[0], 'outer': res[1], 'min_area': res[2], 'inner': res[3], 'lon': coord[0], 'lat': coord[1] })
return jsonify(regions=result)
return jsonify(status='wrong group id')
if __name__ == '__main__':
app.run(threaded=True)

View file

@ -1,73 +0,0 @@
#!/usr/bin/python
import os, sys
import time
import logging
import psycopg2
import config
try:
from daemon import runner
HAS_DAEMON = True
except:
HAS_DAEMON = False
class App():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/var/run/borders-daemon.pid'
self.pidfile_timeout = 5
def process(self, region):
logger.info('Processing {}'.format(region))
try:
f = open(config.DAEMON_STATUS_PATH, 'w')
f.write(region)
f.close()
except:
pass
with self.conn.cursor() as cur:
cur.execute('update {table} set count_k = n.count from (select coalesce(sum(t.count), 0) as count from {table} b, tiles t where ST_Intersects(b.geom, t.tile) and name = %s) as n where name = %s;'.format(table=config.TABLE), (region, region));
try:
f = open(config.DAEMON_STATUS_PATH, 'w')
f.close()
except:
pass
def find_region(self):
with self.conn.cursor() as cur:
cur.execute('select name from {table} where count_k < 0 order by st_area(geom) limit 1;'.format(table=config.TABLE))
res = cur.fetchone()
if not res:
cur.execute('select name from {table} where count_k is null order by st_area(geom) limit 1;'.format(table=config.TABLE))
res = cur.fetchone()
return res[0] if res else None
def run(self):
self.conn = psycopg2.connect(config.CONNECTION)
self.conn.autocommit = True
while True:
region = self.find_region()
if region:
self.process(region)
time.sleep(1) # todo: 10
def init_logger():
logger = logging.getLogger("borders-daemon")
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
#handler = logging.FileHandler("/var/log/borders-daemon.log")
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
if __name__ == '__main__':
app = App()
logger = init_logger()
if not HAS_DAEMON or (len(sys.argv) > 1 and sys.argv[1] == 'run'):
app.run()
else:
r = runner.DaemonRunner(app)
r.do_action()

View file

@ -1,24 +0,0 @@
# postgresql connection string
CONNECTION = 'dbname=borders'
# passed to flask.Debug
DEBUG = False
# if the main table is read-only
READONLY = False
# main table name
TABLE = 'borders'
# from where OSM borders are imported
OSM_TABLE = 'osm_borders'
# tables with borders for reference
OTHER_TABLES = { 'old': 'old_borders' }
# backup table
BACKUP = 'borders_backup'
# table with crossing lines
CROSSING_TABLE = 'crossing'
# area of an island for it to be considered small
SMALL_KM2 = 10
# force multipolygons in JOSM output
JOSM_FORCE_MULTI = True
# alert instead of json on import error
IMPORT_ERROR_ALERT = False
# file to which daemon writes the name of currently processed region
DAEMON_STATUS_PATH = '/var/www/html/borders-daemon-status.txt'

8
web/Dockerfile.web Normal file
View file

@ -0,0 +1,8 @@
FROM tiangolo/uwsgi-nginx-flask:latest
RUN pip install lxml flask_cors flask_compress psycopg2 unidecode numpy sklearn
COPY app /app
COPY ./uwsgi.ini /app
COPY ./prestart.sh /app

220
web/app/auto_split.py Normal file
View file

@ -0,0 +1,220 @@
import itertools
from collections import defaultdict
from config import (
AUTOSPLIT_TABLE as autosplit_table,
OSM_TABLE as osm_table,
MWM_SIZE_THRESHOLD,
)
from subregions import get_subregions_info
class DisjointClusterUnion:
"""Disjoint set union implementation for administrative subregions."""
def __init__(self, region_id, subregions, mwm_size_thr=None):
self.region_id = region_id
self.subregions = subregions
self.mwm_size_thr = mwm_size_thr or MWM_SIZE_THRESHOLD
self.representatives = {sub_id: sub_id for sub_id in subregions}
# A cluster is one or more subregions with common borders
self.clusters = {} # representative => cluster object
# At the beginning, each subregion forms a cluster.
# Then they would be enlarged by merging.
for subregion_id, data in subregions.items():
self.clusters[subregion_id] = {
'representative': subregion_id,
'subregion_ids': [subregion_id],
'mwm_size_est': data['mwm_size_est'],
'finished': False, # True if the cluster cannot be merged with another
}
def get_smallest_cluster(self):
"""Find minimal cluster."""
smallest_cluster_id = min(
(cluster_id for cluster_id in self.clusters.keys()
if not self.clusters[cluster_id]['finished']),
default=None,
key=lambda cluster_id: self.clusters[cluster_id]['mwm_size_est']
)
return smallest_cluster_id
def find_cluster(self, subregion_id):
if self.representatives[subregion_id] == subregion_id:
return subregion_id
else:
representative = self.find_cluster(self.representatives[subregion_id])
self.representatives[subregion_id] = representative
return representative
def union(self, cluster_id1, cluster_id2):
# To make it more deterministic
retained_cluster_id = max(cluster_id1, cluster_id2)
dropped_cluster_id = min(cluster_id1, cluster_id2)
r_cluster = self.clusters[retained_cluster_id]
d_cluster = self.clusters[dropped_cluster_id]
r_cluster['subregion_ids'].extend(d_cluster['subregion_ids'])
r_cluster['mwm_size_est'] += d_cluster['mwm_size_est']
del self.clusters[dropped_cluster_id]
self.representatives[dropped_cluster_id] = retained_cluster_id
return retained_cluster_id
def get_cluster_subregion_ids(self, subregion_id):
"""Get all elements in a cluster by subregion_id"""
representative = self.find_cluster(subregion_id)
return set(self.clusters[representative]['subregion_ids'])
def get_all_subregion_ids(self):
subregion_ids = set(itertools.chain.from_iterable(
cl['subregion_ids'] for cl in self.clusters.values()
))
return subregion_ids
def get_best_cluster_to_join_with(small_cluster_id,
common_border_matrix,
dcu: DisjointClusterUnion):
if small_cluster_id not in common_border_matrix:
# This may be if a subregion is isolated,
# like Bezirk Lienz inside Tyrol, Austria
return None
common_borders = defaultdict(float) # cluster representative => common border length
subregion_ids = dcu.get_cluster_subregion_ids(small_cluster_id)
for subregion_id in subregion_ids:
for other_subregion_id, length in common_border_matrix[subregion_id].items():
other_cluster_id = dcu.find_cluster(other_subregion_id)
if other_cluster_id != small_cluster_id:
common_borders[other_cluster_id] += length
if not common_borders:
return None
total_common_border_length = sum(common_borders.values())
total_adjacent_mwm_size_est = sum(dcu.clusters[x]['mwm_size_est'] for x in common_borders)
if total_adjacent_mwm_size_est:
choice_criterion = lambda cluster_id: (
common_borders[cluster_id] / total_common_border_length +
-dcu.clusters[cluster_id]['mwm_size_est'] / total_adjacent_mwm_size_est
)
else:
choice_criterion = lambda cluster_id: (
common_borders[cluster_id] / total_common_border_length
)
best_cluster_id = max(
filter(
lambda cluster_id: (
(dcu.clusters[small_cluster_id]['mwm_size_est']
+ dcu.clusters[cluster_id]['mwm_size_est']) <= dcu.mwm_size_thr
),
common_borders.keys()
),
default=None,
key=choice_criterion
)
return best_cluster_id
def calculate_common_border_matrix(conn, subregion_ids):
subregion_ids_str = ','.join(str(x) for x in subregion_ids)
# ST_Length returns 0 if its parameter is a geometry other than
# LINESTRING or MULTILINESTRING
with conn.cursor() as cursor:
cursor.execute(f"""
SELECT b1.osm_id AS osm_id1, b2.osm_id AS osm_id2,
ST_Length(geography(ST_Intersection(b1.way, b2.way)))
FROM {osm_table} b1, {osm_table} b2
WHERE b1.osm_id IN ({subregion_ids_str})
AND b2.osm_id IN ({subregion_ids_str})
AND b1.osm_id < b2.osm_id
"""
)
common_border_matrix = {} # {subregion_id: { subregion_id: float} } where len > 0
for osm_id1, osm_id2, border_len in cursor:
if border_len == 0.0:
continue
common_border_matrix.setdefault(osm_id1, {})[osm_id2] = border_len
common_border_matrix.setdefault(osm_id2, {})[osm_id1] = border_len
return common_border_matrix
def find_golden_splitting(conn, border_id, next_level, mwm_size_thr):
subregions = get_subregions_info(conn, border_id, osm_table,
next_level, need_cities=True)
if not subregions:
return
dcu = DisjointClusterUnion(border_id, subregions, mwm_size_thr)
all_subregion_ids = dcu.get_all_subregion_ids()
common_border_matrix = calculate_common_border_matrix(conn, all_subregion_ids)
while True:
if len(dcu.clusters) == 1:
return dcu
smallest_cluster_id = dcu.get_smallest_cluster()
if not smallest_cluster_id:
return dcu
best_cluster_id = get_best_cluster_to_join_with(smallest_cluster_id,
common_border_matrix,
dcu)
if not best_cluster_id:
dcu.clusters[smallest_cluster_id]['finished'] = True
continue
assert (smallest_cluster_id != best_cluster_id), f"{smallest_cluster_id}"
dcu.union(smallest_cluster_id, best_cluster_id)
return dcu
def get_union_sql(subregion_ids):
assert(len(subregion_ids) > 0)
if len(subregion_ids) == 1:
return f"""
SELECT way FROM {osm_table} WHERE osm_id={subregion_ids[0]}
"""
else:
return f"""
SELECT ST_Union(
({get_union_sql(subregion_ids[0:1])}),
({get_union_sql(subregion_ids[1:])})
)
"""
def save_splitting_to_db(conn, dcu: DisjointClusterUnion):
with conn.cursor() as cursor:
# Remove previous splitting of the region
cursor.execute(f"""
DELETE FROM {autosplit_table}
WHERE osm_border_id = {dcu.region_id}
AND mwm_size_thr = {dcu.mwm_size_thr}
""")
for cluster_id, data in dcu.clusters.items():
subregion_ids = data['subregion_ids']
subregion_ids_array_str = (
'{' + ','.join(str(x) for x in subregion_ids) + '}'
)
cluster_geometry_sql = get_union_sql(subregion_ids)
cursor.execute(f"""
INSERT INTO {autosplit_table} (osm_border_id, subregion_ids,
geom, mwm_size_thr, mwm_size_est)
VALUES (
{dcu.region_id},
'{subregion_ids_array_str}',
({cluster_geometry_sql}),
{dcu.mwm_size_thr},
{data['mwm_size_est']}
)
""")
conn.commit()
def split_region(conn, region_id, next_level, mwm_size_thr):
dcu = find_golden_splitting(conn, region_id, next_level, mwm_size_thr)
if dcu is None:
return
save_splitting_to_db(conn, dcu)
## May need to debug
#from auto_split_debug import save_splitting_to_file
#save_splitting_to_file(conn, dcu)

View file

@ -0,0 +1,63 @@
import json
import os
from auto_split import (
DisjointClusterUnion,
get_union_sql,
)
from subregions import (
get_region_full_name,
)
GENERATE_ALL_POLY = False
FOLDER = 'split_results'
def save_splitting_to_file(conn, dcu: DisjointClusterUnion):
if not os.path.exists(FOLDER):
os.mkdir(FOLDER)
region_full_name = get_region_full_name(conn, dcu.region_id)
filename_prefix = f"{region_full_name}-{dcu.mwm_size_thr}"
with open(os.path.join(FOLDER, f"{filename_prefix}.poly"), 'w') as poly_file:
poly_file.write(f"{filename_prefix}\n")
for cluster_id, data in dcu.clusters.items():
subregion_ids = data['subregion_ids']
cluster_geometry_sql = get_union_sql(subregion_ids)
geojson = get_geojson(conn, cluster_geometry_sql)
geometry = json.loads(geojson)
polygons = ([geometry['coordinates']]
if geometry['type'] == 'Polygon'
else geometry['coordinates'])
name_prefix=f"{filename_prefix}_{abs(cluster_id)}"
write_polygons_to_poly(poly_file, polygons, name_prefix)
if GENERATE_ALL_POLY:
with open(os.path.join(FOLDER, f"{filename_prefix}{cluster_id}.poly"), 'w') as f:
f.write(f"{filename_prefix}_{cluster_id}")
write_polygons_to_poly(f, polygons, name_prefix)
f.write('END\n')
poly_file.write('END\n')
with open(os.path.join(FOLDER, f"{filename_prefix}-splitting.json"), 'w') as f:
json.dump(dcu.clusters, f, ensure_ascii=False, indent=2)
def get_geojson(conn, sql_geometry_expr):
with conn.cursor() as cursor:
cursor.execute(f"""SELECT ST_AsGeoJSON(({sql_geometry_expr}))""")
rec = cursor.fetchone()
return rec[0]
def write_polygons_to_poly(file, polygons, name_prefix):
pcounter = 1
for polygon in polygons:
outer = True
for ring in polygon:
inner_mark = '' if outer else '!'
name = pcounter if outer else -pcounter
file.write(f"{inner_mark}{name_prefix}_{name}\n")
pcounter = pcounter + 1
for coord in ring:
file.write('\t{:E}\t{:E}\n'.format(coord[0], coord[1]))
file.write('END\n')
outer = False

998
web/app/borders_api.py Executable file
View file

@ -0,0 +1,998 @@
#!/usr/bin/python3
import io
import re
import sys, traceback
import zipfile
from functools import wraps
from unidecode import unidecode
from flask import (
Flask, g,
request, Response, abort,
json, jsonify,
render_template,
send_file, send_from_directory
)
from flask_cors import CORS
from flask_compress import Compress
import psycopg2
import config
from borders_api_utils import *
from countries_structure import (
CountryStructureException,
create_countries_initial_structure,
)
from osm_xml import (
borders_from_xml,
borders_to_xml,
lines_to_xml,
)
from subregions import (
get_child_region_ids,
get_parent_region_id,
get_region_full_name,
get_similar_regions,
is_administrative_region,
update_border_mwm_size_estimation,
)
try:
from lxml import etree
LXML = True
except:
LXML = False
app = Flask(__name__)
app.debug = config.DEBUG
Compress(app)
CORS(app)
app.config['JSON_AS_ASCII'] = False
def validate_args_types(**expected_types):
"""This decorator does early server method termination
if some args from request.args cannot be converted into expected types.
expected_types example: id=int, size=float, parent_id=(None, int)
Only one type or tuple of the form (None, some_type) is allowed.
"""
def f_with_validation(f):
@wraps(f)
def inner(*args, **kwargs):
args_ok = True
for arg, arg_type in expected_types.items():
if isinstance(arg_type, tuple):
assert len(arg_type) == 2 and arg_type[0] is None
arg_type = arg_type[1]
allow_None = True
else:
assert arg_type is not None
allow_None = False
arg_value = request.args.get(arg)
if allow_None and arg_value is None:
continue
try:
arg_type(arg_value)
except (TypeError, ValueError):
args_ok = False
break
if not args_ok:
return abort(400)
return f(*args, **kwargs)
return inner
return f_with_validation
def check_write_access(f):
@wraps(f)
def inner(*args, **kwargs):
if config.READONLY:
abort(403)
else:
return f(*args, **kwargs)
return inner
@app.route('/static/<path:path>')
def send_js(path):
if config.DEBUG:
return send_from_directory('static/', path)
abort(404)
@app.before_request
def before_request():
g.conn = psycopg2.connect(config.CONNECTION)
@app.teardown_request
def teardown(exception):
conn = getattr(g, 'conn', None)
if conn is not None:
conn.close()
@app.route('/')
@app.route('/index.html')
def index():
return render_template('index.html')
@app.route('/stat.html')
def stat():
return render_template('stat.html')
@app.route('/bbox')
@validate_args_types(xmin=float, xmax=float, ymin=float, ymax=float)
def query_bbox():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
simplify_level = request.args.get('simplify')
simplify = simplify_level_to_postgis_value(simplify_level)
borders_table = request.args.get('table')
borders_table = config.OTHER_TABLES.get(borders_table, config.BORDERS_TABLE)
borders = fetch_borders(
table=borders_table,
simplify=simplify,
where_clause=geom_intersects_bbox_sql(xmin, ymin, xmax, ymax)
)
return jsonify(
status='ok',
geojson={'type': 'FeatureCollection', 'features': borders}
)
@app.route('/small')
@validate_args_types(xmin=float, xmax=float, ymin=float, ymax=float)
def query_small_in_bbox():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
borders_table = request.args.get('table')
borders_table = config.OTHER_TABLES.get(borders_table, config.BORDERS_TABLE)
with g.conn.cursor() as cursor:
cursor.execute(f"""
SELECT id, name, ST_Area(geography(ring))/1E6 AS area,
ST_X(ST_Centroid(ring)), ST_Y(ST_Centroid(ring))
FROM (
SELECT id, name, (ST_Dump(geom)).geom AS ring
FROM {borders_table}
WHERE {geom_intersects_bbox_sql(xmin, ymin, xmax, ymax)}
) g
WHERE ST_Area(geography(ring))/1E6 < %s
""", (config.SMALL_KM2,)
)
rings = []
for border_id, name, area, lon, lat in cursor:
rings.append({
'id': border_id,
'name': name,
'area': area,
'lon': lon,
'lat': lat
})
return jsonify(rings=rings)
@app.route('/config')
def get_server_configuration():
osm = False
backup = False
old = []
with g.conn.cursor() as cursor:
try:
cursor.execute(f"""SELECT osm_id, ST_Area(way), admin_level, name
FROM {config.OSM_TABLE} LIMIT 2""")
if cursor.rowcount == 2:
osm = True
except psycopg2.Error as e:
pass
try:
cursor.execute(f"""SELECT backup, id, name, parent_id, cmnt,
modified, disabled, count_k, ST_Area(geom)
FROM {config.BACKUP} LIMIT 2""")
backup = True
except psycopg2.Error as e:
pass
for t, tname in config.OTHER_TABLES.items():
try:
cursor.execute(f"""SELECT name, ST_Area(geom), modified,
disabled, count_k, cmnt
FROM {tname} LIMIT 2""")
if cursor.rowcount == 2:
old.append(t)
except psycopg2.Error as e:
pass
return jsonify(osm=osm, tables=old,
readonly=config.READONLY,
backup=backup,
mwm_size_thr=config.MWM_SIZE_THRESHOLD)
@app.route('/search')
def search():
query = request.args.get('q')
with g.conn.cursor() as cursor:
cursor.execute(f"""
SELECT ST_XMin(geom), ST_YMin(geom), ST_XMax(geom), ST_YMax(geom)
FROM {config.BORDERS_TABLE}
WHERE name ILIKE %s
ORDER BY (ST_Area(geography(geom)))
LIMIT 1""", (f'%{query}%',)
)
if cursor.rowcount > 0:
rec = cursor.fetchone()
return jsonify(status='ok', bounds=rec)
return jsonify(status='not found')
@app.route('/split')
@check_write_access
@validate_args_types(id=int)
def split():
region_id = int(request.args.get('id'))
line = request.args.get('line')
save_region = (request.args.get('save_region') == 'true')
borders_table = config.BORDERS_TABLE
with g.conn.cursor() as cursor:
# check that we're splitting a single polygon
cursor.execute(f"""
SELECT ST_NumGeometries(geom) FROM {borders_table} WHERE id = %s
""", (region_id,)
)
res = cursor.fetchone()
if not res or res[0] != 1:
return jsonify(status='border should have one outer ring')
cursor.execute(f"""
SELECT ST_AsText(
(ST_Dump(ST_Split(geom, ST_GeomFromText(%s, 4326)))).geom)
FROM {borders_table}
WHERE id = %s
""", (line, region_id)
)
if cursor.rowcount > 1:
# no use of doing anything if the polygon wasn't modified
geometries = []
for res in cursor:
geometries.append(res[0])
# get region properties and delete old border
cursor.execute(f"""
SELECT name, parent_id, disabled FROM {borders_table} WHERE id = %s
""", (region_id,))
name, parent_id, disabled = cursor.fetchone()
if save_region:
parent_id = region_id
else:
cursor.execute(f"DELETE FROM {borders_table} WHERE id = %s",
(region_id,))
base_name = name
# insert new geometries
counter = 1
new_ids = []
free_id = get_free_id()
for geom in geometries:
cursor.execute(f"""
INSERT INTO {borders_table} (id, name, geom, disabled,
count_k, modified, parent_id)
VALUES (%s, %s, ST_GeomFromText(%s, 4326), %s, -1,
now(), %s)
""", (free_id, f'{base_name}_{counter}', geom,
disabled, parent_id)
)
new_ids.append(free_id)
counter += 1
free_id -= 1
warnings = []
for border_id in new_ids:
try:
update_border_mwm_size_estimation(g.conn, border_id)
except Exception as e:
warnings.append(str(e))
g.conn.commit()
return jsonify(status='ok', warnings=warnings)
@app.route('/join')
@check_write_access
@validate_args_types(id1=int, id2=int)
def join_borders():
region_id1 = int(request.args.get('id1'))
region_id2 = int(request.args.get('id2'))
if region_id1 == region_id2:
return jsonify(status='failed to join region with itself')
with g.conn.cursor() as cursor:
try:
borders_table = config.BORDERS_TABLE
free_id = get_free_id()
cursor.execute(f"""
UPDATE {borders_table}
SET id = {free_id},
geom = ST_Union({borders_table}.geom, b2.geom),
mwm_size_est = {borders_table}.mwm_size_est + b2.mwm_size_est,
count_k = -1
FROM (SELECT geom, mwm_size_est FROM {borders_table} WHERE id = %s) AS b2
WHERE id = %s""", (region_id2, region_id1)
)
cursor.execute(f"DELETE FROM {borders_table} WHERE id = %s", (region_id2,))
except psycopg2.Error as e:
g.conn.rollback()
return jsonify(status=str(e))
g.conn.commit()
return jsonify(status='ok')
@app.route('/join_to_parent')
@validate_args_types(id=int)
def join_to_parent():
"""Find all descendants of a region and remove them starting
from the lowest hierarchical level to not violate 'parent_id'
foreign key constraint (which is probably not in ON DELETE CASCADE mode)
"""
region_id = int(request.args.get('id'))
parent_id = get_parent_region_id(g.conn, region_id)
if not parent_id:
return jsonify(status=f"Region {region_id} does not exist or has no parent")
descendants = [[parent_id]] # regions ordered by hierarchical level
while True:
parent_ids = descendants[-1]
child_ids = list(itertools.chain.from_iterable(
get_child_region_ids(g.conn, parent_id) for parent_id in parent_ids
))
if child_ids:
descendants.append(child_ids)
else:
break
with g.conn.cursor() as cursor:
borders_table = config.BORDERS_TABLE
while len(descendants) > 1:
lowest_ids = descendants.pop()
ids_str = ','.join(str(x) for x in lowest_ids)
cursor.execute(f"""
DELETE FROM {borders_table} WHERE id IN ({ids_str})"""
)
g.conn.commit()
return jsonify(status='ok')
@app.route('/set_parent')
@validate_args_types(id=int, parent_id=(None, int))
def set_parent():
region_id = int(request.args.get('id'))
parent_id = request.args.get('parent_id')
parent_id = int(parent_id) if parent_id else None
borders_table = config.BORDERS_TABLE
with g.conn.cursor() as cursor:
cursor.execute(f"""
UPDATE {borders_table} SET parent_id = %s WHERE id = %s
""", (parent_id, region_id)
)
g.conn.commit()
return jsonify(status='ok')
@app.route('/point')
@validate_args_types(lat=float, lon=float)
def find_osm_borders():
lat = request.args.get('lat')
lon = request.args.get('lon')
with g.conn.cursor() as cursor:
cursor.execute(f"""
SELECT osm_id, name, admin_level,
(CASE
WHEN ST_Area(geography(way)) = 'NaN'::DOUBLE PRECISION
THEN 0
ELSE ST_Area(geography(way))/1E6
END) AS area_km
FROM {config.OSM_TABLE}
WHERE ST_Contains(way, ST_SetSRID(ST_Point(%s, %s), 4326))
ORDER BY admin_level DESC, name ASC
""", (lon, lat)
)
result = []
for osm_id, name, admin_level, area in cursor:
border = {'id': osm_id, 'name': name,
'admin_level': admin_level, 'area': area}
result.append(border)
return jsonify(borders=result)
@app.route('/from_osm')
@check_write_access
@validate_args_types(id=int)
def copy_from_osm():
osm_id = int(request.args.get('id'))
name = request.args.get('name')
name_sql = f"'{name}'" if name else "'name'"
borders_table = config.BORDERS_TABLE
osm_table = config.OSM_TABLE
with g.conn.cursor() as cursor:
# Check if this id already in use
cursor.execute(f"SELECT id FROM {borders_table} WHERE id = %s",
(osm_id,))
rec = cursor.fetchone()
if rec and rec[0]:
return jsonify(status=f"Region with id={osm_id} already exists")
cursor.execute(f"""
INSERT INTO {borders_table} (id, geom, name, modified, count_k)
SELECT osm_id, way, {name_sql}, now(), -1
FROM {osm_table}
WHERE osm_id = %s
""", (osm_id,)
)
assign_region_to_lowest_parent(osm_id)
warnings = []
try:
update_border_mwm_size_estimation(g.conn, osm_id)
except Exception as e:
warnings.append(str(e))
g.conn.commit()
return jsonify(status='ok', warnings=warnings)
@app.route('/rename')
@check_write_access
@validate_args_types(id=int)
def set_name():
region_id = int(request.args.get('id'))
borders_table = config.BORDERS_TABLE
new_name = request.args.get('new_name')
with g.conn.cursor() as cursor:
cursor.execute(f"UPDATE {borders_table} SET name = %s WHERE id = %s",
(new_name, region_id))
g.conn.commit()
return jsonify(status='ok')
@app.route('/delete')
@check_write_access
@validate_args_types(id=int)
def delete_border():
region_id = int(request.args.get('id'))
with g.conn.cursor() as cursor:
cursor.execute(f"DELETE FROM {config.BORDERS_TABLE} WHERE id = %s",
(region_id,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/disable')
@check_write_access
@validate_args_types(id=int)
def disable_border():
region_id = int(request.args.get('id'))
with g.conn.cursor() as cursor:
cursor.execute(f"""
UPDATE {config.BORDERS_TABLE}
SET disabled = true
WHERE id = %s""", (region_id,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/enable')
@check_write_access
@validate_args_types(id=int)
def enable_border():
region_id = int(request.args.get('id'))
with g.conn.cursor() as cursor:
cursor.execute(f"""
UPDATE {config.BORDERS_TABLE}
SET disabled = false
WHERE id = %s""", (region_id,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/comment', methods=['POST'])
@validate_args_types(id=int)
def update_comment():
region_id = int(request.form['id'])
comment = request.form['comment']
with g.conn.cursor() as cursor:
cursor.execute(f"""
UPDATE {config.BORDERS_TABLE}
SET cmnt = %s WHERE id = %s
""", (comment, region_id))
g.conn.commit()
return jsonify(status='ok')
@app.route('/divide_preview')
def divide_preview():
return divide(preview=True)
@app.route('/divide')
@check_write_access
def divide_do():
return divide(preview=False)
@validate_args_types(id=int)
def divide(preview=False):
region_id = int(request.args.get('id'))
try:
# TODO: perform next_level field validation on client-side
## and move the parameter check to @validate_args_types decorator
next_level = int(request.args.get('next_level'))
except ValueError:
return jsonify(status="Not a number in next level")
is_admin_region = is_administrative_region(g.conn, region_id)
region_ids = [region_id]
apply_to_similar = (request.args.get('apply_to_similar') == 'true')
if apply_to_similar:
if not is_admin_region:
return jsonify(status="Could not use 'apply to similar' "
"for non-administrative regions")
region_ids = get_similar_regions(g.conn, region_id, only_leaves=True)
auto_divide = (request.args.get('auto_divide') == 'true')
if auto_divide:
if not is_admin_region:
return jsonify(status="Could not apply auto-division "
"to non-administrative regions")
try:
# TODO: perform mwm_size_thr field validation on client-side
## and move the parameter check to @validate_args_types decorator
mwm_size_thr = int(request.args.get('mwm_size_thr'))
except ValueError:
return jsonify(status="Not a number in thresholds")
divide_into_clusters_func = (
divide_into_clusters_preview
if preview
else divide_into_clusters
)
return divide_into_clusters_func(
region_ids, next_level,
mwm_size_thr)
else:
divide_into_subregions_func = (
divide_into_subregions_preview
if preview
else divide_into_subregions
)
return divide_into_subregions_func(region_ids, next_level)
@app.route('/chop1')
@check_write_access
@validate_args_types(id=int)
def chop_largest_or_farthest():
region_id = int(request.args.get('id'))
borders_table = config.BORDERS_TABLE
with g.conn.cursor() as cursor:
cursor.execute(f"""SELECT ST_NumGeometries(geom)
FROM {borders_table}
WHERE id = {region_id}""")
res = cursor.fetchone()
if not res or res[0] < 2:
return jsonify(status='border should have more than one outer ring')
free_id1 = get_free_id()
free_id2 = free_id1 - 1
cursor.execute(f"""
INSERT INTO {borders_table} (id, parent_id, name, disabled,
modified, geom)
SELECT id, region_id, name, disabled, modified, geom FROM
(
(WITH w AS (SELECT name, disabled, (ST_Dump(geom)).geom AS g
FROM {borders_table} WHERE id = {region_id})
(SELECT {free_id1} id, {region_id} region_id,
name||'_main' as name, disabled,
now() as modified, g as geom, ST_Area(g) as a
FROM w ORDER BY a DESC LIMIT 1)
UNION ALL
SELECT {free_id2} id, {region_id} region_id,
name||'_small' as name, disabled,
now() as modified, ST_Collect(g) AS geom,
ST_Area(ST_Collect(g)) as a
FROM (SELECT name, disabled, g, ST_Area(g) AS a
FROM w ORDER BY a DESC OFFSET 1) ww
GROUP BY name, disabled)
) x"""
)
warnings = []
for border_id in (free_id1, free_id2):
try:
update_border_mwm_size_estimation(g.conn, border_id)
except Exception as e:
warnings.append(str(e))
g.conn.commit()
return jsonify(status='ok', warnings=warnings)
@app.route('/hull')
@check_write_access
@validate_args_types(id=int)
def draw_hull():
border_id = int(request.args.get('id'))
borders_table = config.BORDERS_TABLE
with g.conn.cursor() as cursor:
cursor.execute(f"""
SELECT ST_NumGeometries(geom) FROM {borders_table} WHERE id = %s
""", (border_id,))
res = cursor.fetchone()
if not res or res[0] < 2:
return jsonify(status='border should have more than one outer ring')
cursor.execute(f"""
UPDATE {borders_table} SET geom = ST_ConvexHull(geom)
WHERE id = %s""", (border_id,)
)
g.conn.commit()
return jsonify(status='ok')
@app.route('/backup')
@check_write_access
def backup_do():
with g.conn.cursor() as cursor:
cursor.execute(f"""
SELECT to_char(now(), 'IYYY-MM-DD HH24:MI'), max(backup)
FROM {config.BACKUP}
""")
(timestamp, tsmax) = cursor.fetchone()
if timestamp == tsmax:
return jsonify(status="please try again later")
backup_table = config.BACKUP
borders_table = config.BORDERS_TABLE
cursor.execute(f"""
INSERT INTO {backup_table}
(backup, id, name, parent_id, geom, disabled, count_k,
modified, cmnt, mwm_size_est)
SELECT %s, id, name, parent_id, geom, disabled, count_k,
modified, cmnt, mwm_size_est
FROM {borders_table}
""", (timestamp,)
)
g.conn.commit()
return jsonify(status='ok')
@app.route('/restore')
@check_write_access
def backup_restore():
ts = request.args.get('timestamp')
borders_table = config.BORDERS_TABLE
backup_table = config.BACKUP
with g.conn.cursor() as cursor:
cursor.execute(f"SELECT count(1) FROM {backup_table} WHERE backup = %s",
(ts,))
(count,) = cursor.fetchone()
if count <= 0:
return jsonify(status="no such timestamp")
cursor.execute(f"DELETE FROM {borders_table}")
cursor.execute(f"""
INSERT INTO {borders_table}
(id, name, parent_id, geom, disabled, count_k,
modified, cmnt, mwm_size_est)
SELECT id, name, parent_id, geom, disabled, count_k,
modified, cmnt, mwm_size_est
FROM {backup_table}
WHERE backup = %s
""", (ts,)
)
g.conn.commit()
return jsonify(status='ok')
@app.route('/backlist')
def backup_list():
with g.conn.cursor() as cursor:
cursor.execute(f"""SELECT backup, count(1)
FROM {config.BACKUP}
GROUP BY backup
ORDER BY backup DESC""")
result = []
for backup_name, borders_count in cursor:
result.append({
'timestamp': backup_name,
'text': backup_name,
'count': borders_count
})
# todo: count number of different objects for the last one
return jsonify(backups=result)
@app.route('/backdelete')
@check_write_access
def backup_delete():
ts = request.args.get('timestamp')
with g.conn.cursor() as cursor:
cursor.execute(f"""
SELECT count(1) FROM {config.BACKUP} WHERE backup = %s
""", (ts,))
(count,) = cursor.fetchone()
if count <= 0:
return jsonify(status='no such timestamp')
cursor.execute(f"DELETE FROM {config.BACKUP} WHERE backup = %s", (ts,))
g.conn.commit()
return jsonify(status='ok')
@app.route('/josm')
@validate_args_types(xmin=float, xmax=float, ymin=float, ymax=float)
def make_osm():
xmin = request.args.get('xmin')
xmax = request.args.get('xmax')
ymin = request.args.get('ymin')
ymax = request.args.get('ymax')
borders_table = request.args.get('table')
borders_table = config.OTHER_TABLES.get(borders_table, config.BORDERS_TABLE)
borders = fetch_borders(
table=borders_table,
where_clause=geom_intersects_bbox_sql(xmin, ymin, xmax, ymax)
)
xml = borders_to_xml(borders)
return Response(xml, mimetype='application/x-osm+xml')
@app.route('/josmbord')
@validate_args_types(id=int)
def josm_borders_along():
region_id = int(request.args.get('id'))
line = request.args.get('line')
# select all outer osm borders inside a buffer of the given line
borders_table = config.BORDERS_TABLE
osm_table = config.OSM_TABLE
with g.conn.cursor() as cursor:
cursor.execute(f"""
WITH linestr AS (
SELECT ST_Intersection(
geom,
ST_Buffer(ST_GeomFromText(%s, 4326), 0.2)
) as line
FROM {borders_table}
WHERE id = %s
), osmborders AS (
SELECT (ST_Dump(way)).geom as g
FROM {osm_table}, linestr
WHERE ST_Intersects(line, way)
)
SELECT ST_AsGeoJSON((
ST_Dump(
ST_LineMerge(
ST_Intersection(
ST_Collect(ST_ExteriorRing(g)),
line
)
)
)
).geom)
FROM osmborders, linestr
GROUP BY line
""", (line, region_id)
)
xml = lines_to_xml(rec[0] for rec in cursor)
return Response(xml, mimetype='application/x-osm+xml')
def import_error(msg):
if config.IMPORT_ERROR_ALERT:
return f'<script>alert("{msg}");</script>'
else:
return jsonify(status=msg)
@app.route('/import', methods=['POST'])
@check_write_access
def import_osm():
# Though this variable is not used it's necessary to consume request.data
# so that nginx doesn't produce error like "#[error] 13#13: *65 readv()
# failed (104: Connection reset by peer) while reading upstream"
data = request.data
if not LXML:
return import_error("importing is disabled due to absent lxml library")
f = request.files['file']
if not f:
return import_error("failed upload")
try:
tree = etree.parse(f)
except:
return import_error("malformed xml document")
if not tree:
return import_error("bad document")
result = borders_from_xml(tree)
if type(result) == 'str':
return import_error(result)
regions = result
# submit modifications to the database
added = 0
updated = 0
free_id = None
for r_id, region in regions.items():
if not region['modified']:
continue
try:
region_id = create_or_update_region(region, free_id)
except psycopg2.Error as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_traceback)
return import_error("Database error. See server log for details")
except Exception as e:
return import_error(f"Import error: {str(e)}")
if region_id < 0:
added += 1
if free_id is None:
free_id = region_id - 1
else:
free_id -= 1
else:
updated += 1
g.conn.commit()
return jsonify(regions=len(regions), added=added, updated=updated)
@app.route('/potential_parents')
@validate_args_types(id=int)
def potential_parents():
region_id = int(request.args.get('id'))
parents = find_potential_parents(region_id)
return jsonify(status='ok', parents=parents)
@app.route('/poly')
@validate_args_types(xmin=(None, float), xmax=(None, float),
ymin=(None, float), ymax=(None, float))
def export_poly():
borders_table = request.args.get('table')
borders_table = config.OTHER_TABLES.get(borders_table, config.BORDERS_TABLE)
fetch_borders_args = {'table': borders_table, 'only_leaves': True}
if 'xmin' in request.args:
# If one coordinate is given then others are also expected.
# If at least one coordinate is absent, SQL expressions
# like ST_Point(NULL, 55.6) would provide NULL, and the
# whole where_clause would be NULL, and the result set would be empty.
xmin = request.args.get('xmin') or 'NULL'
xmax = request.args.get('xmax') or 'NULL'
ymin = request.args.get('ymin') or 'NULL'
ymax = request.args.get('ymax') or 'NULL'
fetch_borders_args['where_clause'] = geom_intersects_bbox_sql(xmin, ymin,
xmax, ymax)
borders = fetch_borders(**fetch_borders_args)
memory_file = io.BytesIO()
with zipfile.ZipFile(memory_file, 'w', zipfile.ZIP_DEFLATED) as zf:
for border in borders:
geometry = border['geometry']
polygons = ([geometry['coordinates']]
if geometry['type'] == 'Polygon'
else geometry['coordinates'])
# sanitize name, src: http://stackoverflow.com/a/295466/1297601
name = border['properties']['name'] or str(-border['properties']['id'])
fullname = get_region_full_name(g.conn, border['properties']['id'])
filename = unidecode(fullname)
filename = re.sub('[^\w _-]', '', filename).strip()
filename = filename + '.poly'
poly = io.BytesIO()
poly.write(name.encode() + b'\n')
pcounter = 1
for polygon in polygons:
outer = True
for ring in polygon:
poly.write('{inner_mark}{name}\n'.format(
inner_mark=('' if outer else '!'),
name=(pcounter if outer else -pcounter)
).encode())
pcounter = pcounter + 1
for coord in ring:
poly.write('\t{:E}\t{:E}\n'.format(coord[0], coord[1]).encode())
poly.write(b'END\n')
outer = False
poly.write(b'END\n')
zf.writestr(filename, poly.getvalue())
poly.close()
memory_file.seek(0)
return send_file(memory_file, attachment_filename='borders.zip', as_attachment=True)
@app.route('/stat')
def statistics():
group = request.args.get('group')
borders_table = request.args.get('table')
borders_table = config.OTHER_TABLES.get(borders_table, config.BORDERS_TABLE)
with g.conn.cursor() as cursor:
if group == 'total':
cursor.execute(f"SELECT count(1) FROM {borders_table}")
return jsonify(total=cursor.fetchone()[0])
elif group == 'sizes':
cursor.execute(f"""
SELECT
name,
count_k,
ST_NPoints(geom),
ST_AsGeoJSON(ST_Centroid(geom)),
(CASE
WHEN ST_Area(geography(geom)) = 'NaN'::DOUBLE PRECISION
THEN 0
ELSE ST_Area(geography(geom))/1E6
END) AS area,
disabled,
(CASE
WHEN coalesce(cmnt, '') = '' THEN false
ELSE true
END) AS cmnt
FROM {borders_table}"""
)
result = []
for res in cursor:
coord = json.loads(res[3])['coordinates']
result.append({'name': res[0], 'lat': coord[1], 'lon': coord[0],
'size': res[1], 'nodes': res[2], 'area': res[4],
'disabled': res[5], 'commented': res[6]})
return jsonify(regions=result)
elif group == 'topo':
cursor.execute(f"""
SELECT
name,
count(1),
min(
CASE
WHEN ST_Area(geography(g)) = 'NaN'::DOUBLE PRECISION
THEN 0
ELSE ST_Area(geography(g))
END
) / 1E6,
sum(ST_NumInteriorRings(g)),
ST_AsGeoJSON(ST_Centroid(ST_Collect(g)))
FROM (SELECT name, (ST_Dump(geom)).geom AS g FROM {borders_table}) a
GROUP BY name"""
)
result = []
for (name, outer, min_area, inner, coords) in cursor:
coord = json.loads(coords)['coordinates']
result.append({'name': name, 'outer': outer,
'min_area': min_area, 'inner': inner,
'lon': coord[0], 'lat': coord[1]})
return jsonify(regions=result)
return jsonify(status='wrong group id')
@app.route('/border')
@validate_args_types(id=int)
def border():
region_id = int(request.args.get('id'))
borders_table = config.BORDERS_TABLE
simplify_level = request.args.get('simplify')
simplify = simplify_level_to_postgis_value(simplify_level)
borders = fetch_borders(
table=borders_table,
simplify=simplify,
only_leaves=False,
where_clause=f'id = {region_id}'
)
if not borders:
return jsonify(status=f'No border with id={region_id} found')
return jsonify(status='ok', geojson=borders[0])
@app.route('/start_over')
def start_over():
try:
warnings = create_countries_initial_structure(g.conn)
except CountryStructureException as e:
return jsonify(status=str(e))
autosplit_table = config.AUTOSPLIT_TABLE
with g.conn.cursor() as cursor:
cursor.execute(f"DELETE FROM {autosplit_table}")
g.conn.commit()
return jsonify(status='ok', warnings=warnings[:10])
if __name__ == '__main__':
app.run(threaded=True)

View file

@ -0,0 +1,382 @@
import itertools
import json
from flask import g, jsonify
import config
from auto_split import split_region
from countries_structure import get_osm_border_name_by_osm_id
from subregions import (
get_region_country,
get_subregions_info,
is_administrative_region,
)
def geom_intersects_bbox_sql(xmin, ymin, xmax, ymax):
return (f'(geom && ST_MakeBox2D(ST_Point({xmin}, {ymin}),'
f'ST_Point({xmax}, {ymax})))')
def fetch_borders(**kwargs):
borders_table = kwargs.get('table', config.BORDERS_TABLE)
simplify = kwargs.get('simplify', 0)
where_clause = kwargs.get('where_clause', '1=1')
only_leaves = kwargs.get('only_leaves', True)
osm_table = config.OSM_TABLE
geom = (f'ST_SimplifyPreserveTopology(geom, {simplify})'
if simplify > 0 else 'geom')
leaves_filter = (f""" AND id NOT IN (SELECT parent_id FROM {borders_table}
WHERE parent_id IS NOT NULL)"""
if only_leaves else '')
query = f"""
SELECT name, geometry, nodes, modified, disabled, count_k, cmnt,
(CASE WHEN area = 'NaN'::DOUBLE PRECISION THEN 0 ELSE area END) AS area,
id, admin_level, parent_id, parent_name, parent_admin_level,
mwm_size_est
FROM (
SELECT name,
ST_AsGeoJSON({geom}, 7) as geometry,
ST_NPoints(geom) AS nodes,
modified,
disabled,
count_k,
cmnt,
round(ST_Area(geography(geom))) AS area,
id,
( SELECT admin_level FROM {osm_table}
WHERE osm_id = t.id
) AS admin_level,
parent_id,
( SELECT name FROM {borders_table}
WHERE id = t.parent_id
) AS parent_name,
( SELECT admin_level FROM {osm_table}
WHERE osm_id = (SELECT parent_id FROM {borders_table} WHERE id = t.id)
) AS parent_admin_level,
mwm_size_est
FROM {borders_table} t
WHERE ({where_clause}) {leaves_filter}
) q
ORDER BY area DESC
"""
with g.conn.cursor() as cursor:
cursor.execute(query)
borders = []
for rec in cursor:
region_id = rec[8]
country_id, country_name = get_region_country(g.conn, region_id)
props = { 'name': rec[0] or '', 'nodes': rec[2], 'modified': rec[3],
'disabled': rec[4], 'count_k': rec[5],
'comment': rec[6],
'area': rec[7],
'id': region_id,
'admin_level': rec[9],
'parent_id': rec[10],
'parent_name': rec[11],
'parent_admin_level': rec[12],
'country_id': country_id,
'country_name': country_name,
'mwm_size_est': rec[13]
}
feature = {'type': 'Feature',
'geometry': json.loads(rec[1]),
'properties': props
}
borders.append(feature)
return borders
def simplify_level_to_postgis_value(simplify_level):
return (
0.1 if simplify_level == '2'
else 0.01 if simplify_level == '1'
else 0
)
def get_subregions_for_preview(region_ids, next_level):
subregions = list(itertools.chain.from_iterable(
get_subregions_one_for_preview(region_id, next_level)
for region_id in region_ids
))
return subregions
def get_subregions_one_for_preview(region_id, next_level):
osm_table = config.OSM_TABLE
borders_table = config.BORDERS_TABLE
with g.conn.cursor() as cursor:
# We use ST_SimplifyPreserveTopology, since ST_Simplify would give NULL
# for very little regions.
cursor.execute(f"""
SELECT name,
ST_AsGeoJSON(ST_SimplifyPreserveTopology(way, 0.01)) as way,
osm_id
FROM {osm_table}
WHERE ST_Contains(
(SELECT geom FROM {borders_table} WHERE id = %s), way
)
AND admin_level = %s
""", (region_id, next_level)
)
subregions = []
for rec in cursor:
feature = {'type': 'Feature', 'geometry': json.loads(rec[1]),
'properties': {'name': rec[0]}}
subregions.append(feature)
return subregions
def get_clusters_for_preview(region_ids, next_level, thresholds):
clusters = list(itertools.chain.from_iterable(
get_clusters_for_preview_one(region_id, next_level, thresholds)
for region_id in region_ids
))
return clusters
def get_clusters_for_preview_one(region_id, next_level, mwm_size_thr):
autosplit_table = config.AUTOSPLIT_TABLE
where_clause = f"""
osm_border_id = %s
AND mwm_size_thr = %s
"""
splitting_sql_params = (region_id, mwm_size_thr)
with g.conn.cursor() as cursor:
cursor.execute(f"""
SELECT 1 FROM {autosplit_table}
WHERE {where_clause}
""", splitting_sql_params
)
if cursor.rowcount == 0:
split_region(g.conn, region_id, next_level, mwm_size_thr)
cursor.execute(f"""
SELECT subregion_ids[1],
ST_AsGeoJSON(ST_SimplifyPreserveTopology(geom, 0.01)) as way
FROM {autosplit_table}
WHERE {where_clause}
""", splitting_sql_params
)
clusters = []
for rec in cursor:
cluster = {
'type': 'Feature',
'geometry': json.loads(rec[1]),
'properties': {'osm_id': int(rec[0])}
}
clusters.append(cluster)
return clusters
def divide_into_subregions_preview(region_ids, next_level):
subregions = get_subregions_for_preview(region_ids, next_level)
return jsonify(
status='ok',
subregions={'type': 'FeatureCollection', 'features': subregions}
)
def divide_into_clusters_preview(region_ids, next_level, mwm_size_thr):
subregions = get_subregions_for_preview(region_ids, next_level)
clusters = get_clusters_for_preview(region_ids, next_level, mwm_size_thr)
return jsonify(
status='ok',
subregions={'type': 'FeatureCollection', 'features': subregions},
clusters={'type': 'FeatureCollection', 'features': clusters}
)
def divide_into_subregions(region_ids, next_level):
for region_id in region_ids:
divide_into_subregions_one(region_id, next_level)
g.conn.commit()
return jsonify(status='ok')
def divide_into_subregions_one(region_id, next_level):
borders_table = config.BORDERS_TABLE
osm_table = config.OSM_TABLE
subregions = get_subregions_info(g.conn, region_id, borders_table,
next_level, need_cities=False)
with g.conn.cursor() as cursor:
is_admin_region = is_administrative_region(g.conn, region_id)
if is_admin_region:
for subregion_id, data in subregions.items():
cursor.execute(f"""
INSERT INTO {borders_table}
(id, geom, name, parent_id, modified, count_k, mwm_size_est)
SELECT osm_id, way, name, %s, now(), -1, {data['mwm_size_est']}
FROM {osm_table}
WHERE osm_id = %s
""", (region_id, subregion_id)
)
else:
for subregion_id, data in subregions.items():
cursor.execute(f"""
INSERT INTO {borders_table}
(id, geom, name, parent_id, modified, count_k, mwm_size_est)
SELECT osm_id, way, name,
(SELECT parent_id FROM {borders_table} WHERE id = %s),
now(), -1, {data['mwm_size_est']}
FROM {osm_table}
WHERE osm_id = %s
""", (region_id, subregion_id)
)
cursor.execute(f"DELETE FROM {borders_table} WHERE id = %s", (region_id,))
g.conn.commit()
def divide_into_clusters(region_ids, next_level, mwm_size_thr):
borders_table = config.BORDERS_TABLE
autosplit_table = config.AUTOSPLIT_TABLE
cursor = g.conn.cursor()
insert_cursor = g.conn.cursor()
for region_id in region_ids:
cursor.execute(f"SELECT name FROM {borders_table} WHERE id = %s", (region_id,))
base_name = cursor.fetchone()[0]
where_clause = f"""
osm_border_id = %s
AND mwm_size_thr = %s
"""
splitting_sql_params = (region_id, mwm_size_thr)
cursor.execute(f"""
SELECT 1 FROM {autosplit_table}
WHERE {where_clause}
""", splitting_sql_params
)
if cursor.rowcount == 0:
split_region(g.conn, region_id, next_level, mwm_size_thr)
free_id = get_free_id()
counter = 0
cursor.execute(f"""
SELECT subregion_ids
FROM {autosplit_table} WHERE {where_clause}
""", splitting_sql_params
)
if cursor.rowcount == 1:
continue
for rec in cursor:
subregion_ids = rec[0]
cluster_id = subregion_ids[0]
if len(subregion_ids) == 1:
subregion_id = cluster_id
name = get_osm_border_name_by_osm_id(g.conn, subregion_id)
else:
counter += 1
free_id -= 1
subregion_id = free_id
name = f"{base_name}_{counter}"
insert_cursor.execute(f"""
INSERT INTO {borders_table} (id, name, parent_id, geom, modified, count_k, mwm_size_est)
SELECT {subregion_id}, %s, osm_border_id, geom, now(), -1, mwm_size_est
FROM {autosplit_table} WHERE subregion_ids[1] = %s AND {where_clause}
""", (name, cluster_id,) + splitting_sql_params
)
g.conn.commit()
return jsonify(status='ok')
def get_free_id():
with g.conn.cursor() as cursor:
borders_table = config.BORDERS_TABLE
cursor.execute(f"SELECT min(id) FROM {borders_table} WHERE id < -1000000000")
min_id = cursor.fetchone()[0]
free_id = min_id - 1 if min_id else -1_000_000_001
return free_id
def assign_region_to_lowest_parent(region_id):
"""Lowest parent is the region with lowest (maximum by absolute value)
admin_level containing given region."""
pot_parents = find_potential_parents(region_id)
if pot_parents:
# potential_parents are sorted by area ascending
parent_id = pot_parents[0]['properties']['id']
borders_table = config.BORDERS_TABLE
with g.conn.cursor() as cursor:
cursor.execute(f"""
UPDATE {borders_table}
SET parent_id = %s
WHERE id = %s
""", (parent_id, region_id)
)
return True
return False
def create_or_update_region(region, free_id):
borders_table = config.BORDERS_TABLE
with g.conn.cursor() as cursor:
if region['id'] < 0:
if not free_id:
free_id = get_free_id()
region_id = free_id
cursor.execute(f"""
INSERT INTO {borders_table}
(id, name, disabled, geom, modified, count_k)
VALUES (%s, %s, %s, ST_GeomFromText(%s, 4326), now(), -1)
""", (region_id, region['name'],
region['disabled'], region['wkt'])
)
assign_region_to_lowest_parent(region_id)
return region_id
else:
cursor.execute(f"SELECT count(1) FROM {borders_table} WHERE id = %s",
(-region['id'],))
rec = cursor.fetchone()
if rec[0] == 0:
raise Exception(f"Can't find border ({region['id']}) for update")
cursor.execute(f"""
UPDATE {borders_table}
SET disabled = %s,
name = %s,
modified = now(),
count_k = -1,
geom = ST_GeomFromText(%s, 4326)
WHERE id = %s
""", (region['disabled'], region['name'],
region['wkt'], -region['id'])
)
return region['id']
def find_potential_parents(region_id):
borders_table = config.BORDERS_TABLE
osm_table = config.OSM_TABLE
p_geogr = "geography(p.geom)"
c_geogr = "geography(c.geom)"
query = f"""
SELECT
p.id,
p.name,
(SELECT admin_level FROM {osm_table} WHERE osm_id = p.id) admin_level,
ST_AsGeoJSON(ST_SimplifyPreserveTopology(p.geom, 0.01)) geometry
FROM {borders_table} p, {borders_table} c
WHERE c.id = %s
AND ST_Intersects(p.geom, c.geom)
AND ST_Area({p_geogr}) > ST_Area({c_geogr})
AND ST_Area(ST_Intersection({p_geogr}, {c_geogr})) >
0.5 * ST_Area({c_geogr})
ORDER BY ST_Area({p_geogr})
"""
with g.conn.cursor() as cursor:
cursor.execute(query, (region_id,))
parents = []
for rec in cursor:
props = {
'id': rec[0],
'name': rec[1],
'admin_level': rec[2],
}
feature = {
'type': 'Feature',
'geometry': json.loads(rec[3]),
'properties': props
}
parents.append(feature)
return parents

137
web/app/borders_daemon.py Executable file
View file

@ -0,0 +1,137 @@
#!/usr/bin/python3
import logging
import sys
import time
import psycopg2
import config
try:
from daemon import runner
HAS_DAEMON = True
except:
HAS_DAEMON = False
borders_table = config.BORDERS_TABLE
CONNECT_WAIT_INTERVAL = 5
CHECK_BORDERS_INTERVAL = 10
# For geometries crossing 180th meridian envelope area calculates to
# very small values. Examples area 'United States', 'Chukotka Autonomous Okrug',
# 'Alaska', 'Tuvalu'. For those borders area > env_area.
# Limit on envelope area is imposed due to countries whose bbox covers half the world
# like France or Netherlands with oversea territories for which tile counting
# lasts too long.
no_count_queries = [
f"""
SELECT id, name
FROM (
SELECT id, name,
ST_Area(geography(geom))/1000000.0 area,
ST_Area(geography(ST_Envelope(geom)))/1000000.0 env_area
FROM {borders_table}
WHERE {condition}) q
WHERE area != 'NaN'::double precision
AND area <= env_area
AND env_area < 5000000
-- ORDER BY area -- makes the query too much slower (why?)
LIMIT 1
""" for condition in ('count_k < 0', 'count_k IS NULL')
]
class App():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = config.DAEMON_PID_PATH
self.pidfile_timeout = 5
self.conn = None
def get_connection(self):
while True:
try:
if self.conn is None or self.conn.closed:
self.conn = psycopg2.connect(config.CONNECTION)
self.conn.autocommit = True
with self.conn.cursor() as cur:
cur.execute(f"SELECT count_k FROM {borders_table} LIMIT 1")
return self.conn
except psycopg2.Error:
try:
self.conn.close()
except:
pass
time.sleep(CONNECT_WAIT_INTERVAL)
def process(self, region_id, region_name):
msg = f'Processing {region_name} ({region_id})'
logger.info(msg)
try:
f = open(config.DAEMON_STATUS_PATH, 'w')
f.write(msg)
f.close()
except Exception as e:
logger.error(str(e))
pass
with self.get_connection().cursor() as cur:
cur.execute(f"""
UPDATE {borders_table}
SET count_k = n.count
FROM (SELECT coalesce(sum(t.count), 0) AS count
FROM {borders_table} b, tiles t
WHERE b.id = %s AND ST_Intersects(b.geom, t.tile)
) AS n
WHERE id = %s
""", (region_id, region_id)
)
try:
f = open(config.DAEMON_STATUS_PATH, 'w')
f.close()
except Exception as e:
logger.error(str(e))
pass
def find_region(self):
with self.get_connection().cursor() as cur:
cur.execute(no_count_queries[0])
res = cur.fetchone()
if not res:
cur.execute(no_count_queries[1])
res = cur.fetchone()
return res if res else (None, None)
def run(self):
while True:
try:
region_id, region_name = self.find_region()
if region_id:
self.process(region_id, region_name)
else:
time.sleep(CHECK_BORDERS_INTERVAL)
except:
time.sleep(CHECK_BORDERS_INTERVAL)
def init_logger():
logger = logging.getLogger("borders-daemon")
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
handler = logging.FileHandler(config.DAEMON_LOG_PATH)
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
if __name__ == '__main__':
app = App()
logger = init_logger()
if not HAS_DAEMON or (len(sys.argv) > 1 and sys.argv[1] == 'run'):
app.run()
else:
r = runner.DaemonRunner(app)
r.do_action()

35
web/app/config.py Normal file
View file

@ -0,0 +1,35 @@
# postgresql connection string
CONNECTION = 'dbname=borders user=borders password=borders host=dbhost port=5432'
# passed to flask.Debug
DEBUG = True
# if the main table is read-only
READONLY = False
# main table name
BORDERS_TABLE = 'borders'
# from where OSM borders are imported
OSM_TABLE = 'osm_borders'
# All populated places in OSM
OSM_PLACES_TABLE = 'osm_places'
# transit table for autosplitting results
AUTOSPLIT_TABLE = 'splitting'
# tables with borders for reference
OTHER_TABLES = {
#'old': 'old_borders'
}
# backup table
BACKUP = 'borders_backup'
# area of an island for it to be considered small
SMALL_KM2 = 10
# force multipolygons in JOSM output
JOSM_FORCE_MULTI = True
# alert instead of json on import error
IMPORT_ERROR_ALERT = False
# file to which daemon writes the name of currently processed region
DAEMON_STATUS_PATH = '/tmp/borders-daemon-status.txt'
DAEMON_PID_PATH = '/tmp/borders-daemon.pid'
DAEMON_LOG_PATH = '/var/log/borders-daemon.log'
# mwm size threshold in Kb
MWM_SIZE_THRESHOLD = 70*1024
# Estimated mwm size is predicted by the 'model.pkl' with 'scaler.pkl' for X
MWM_SIZE_PREDICTION_MODEL_PATH = '/app/data/model.pkl'
MWM_SIZE_PREDICTION_MODEL_SCALER_PATH = '/app/data/scaler.pkl'

View file

@ -0,0 +1,243 @@
import itertools
# admin_level => list of countries which should be initially divided at one admin level
unilevel_countries = {
2: [
'Afghanistan',
'Albania',
'Algeria',
'Andorra',
'Angola',
'Antigua and Barbuda',
'Armenia',
'Australia', # need to be divided at level 4 but has many small islands of level 4
'Azerbaijan', # has 2 non-covering 3-level regions
'Bahrain',
'Barbados',
'Belize',
'Benin',
'Bermuda',
'Bhutan',
'Botswana',
'British Sovereign Base Areas', # ! include into Cyprus
'British Virgin Islands',
'Bulgaria',
'Burkina Faso',
'Burundi',
'Cambodia',
'Cameroon',
'Cape Verde',
'Central African Republic',
'Chad',
'Chile',
'Colombia',
'Comoros',
'Congo-Brazzaville', # BUG whith autodivision at level 4
'Cook Islands',
'Costa Rica',
'Croatia', # next level = 6
'Cuba',
'Cyprus',
"Côte d'Ivoire",
'Democratic Republic of the Congo',
'Djibouti',
'Dominica',
'Dominican Republic',
'East Timor',
'Ecuador',
'Egypt',
'El Salvador',
'Equatorial Guinea',
'Eritrea',
'Estonia',
'Eswatini',
'Ethiopia',
'Falkland Islands',
'Faroe Islands',
'Federated States of Micronesia',
'Fiji',
'Gabon',
'Georgia',
'Ghana',
'Gibraltar',
'Greenland',
'Grenada',
'Guatemala',
'Guernsey',
'Guinea',
'Guinea-Bissau',
'Guyana',
'Haiti',
'Honduras',
'Iceland',
'Indonesia',
'Iran',
'Iraq',
'Isle of Man',
'Israel', # ! don't forget to separate Jerusalem
'Jamaica',
'Jersey',
'Jordan',
'Kazakhstan',
'Kenya', # ! level 3 doesn't cover the whole country
'Kiribati',
'Kosovo',
'Kuwait',
'Kyrgyzstan',
'Laos',
'Latvia',
'Lebanon',
'Liberia',
'Libya',
'Liechtenstein',
'Lithuania',
'Luxembourg',
'Madagascar',
'Malaysia',
'Maldives',
'Mali',
'Malta',
'Marshall Islands',
'Martinique',
'Mauritania',
'Mauritius',
'Mexico',
'Moldova',
'Monaco',
'Mongolia',
'Montenegro',
'Montserrat',
'Mozambique',
'Myanmar',
'Namibia',
'Nauru',
'Nicaragua',
'Niger',
'Nigeria',
'Niue',
'North Korea',
'North Macedonia',
'Oman',
'Palau',
# ! 'Palestina' is not a country in OSM - need make an mwm
'Panama',
'Papua New Guinea',
'Peru', # need split-merge
'Philippines', # split at level 3 and merge or not merge
'Qatar',
'Romania', # need split-merge
'Rwanda',
'Saint Helena, Ascension and Tristan da Cunha',
'Saint Kitts and Nevis',
'Saint Lucia',
'Saint Vincent and the Grenadines',
'San Marino',
'Samoa',
'Saudi Arabia',
'Senegal',
'Seychelles',
'Sierra Leone',
'Singapore',
'Slovakia', # ! split at level 3 then 4, and add Bratislava region (4)
'Slovenia',
'Solomon Islands',
'Somalia',
'South Georgia and the South Sandwich Islands',
'South Korea',
'South Sudan',
'South Ossetia', # ! don't forget to divide from Georgia
'Sri Lanka',
'Sudan',
'São Tomé and Príncipe',
'Suriname',
'Switzerland',
'Syria',
'Taiwan',
'Tajikistan',
'Thailand',
'The Bahamas',
'The Gambia',
'Togo',
'Tokelau',
'Tonga',
'Trinidad and Tobago',
'Tunisia',
'Turkmenistan',
'Turks and Caicos Islands',
'Tuvalu',
'United Arab Emirate',
'Uruguay',
'Uzbekistan',
'Vanuatu',
'Venezuela', # level 3 not comprehensive
'Vietnam',
# ! don't forget 'Wallis and Futuna', belongs to France
'Yemen',
'Zambia',
'Zimbabwe',
],
3: [
'Malawi',
'Nepal', # ! one region is lost after division
'Pakistan',
'Paraguay',
'Tanzania',
'Turkey',
'Uganda',
],
4: [
'Austria',
'Bangladesh',
'Belarus', # maybe need merge capital region with the province
'Belgium', # maybe need merge capital region into encompassing province
'Bolivia',
'Bosnia and Herzegovina', # other levels - 5, 6, 7 - are incomplete.
'Canada',
'China', # ! don't forget about Macau and Hong Kong of level 3 not covered by level 4
'Denmark',
'Greece', # ! has one small 3-level subregion!
'Hungary', # maybe multilevel division at levels [4, 5] ?
'India',
'Italy',
'Japan', # ? About 50 4-level subregions, some of which requires further division
'Morocco', # ! not all regions appear after substitution with level 4
'New Zealand', # ! don't forget islands to the north and south
'Norway',
'Poland', # 380(!) subregions of AL=6
'Portugal',
'Russia',
'Serbia',
'South Africa',
'Spain',
'Ukraine',
'United States',
],
5: [
'Ireland', # ! 5-level don't cover the whole country
],
6: [
'Czechia',
]
}
# Country name => list of admin levels to which it should be initially divided.
# 'Germany': [4, 5] implies that the country is divided at level 4 at first, then all
# 4-level subregions are divided into subregions of level 5 (if any)
multilevel_countries = {
'Brazil': [3, 4],
'Finland': [3, 6], # [3,5,6] in more fresh data? # division by level 6 seems ideal
'France': [3, 4],
'Germany': [4, 5], # not the whole country is covered by units of AL=5
'Netherlands': [3, 4], # there are carribean lands of level both 3 and 4
'Sweden': [3, 4], # division by level 4 seems ideal
'United Kingdom': [4, 5], # level 5 is necessary but not comprehensive
}
country_initial_levels = dict(itertools.chain(
((country, ([level] if level > 2 else []))
for level, countries in unilevel_countries.items()
for country in countries),
multilevel_countries.items()
))

View file

@ -0,0 +1,140 @@
from config import (
BORDERS_TABLE as borders_table,
OSM_TABLE as osm_table
)
from countries_division import country_initial_levels
from subregions import (
get_subregions_info,
update_border_mwm_size_estimation,
)
class CountryStructureException(Exception):
pass
def _clear_borders(conn):
with conn.cursor() as cursor:
cursor.execute(f"DELETE FROM {borders_table}")
conn.commit()
def _find_subregions(conn, osm_ids, next_level, regions):
"""Return subregions of level 'next_level' for regions with osm_ids."""
subregion_ids = []
for osm_id in osm_ids:
more_subregions = get_subregions_info(conn, osm_id, borders_table,
next_level, need_cities=False)
for subregion_id, subregion_data in more_subregions.items():
region_data = regions.setdefault(subregion_id, {})
region_data['name'] = subregion_data['name']
region_data['mwm_size_est'] = subregion_data['mwm_size_est']
region_data['parent_id'] = osm_id
subregion_ids.append(subregion_id)
return subregion_ids
def _create_regions(conn, osm_ids, regions):
if not osm_ids:
return
osm_ids = list(osm_ids) # to ensure order
sql_values = ','.join(
f'({osm_id},'
'%s,'
f"{regions[osm_id].get('parent_id', 'NULL')},"
f"{regions[osm_id].get('mwm_size_est', 'NULL')},"
f'(SELECT way FROM {osm_table} WHERE osm_id={osm_id}),'
'now())'
for osm_id in osm_ids
)
with conn.cursor() as cursor:
cursor.execute(f"""
INSERT INTO {borders_table} (id, name, parent_id, mwm_size_est,
geom, modified)
VALUES {sql_values}
""", tuple(regions[osm_id]['name'] for osm_id in osm_ids)
)
def _make_country_structure(conn, country_osm_id):
regions = {} # osm_id: { 'name': name,
# 'mwm_size_est': size,
# 'parent_id': parent_id }
country_name = get_osm_border_name_by_osm_id(conn, country_osm_id)
country_data = regions.setdefault(country_osm_id, {})
country_data['name'] = country_name
# TODO: country_data['mwm_size_est'] = ...
_create_regions(conn, [country_osm_id], regions)
if country_initial_levels.get(country_name):
admin_levels = country_initial_levels[country_name]
prev_admin_levels = [2] + admin_levels[:-1]
prev_region_ids = [country_osm_id]
for admin_level, prev_level in zip(admin_levels, prev_admin_levels):
if not prev_region_ids:
raise CountryStructureException(
f"Empty prev_region_ids at {country_name}, "
f"AL={admin_level}, prev-AL={prev_level}"
)
subregion_ids = _find_subregions(conn, prev_region_ids,
admin_level, regions)
_create_regions(conn, subregion_ids, regions)
prev_region_ids = subregion_ids
warning = None
if len(regions) == 1:
try:
update_border_mwm_size_estimation(conn, country_osm_id)
except Exception as e:
warning = str(e)
return warning
def create_countries_initial_structure(conn):
_clear_borders(conn)
with conn.cursor() as cursor:
# TODO: process overlapping countries, like Ukraine and Russia with common Crimea
cursor.execute(f"""
SELECT osm_id, name
FROM {osm_table}
WHERE admin_level = 2 and name != 'Ukraine'
"""
)
warnings = []
for rec in cursor:
warning = _make_country_structure(conn, rec[0])
if warning:
warnings.append(warning)
conn.commit()
return warnings
def get_osm_border_name_by_osm_id(conn, osm_id):
with conn.cursor() as cursor:
cursor.execute(f"""
SELECT name FROM {osm_table}
WHERE osm_id = %s
""", (osm_id,))
rec = cursor.fetchone()
if not rec:
raise CountryStructureException(
f'Not found region with osm_id="{osm_id}"'
)
return rec[0]
def _get_country_osm_id_by_name(conn, name):
with conn.cursor() as cursor:
cursor.execute(f"""
SELECT osm_id FROM {osm_table}
WHERE admin_level = 2 AND name = %s
""", (name,))
row_count = cursor.rowcount
if row_count > 1:
raise CountryStructureException(f'More than one country "{name}"')
rec = cursor.fetchone()
if not rec:
raise CountryStructureException(f'Not found country "{name}"')
return int(rec[0])

BIN
web/app/data/model.pkl Normal file

Binary file not shown.

BIN
web/app/data/mwm_data.xlsx Normal file

Binary file not shown.

View file

@ -0,0 +1,119 @@
import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.model_selection import (
cross_val_score,
KFold,
GridSearchCV,
)
from sklearn.svm import SVR
from sklearn.preprocessing import StandardScaler
data = pd.read_excel('mwm_data.xlsx', sheet_name='mwms_all', header=1)
data = data[data['exclude'] == 0]
#data['is_urban2'] = data.apply(lambda row: row['pop_density'] > 260, axis=1) # 260 - median of pop_density
popul_column = 'urban_pop' # options are 'population and 'urban_pop' (for population of cities and towns only)
feature_names = [popul_column, 'area', 'city_cnt', 'hamlet_cnt']
target_name = 'size'
for feature in set(feature_names) - set(['area']): # if area is None it's an error!
data[feature] = data[feature].fillna(0)
scoring = 'neg_mean_squared_error' # another option is 'r2'
def my_cross_validation(sample):
X = sample[feature_names]
y = sample[target_name]
sc_X = StandardScaler()
X = sc_X.fit_transform(X)
lin_regression = LinearRegression(fit_intercept=False)
svr_linear = SVR(kernel='linear')
svr_rbf = SVR(kernel='rbf')
for estimator_name, estimator in zip(
('LinRegression', 'SVR_linear', 'SVR_rbf'),
(lin_regression, svr_linear, svr_rbf)):
cv_scores = cross_val_score(estimator, X, y,
cv=KFold(5, shuffle=True, random_state=1),
scoring=scoring)
mean_score = np.mean(cv_scores)
print(f"{estimator_name:15}", cv_scores, mean_score)
def my_grid_search(sample):
X = sample[feature_names]
y = sample[target_name]
sc_X = StandardScaler()
X = sc_X.fit_transform(X)
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.2, random_state=0)
C_array = [10 ** n for n in range(6, 7)]
gamma_array = [0.009 + i * 0.001 for i in range(-7, 11, 2)] + ['auto', 'scale']
epsilon_array = [0.5 * i for i in range(0, 15)]
coef0_array = [-0.1, -0.01, 0, 0.01, 0.1]
param_grid = [
{'kernel': ['linear'], 'C': C_array, 'epsilon': epsilon_array},
{'kernel': ['rbf'], 'C': C_array, 'gamma': gamma_array, 'epsilon': epsilon_array},
{'kernel': ['poly', 'sigmoid'],
'C': C_array, 'gamma': gamma_array, 'epsilon': epsilon_array, 'coef0': coef0_array},
]
svr = SVR()
grid_search = GridSearchCV(svr, param_grid, scoring=scoring)
grid_search.fit(X_train, y_train)
#means = grid_search.cv_results_['mean_test_score']
#stds = grid_search.cv_results_['std_test_score']
#print("Grid scores on development set:")
#for mean, std, params in zip(means, stds, grid_search.cv_results_['params']):
# print("%0.3f (+/-%0.03f) for %r" % (mean, std, params))
print("C", C_array)
print("gamma", gamma_array)
print("epsilon", epsilon_array)
print("coef0", coef0_array)
print("Best_params:", grid_search.best_params_, grid_search.best_score_)
def train_and_serialize_model(sample):
X = sample[feature_names]
y = sample[target_name]
X_head = X[0:4]
scaler = StandardScaler()
X = scaler.fit_transform(X)
# Parameters tuned with GridSearch
regressor = SVR(kernel='rbf', C=10**6, epsilon=0.0, gamma=0.012)
regressor.fit(X, y)
print(regressor.predict(X[0:4]))
# Serialize model
import pickle
with open('model.pkl', 'wb') as f:
pickle.dump(regressor, f)
with open('scaler.pkl', 'wb') as f:
pickle.dump(scaler, f)
# Deserialize model and test it on X_head samples
with open('model.pkl', 'rb') as f:
regressor2 = pickle.load(f)
with open('scaler.pkl', 'rb') as f:
scaler2 = pickle.load(f)
print(regressor2.predict(scaler2.transform(X_head)))
if __name__ == '__main__':
train_and_serialize_model(data)

BIN
web/app/data/scaler.pkl Normal file

Binary file not shown.

View file

@ -0,0 +1,38 @@
import numpy as np
import pickle
import config
class MwmSizePredictor:
def __init__(self):
with open(config.MWM_SIZE_PREDICTION_MODEL_PATH, 'rb') as f:
self.model = pickle.load(f)
with open(config.MWM_SIZE_PREDICTION_MODEL_SCALER_PATH, 'rb') as f:
self.scaler = pickle.load(f)
@classmethod
def _get_instance(cls):
if not hasattr(cls, '_instance'):
cls._instance = cls()
return cls._instance
@classmethod
def predict(cls, features_array):
"""1D or 2D array of feature values for predictions. Features are
'urban_pop', 'area', 'city_cnt', 'hamlet_cnt' as defined for the
prediction model.
"""
X = np.array(features_array)
one_prediction = (X.ndim == 1)
if one_prediction:
X = X.reshape(1, -1)
predictor = cls._get_instance()
X_scaled = predictor.scaler.transform(X)
predictions = predictor.model.predict(X_scaled)
if one_prediction:
return predictions[0]
else:
return predictions.tolist()

347
web/app/osm_xml.py Normal file
View file

@ -0,0 +1,347 @@
import json
import config
XML_ATTR_ESCAPINGS = {
'&': '&amp;',
'>': '&gt;',
'<': '&lt;',
'\n': '&#10;',
'\r': '&#13;',
'\t': '&#9;',
'"': '&quot;'
}
def _quoteattr(value):
for char, replacement in XML_ATTR_ESCAPINGS.items():
value = value.replace(char, replacement)
return f'"{value}"'
def get_xml_header():
return ('<?xml version="1.0" encoding="UTF-8"?>'
'<osm version="0.6" upload="false">')
def _ring_hash(refs):
#return json.dumps(refs)
return hash(tuple(sorted(refs)))
def _parse_polygon(node_pool, rings, polygon):
role = 'outer'
for ring in polygon:
rings.append([role, _parse_linestring(node_pool, ring)])
role = 'inner'
def _parse_linestring(node_pool, linestring):
nodes = []
for lonlat in linestring:
ref = f'{lonlat[1]} {lonlat[0]}'
if ref in node_pool:
node_id = node_pool[ref]
else:
node_id = node_pool['id']
node_pool[ref] = node_id
node_pool['id'] = node_id + 1
nodes.append(node_id)
return nodes
def _append_way(way, way2):
another = list(way2) # make copy to not modify original list
if way[0] == way[-1] or another[0] == another[-1]:
return None
if way[0] == another[0] or way[-1] == another[-1]:
another.reverse()
if way[-1] == another[0]:
result = list(way)
result.extend(another[1:])
return result
elif way[0] == another[-1]:
result = another
result.extend(way)
return result
return None
def _way_to_wkt(node_pool, refs):
coords_sequence = (f"{node_pool[nd]['lon']} {node_pool[nd]['lat']}"
for nd in refs)
return f"({','.join(coords_sequence)})"
def borders_to_xml(borders):
node_pool = {'id': 1} # 'lat_lon': id
regions = [] # { id: id, name: name, rings: [['outer', [ids]], ['inner', [ids]], ...] }
for border in borders:
geometry = border['geometry']
rings = []
if geometry['type'] == 'Polygon':
_parse_polygon(node_pool, rings, geometry['coordinates'])
elif geometry['type'] == 'MultiPolygon':
for polygon in geometry['coordinates']:
_parse_polygon(node_pool, rings, polygon)
if len(rings) > 0:
regions.append({
'id': abs(border['properties']['id']),
'name': border['properties']['name'],
'disabled': border['properties']['disabled'],
'rings': rings
})
xml = get_xml_header()
for latlon, node_id in node_pool.items():
if latlon != 'id':
(lat, lon) = latlon.split()
xml += (f'<node id="{node_id}" visible="true" version="1" '
f'lat="{lat}" lon="{lon}" />')
ways = {} # _ring_hash => id
wrid = 1
for region in regions:
w1key = _ring_hash(region['rings'][0][1])
if (not config.JOSM_FORCE_MULTI and
len(region['rings']) == 1 and
w1key not in ways
):
# simple case: a way
ways[w1key] = region['id']
xml += f'''<way id="{region['id']}" visible="true" version="1">'''
xml += f'''<tag k="name" v={region['name']} />'''
if region['disabled']:
xml += '<tag k="disabled" v="yes" />'
for nd in region['rings'][0][1]:
xml += f'<nd ref="{nd}" />'
xml += '</way>'
else:
# multipolygon
rxml = f'''<relation id="{region['id']}" visible="true" version="1">'''
wrid += 1
rxml += '<tag k="type" v="multipolygon" />'
rxml += f'''<tag k="name" v={_quoteattr(region['name'])} />'''
if region['disabled']:
rxml += '<tag k="disabled" v="yes" />'
for ring in region['rings']:
wkey = _ring_hash(ring[1])
if wkey in ways:
# already have that way
rxml += f'<member type="way" ref="{ways[wkey]}" role="{ring[0]}" />'
else:
ways[wkey] = wrid
xml += f'<way id="{wrid}" visible="true" version="1">'
rxml += f'<member type="way" ref="{wrid}" role="{ring[0]}" />'
for nd in ring[1]:
xml += f'<nd ref="{nd}" />'
xml += '</way>'
wrid += 1
xml += rxml + '</relation>'
xml += '</osm>'
return xml
def _extend_bbox(bbox, *args):
"""Extend bbox to include another bbox or point."""
assert len(args) in (1, 2)
if len(args) == 1:
another_bbox = args[0]
else:
another_bbox = [args[0], args[1], args[0], args[1]]
bbox[0] = min(bbox[0], another_bbox[0])
bbox[1] = min(bbox[1], another_bbox[1])
bbox[2] = max(bbox[2], another_bbox[2])
bbox[3] = max(bbox[3], another_bbox[3])
def _bbox_contains(outer, inner):
return (outer[0] <= inner[0] and
outer[1] <= inner[1] and
outer[2] >= inner[2] and
outer[3] >= inner[3])
def borders_from_xml(doc_tree):
"""Returns regions dict or str with error message."""
root = doc_tree.getroot()
# read nodes and ways
nodes = {} # id: { lat, lon, modified }
for node in root.iter('node'):
if node.get('action') == 'delete':
continue
modified = int(node.get('id')) < 0 or node.get('action') == 'modify'
nodes[node.get('id')] = {'lat': float(node.get('lat')),
'lon': float(node.get('lon')),
'modified': modified }
ways = {} # id: { name, disabled, modified, bbox, nodes, used }
for way in root.iter('way'):
if way.get('action') == 'delete':
continue
way_nodes = []
bbox = [1e4, 1e4, -1e4, -1e4]
modified = int(way.get('id')) < 0 or way.get('action') == 'modify'
for node in way.iter('nd'):
ref = node.get('ref')
if not ref in nodes:
return f"Missing node {ref} in way {way.get('id')}"
way_nodes.append(ref)
if nodes[ref]['modified']:
modified = True
_extend_bbox(bbox, float(nodes[ref]['lon']), float(nodes[ref]['lat']))
name = None
disabled = False
for tag in way.iter('tag'):
if tag.get('k') == 'name':
name = tag.get('v')
if tag.get('k') == 'disabled' and tag.get('v') == 'yes':
disabled = True
if len(way_nodes) < 2:
return f"Way with less than 2 nodes: {way.get('id')}"
ways[way.get('id')] = {'name': name, 'disabled': disabled,
'modified': modified, 'bbox': bbox,
'nodes': way_nodes, 'used': False}
# finally we are constructing regions: first, from multipolygons
regions = {} # id: { modified, disabled, wkt, type: 'r'|'w' }
for rel in root.iter('relation'):
if rel.get('action') == 'delete':
continue
osm_id = int(rel.get('id'))
modified = osm_id < 0 or rel.get('action') == 'modify'
name = None
disabled = False
multi = False
inner = []
outer = []
for tag in rel.iter('tag'):
if tag.get('k') == 'name':
name = tag.get('v')
if tag.get('k') == 'disabled' and tag.get('v') == 'yes':
disabled = True
if tag.get('k') == 'type' and tag.get('v') == 'multipolygon':
multi = True
if not multi:
return f"Found non-multipolygon relation: {rel.get('id')}"
for member in rel.iter('member'):
ref = member.get('ref')
if not ref in ways:
return f"Missing way {ref} in relation {rel.get('id')}"
if ways[ref]['modified']:
modified = True
role = member.get('role')
if role == 'outer':
outer.append(ways[ref])
elif role == 'inner':
inner.append(ways[ref])
else:
return f"Unknown role {role} in relation {rel.get('id')}"
ways[ref]['used'] = True
# after parsing ways, so 'used' flag is set
if rel.get('action') == 'delete':
continue
if len(outer) == 0:
return f"Relation {rel.get('id')} has no outer ways"
# reconstruct rings in multipolygon
for multi in (inner, outer):
i = 0
while i < len(multi):
way = multi[i]['nodes']
while way[0] != way[-1]:
productive = False
j = i + 1
while way[0] != way[-1] and j < len(multi):
new_way = _append_way(way, multi[j]['nodes'])
if new_way:
multi[i] = dict(multi[i])
multi[i]['nodes'] = new_way
way = new_way
if multi[j]['modified']:
multi[i]['modified'] = True
_extend_bbox(multi[i]['bbox'], multi[j]['bbox'])
del multi[j]
productive = True
else:
j += 1
if not productive:
return f"Unconnected way in relation {rel.get('id')}"
i += 1
# check for 2-node rings
for multi in (outer, inner):
for way in multi:
if len(way['nodes']) < 3:
return f"Way in relation {rel.get('id')} has only {len(way['nodes'])} nodes"
# sort inner and outer rings
polygons = []
for way in outer:
rings = [_way_to_wkt(nodes, way['nodes'])]
for i in range(len(inner)-1, -1, -1):
if _bbox_contains(way['bbox'], inner[i]['bbox']):
rings.append(_way_to_wkt(nodes, inner[i]['nodes']))
del inner[i]
polygons.append('({})'.format(','.join(rings)))
regions[osm_id] = {
'id': osm_id,
'type': 'r',
'name': name,
'modified': modified,
'disabled': disabled,
'wkt': 'MULTIPOLYGON({})'.format(','.join(polygons))
}
# make regions from unused named ways
for wid, w in ways.items():
if w['used']:
continue
if not w['name']:
#continue
return f"Unused in multipolygon way with no name: {wid}"
if w['nodes'][0] != w['nodes'][-1]:
return f"Non-closed unused in multipolygon way: {wid}"
if len(w['nodes']) < 3:
return f"Way {wid} has {len(w['nodes'])} nodes"
regions[wid] = {
'id': int(wid),
'type': 'w',
'name': w['name'],
'modified': w['modified'],
'disabled': w['disabled'],
'wkt': 'POLYGON({})'.format(_way_to_wkt(nodes, w['nodes']))
}
return regions
def lines_to_xml(lines_geojson_iterable):
node_pool = {'id': 1} # 'lat_lon': id
lines = []
for feature in lines_geojson_iterable:
geometry = json.loads(feature)
if geometry['type'] == 'LineString':
nodes = _parse_linestring(node_pool, geometry['coordinates'])
elif geometry['type'] == 'MultiLineString':
nodes = []
for line in geometry['coordinates']:
nodes.extend(_parse_linestring(node_pool, line))
if len(nodes) > 0:
lines.append(nodes)
xml = get_xml_header()
for latlon, node_id in node_pool.items():
if latlon != 'id':
(lat, lon) = latlon.split()
xml += (f'<node id="{node_id}" visible="true" version="1" '
f'lat="{lat}" lon="{lon}" />')
wrid = 1
for line in lines:
xml += f'<way id="{wrid}" visible="true" version="1">'
for nd in line:
xml += f'<nd ref="{nd}" />'
xml += '</way>'
wrid += 1
xml += '</osm>'
return xml

1299
web/app/static/borders.js Normal file

File diff suppressed because it is too large Load diff

18
web/app/static/config.js Normal file
View file

@ -0,0 +1,18 @@
const JOSM_IMPORT_LISTEN_ADDRESS = 'http://127.0.0.1:8111/import';
const BYTES_FOR_NODE = 8;
const IMPORT_ENABLED = true;
const SELF_URL = document.location.origin;
// If the web api works not at server's root, you many need something like:
// const API_URL = SELF_URL + '/borders-api';
const API_URL = SELF_URL;
function getServer(endpoint, base_url) {
var url = base_url ? base_url : API_URL;
if (endpoint)
url += '/' + endpoint;
return url;
}

View file

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 2.8 KiB

View file

Before

Width:  |  Height:  |  Size: 1.5 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

View file

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 3.9 KiB

View file

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

View file

Before

Width:  |  Height:  |  Size: 797 B

After

Width:  |  Height:  |  Size: 797 B

View file

Before

Width:  |  Height:  |  Size: 2 KiB

After

Width:  |  Height:  |  Size: 2 KiB

View file

Before

Width:  |  Height:  |  Size: 1 KiB

After

Width:  |  Height:  |  Size: 1 KiB

206
web/app/static/stat.js Normal file
View file

@ -0,0 +1,206 @@
var MB_LIMIT = 50,
MB_LIMIT2 = 70;
var KM_LIMIT = 50,
POINT_LIMIT = 50000;
function statInit() {
$('.mb_limit').text(MB_LIMIT);
$('.mb_limit2').text(MB_LIMIT2);
$('.km_limit').text(KM_LIMIT);
$('.point_limit').text(Math.round(POINT_LIMIT / 1000));
statQuery('total', statTotal);
}
function statOpen(id) {
var div = document.getElementById(id);
if (div.style.display != 'block')
div.style.display = 'block';
else
div.style.display = 'none';
}
function statQuery(id, callback) {
$.ajax(getServer('stat'), {
data: {
'group': id
},
success: function(data) {
callback(data);
document.getElementById(id).style.display = 'block';
},
error: function() {
alert('Failed!');
}
});
}
function formatNum(value, digits) {
if (digits != undefined) {
var pow = Math.pow(10, digits);
return Math.round(value * pow) / pow;
}
else
return value;
}
function statFill(id, value, digits) {
document.getElementById(id).innerHTML = ('' + formatNum(value, digits))
.replace('&', '&amp;').replace('<', '&lt;');
}
function getIndexLink(region) {
var big = region.area > 1000;
return 'index.html#' + (big ? 8 : 12) + '/' + region.lat + '/' + region.lon;
}
function statFillList(id, regions, comment, count) {
var div = document.getElementById(id),
i, a, html, p;
if (!div) {
console.log('Div ' + id + ' not found');
return;
}
if (count)
statFill(count, regions.length);
for (i = 0; i < regions.length; i++) {
a = document.createElement('a');
a.href = getIndexLink(regions[i]);
a.target = '_blank';
html = regions[i].name;
if (comment) {
if (typeof comment == 'string')
p = regions[i][comment];
else
p = comment(regions[i]);
if (p)
html += ' (' + p + ')';
}
a.innerHTML = html.replace('&', '&amp;').replace('<', '&lt;');
div.appendChild(a);
div.appendChild(document.createElement('br'));
}
}
function statTotal(data) {
statFill('total_total', data.total);
statQuery('sizes', statSizes);
}
function statSizes(data) {
var list_1mb = [],
list_50mb = [],
list_100mb = [];
var list_spaces = [],
list_bad = [];
var list_100km = [],
list_100kp = [],
list_zero = [];
var list_100p = [];
var list_disabled = [],
list_commented = [];
for (var i = 0; i < data.regions.length; i++) {
region = data.regions[i];
if (region.area > 0 && region.area < KM_LIMIT)
list_100km.push(region);
if (region.area <= 0)
list_zero.push(region);
if (region.nodes > POINT_LIMIT)
list_100kp.push(region);
if (region.nodes < 50)
list_100p.push(region);
var size_mb = region.size * window.BYTES_FOR_NODE / 1024 / 1024;
region.size_mb = size_mb;
if (size_mb < 1)
list_1mb.push(region);
if (size_mb > MB_LIMIT)
list_50mb.push(region);
if (size_mb > MB_LIMIT2)
list_100mb.push(region);
if (!/^[\x20-\x7F]*$/.test(region.name))
list_bad.push(region);
if (region.name.indexOf(' ') >= 0)
list_spaces.push(region);
if (region.disabled)
list_disabled.push(region);
if (region.commented)
list_commented.push(region);
}
statFill('names_spaces', list_spaces.length);
statFillList('names_bad_list', list_bad, null, 'names_bad');
statFillList('total_disabled_list', list_disabled, null, 'total_disabled');
statFillList('total_commented_list', list_commented, null,
'total_commented');
list_1mb.sort(function(a, b) {
return a.size_mb - b.size_mb;
});
list_50mb.sort(function(a, b) {
return a.size_mb - b.size_mb;
});
list_100mb.sort(function(a, b) {
return b.size_mb - a.size_mb;
});
statFillList('sizes_1mb_list', list_1mb, function(r) {
return formatNum(r.size_mb, 2) + ' МБ';
}, 'sizes_1mb');
statFillList('sizes_50mb_list', list_50mb, function(r) {
return formatNum(r.size_mb, 0) + ' МБ';
}, 'sizes_50mb');
statFillList('sizes_100mb_list', list_100mb, function(r) {
return formatNum(r.size_mb, 0) + ' МБ';
}, 'sizes_100mb');
list_100km.sort(function(a, b) {
return a.area - b.area;
});
list_100kp.sort(function(a, b) {
return b.nodes - a.nodes;
});
list_100p.sort(function(a, b) {
return a.nodes - b.nodes;
});
statFillList('areas_100km_list', list_100km, function(r) {
return formatNum(r.area, 2) + ' км²';
}, 'areas_100km');
statFillList('areas_50k_points_list', list_100kp, 'nodes',
'areas_50k_points');
statFillList('areas_100_points_list', list_100p, 'nodes',
'areas_100_points');
statFillList('areas_0_list', list_zero, null, 'areas_0');
statQuery('topo', statTopo);
}
function statTopo(data) {
var list_holed = [],
list_multi = [],
list_100km = [];
for (var i = 0; i < data.regions.length; i++) {
region = data.regions[i];
if (region.outer > 1)
list_multi.push(region);
if (region.inner > 0)
list_holed.push(region);
if (region.outer > 1 && region.min_area > 0 && region.min_area <
KM_LIMIT)
list_100km.push(region);
}
list_multi.sort(function(a, b) {
return b.outer - a.outer;
});
list_holed.sort(function(a, b) {
return b.inner - a.inner;
});
list_100km.sort(function(a, b) {
return a.min_area - b.min_area;
});
statFillList('topo_holes_list', list_holed, 'inner', 'topo_holes');
statFillList('topo_multi_list', list_multi, 'outer', 'topo_multi');
statFillList('topo_100km_list', list_100km, function(r) {
return formatNum(r.min_area, 2) + ' км²';
}, 'topo_100km');
}

269
web/app/subregions.py Normal file
View file

@ -0,0 +1,269 @@
import math
from queue import Queue
from config import (
BORDERS_TABLE as borders_table,
OSM_TABLE as osm_table,
OSM_PLACES_TABLE as osm_places_table,
)
from mwm_size_predictor import MwmSizePredictor
def get_subregions_info(conn, region_id, region_table,
next_level, need_cities=False):
"""
:param conn: psycopg2 connection
:param region_id:
:param region_table: maybe TABLE or OSM_TABLE from config.py
:param next_level: admin level of subregions to find
:return: dict {subregion_id => subregion data} including area and population info
"""
subregions = _get_subregions_basic_info(conn, region_id, region_table,
next_level, need_cities)
_add_population_data(conn, subregions, need_cities)
_add_mwm_size_estimation(subregions)
keys = ('name', 'mwm_size_est')
if need_cities:
keys = keys + ('cities',)
return {subregion_id: {k: subregion_data[k] for k in keys}
for subregion_id, subregion_data in subregions.items()
}
def _get_subregions_basic_info(conn, region_id, region_table,
next_level, need_cities):
cursor = conn.cursor()
region_id_column, region_geom_column = (
('id', 'geom') if region_table == borders_table else
('osm_id', 'way')
)
cursor.execute(f"""
SELECT subreg.osm_id, subreg.name,
ST_Area(geography(subreg.way))/1.0E+6 area
FROM {region_table} reg, {osm_table} subreg
WHERE reg.{region_id_column} = %s AND subreg.admin_level = %s AND
ST_Contains(reg.{region_geom_column}, subreg.way)
""", (region_id, next_level)
)
subregions = {}
for rec in cursor:
subregion_data = {
'osm_id': rec[0],
'name': rec[1],
'area': rec[2],
'urban_pop': 0,
'city_cnt': 0,
'hamlet_cnt': 0
}
if need_cities:
subregion_data['cities'] = []
subregions[rec[0]] = subregion_data
return subregions
def _add_population_data(conn, subregions, need_cities):
if not subregions:
return
cursor = conn.cursor()
subregion_ids = ','.join(str(x) for x in subregions.keys())
cursor.execute(f"""
SELECT b.osm_id, p.name, coalesce(p.population, 0), p.place
FROM {osm_table} b, {osm_places_table} p
WHERE b.osm_id IN ({subregion_ids})
AND ST_Contains(b.way, p.center)
"""
)
for subregion_id, place_name, place_population, place_type in cursor:
subregion_data = subregions[subregion_id]
if place_type in ('city', 'town'):
subregion_data['city_cnt'] += 1
subregion_data['urban_pop'] += place_population
if need_cities:
subregion_data['cities'].append({
'name': place_name,
'population': place_population
})
else:
subregion_data['hamlet_cnt'] += 1
def _add_mwm_size_estimation(subregions):
subregions_sorted = [
(
s_id,
[subregions[s_id][f] for f in
('urban_pop', 'area', 'city_cnt', 'hamlet_cnt')]
)
for s_id in sorted(subregions.keys())
]
feature_array = [x[1] for x in subregions_sorted]
predictions = MwmSizePredictor.predict(feature_array)
for subregion_id, mwm_size_prediction in zip(
(x[0] for x in subregions_sorted),
predictions
):
subregions[subregion_id]['mwm_size_est'] = mwm_size_prediction
def update_border_mwm_size_estimation(conn, border_id):
cursor = conn.cursor()
cursor.execute(f"""
SELECT name, ST_Area(geography(geom))/1.0E+6 area
FROM {borders_table}
WHERE id = %s""", (border_id, ))
name, area = cursor.fetchone()
if math.isnan(area):
raise Exception(f"Area is NaN for border '{name}' ({border_id})")
border_data = {
'area': area,
'urban_pop': 0,
'city_cnt': 0,
'hamlet_cnt': 0
}
cursor.execute(f"""
SELECT coalesce(p.population, 0), p.place
FROM {borders_table} b, {osm_places_table} p
WHERE b.id = %s
AND ST_Contains(b.geom, p.center)
""", (border_id, ))
for place_population, place_type in cursor:
if place_type in ('city', 'town'):
border_data['city_cnt'] += 1
border_data['urban_pop'] += place_population
else:
border_data['hamlet_cnt'] += 1
feature_array = [
border_data[f] for f in
('urban_pop', 'area', 'city_cnt', 'hamlet_cnt')
]
mwm_size_est = MwmSizePredictor.predict(feature_array)
cursor.execute(f"UPDATE {borders_table} SET mwm_size_est = %s WHERE id = %s",
(mwm_size_est, border_id))
conn.commit()
def is_administrative_region(conn, region_id):
cursor = conn.cursor()
cursor.execute(f"""
SELECT count(1) FROM {osm_table} WHERE osm_id = %s
""", (region_id,)
)
count = cursor.fetchone()[0]
return (count > 0)
def is_leaf(conn, region_id):
cursor = conn.cursor()
cursor.execute(f"""
SELECT count(1)
FROM {borders_table}
WHERE parent_id = %s
""", (region_id,)
)
count = cursor.fetchone()[0]
return (count == 0)
def get_region_country(conn, region_id):
"""Returns the uppermost predecessor of the region in the hierarchy,
possibly itself.
"""
predecessors = get_predecessors(conn, region_id)
return predecessors[-1]
def get_predecessors(conn, region_id):
"""Returns the list of (id, name)-tuples of all predecessors,
starting from the very region_id.
"""
predecessors = []
cursor = conn.cursor()
while True:
cursor.execute(f"""
SELECT id, name, parent_id
FROM {borders_table} WHERE id = %s
""", (region_id,)
)
rec = cursor.fetchone()
if not rec:
raise Exception(
f"No record in '{borders_table}' table with id = {region_id}"
)
predecessors.append(rec[0:2])
parent_id = rec[2]
if not parent_id:
break
region_id = parent_id
return predecessors
def get_region_full_name(conn, region_id):
predecessors = get_predecessors(conn, region_id)
return '_'.join(pr[1] for pr in reversed(predecessors))
def get_parent_region_id(conn, region_id):
cursor = conn.cursor()
cursor.execute(f"""
SELECT parent_id FROM {borders_table} WHERE id = %s
""", (region_id,))
rec = cursor.fetchone()
parent_id = int(rec[0]) if rec and rec[0] is not None else None
return parent_id
def get_child_region_ids(conn, region_id):
cursor = conn.cursor()
cursor.execute(f"""
SELECT id FROM {borders_table} WHERE parent_id = %s
""", (region_id,))
child_ids = []
for rec in cursor:
child_ids.append(int(rec[0]))
return child_ids
def get_similar_regions(conn, region_id, only_leaves=False):
"""Returns ids of regions of the same admin_level in the same country.
Prerequisite: is_administrative_region(region_id) is True.
"""
cursor = conn.cursor()
cursor.execute(f"""
SELECT admin_level FROM {osm_table}
WHERE osm_id = %s""", (region_id,)
)
admin_level = int(cursor.fetchone()[0])
country_id, country_name = get_region_country(conn, region_id)
q = Queue()
q.put({'id': country_id, 'admin_level': 2})
similar_region_ids = []
while not q.empty():
item = q.get()
if item['admin_level'] == admin_level:
similar_region_ids.append(item['id'])
elif item['admin_level'] < admin_level:
children = find_osm_child_regions(item['id'])
for ch in children:
q.put(ch)
if only_leaves:
similar_region_ids = [r_id for r_id in similar_region_ids
if is_leaf(conn, r_id)]
return similar_region_ids
def find_osm_child_regions(conn, region_id):
children = []
with conn.cursor() as cursor:
cursor.execute(f"""
SELECT c.id, oc.admin_level
FROM {borders_table} c, {borders_table} p, {osm_table} oc
WHERE p.id = c.parent_id AND c.id = oc.osm_id
AND p.id = %s
""", (region_id,)
)
for osm_id, admin_level in cursor:
children.append({'id': osm_id, 'admin_level': admin_level})
return children

View file

@ -0,0 +1,346 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Редактор границ для MAPS.ME</title>
<link rel="stylesheet" href="{{ url_for('static', filename='lib/leaflet.css') }}"/>
<script src="{{ url_for('static', filename='lib/leaflet.js') }}"></script>
<script src="{{ url_for('static', filename='lib/Leaflet.Editable.js') }}"></script>
<script src="{{ url_for('static', filename='lib/leaflet-hash.js') }}"></script>
<script src="{{ url_for('static', filename='lib/jquery-1.11.2.min.js') }}"></script>
<script src="{{ url_for('static', filename='config.js') }}"></script>
<script src="{{ url_for('static', filename='borders.js') }}"></script>
<style>
html,
body,
#map,
#panel {
margin: 0;
height: 100%;
}
#panel {
width: 250px;
float: right;
padding: 1em;
font-family: sans-serif;
font-size: 80%
}
#panel button {
font-size: 9pt;
margin: 4px 0;
background-color: #eee;
}
#map {
margin-right: 250px;
}
#selected_border_actions,
#b_divide,
#unbound_actions,
#backups,
#wait_start_over,
#split,
#join,
#join_to_parent,
#point,
#divide,
#backup,
#filefm,
#old_action,
#josm_old {
display: none;
}
#unbound_actions {
margin-bottom: 1em;
}
#rename,
#potential_parents {
display: none;
margin-left: 10px;
}
#info {
margin-top: 1em;
}
#b_delete,
#b_clear,
.back_del {
font-size: 8pt;
}
.actions input[type='text'],
#search input[type='text'] {
width: 150px;
}
#header {
border-bottom: 1px solid gray;
margin-bottom: 1em;
padding-bottom: 1em;
}
#f_topo,
#f_chars,
#f_comments,
#links {
font-size: 10pt;
}
#backup_saving,
#backup_restoring {
margin-bottom: 1em;
}
#h_iframe {
display: block;
width: 100%;
height: 80px;
}
a,
a:hover,
a:visited {
color: blue;
}
#start_over,
#start_over:hover,
#start_over:visited {
color: red;
}
#population_thresholds {
padding-left: 1.5em;
}
#mwm_size_thr {
max-width: 50px;
}
#r_green,
#r_red {
width: 40px;
}
#b_import {
max-width: 180px;
}
#import_div {
position: relative;
display: none;
}
#hide_import_button {
position: absolute;
width: 20px;
height: 20px;
top: 0;
right: 0;
background-color: #ccc;
text-align: center;
display: flex;
align-items: center;
justify-content: center;
cursor: pointer;
}
</style>
</head>
<body onload="init();">
<div id="panel">
<div id="header">
<div id="filter">
Раскраска по <select size="1" id="f_type" value="size" onchange="filterSelect()">
<option value="country">стране</option>
<option value="nodes_size">размеру по точкам</option>
<option value="predict_size">предсказ. размеру</option>
<option value="topo">топологии</option>
<option value="chars">буквам в назв.</option>
<option value="comments">комментариям</option>
</select>
<div id="f_size">
Цвета: 0<input type="text" size="2" id="r_green"><input type="text" size="3" id="r_red">
<button onclick="bUpdateColors()">&#10003;</button>
</div>
<div id="f_topo">
Красный — есть дыры, синий — больше одного полигона.
Маркерами обозначены мелкие острова.
</div>
<div id="f_chars">
Синий — есть пробелы, красный — символы не из ASCII.
</div>
<div id="f_comments">
Красный — есть комментарий.
</div>
</div>
<div id="b_josm">
<button onclick="bJOSM()">Открыть в JOSM</button>
<button id="josm_old" onclick="bJosmOld()">ст.</button>
<button onclick="bJosmZoom()">&#x1f50d;</button>
</div>
<form action="" enctype="multipart/form-data" method="post" id="filefm" target="import_frame">
Импорт <input type="file" accept=".osm,.xml" name="file" id="b_import" onchange="bImport();">
</form>
<div id="import_div">
<iframe name="import_frame" id="h_iframe" src="about:blank"></iframe>
<div id="hide_import_button">
<div>x</div>
</div>
</div>
<div id="backups">
<button onclick="bBackup()">Архив границ</button>
<br>
</div>
<div id="old_action">
<input type="checkbox" id="old" onchange="bOldBorders()"><label for="old"> старые границы</label>
</div>
<div id="links">
<a href="stat.html">Статистика</a>
<a href="#" id="poly_bbox">Скачать в poly видимое</a>,
<a href="#" id="poly_all">всё</a><br>
<a href="#" id="start_over" onclick="startOver()">Начать заново</a>
<span id="wait_start_over">ожидайте...</span>
</div>
<div id="search">
Поиск <input type="text" id="fsearch">
<button id="b_search" onclick="doSearch()">&#x1f50d;</button>
</div>
</div>
<div id="actions">
<div id="unbound_actions">
<button id="b_point" onclick="bPoint()">Регион из точки</button>
<br>
</div>
<div id="selected_border_actions" class="actions">
<div id="action_buttons">
<!--button onclick="bDisable()" id="b_disable">Убрать</button-->
<button onclick="bDelete()" id="b_delete">Удалить</button>
<br>
<button onclick="bSplit()">Разрезать</button>
<button onclick="bJoin()">Склеить</button>
<br>
<button onclick="bJoinToParent()">Склеить всё до родителя</button>
<br>
<button onclick="bLargest()">Выделить острова</button>
<button onclick="bHull()">Единый контур</button>
<br>
<button id="b_divide" onclick="bDivide()">Заменить регионами</button>
<br>
</div>
<div id="info">
<b><a href="#" id="rename_link" onclick="bToggleRename(); return false;">Название &#9660:</a></b>
<span id="b_name"></span>
<span id="b_al"></span><br>
<div id="rename">
<input type="text" id="b_rename">
<button onclick="bRename()">Переименовать</button>
</div>
<div>
<b><a href="#" id="parent_link" onclick="bTogglePotentialParents(); return false;">Родитель
&#9660:</a></b>
<span id="b_parent_name"></span>
</div>
<div id="potential_parents">
</div>
<b>Оценка размера по точкам:</b> <span id="b_size"></span><br>
<b>Оценка размера по нас+пл:</b> <span id="pa_size"></span><br>
<b>Последняя правка:</b> <span id="b_date"></span><br>
<b>Количество точек:</b> <span id="b_nodes"></span><br>
<b>Площадь:</b> <span id="b_area"></span> км²<br>
<!--b>Статус:</b> <span id="b_status"></span><br-->
<b>Комментарий:</b><br>
<textarea style="width: 240px; height: 100px;" id="b_comment"></textarea><br>
<button id="b_comment_send" onclick="bComment()">Отправить</button>
<a href="#" onclick="bClearComment(); return false;" id="b_clear">Очистить</a>
</div>
</div>
</div>
<div id="split" class="actions">
Выбрана область <span id="s_sel"></span>.<br><br>
Нарисуйте линию через выбранную область (повторный клик на последней точке для завершения), затем нажмите кнопку<br>
<div id="s_do">
<button onclick="bSplitDo()">Разрезать область</button>
<input type="checkbox" id="save_split_region" checked></input>
<label for="save_split_region">Не удалять</label><br>
<button onclick="bSplitJosm()">Границы вдоль — в JOSM</button>
<br>
<button onclick="bSplitAgain()">Нарисовать по-другому</button>
</div>
<br>
<button onclick="bSplitCancel()">Вернуться</button>
</div>
<div id="join" class="actions">
Выбрана область <span id="j_sel"></span>.<br><br>
Выберите область, которую к ней присоединить.<br>
<div id="j_do">
Выбрана: <span id="j_name2"></span><br>
<button onclick="bJoinDo()">Склеить области</button>
</div>
<br>
<button onclick="bJoinCancel()">Вернуться</button>
</div>
<div id="join_to_parent" class="actions">
Выбрана область <span id="j_to_parent_sel"></span>.<br>
Родительская область <span id="j_sel_parent"></span>.<br><br>
<button onclick="bJoinToParentPreview()">Посмотреть результат</button>
<br>
<button onclick="bJoinToParentDo()">Склеить всё до родителя</button>
<br>
<button onclick="bJoinToParentCancel()">Вернуться</button>
</div>
<div id="point" class="actions">
Переместите маркер в нужную точку и нажмите<br>
<button onclick="bPointList()">Получить список областей</button>
<br>
Название <input type="text" id="p_name"><br>
<div id="p_list"></div>
<br>
<button onclick="bPointCancel()">Вернуться</button>
</div>
<div id="divide" class="actions">
Выбранная область <span id="region_to_divide"></span>
будет заменена вложенными областями уровня
<input type="number" id="next_level" min="2" max="12">.<br>
<br>
<div>
<input type="checkbox" id="auto_divide" checked>
<label for="auto_divide">Автослияние по населению</label>
<div id="population_thresholds">
Верхняя граница размера mwm:
<input id="mwm_size_thr" type="number"
min="1" value="70" step="1"> Мб
</div>
</div>
<div>
<input type="checkbox" id="apply_to_similar">
<label for="apply_to_similar">
Для всех областей страны того же уровня
</label>
</div>
<button onclick="bDividePreview()">Посмотреть субобласти</button>
<br>
<div id="d_count"></div>
<button id="b_divide_do" onclick="bDivideDo()">Заменить область</button>
<br>
<button onclick="bDivideCancel()">Вернуться</button>
</div>
<div id="backup" class="actions">
<button onclick="bBackupSave()" id="backup_save">Сохранить границы</button>
<div id="backup_saving">Копирую границы...</div>
<div id="backup_restoring">Восстанавливаю границы...</div>
<div>Или выберите набор границ для восстановления:</div>
<div id="backup_list"></div>
<button onclick="bBackupCancel()">Вернуться</button>
</div>
</div>
<div id="map"></div>
</body>
</html>

View file

@ -0,0 +1,97 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Статистика границ для MAPS.ME</title>
<script src="{{ url_for('static', filename='lib/jquery-1.11.2.min.js') }}">
</script>
<script src="{{ url_for('static', filename='config.js') }}"></script>
<script src="{{ url_for('static', filename='stat.js') }}"></script>
<style>
body>div {
display: none;
margin-bottom: 1em;
}
#sizes>div {
margin-top: 1em;
}
.h {
display: none;
padding-left: 1em;
}
</style>
</head>
<body onload="statInit();">
<h1>Статистика по границам</h1>
<div id="total">
Всего границ: <span id="total_total"></span><br>
</div>
<div id="sizes">
<div>
Отключено из сборки: <span id="total_disabled"></span> (<a href="#"
onclick="return statOpen('total_disabled_list');">список</a>)<br>
<div id="total_disabled_list" class="h"></div>
Прокомментировано: <span id="total_commented"></span> (<a href="#"
onclick="return statOpen('total_commented_list');">список</a>)<br>
<div id="total_commented_list" class="h"></div>
</div>
<div>
Названий с пробелами: <span id="names_spaces"></span><br>
Названий с левыми символами: <span id="names_bad"></span> (<a
href="#"
onclick="return statOpen('names_bad_list');">список</a>)<br>
<div id="names_bad_list" class="h"></div>
</div>
<div>
Размер MWM до 1 МБ: <span id="sizes_1mb"></span> (<a href="#"
onclick="return statOpen('sizes_1mb_list');">список</a>)<br>
<div id="sizes_1mb_list" class="h"></div>
Размер MWM больше <span class="mb_limit"></span> МБ: <span
id="sizes_50mb"></span> (<a href="#"
onclick="return statOpen('sizes_50mb_list');">список</a>)<br>
<div id="sizes_50mb_list" class="h"></div>
Из них больше <span class="mb_limit2"></span> МБ: <span
id="sizes_100mb"></span> (<a href="#"
onclick="return statOpen('sizes_100mb_list');">список</a>)<br>
<div id="sizes_100mb_list" class="h"></div>
</div>
<div>
Регионов меньше <span class="km_limit"></span> км²: <span
id="areas_100km"></span> (<a href="#"
onclick="return statOpen('areas_100km_list');">список</a>)<br>
<div id="areas_100km_list" class="h"></div>
Регионов от <span class="point_limit"></span> тысяч точек в контуре:
<span id="areas_50k_points"></span> (<a href="#"
onclick="return statOpen('areas_50k_points_list');">список</a>)<br>
<div id="areas_50k_points_list" class="h"></div>
Регионов до 50 точек в контуре: <span id="areas_100_points"></span>
(<a href="#"
onclick="return statOpen('areas_100_points_list');">список</a>)<br>
<div id="areas_100_points_list" class="h"></div>
Регионов с неизвестной площадью: <span id="areas_0"></span> (<a
href="#"
onclick="return statOpen('areas_0_list');">список</a>)<br>
<div id="areas_0_list" class="h"></div>
</div>
</div>
<div id="topo">
Регионов с дырками: <span id="topo_holes"></span> (<a href="#"
onclick="return statOpen('topo_holes_list');">список</a>)<br>
<div id="topo_holes_list" class="h"></div>
Регионов из нескольких частей: <span id="topo_multi"></span> (<a
href="#"
onclick="return statOpen('topo_multi_list');">список</a>)<br>
<div id="topo_multi_list" class="h"></div>
Регионов с островами меньше <span class="km_limit"></span> км²: <span
id="topo_100km"></span> (<a href="#"
onclick="return statOpen('topo_100km_list');">список</a>)<br>
<div id="topo_100km_list" class="h"></div>
<hr>
</div>
</body>
</html>

20
web/prestart.sh Normal file
View file

@ -0,0 +1,20 @@
CHECK_DB_AVAILABILITY="
import sys, time
import psycopg2
import config
try:
for i in range(6):
with psycopg2.connect(config.CONNECTION) as conn:
time.sleep(0.5)
sys.exit(0)
except Exception as e:
sys.exit(1)
"
# Wait until postgres is up
until python3 -c "$CHECK_DB_AVAILABILITY"; do
>&2 echo "Postgres is unavailable - sleeping"
sleep 2
done
python3 /app/borders_daemon.py&

6
web/uwsgi.ini Normal file
View file

@ -0,0 +1,6 @@
[uwsgi]
# Workaround for io.BytesIO to work with uWSGI sendfile: https://github.com/unbit/uwsgi/issues/1126
wsgi-disable-file-wrapper = True
module = borders_api
callable = app

View file

@ -1,935 +0,0 @@
var STYLE_BORDER = { stroke: true, color: '#03f', weight: 3, fill: true, fillOpacity: 0.1 };
var STYLE_SELECTED = { stroke: true, color: '#ff3', weight: 3, fill: true, fillOpacity: 0.1 };
var FILL_TOO_SMALL = '#0f0';
var FILL_TOO_BIG = '#800';
var FILL_ZERO = 'black';
var OLD_BORDERS_NAME; // filled in checkHasOSM()
var IMPORT_ENABLED = false;
var map, borders = {}, bordersLayer, selectedId, editing = false, readonly = false;
var size_good = 5, size_bad = 50;
var maxRank = 1;
var tooSmallLayer = null;
var oldBordersLayer = null;
var routingGroup = null;
var crossingLayer = null;
function init() {
map = L.map('map', { editable: true }).setView([30, 0], 3);
var hash = new L.Hash(map);
L.tileLayer('http://tile.openstreetmap.org/{z}/{x}/{y}.png', { attribution: '&copy; OpenStreetMap' }).addTo(map);
L.tileLayer('http://korona.geog.uni-heidelberg.de/tiles/adminb/x={x}&y={y}&z={z}',
{ attribution: '&copy; GIScience Heidelberg' }).addTo(map);
bordersLayer = L.layerGroup();
map.addLayer(bordersLayer);
routingGroup = L.layerGroup();
map.addLayer(routingGroup);
crossingLayer = L.layerGroup();
map.addLayer(crossingLayer);
map.on('moveend', function() {
if( map.getZoom() >= 5 )
updateBorders();
$('#b_josm').css('visibility', map.getZoom() >= 7 ? 'visible' : 'hidden');
});
if( IMPORT_ENABLED ) {
$('#import_link').css('display', 'none');
$('#filefm').css('display', 'block');
$('#filefm').attr('action', getServer('import'));
var iframe = '<iframe name="import_frame" class="h_iframe" src="about:blank"></iframe>';
$('#filefm').after(iframe);
}
$('#poly_all').attr('href', getPolyDownloadLink());
$('#poly_bbox').on('mousedown', function() {
$(this).attr('href', getPolyDownloadLink(true));
});
$('#r_green').val(size_good);
$('#r_red').val(size_bad);
$('#fsearch').keyup(function(e) {
if( e.keyCode == 13 )
$('#b_search').click();
});
$('#b_rename').keyup(function(e) {
if( e.keyCode == 13 )
$('#do_rename').click();
});
checkHasOSM();
filterSelect(true);
}
function checkHasOSM() {
$.ajax(getServer('tables'), {
success: function(res) {
if( res.osm )
$('#osm_actions').css('display', 'block');
if( res.tables && res.tables.length > 0 ) {
OLD_BORDERS_NAME = res.tables[0];
$('#old_action').css('display', 'block');
$('#josm_old').css('display', 'inline');
}
if( res.crossing )
$('#cross_actions').css('display', 'block');
if( !res.backup ) {
$('#backups').css('display', 'none');
}
if( res.readonly ) {
$('#action_buttons').css('display', 'none');
$('#import_link').css('display', 'none');
$('#backups').css('display', 'none');
readonly = true;
}
if( !res.readonly && IMPORT_ENABLED ) {
$('#import_link').css('display', 'none');
$('#filefm').css('display', 'block');
$('#filefm').attr('action', getServer('import'));
var iframe = '<iframe name="import_frame" class="h_iframe" src="about:blank"></iframe>';
$('#filefm').after(iframe);
}
}
});
}
function updateBorders() {
var b = map.getBounds(),
simplified = map.getZoom() < 7 ? 2 : (map.getZoom() < 11 ? 1 : 0);
$.ajax(getServer('bbox'), {
data: {
'simplify' : simplified,
'xmin': b.getWest(),
'xmax': b.getEast(),
'ymin': b.getSouth(),
'ymax': b.getNorth()
},
success: processResult,
dataType: 'json',
simplified: simplified
});
$.ajax(getServer('routing'), {
data: {
'xmin': b.getWest(),
'xmax': b.getEast(),
'ymin': b.getSouth(),
'ymax': b.getNorth()
},
success: processRouting,
dataType: 'json'
});
if (map.getZoom() >= 4) {
$.ajax(getServer('crossing'), {
data: {
'xmin': b.getWest(),
'xmax': b.getEast(),
'ymin': b.getSouth(),
'ymax': b.getNorth(),
'points': (map.getZoom() < 10 ? 1 : 0),
'rank': maxRank
},
success: processCrossing,
dataType: 'json'
});
} else {
crossingLayer.clearLayers();
}
if( oldBordersLayer != null && OLD_BORDERS_NAME ) {
oldBordersLayer.clearLayers();
$.ajax(getServer('bbox'), {
data: {
'table': OLD_BORDERS_NAME,
'simplify': simplified,
'xmin': b.getWest(),
'xmax': b.getEast(),
'ymin': b.getSouth(),
'ymax': b.getNorth()
},
success: processOldBorders,
dataType: 'json'
});
}
}
routingTypes = {1: "Border and feature are intersecting several times.",
2: "Unknown outgoing feature."};
function processRouting(data) {
routingGroup.clearLayers();
for( var f = 0; f < data.features.length; f++ ) {
marker = L.marker([data.features[f]["lat"], data.features[f]["lon"]]);
marker.bindPopup(routingTypes[data.features[f]["type"]], {showOnMouseOver: true});
routingGroup.addLayer(marker);
}
}
function processResult(data) {
for( var id in borders ) {
if( id != selectedId || !editing ) {
bordersLayer.removeLayer(borders[id].layer);
delete borders[id];
}
}
for( var f = 0; f < data.features.length; f++ ) {
var layer = L.GeoJSON.geometryToLayer(data.features[f].geometry),
props = data.features[f].properties;
props.simplified = this.simplified;
if( 'name' in props && props.name != '' )
updateBorder(props.name, layer, props);
}
if( selectedId in borders ) {
selectLayer({ target: borders[selectedId].layer });
} else {
selectLayer(null);
}
var b = map.getBounds();
if( tooSmallLayer != null ) {
tooSmallLayer.clearLayers();
$.ajax(getServer('small'), {
data: {
'xmin': b.getWest(),
'xmax': b.getEast(),
'ymin': b.getSouth(),
'ymax': b.getNorth()
},
success: processTooSmall,
dataType: 'json'
});
}
}
function processOldBorders(data) {
var layer = L.geoJson(data, {
style: { fill: false, color: 'purple', weight: 3, clickable: false }
});
oldBordersLayer.addLayer(layer);
}
function processTooSmall(data) {
if( tooSmallLayer == null || !data || !('features' in data) )
return;
tooSmallLayer.clearLayers();
var i, pt, tsm;
for( i = 0; i < data.features.length; i++ ) {
pt = data.features[i];
if( pt.name in borders ) {
tsm = L.marker([pt.lat, pt.lon], { title: pt.name + '\n' + 'Площадь: ' + L.Util.formatNum(pt.area / 1000000, 2) + ' км²' });
tsm.pLayer = borders[pt.name].layer;
tsm.on('click', selectLayer);
tooSmallLayer.addLayer(tsm);
}
}
}
function updateBorder(id, layer, props) {
if( id in borders ) {
if( id == selectedId && editing )
return;
bordersLayer.removeLayer(borders[id].layer);
}
borders[id] = props;
borders[id].layer = layer;
layer.id = id;
bordersLayer.addLayer(layer);
layer.setStyle(STYLE_BORDER);
if( borders[id]['disabled'] )
layer.setStyle({ fillOpacity: 0.01 });
var color = getColor(borders[id]);
layer.setStyle({ color: color });
layer.defStyle = color;
layer.on('click', selectLayer);
}
function selectLayer(e) {
if( e != null && 'pLayer' in e.target )
e.target = e.target.pLayer;
if( e != null && joinSelected != null ) {
bJoinSelect(e.target);
return;
}
if( selectedId && selectedId in borders ) {
borders[selectedId].layer.setStyle(STYLE_BORDER);
if( borders[selectedId]['disabled'] )
borders[selectedId].layer.setStyle({ fillOpacity: 0.01 });
if( 'defStyle' in borders[selectedId].layer )
borders[selectedId].layer.setStyle({ color: borders[selectedId].layer.defStyle });
}
if( e != null && 'id' in e.target && e.target.id in borders ) {
selectedId = e.target.id;
e.target.setStyle(STYLE_SELECTED);
var props = borders[selectedId];
if( props['disabled'] )
e.target.setStyle({ fillOpacity: 0.01 });
$('#b_name').text(props['name']);
$('#b_size').text(Math.round(props['count_k'] * window.BYTES_FOR_NODE / 1024 / 1024) + ' MB');
//$('#b_nodes').text(borders[selectedId].layer.getLatLngs()[0].length);
$('#b_nodes').text(props['nodes']);
$('#b_date').text(props['modified']);
$('#b_area').text(L.Util.formatNum(props['area'] / 1000000, 2));
$('#b_comment').val(props['comment'] || '');
$('#b_status').text(props['disabled'] ? 'Отключено' : 'В сборке');
$('#b_disable').text(props['disabled'] ? 'Вернуть' : 'Убрать');
} else
selectedId = null;
$('#actions').css('visibility', selectedId == null ? 'hidden' : 'visible');
$('#rename').css('display', 'none');
}
function filterSelect(noRefresh) {
value = $('#f_type').val();
$('#f_size').css('display', value == 'size' ? 'block' : 'none');
$('#f_chars').css('display', value == 'chars' ? 'block' : 'none');
$('#f_comments').css('display', value == 'comments' ? 'block' : 'none');
$('#f_topo').css('display', value == 'topo' ? 'block' : 'none');
if( value == 'topo' ) {
tooSmallLayer = L.layerGroup();
map.addLayer(tooSmallLayer);
} else if( tooSmallLayer != null ) {
map.removeLayer(tooSmallLayer);
tooSmallLayer = null;
}
if( !noRefresh )
updateBorders();
}
function getColor(props) {
var color = STYLE_BORDER.color;
fType = $('#f_type').val();
if( fType == 'size' ) {
if( props['count_k'] <= 0 )
color = FILL_ZERO;
else if( props['count_k'] * window.BYTES_FOR_NODE < size_good * 1024 * 1024 )
color = FILL_TOO_SMALL;
else if( props['count_k'] * window.BYTES_FOR_NODE > size_bad * 1024 * 1024 )
color = FILL_TOO_BIG;
} else if( fType == 'topo' ) {
var rings = countRings([0, 0], props.layer);
if( rings[1] > 0 )
color = FILL_TOO_BIG;
else if( rings[0] == 1 )
color = FILL_TOO_SMALL;
else if( rings[0] == 0 )
color = FILL_ZERO;
} else if( fType == 'chars' ) {
if( !/^[\x20-\x7F]*$/.test(props['name']) )
color = FILL_TOO_BIG;
else if( props['name'].indexOf(' ') < 0 )
color = FILL_TOO_SMALL;
} else if( fType == 'comments' ) {
if( props['comment'] && props['comment'] != '' )
color = FILL_TOO_BIG;
}
return color;
}
function countRings( rings, polygon ) {
if( polygon instanceof L.MultiPolygon ) {
polygon.eachLayer(function(layer) {
rings = countRings(rings, layer);
});
} else if( polygon instanceof L.Polygon ) {
rings[0]++;
if( '_holes' in polygon && 'length' in polygon._holes )
rings[1] += polygon._holes.length;
}
return rings;
}
function doSearch() {
var query = $('#fsearch').val();
if( query.length > 1 ) {
$.ajax(getServer('search'), {
data: { 'q': query },
success: zoomToFound
});
}
}
function zoomToFound(result) {
$('#fsearch').val('');
if( !('bounds' in result))
return;
var b = result['bounds'];
if( b.length != 4 )
return;
map.fitBounds([[b[1], b[0]], [b[3], b[2]]]);
}
function bUpdateColors() {
size_good = +$('#r_green').val();
if( size_good <= 0 )
size_good = 10;
size_bad = +$('#r_red').val();
if( size_bad <= size_good )
size_bad = size_good * 10;
$('#r_green').val(size_good);
$('#r_red').val(size_bad);
updateBorders();
}
function bOldBorders() {
if( $('#old').prop('checked') ) {
oldBordersLayer = L.layerGroup();
map.addLayer(oldBordersLayer);
updateBorders();
} else if( oldBordersLayer != null ) {
map.removeLayer(oldBordersLayer);
oldBordersLayer = null;
}
}
function importInJOSM(method, data ) {
var url = getServer(method) + '?' + $.param(data);
$.ajax({
url: 'http://127.0.0.1:8111/import',
data: { url: url, new_layer: 'true', format: '.osm' },
complete: function(t) {
if( t.status != 200 )
window.alert('Please enable remote_control in JOSM');
}
});
}
function bJOSM() {
var b = map.getBounds();
importInJOSM('josm', {
'xmin': b.getWest(),
'xmax': b.getEast(),
'ymin': b.getSouth(),
'ymax': b.getNorth()
});
}
function bJosmOld() {
var b = map.getBounds();
importInJOSM('josm', {
'table': OLD_BORDERS_NAME,
'xmin': b.getWest(),
'xmax': b.getEast(),
'ymin': b.getSouth(),
'ymax': b.getNorth()
});
}
function bJosmZoom() {
var b = map.getBounds();
$.ajax({
url: 'http://127.0.0.1:8111/zoom',
data: {
'left': b.getWest(),
'right': b.getEast(),
'bottom': b.getSouth(),
'top': b.getNorth()
}
});
}
function bImport() {
document.getElementById('filefm').submit();
}
function bShowRename() {
if( !selectedId || !(selectedId in borders) || readonly )
return;
$('#b_rename').val(borders[selectedId].name);
$('#rename').css('display', 'block');
}
function bRename() {
if( !selectedId || !(selectedId in borders) )
return;
$('#rename').css('display', 'none');
$.ajax(getServer('rename'), {
data: { 'name': selectedId, 'newname': $('#b_rename').val() },
success: updateBorders
});
}
function bDisable() {
if( !selectedId || !(selectedId in borders) )
return;
$.ajax(getServer(borders[selectedId].disabled ? 'enable' : 'disable'), {
data: { 'name': selectedId },
success: updateBorders
});
}
function bDelete() {
if( !selectedId || !(selectedId in borders) )
return;
if( !window.confirm('Точно удалить регион ' + selectedId + '?') )
return;
$.ajax(getServer('delete'), {
data: { 'name': selectedId },
success: updateBorders
});
}
function sendComment( text ) {
if( !selectedId || !(selectedId in borders) )
return;
$.ajax(getServer('comment'), {
data: { 'name': selectedId, 'comment': text },
type: 'POST',
success: updateBorders
});
}
function bComment() {
sendComment($('#b_comment').val());
}
function bClearComment() {
$('#b_comment').val('');
sendComment('');
}
var splitLayer = null,
splitSelected = null;
function bSplit() {
if( !selectedId || !(selectedId in borders) )
return;
splitSelected = selectedId;
$('#s_sel').text(selectedId);
$('#actions').css('display', 'none');
$('#split').css('display', 'block');
map.on('editable:drawing:end', bSplitDrawn);
bSplitStart();
}
function bSplitStart() {
$('#s_do').css('display', 'none');
splitLayer = null;
map.editTools.startPolyline();
}
function bSplitDrawn(e) {
splitLayer = e.layer;
$('#s_do').css('display', 'block');
}
function bSplitAgain() {
map.editTools.stopDrawing();
if( splitLayer != null )
map.removeLayer(splitLayer);
bSplitStart();
}
function bSplitDo() {
var wkt = '', lls = splitLayer.getLatLngs();
for( i = 0; i < lls.length; i++ ) {
if( i > 0 )
wkt += ',';
wkt += L.Util.formatNum(lls[i].lng, 6) + ' ' + L.Util.formatNum(lls[i].lat, 6);
}
$.ajax(getServer('split'), {
data: { 'name': splitSelected, 'line': 'LINESTRING(' + wkt + ')' },
datatype: 'json',
success: function(data) { if( data.status != 'ok' ) alert(data.status); else updateBorders(); }
});
bSplitCancel();
}
function bSplitJosm() {
var wkt = '', lls = splitLayer.getLatLngs();
for( i = 0; i < lls.length; i++ ) {
if( i > 0 )
wkt += ',';
wkt += L.Util.formatNum(lls[i].lng, 6) + ' ' + L.Util.formatNum(lls[i].lat, 6);
}
importInJOSM('josmbord', {
'name': splitSelected,
'line': 'LINESTRING(' + wkt + ')'
});
}
function bSplitCancel() {
map.editTools.stopDrawing();
if( splitLayer != null )
map.removeLayer(splitLayer);
$('#actions').css('display', 'block');
$('#split').css('display', 'none');
}
var joinSelected = null, joinAnother = null;
function bJoin() {
if( !selectedId || !(selectedId in borders) )
return;
joinSelected = selectedId;
joinAnother = null;
$('#j_sel').text(selectedId);
$('#actions').css('display', 'none');
$('#j_do').css('display', 'none');
$('#join').css('display', 'block');
}
// called from selectLayer() when joinSelected is not null
function bJoinSelect(layer) {
if( 'id' in layer && layer.id in borders && layer.id != joinSelected ) {
joinAnother = layer.id;
$('#j_name2').text(joinAnother);
$('#j_do').css('display', 'block');
}
}
function bJoinDo() {
if( joinSelected != null && joinAnother != null ) {
$.ajax(getServer('join'), {
data: { 'name': joinSelected, 'name2': joinAnother },
success: updateBorders
});
}
bJoinCancel();
}
function bJoinCancel() {
joinSelected = null;
$('#actions').css('display', 'block');
$('#join').css('display', 'none');
}
var pMarker = L.marker([0, 0], { draggable: true });
function bPoint() {
$('#p_name').val(selectedId && selectedId in borders ? selectedId : '');
selectLayer(null);
$('#actions').css('display', 'none');
$('#point').css('display', 'block');
pMarker.setLatLng(map.getCenter());
map.addLayer(pMarker);
}
function bPointList() {
var ll = pMarker.getLatLng();
$.ajax(getServer('point'), {
data: { 'lat': ll.lat, 'lon': ll.lng },
dataType: 'json',
success: updatePointList
});
}
function updatePointList(data) {
var list = $('#p_list');
list.text('');
if( !data || !('borders' in data) )
return;
for( var i = 0; i < data.borders.length; i++ ) {
var b = data.borders[i];
var a = document.createElement('a');
a.href = '#';
a.onclick = (function(id, name) { return function() { pPointSelect(id, name); return false } })(b['id'], b['name']);
list.append(a, $('<br>'));
$(a).text(b['admin_level'] + ': ' + b['name'] + ' (' + Math.round(b['area']) + ' км²)');
}
}
function pPointSelect(id, name1) {
var name = $('#p_name').val();
name = name.replace('*', name1);
$.ajax(getServer('from_osm'), {
data: { 'name': name, 'id': id },
success: updateBorders
});
bPointCancel();
}
function bPointCancel() {
$('#actions').css('display', 'block');
$('#point').css('display', 'none');
$('#p_list').text('');
map.removeLayer(pMarker);
}
var divPreview = null, divSelected = null;
function bDivide() {
if( !selectedId || !(selectedId in borders) )
return;
divSelected = selectedId;
$('#actions').css('display', 'none');
$('#d_do').css('display', 'none');
$('#d_none').css('display', 'none');
$('#divide').css('display', 'block');
// pre-fill 'like' and 'where' fields
$('#d_like').val(borders[selectedId].name);
$('#d_prefix').val(borders[selectedId].name);
$('#d_where').val('admin_level = 4');
}
function bDividePreview() {
if( divPreview != null ) {
map.removeLayer(divPreview);
divPreview = null;
}
$('#d_do').css('display', 'none');
$('#d_none').css('display', 'none');
$.ajax(getServer('divpreview'), {
data: {
'like': $('#d_like').val(),
'query': $('#d_where').val()
},
success: bDivideDrawPreview
});
}
function bDivideDrawPreview(geojson) {
if( !('features' in geojson) || !geojson.features.length ) {
$('#d_none').css('display', 'block');
return;
}
divPreview = L.geoJson(geojson, {
style: function(f) {
return { color: 'blue', weight: 1, fill: false };
}
});
map.addLayer(divPreview);
$('#d_count').text(geojson.features.length);
$('#d_do').css('display', 'block');
}
function bDivideDo() {
$.ajax(getServer('divide'), {
data: {
'name': divSelected,
'prefix': $('#d_prefix').val(),
'like': $('#d_like').val(),
'query': $('#d_where').val()
},
success: updateBorders
});
bDivideCancel();
}
function bDivideCancel() {
if( divPreview != null ) {
map.removeLayer(divPreview);
divPreview = null;
}
divSelected = null;
$('#actions').css('display', 'block');
$('#divide').css('display', 'none');
}
function bLargest() {
if( !selectedId || !(selectedId in borders) )
return;
$.ajax(getServer('chop1'), {
data: { 'name': selectedId },
success: updateBorders
});
}
function bHull() {
if( !selectedId || !(selectedId in borders) )
return;
$.ajax(getServer('hull'), {
data: { 'name': selectedId },
success: updateBorders
});
}
function bBackup() {
$('#actions').css('display', 'none');
$('#backup_saving').css('display', 'none');
$('#backup_restoring').css('display', 'none');
$('#backup_save').attr('disabled', false);
$('#backup_list').text('');
$('#backup').css('display', 'block');
$.ajax(getServer('backlist'), {
success: updateBackupList
});
}
function bBackupCancel() {
$('#actions').css('display', 'block');
$('#backup').css('display', 'none');
}
function updateBackupList(data) {
var list = $('#backup_list');
list.text('');
if( !data || !('backups' in data) )
return;
for( var i = 0; i < data.backups.length; i++ ) {
var b = data.backups[i];
var a = document.createElement('a');
a.href = '#';
a.onclick = (function(id, name) { return function() { bBackupRestore(id); return false } })(b['timestamp']);
$(a).text(b['text'] + ' (' + b['count'] + ')');
if( i > 0 ) {
var d = document.createElement('a');
d.className = 'back_del';
d.href = '#';
d.onclick = (function(id, name) { return function() { bBackupDelete(id); return false } })(b['timestamp']);
$(d).text('[x]');
list.append(a, document.createTextNode(' '), d, $('<br>'));
} else {
list.append(a, $('<br>'));
}
}
}
function bBackupSave() {
$.ajax(getServer('backup'), {
success: bBackupCancel
});
$('#backup_save').attr('disabled', true);
$('#backup_saving').css('display', 'block');
}
function bBackupRestore(timestamp) {
$.ajax(getServer('restore'), {
data: { 'timestamp': timestamp },
success: function() { bBackupCancel(); updateBorders(); }
});
$('#backup_list').text('');
$('#backup_restoring').css('display', 'block');
}
function bBackupDelete(timestamp) {
$.ajax(getServer('backdelete'), {
data: { 'timestamp': timestamp }
});
bBackupCancel();
}
function getPolyDownloadLink(bbox) {
var b = map.getBounds();
var data = {
'xmin': b.getWest(),
'xmax': b.getEast(),
'ymin': b.getSouth(),
'ymax': b.getNorth()
};
return getServer('poly') + (bbox ? '?' + $.param(data) : '');
}
var crossSelected = null, fcPreview = null;
var selectedCrossings = {};
function crossingUpdateColor(layer) {
if( 'setStyle' in layer )
layer.setStyle({ color: selectedCrossings[layer.crossId] ? 'red' : 'blue' });
}
function crossingClicked(e) {
if( !crossSelected )
return;
var layer = e.target;
if( 'crossId' in layer ) {
var id = layer.crossId;
if( selectedCrossings[id] )
delete selectedCrossings[id];
else
selectedCrossings[id] = true;
crossingUpdateColor(layer);
}
}
function setBordersSelectable(selectable) {
crossingLayer.eachLayer(function(l) {
l.bringToFront();
});
}
function processCrossing(data) {
crossingLayer.clearLayers();
for( var f = 0; f < data.features.length; f++ ) {
var layer = L.GeoJSON.geometryToLayer(data.features[f].geometry),
props = data.features[f].properties;
layer.crossId = '' + props.id;
layer.crossRegion = props.region;
crossingUpdateColor(layer);
layer.on('click', crossingClicked);
crossingLayer.addLayer(layer);
}
}
function selectCrossingByRegion(region) {
if( region ) {
crossingLayer.eachLayer(function(l) {
if( l.crossId && l.crossRegion == region ) {
selectedCrossings[l.crossId] = true;
crossingUpdateColor(l);
}
});
} else {
crossingLayer.eachLayer(function(l) {
if( l.crossId ) {
delete selectedCrossings[l.crossId];
crossingUpdateColor(l);
}
});
}
}
function bFixCross() {
if( !selectedId || !(selectedId in borders) )
return;
setBordersSelectable(false);
crossSelected = selectedId;
fcPreview = null;
$('#actions').css('display', 'none');
$('#fc_sel').text(crossSelected);
$('#fc_do').css('display', 'none');
$('#fixcross').css('display', 'block');
selectCrossingByRegion(crossSelected);
}
function bFixCrossPreview() {
if( fcPreview != null ) {
map.removeLayer(fcPreview);
fcPreview = null;
}
$('#fc_do').css('display', 'none');
$.ajax(getServer('fixcrossing'), {
data: {
'preview': 1,
'region': crossSelected,
'ids': Object.keys(selectedCrossings).join(',')
},
success: bFixCrossDrawPreview
});
}
function bFixCrossDrawPreview(geojson) {
if( !('geometry' in geojson) ) {
return;
}
fcPreview = L.geoJson(geojson, {
style: function(f) {
return { color: 'red', weight: 1, fill: false };
}
});
map.addLayer(fcPreview);
$('#fc_do').css('display', 'block');
}
function bFixCrossDo() {
$.ajax(getServer('fixcrossing'), {
data: {
'region': crossSelected,
'ids': Object.keys(selectedCrossings).join(',')
},
success: updateBorders
});
bFixCrossCancel();
}
function bFixCrossCancel() {
if( fcPreview != null ) {
map.removeLayer(fcPreview);
fcPreview = null;
}
crossSelected = null;
selectCrossingByRegion(false);
selectedCrossings = {};
updateBorders();
$('#actions').css('display', 'block');
$('#fixcross').css('display', 'none');
}

View file

@ -1,6 +0,0 @@
window.BYTES_FOR_NODE = 8;
function getServer(endpoint) {
var server = '/borders-api';
return endpoint ? server + '/' + endpoint : server;
}

View file

@ -1,20 +0,0 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Заливка поправленных границ</title>
<script src="config.js"></script>
</head>
<body onload="init();">
<form action="http://127.0.0.1:5000/import" enctype="multipart/form-data" method="post" id="filefm" target="import_frame">
Импорт <input type="file" accept=".osm,.xml" name="file">
<input type="submit">
</form>
<iframe name="import_frame" width="500" height="200" src="about:blank"></iframe>
<script>
function init() {
document.getElementById('filefm').action = getServer('import');
}
</script>
</body>
</html>

View file

@ -1,180 +0,0 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Редактор границ для MAPS.ME</title>
<link rel="stylesheet" href="lib/leaflet.css" />
<script src="lib/leaflet.js"></script>
<script src="lib/Leaflet.Editable.js"></script>
<script src="lib/leaflet-hash.js"></script>
<script src="lib/jquery-1.11.2.min.js"></script>
<script src="config.js"></script>
<script src="borders.js"></script>
<style>
html, body, #map, #panel { margin: 0; height: 100%; }
#panel { width: 250px; float: right; padding: 1em; font-family: sans-serif; }
#panel button { font-size: 12pt; margin: 4px 0; }
#map { margin-right: 250px; }
#actions { visibility: hidden; }
#osm_actions { display: none; margin-top: 1em; }
#info { margin-top: 2em; }
#b_delete, #b_clear, .back_del { font-size: 8pt; }
#rename, #split, #join, #point, #divide, #backup, #fixcross { display: none; }
.actions input[type='text'], #search input[type='text'] { width: 150px; }
#header { border-bottom: 1px solid gray; margin-bottom: 1em; padding-bottom: 1em; }
#f_topo, #f_chars, #f_comments, #links { font-size: 10pt; }
#backup_saving, #backup_restoring { margin-bottom: 1em; }
#filefm, #old_action, #josm_old, #cross_actions { display: none; }
.h_iframe { display: none; width: 230px; height: 80px; }
</style>
</head>
<body onload="init();">
<div id="panel">
<div id="header">
<div id="filter">
Раскраска по <select size="1" id="f_type" value="size" onchange="filterSelect()">
<option value="size">размеру</option>
<option value="topo">топологии</option>
<option value="chars">буквам в назв.</option>
<option value="comments">комментариям</option>
</select>
<div id="f_size">
Цвета: 0<input type="text" size="2" id="r_green"><input type="text" size="3" id="r_red">
<button onclick="bUpdateColors()">&#10003;</button>
</div>
<div id="f_topo">
Красный — есть дыры, синий — больше одного полигона.
Маркерами обозначены мелкие острова.
</div>
<div id="f_chars">
Синий — есть пробелы, красный — символы не из ASCII.
</div>
<div id="f_comments">
Красный — есть комментарий.
</div>
</div>
<div id="b_josm">
<button onclick="bJOSM()">Открыть в JOSM</button>
<button id="josm_old" onclick="bJosmOld()">ст.</button>
<button onclick="bJosmZoom()">&#x1f50d;</button>
</div>
<form action="" enctype="multipart/form-data" method="post" id="filefm" target="import_frame">
Импорт <input type="file" accept=".osm,.xml" name="file" id="b_import" onchange="bImport();" style="max-width: 100px;">
</form>
<div id="backups">
<button onclick="bBackup()">Архив границ</button><br>
</div>
<div id="old_action">
<input type="checkbox" id="old" onchange="bOldBorders()"><label for="old"> старые границы</label>
</div>
<div id="links">
<a href="stat.html">Статистика</a>
<a href="import.html" id="import_link">Импорт</a><br>
<a href="#" id="poly_bbox">Скачать в poly видимое</a>,
<a href="#" id="poly_all">всё</a>
</div>
<div id="search">
Поиск <input type="text" id="fsearch"><button id="b_search" onclick="doSearch()">&#x1f50d;</button>
</div>
</div>
<div id="actions" class="actions">
<div id="action_buttons">
<button onclick="bDisable()" id="b_disable">Убрать</button>
<a href="#" onclick="bDelete(); return false;" id="b_delete">Удалить</a><br>
<button onclick="bSplit()">Разрезать</button><br>
<button onclick="bJoin()">Склеить</button><br>
<button onclick="bLargest()">Выделить острова</button><br>
<button onclick="bHull()">Единый контур</button><br>
<div id="osm_actions">
<button onclick="bDivide()">Заменить регионами</button><br>
<button onclick="bPoint()">Регион из точки</button><br>
</div>
<div id="cross_actions">
<button onclick="bFixCross()">Исправить меж-mwm</button><br>
</div>
</div>
<div id="info">
<b><a href="#" id="name_link" onclick="bShowRename(); return false;">Название:</a></b>
<span id="b_name"></span><br>
<div id="rename">
<input type="text" id="b_rename">
<button id="do_rename" onclick="bRename()">Переименовать</button>
</div>
<b>Оценка размера:</b> <span id="b_size"></span><br>
<b>Последняя правка:</b> <span id="b_date"></span><br>
<b>Количество точек:</b> <span id="b_nodes"></span><br>
<b>Площадь:</b> <span id="b_area"></span> км²<br>
<b>Статус:</b> <span id="b_status"></span><br>
<b>Комментарий:</b><br>
<textarea style="width: 240px; height: 200px;" id="b_comment"></textarea><br>
<button id="b_comment_send" onclick="bComment()">Отправить</button>
<a href="#" onclick="bClearComment(); return false;" id="b_clear">Очистить</a>
</div>
</div>
<div id="split" class="actions">
Выбрана область <span id="s_sel"></span>.<br><br>
Нарисуйте линию через выбранную область, затем нажмите кнопку<br>
<div id="s_do">
<button onclick="bSplitDo()">Разрезать область</button><br>
<button onclick="bSplitJosm()">Границы вдоль — в JOSM</button><br>
<button onclick="bSplitAgain()">Нарисовать по-другому</button>
</div>
<br>
<button onclick="bSplitCancel()">Вернуться</button>
</div>
<div id="join" class="actions">
Выбрана область <span id="j_sel"></span>.<br><br>
Выберите область, которую к ней присоединить.<br>
<div id="j_do">
Выбрана: <span id="j_name2"></span><br>
<button onclick="bJoinDo()">Склеить области</button>
</div>
<br>
<button onclick="bJoinCancel()">Вернуться</button>
</div>
<div id="point" class="actions">
Переместите маркер в нужную точку и нажмите<br>
<button onclick="bPointList()">Получить список областей</button><br>
Название <input type="text" id="p_name"><br>
<div id="p_list"></div>
<br>
<button onclick="bPointCancel()">Вернуться</button>
</div>
<div id="divide" class="actions">
Выбранная область будет заменена вложенными областями в соответствии с запросом.<br>
<br>
Like для названия выбранной области <input type="text" id="d_like"><br>
Where для субобластей <input type="text" id="d_where"><br>
<button onclick="bDividePreview()">Посмотреть субобласти</button><br>
<div id="d_none">Нет областей</div>
<div id="d_do">
<span id="d_count"></span> областей<br>
Префикс <input type="text" id="d_prefix"><br>
<button onclick="bDivideDo()">Заменить область</button>
</div>
<br>
<button onclick="bDivideCancel()">Вернуться</button>
</div>
<div id="backup" class="actions">
<button onclick="bBackupSave()" id="backup_save">Сохранить границы</button>
<div id="backup_saving">Копирую границы...</div>
<div id="backup_restoring">Восстанавливаю границы...</div>
<div>Или выберите набор границ для восстановления:</div>
<div id="backup_list"></div>
<button onclick="bBackupCancel()">Вернуться</button>
</div>
<div id="fixcross" class="actions">
Границы региона <span id="fc_sel"></span> будут поправлены, чтобы включать в себя подсвеченные красным линии.
Кликайте на линии, чтобы изменять их статус.<br>
<br>
<button onclick="bFixCrossPreview()">Посмотреть, что получится</button><br>
<div id="fc_do">
<button onclick="bFixCrossDo()">Включить линии в контур</button>
</div>
<button onclick="bFixCrossCancel()">Вернуться</button>
</div>
</div>
<div id="map"></div>
</body>
</html>

View file

@ -1,61 +0,0 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Статистика границ для MAPS.ME</title>
<script src="lib/jquery-1.11.2.min.js"></script>
<script src="config.js"></script>
<script src="stat.js"></script>
<style>
body > div { display: none; margin-bottom: 1em; }
#sizes > div { margin-top: 1em; }
.h { display: none; padding-left: 1em; }
</style>
</head>
<body onload="statInit();">
<h1>Статистика по границам</h1>
<div id="total">
Всего границ: <span id="total_total"></span><br>
</div>
<div id="sizes">
<div>
Отключено из сборки: <span id="total_disabled"></span> (<a href="#" onclick="return statOpen('total_disabled_list');">список</a>)<br>
<div id="total_disabled_list" class="h"></div>
Прокомментировано: <span id="total_commented"></span> (<a href="#" onclick="return statOpen('total_commented_list');">список</a>)<br>
<div id="total_commented_list" class="h"></div>
</div>
<div>
Названий с пробелами: <span id="names_spaces"></span><br>
Названий с левыми символами: <span id="names_bad"></span> (<a href="#" onclick="return statOpen('names_bad_list');">список</a>)<br>
<div id="names_bad_list" class="h"></div>
</div>
<div>
Размер MWM до 1 МБ: <span id="sizes_1mb"></span> (<a href="#" onclick="return statOpen('sizes_1mb_list');">список</a>)<br>
<div id="sizes_1mb_list" class="h"></div>
Размер MWM больше <span class="mb_limit"></span> МБ: <span id="sizes_50mb"></span> (<a href="#" onclick="return statOpen('sizes_50mb_list');">список</a>)<br>
<div id="sizes_50mb_list" class="h"></div>
Из них больше <span class="mb_limit2"></span> МБ: <span id="sizes_100mb"></span> (<a href="#" onclick="return statOpen('sizes_100mb_list');">список</a>)<br>
<div id="sizes_100mb_list" class="h"></div>
</div>
<div>
Регионов меньше <span class="km_limit"></span> км²: <span id="areas_100km"></span> (<a href="#" onclick="return statOpen('areas_100km_list');">список</a>)<br>
<div id="areas_100km_list" class="h"></div>
Регионов от <span class="point_limit"></span> тысяч точек в контуре: <span id="areas_50k_points"></span> (<a href="#" onclick="return statOpen('areas_50k_points_list');">список</a>)<br>
<div id="areas_50k_points_list" class="h"></div>
Регионов до 50 точек в контуре: <span id="areas_100_points"></span> (<a href="#" onclick="return statOpen('areas_100_points_list');">список</a>)<br>
<div id="areas_100_points_list" class="h"></div>
Регионов с неизвестной площадью: <span id="areas_0"></span> (<a href="#" onclick="return statOpen('areas_0_list');">список</a>)<br>
<div id="areas_0_list" class="h"></div>
</div>
</div>
<div id="topo">
Регионов с дырками: <span id="topo_holes"></span> (<a href="#" onclick="return statOpen('topo_holes_list');">список</a>)<br>
<div id="topo_holes_list" class="h"></div>
Регионов из нескольких частей: <span id="topo_multi"></span> (<a href="#" onclick="return statOpen('topo_multi_list');">список</a>)<br>
<div id="topo_multi_list" class="h"></div>
Регионов с островами меньше <span class="km_limit"></span> км²: <span id="topo_100km"></span> (<a href="#" onclick="return statOpen('topo_100km_list');">список</a>)<br>
<div id="topo_100km_list" class="h"></div>
<hr>
</div>
</body>
</html>

View file

@ -1,157 +0,0 @@
var MB_LIMIT = 50, MB_LIMIT2 = 80;
var KM_LIMIT = 50, POINT_LIMIT = 50000;
function statInit() {
$('.mb_limit').text(MB_LIMIT);
$('.mb_limit2').text(MB_LIMIT2);
$('.km_limit').text(KM_LIMIT);
$('.point_limit').text(Math.round(POINT_LIMIT / 1000));
statQuery('total', statTotal);
}
function statOpen(id) {
var div = document.getElementById(id);
if( div.style.display != 'block' )
div.style.display = 'block';
else
div.style.display = 'none';
}
function statQuery(id, callback) {
$.ajax(getServer('stat'), {
data: { 'group': id },
success: function(data) {
callback(data);
document.getElementById(id).style.display = 'block';
},
error: function() { alert('Failed!'); }
});
}
function formatNum(value, digits) {
if( digits != undefined ) {
var pow = Math.pow(10, digits);
return Math.round(value * pow) / pow;
} else
return value;
}
function statFill(id, value, digits) {
document.getElementById(id).innerHTML = ('' + formatNum(value, digits)).replace('&', '&amp;').replace('<', '&lt;');
}
function getIndexLink(region) {
var big = region.area > 1000;
return 'index.html#' + (big ? 8 : 12) + '/' + region.lat + '/' + region.lon;
}
function statFillList(id, regions, comment, count) {
var div = document.getElementById(id), i, a, html, p;
if( !div ) {
console.log('Div ' + id + ' not found');
return;
}
if( count )
statFill(count, regions.length);
for( i = 0; i < regions.length; i++ ) {
a = document.createElement('a');
a.href = getIndexLink(regions[i]);
a.target = '_blank';
html = regions[i].name;
if( comment ) {
if( typeof comment == 'string' )
p = regions[i][comment];
else
p = comment(regions[i]);
if( p )
html += ' (' + p + ')';
}
a.innerHTML = html.replace('&', '&amp;').replace('<', '&lt;');
div.appendChild(a);
div.appendChild(document.createElement('br'));
}
}
function statTotal(data) {
statFill('total_total', data.total);
statQuery('sizes', statSizes);
}
function statSizes(data) {
var list_1mb = [], list_50mb = [], list_100mb = [];
var list_spaces = [], list_bad = [];
var list_100km = [], list_100kp = [], list_zero = [];
var list_100p = [];
var list_disabled = [], list_commented = [];
for( var i = 0; i < data.regions.length; i++ ) {
region = data.regions[i];
if( region.area > 0 && region.area < KM_LIMIT )
list_100km.push(region);
if( region.area <= 0 )
list_zero.push(region);
if( region.nodes > POINT_LIMIT )
list_100kp.push(region);
if( region.nodes < 50 )
list_100p.push(region);
var size_mb = region.size * window.BYTES_FOR_NODE / 1024 / 1024;
region.size_mb = size_mb;
if( size_mb < 1 )
list_1mb.push(region);
if( size_mb > MB_LIMIT )
list_50mb.push(region);
if( size_mb > MB_LIMIT2 )
list_100mb.push(region);
if( !/^[\x20-\x7F]*$/.test(region.name) )
list_bad.push(region);
if( region.name.indexOf(' ') >= 0 )
list_spaces.push(region);
if( region.disabled )
list_disabled.push(region);
if( region.commented )
list_commented.push(region);
}
statFill('names_spaces', list_spaces.length);
statFillList('names_bad_list', list_bad, null, 'names_bad');
statFillList('total_disabled_list', list_disabled, null, 'total_disabled');
statFillList('total_commented_list', list_commented, null, 'total_commented');
list_1mb.sort(function(a, b) { return a.size_mb - b.size_mb; });
list_50mb.sort(function(a, b) { return a.size_mb - b.size_mb; });
list_100mb.sort(function(a, b) { return b.size_mb - a.size_mb; });
statFillList('sizes_1mb_list', list_1mb, function(r) { return formatNum(r.size_mb, 2) + ' МБ'; }, 'sizes_1mb');
statFillList('sizes_50mb_list', list_50mb, function(r) { return formatNum(r.size_mb, 0) + ' МБ'; }, 'sizes_50mb');
statFillList('sizes_100mb_list', list_100mb, function(r) { return formatNum(r.size_mb, 0) + ' МБ'; }, 'sizes_100mb');
list_100km.sort(function(a, b) { return a.area - b.area; });
list_100kp.sort(function(a, b) { return b.nodes - a.nodes; });
list_100p.sort(function(a, b) { return a.nodes - b.nodes; });
statFillList('areas_100km_list', list_100km, function(r) { return formatNum(r.area, 2) + ' км²'; }, 'areas_100km');
statFillList('areas_50k_points_list', list_100kp, 'nodes', 'areas_50k_points');
statFillList('areas_100_points_list', list_100p, 'nodes', 'areas_100_points');
statFillList('areas_0_list', list_zero, null, 'areas_0');
statQuery('topo', statTopo);
}
function statTopo(data) {
var list_holed = [], list_multi = [], list_100km = [];
for( var i = 0; i < data.regions.length; i++ ) {
region = data.regions[i];
if( region.outer > 1 )
list_multi.push(region);
if( region.inner > 0 )
list_holed.push(region);
if( region.outer > 1 && region.min_area > 0 && region.min_area < KM_LIMIT )
list_100km.push(region);
}
list_multi.sort(function(a, b) { return b.outer - a.outer; });
list_holed.sort(function(a, b) { return b.inner - a.inner; });
list_100km.sort(function(a, b) { return a.min_area - b.min_area; });
statFillList('topo_holes_list', list_holed, 'inner', 'topo_holes');
statFillList('topo_multi_list', list_multi, 'outer', 'topo_multi');
statFillList('topo_100km_list', list_100km, function(r) { return formatNum(r.min_area, 2) + ' км²'; }, 'topo_100km');
}