From 2e90ba48e55baab9d88ff6481709b7e2c57bfd93 Mon Sep 17 00:00:00 2001 From: Alexey Zakharenkov <35913079+alexey-zakharenkov@users.noreply.github.com> Date: Thu, 23 Jul 2020 14:01:48 +0300 Subject: [PATCH] Migrated to python3; id-based border identification instead of name-based; store border hierarchy; auto-split by population; docker --- .gitignore | 2 + README.md | 53 +- db/Dockerfile.db | 37 + db/create_extensions.sql | 7 + db/create_index_on_tiles.sql | 3 + db/create_osm_cities_table.sql | 67 + db/create_tables.sql | 41 + db/init_databases.sh | 11 + db/load_borders.sh | 70 + db/load_osm_cities_table.sh | 4 + db/load_tiles.sh | 17 + db/prepare_borders.sh | 33 + db/prepare_tiles.sh | 21 + db/tiles2pg.py | 45 + docker-compose.yaml | 33 + scripts/borders.sql | 33 - scripts/osm_borders.sh | 83 - scripts/process_planet.sh | 49 - scripts/tiles2pg.py | 29 - server/borders_api.py | 921 ---------- server/borders_daemon.py | 73 - web/Dockerfile.web | 8 + web/app/auto_split.py | 501 ++++++ web/app/borders_api.py | 1569 +++++++++++++++++ {server => web/app}/borders_api.wsgi | 0 web/app/borders_daemon.py | 137 ++ {server => web/app}/config.py | 18 +- web/app/countries_structure.py | 417 +++++ {www => web/app/static}/borders.js | 573 ++++-- web/app/static/config.js | 15 + {www => web/app/static}/import.html | 0 .../app/static}/lib/Leaflet.Editable.js | 0 .../app/static}/lib/images/layers-2x.png | Bin {www => web/app/static}/lib/images/layers.png | Bin .../app/static}/lib/images/marker-icon-2x.png | Bin .../app/static}/lib/images/marker-icon.png | Bin .../app/static}/lib/images/marker-shadow.png | Bin .../app/static}/lib/images/spritesheet-2x.png | Bin .../app/static}/lib/images/spritesheet.png | Bin .../app/static}/lib/jquery-1.11.2.min.js | 0 {www => web/app/static}/lib/leaflet-hash.js | 0 {www => web/app/static}/lib/leaflet.css | 0 {www => web/app/static}/lib/leaflet.js | 0 {www => web/app/static}/stat.js | 0 {www => web/app/templates}/index.html | 132 +- {www => web/app/templates}/stat.html | 6 +- web/prestart.sh | 20 + web/uwsgi.ini | 5 + www/config.js | 6 - 49 files changed, 3678 insertions(+), 1361 deletions(-) create mode 100644 .gitignore create mode 100644 db/Dockerfile.db create mode 100644 db/create_extensions.sql create mode 100644 db/create_index_on_tiles.sql create mode 100644 db/create_osm_cities_table.sql create mode 100644 db/create_tables.sql create mode 100644 db/init_databases.sh create mode 100755 db/load_borders.sh create mode 100644 db/load_osm_cities_table.sh create mode 100755 db/load_tiles.sh create mode 100644 db/prepare_borders.sh create mode 100755 db/prepare_tiles.sh create mode 100755 db/tiles2pg.py create mode 100644 docker-compose.yaml delete mode 100644 scripts/borders.sql delete mode 100755 scripts/osm_borders.sh delete mode 100755 scripts/process_planet.sh delete mode 100755 scripts/tiles2pg.py delete mode 100755 server/borders_api.py delete mode 100755 server/borders_daemon.py create mode 100644 web/Dockerfile.web create mode 100644 web/app/auto_split.py create mode 100755 web/app/borders_api.py rename {server => web/app}/borders_api.wsgi (100%) create mode 100755 web/app/borders_daemon.py rename {server => web/app}/config.py (59%) create mode 100644 web/app/countries_structure.py rename {www => web/app/static}/borders.js (56%) create mode 100644 web/app/static/config.js rename {www => web/app/static}/import.html (100%) rename {www => web/app/static}/lib/Leaflet.Editable.js (100%) rename {www => web/app/static}/lib/images/layers-2x.png (100%) rename {www => web/app/static}/lib/images/layers.png (100%) rename {www => web/app/static}/lib/images/marker-icon-2x.png (100%) rename {www => web/app/static}/lib/images/marker-icon.png (100%) rename {www => web/app/static}/lib/images/marker-shadow.png (100%) rename {www => web/app/static}/lib/images/spritesheet-2x.png (100%) rename {www => web/app/static}/lib/images/spritesheet.png (100%) rename {www => web/app/static}/lib/jquery-1.11.2.min.js (100%) rename {www => web/app/static}/lib/leaflet-hash.js (100%) rename {www => web/app/static}/lib/leaflet.css (100%) rename {www => web/app/static}/lib/leaflet.js (100%) rename {www => web/app/static}/stat.js (100%) rename {www => web/app/templates}/index.html (52%) rename {www => web/app/templates}/stat.html (94%) create mode 100644 web/prestart.sh create mode 100644 web/uwsgi.ini delete mode 100644 www/config.js diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..8d35cb3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +*.pyc diff --git a/README.md b/README.md index 02146ec..80c6ac2 100644 --- a/README.md +++ b/README.md @@ -2,18 +2,42 @@ В этих каталогах лежит набор инструментов для редактирования набора границ в формате [Osmosis Poly](http://wiki.openstreetmap.org/wiki/Osmosis/Polygon_Filter_File_Format). -Для работы требуется база данных PostgreSQL + PostGIS, инициализированная из -файла `scripts/borders.sql`. Для оценки размера файла MWM нужно заполнить -таблицу `tiles` из файла планеты (см. `scripts/process_planet.sh`). + +## Развёртывание в Docker + +Самое простое — запустить систему в Docker-контейнерах. + +#### Предварительные требования +* Должен быть установлен docker https://docs.docker.com/engine/install/ +* и docker-compose https://docs.docker.com/compose/install/ +* Для всей планеты во время сборки необходимо ~200 Гб дискового пространства +(после сборки — 30 Гб), разворачивание может длиться около суток. + +#### Настройка сборки +В файле docker-compose.yaml нужно выставить желаемый порт, на котором будет +работать веб-интерфейс (сейчас это число 8081 в строке "8081:80"), +и URL с файлом планеты в переменной PLANET_URL. Переменные PLANET_URL_<suffix> +не используются, это просто примеры. Для тестирования советуется подставить +в PLANET_URL небольшой файл, тогда вся сборка займёт несколько минут. + + +## Развёртывание вручную +Для работы требуется база данных PostgreSQL + PostGIS, инициализированная +скриптами из каталога `db`. Последовательность выполнение скриптов и необходимые +переменные окружения см. в `db/Dockerfile.db` и `docker-compose.yaml`. +Для оценки размера файла MWM нужно заполнить +таблицу `tiles` из файла планеты (см. скрипты `db/*tiles*.sh`). Также для обновления и замены границ из OpenStreetMap желательно импортировать -таблицу `osm_borders` — см. `scripts/osm_borders.sh`. Начальный набор границ -для редактирования можно либо загрузить скриптом `scripts/poly2postgis.py`, -либо скопировать из таблицы `osm_borders` по, например, `admin_level=2`. +таблицу `osm_borders` — см. `db/prepare_borders.sh` и `db/load_borders.sh`. +Начальный набор границ для редактирования можно либо загрузить скриптом +`scripts/poly2postgis.py`, либо скопировать из таблицы `osm_borders` по, +например, `admin_level=2`. -После редактирования набор файлов `poly` создаст скрипт `scripts/export_poly.py`. +После редактирования набор файлов `poly` создаст скрипт `scripts/export_poly.py` +или ссылка *Скачать poly - всё* в веб-интерфейсе. -## Серверная часть +#### Серверная часть Два скрипта в каталоге `server` должны работать постоянно на фоне. @@ -25,13 +49,14 @@ в столбце количества данных, и найдя их, пересчитывает. Запустите, если нужна оценка размера MWM. -## Веб-интерфейс +#### Веб-интерфейс -Файлы в каталоге `www` не требуют каких-либо интерпретаторов или выделенных серверов: -просто откройте `index.html` в браузере. На карте нарисованы границы, по клику -на границу панель справа наполнится кнопками. Оттуда можно разрезать и склеивать -границы, переименовывать их, заменять и дополнять из таблицы `osm_borders`, -а также экспортировать в JOSM для сложных модификаций. +Файлы в каталоге `web/app/static` не требуют каких-либо интерпретаторов или +выделенных серверов: просто откройте `index.html` в браузере. На карте +нарисованы границы, по клику на границу панель справа наполнится кнопками. +Оттуда можно разрезать и склеивать границы, переименовывать их, заменять и +дополнять из таблицы `osm_borders`, а также экспортировать в JOSM для сложных +модификаций. ## Автор и лицензия diff --git a/db/Dockerfile.db b/db/Dockerfile.db new file mode 100644 index 0000000..8e8ebba --- /dev/null +++ b/db/Dockerfile.db @@ -0,0 +1,37 @@ +FROM postgres:12 + +WORKDIR /borders/ + +RUN apt-get update && apt-get install -y \ + wget \ + postgresql-contrib `# contains hstore extension` \ + postgresql-12-postgis-3 \ + osmctools \ + osm2pgsql \ + python3 \ + python3-psycopg2 + +##git clone https://github.com/mapsme/borders.git mapsme_borders + +ARG PLANET_URL=http://download.geofabrik.de/europe/andorra-latest.osm.pbf + +ENV PLANET=planet-file + +RUN wget "${PLANET_URL}" -O "${PLANET}" + +ENV FILTERED_PLANET=${PLANET}-filtered.o5m + +COPY prepare_borders.sh tiles2pg.py prepare_tiles.sh ./ +RUN ["/bin/bash", "prepare_borders.sh"] +RUN ["/bin/bash", "prepare_tiles.sh"] + +RUN chmod a+w /borders/ + +COPY init_databases.sh /docker-entrypoint-initdb.d/00-init_databases.sh +COPY create_extensions.sql /docker-entrypoint-initdb.d/01-create_extensions.sql +COPY load_borders.sh /docker-entrypoint-initdb.d/10-load_borders.sh +COPY create_tables.sql /docker-entrypoint-initdb.d/20-create_tables.sql +COPY load_tiles.sh /docker-entrypoint-initdb.d/30-load_tiles.sh +COPY create_osm_cities_table.sql /docker-entrypoint-initdb.d/40-create_osm_cities_table.sql +COPY load_osm_cities_table.sh /docker-entrypoint-initdb.d/41-load_osm_cities_table.sh + diff --git a/db/create_extensions.sql b/db/create_extensions.sql new file mode 100644 index 0000000..674af04 --- /dev/null +++ b/db/create_extensions.sql @@ -0,0 +1,7 @@ +\c gis +CREATE EXTENSION postgis; +CREATE EXTENSION hstore; + +\c borders +CREATE EXTENSION postgis; + diff --git a/db/create_index_on_tiles.sql b/db/create_index_on_tiles.sql new file mode 100644 index 0000000..317d71a --- /dev/null +++ b/db/create_index_on_tiles.sql @@ -0,0 +1,3 @@ +\c borders borders +CREATE INDEX tiles_idx ON tiles USING gist (tile); + diff --git a/db/create_osm_cities_table.sql b/db/create_osm_cities_table.sql new file mode 100644 index 0000000..6789d12 --- /dev/null +++ b/db/create_osm_cities_table.sql @@ -0,0 +1,67 @@ +\c gis postgres + +----------- Collect city polygons +CREATE TABLE osm_cities AS + SELECT + osm_id, + place, + 'polygon'::text AS g_type, -- geometry_type + max(regexp_replace(population, '[ .,]+', '', 'g')::int) AS population, + ST_Buffer(ST_Transform(ST_Collect(way),4326), 0) AS way, + coalesce(max("name"), max("name:en")) AS name + FROM planet_osm_polygon + WHERE place IN ('city', 'town') + AND regexp_replace(population, '[ .,]+', '', 'g') ~ '^\d+$' + GROUP BY osm_id, place; + +----------- Collect city nodes +INSERT INTO osm_cities + SELECT + osm_id, + place, + 'point'::text AS g_type, -- geometry_type + regexp_replace(population, '[ .,]+', '', 'g')::int AS population, + ST_Transform(way,4326) AS way, + coalesce("name", "name:en") AS name + FROM planet_osm_point + WHERE place IN ('city', 'town') + AND regexp_replace(population, '[ .,]+', '', 'g') ~ '^\d+$'; + + +create index osm_cities_gist_idx on osm_cities using gist(way); + + +-- Delete polygons where exists a node within it with the same name + +DELETE from osm_cities WHERE g_type='polygon' and osm_id IN + ( + SELECT p.osm_id + FROM osm_cities n, osm_cities p + WHERE p.g_type='polygon' AND n.g_type='point' + AND ST_Contains(p.way, n.way) + AND (strpos(n.name, p.name) > 0 OR strpos(p.name, n.name) > 0) + ); + + +-- Convert [multi]polygons to points - for further faster requests "is city in region" + +ALTER TABLE osm_cities ADD COLUMN center geometry; + +UPDATE osm_cities c SET center = + ( + CASE WHEN ST_Contains(way, ST_Centroid(way)) --true for 42972 out of 42999 + THEN ST_Centroid(way) + -- for the rest 27 cities choose arbitrary point as a center + ELSE ( + SELECT (ST_DumpPoints(way)).geom + FROM osm_cities + WHERE osm_id = c.osm_id + LIMIT 1 + ) + END + ); + +CREATE INDEX osm_cities_center_gist_idx ON osm_cities USING gist(center); +DROP INDEX osm_cities_gist_idx; +ALTER TABLE osm_cities DROP column way; + diff --git a/db/create_tables.sql b/db/create_tables.sql new file mode 100644 index 0000000..a5ab064 --- /dev/null +++ b/db/create_tables.sql @@ -0,0 +1,41 @@ +\c borders borders + +CREATE TABLE tiles ( + tile geometry NOT NULL, + count INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE borders ( + id BIGINT PRIMARY KEY, + parent_id BIGINT REFERENCES borders(id), + name VARCHAR(200), + geom geometry NOT NULL, + disabled boolean NOT NULL DEFAULT FALSE, + count_k INTEGER, + modified TIMESTAMP NOT NULL, + cmnt VARCHAR(500) +); +CREATE INDEX borders_idx ON borders USING gist (geom); +CREATE INDEX borders_parent_id_idx ON borders (parent_id); + +CREATE TABLE borders_backup ( + backup VARCHAR(30) NOT NULL, + id BIGINT NOT NULL, + parent_id BIGINT, + name VARCHAR(200) NOT NULL, + geom geometry NOT NULL, + disabled boolean NOT NULL DEFAULT FALSE, + count_k INTEGER, + modified TIMESTAMP NOT NULL, + cmnt VARCHAR(500), + PRIMARY KEY (backup, id) +); + +CREATE TABLE splitting ( + osm_border_id BIGINT NOT NULL REFERENCES osm_borders(osm_id), -- reference to parent osm region + id BIGINT NOT NULL, -- representative subregion id + city_population_thr INT NOT NULL, + cluster_population_thr INT NOT NULL, + geom geometry NOT NULL +); +CREATE INDEX splitting_idx ON splitting (osm_border_id, city_population_thr, cluster_population_thr); diff --git a/db/init_databases.sh b/db/init_databases.sh new file mode 100644 index 0000000..741ee77 --- /dev/null +++ b/db/init_databases.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -e + +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL + CREATE USER borders WITH PASSWORD 'borders'; + CREATE DATABASE gis; + CREATE DATABASE borders; + GRANT ALL PRIVILEGES ON DATABASE borders TO borders; + + -- GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO borders; +EOSQL diff --git a/db/load_borders.sh b/db/load_borders.sh new file mode 100755 index 0000000..20e8926 --- /dev/null +++ b/db/load_borders.sh @@ -0,0 +1,70 @@ +#!/bin/sh +set -e +OSM2PGSQL=osm2pgsql +DATABASE=gis +DATABASE_BORDERS=borders +OSM2PGSQL_KEYS='--cache 2000 --number-processes 6' +OSM2PGSQL_STYLE= + +if [[ "`uname`" == 'Darwin' ]]; then + WHICH='which -s' + MKTEMP='mktemp -t ' +else + WHICH=which + MKTEMP='mktemp --suff=' +fi + +if ! $WHICH $OSM2PGSQL; then + echo "No osm2pgsql found." + exit 1 +fi + +# Load filtered data into an osm2pgsql database +echo Loading data into the database + +# Creating a style file if we weren't provided with one +if [ -z "$OSM2PGSQL_STYLE" ]; then + OSM2PGSQL_STYLE=$(${MKTEMP}osm2pgsql_style) + OSM2PGSQL_STYLE_TMP=1 + cat > $OSM2PGSQL_STYLE < $PLANET-nodes.csv + +echo Sorting node list +LC_ALL=C sort -o $PLANET-nodes-sorted.csv $PLANET-nodes.csv +rm $PLANET-nodes.csv + +echo Counting unique tiles +LC_ALL=C uniq -c $PLANET-nodes-sorted.csv $PLANET-tiles.csv +rm $PLANET-nodes-sorted.csv + diff --git a/db/tiles2pg.py b/db/tiles2pg.py new file mode 100755 index 0000000..d31b6aa --- /dev/null +++ b/db/tiles2pg.py @@ -0,0 +1,45 @@ +#!/usr/bin/python3 +import argparse +import re +import sys + +import psycopg2 + + +parser = argparse.ArgumentParser(description='Import tiles from CSV into a database') +parser.add_argument('-t', '--table', default='tiles', help='Target directory (default=tiles)') +parser.add_argument('-d', '--database', default='borders', help='Database name (default=borders)') +parser.add_argument('-v', dest='verbose', action='store_true', help='Print status messages') +options = parser.parse_args() + +with psycopg2.connect(f'dbname={options.database}') as conn: + with conn.cursor() as cur: + cnt = 0 + for line in sys.stdin: + m = re.match(r'^\s*(\d+)\s+(-?\d+)\s+(-?\d+)', line) + if m: + (count, lat, lon) = (int(m.group(1)), float(m.group(2))/100, float(m.group(3))/100) + cur.execute(f''' + INSERT INTO {options.table} (count, tile) + VALUES ( + %s, + ST_SetSRID( + ST_MakeBox2d( + ST_Point(%s, %s), + ST_Point(%s, %s) + ), + 4326 + ) + ) + ''', (count, lon, lat, lon + 0.01, lat + 0.01) + ) + cnt += 1 + else: + print(line) + + if options.verbose: + print('Commit') + conn.commit() + if options.verbose: + print(f'Uploaded {cnt} tiles') + diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..2854dc1 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,33 @@ +version: "3" +services: + web: + build: + context: ./web + dockerfile: Dockerfile.web + container_name: web + restart: always + depends_on: + - db + links: + - "db:dbhost" + ports: + - "8081:80" + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + + db: + build: + context: ./db + dockerfile: Dockerfile.db + args: + PLANET_URL: http://generator.testdata.mapsme.cloud.devmail.ru/planet/planet-latest.o5m + PLANET_URL_external: https://planet.openstreetmap.org/pbf/planet-latest.osm.pbf + PLANET_URL_small: http://download.geofabrik.de/africa/eritrea-latest.osm.pbf + container_name: db + restart: always + environment: + POSTGRES_HOST_AUTH_METHOD: password + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + diff --git a/scripts/borders.sql b/scripts/borders.sql deleted file mode 100644 index 1f99cc2..0000000 --- a/scripts/borders.sql +++ /dev/null @@ -1,33 +0,0 @@ -create table tiles ( - tile geometry not null, - count integer not null default 0 -); - -create table borders ( - name varchar(200) not null primary key, - geom geometry not null, - disabled boolean not null default FALSE, - count_k integer, - modified timestamp not null, - cmnt varchar(500) -); - -create table borders_backup ( - backup varchar(30) not null, - name varchar(200) not null, - geom geometry not null, - disabled boolean not null default FALSE, - count_k integer, - modified timestamp not null, - cmnt varchar(500), - primary key (backup, name) -); - -create table points ( - geom geometry not null, - type integer not null default 0 -); - -create index border_idx on borders using gist (geom); -create index tiles_idx on tiles using gist (tile); -create index points_ids on points using gist (geom); diff --git a/scripts/osm_borders.sh b/scripts/osm_borders.sh deleted file mode 100755 index a705c81..0000000 --- a/scripts/osm_borders.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/sh -OSM2PGSQL=osm2pgsql -OSMFILTER=./osmfilter -OSMCONVERT=./osmconvert -DATABASE=gis -DATABASE_BORDERS=borders -OSM2PGSQL_KEYS='--cache 2000 --number-processes 6' -OSM2PGSQL_STYLE= - -if [[ ! -r "$1" ]] -then - echo Import borders and towns from the planet into osm_borders table - echo Syntax: $0 \ - exit 1 -fi -PLANET=$1 - -if [[ "`uname`" == 'Darwin' ]]; then - WHICH='which -s' - MKTEMP='mktemp -t ' -else - WHICH=which - MKTEMP='mktemp --suff=' -fi - -# 0. Test for all required tools and files -if ! $WHICH psql; then - echo "Do you have postgresql installed?" - exit 1 -fi -if ! $WHICH $OSM2PGSQL; then - echo "No osm2pgsql found." - exit 1 -fi -if [ ! -x "$OSMFILTER" ]; then - wget -O - http://m.m.i24.cc/osmfilter.c |cc -x c - -O3 -o $OSMFILTER -fi -if [ ! -x "$OSMCONVERT" ]; then - wget -O - http://m.m.i24.cc/osmconvert.c | cc -x c - -lz -O3 -o $OSMCONVERT -fi - -# 1. Filter planet file, leaving only administrative borders (and cities) -echo Filtering planet -FILTERED=$(${MKTEMP}osmadm) -$OSMFILTER $PLANET --keep="boundary=administrative or place=" --out-o5m -o=$FILTERED || exit 3 - -# 2. Load filtered data into an osm2pgsql database -echo Loading data into the database - -# Creating a style file if we weren't provided with one -if [ -z "$OSM2PGSQL_STYLE" ]; then - OSM2PGSQL_STYLE=$(${MKTEMP}osm2pgsql_style) - OSM2PGSQL_STYLE_TMP=1 - cat > $OSM2PGSQL_STYLE < - exit 1 -fi - -set -e -u - -if ! which -s psql; then - echo "Do you have postgresql installed?" - exit 1 -fi -if [ ! -x "$OSMCONVERT" ]; then - wget -O - http://m.m.i24.cc/osmconvert.c | cc -x c - -lz -O3 -o $OSMCONVERT -fi - -PLANET=$(echo $(basename $1) | sed 's/\..*//') - -echo Extracting node coordinates -$OSMCONVERT --out-osm $1 | perl -n -e 'print sprintf "%d %d\n", $1*100, $2*100 if / $PLANET-nodes.csv - -echo Sorting node list -LC_ALL=C sort -o $PLANET-nodes-sorted.csv $PLANET-nodes.csv -rm $PLANET-nodes.csv - -echo Counting unique tiles -LC_ALL=C uniq -c $PLANET-nodes-sorted.csv $PLANET-tiles.csv -rm $PLANET-nodes-sorted.csv - -echo Cleaning up tiles table and index -psql $DATABASE -c "DELETE FROM $TABLE; DROP INDEX IF EXISTS ${TABLE}_idx;" - -echo Loading tiles into the database -pv $PLANET-tiles.csv | python "$(dirname "$0")/tiles2pg.py" -d $DATABASE -t $TABLE -rm $PLANET-tiles.csv - -echo Indexing tiles -psql $DATABASE -c "CREATE INDEX ${TABLE}_idx ON $TABLE USING GIST (tile);" - -echo Dumping the table -pg_dump -t $TABLE $DATABASE | gzip > $PLANET-tiles.sql.gz - -echo Done! diff --git a/scripts/tiles2pg.py b/scripts/tiles2pg.py deleted file mode 100755 index 0e25adc..0000000 --- a/scripts/tiles2pg.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/python -import psycopg2, sys, re, argparse - -parser = argparse.ArgumentParser(description='Import tiles from CSV into a database') -parser.add_argument('-t', '--table', default='tiles', help='Target directory (default=tiles)') -parser.add_argument('-d', '--database', default='borders', help='Database name (default=borders)') -parser.add_argument('-v', dest='verbose', action='store_true', help='Print status messages') -options = parser.parse_args() - -conn = psycopg2.connect("dbname={}".format(options.database)) -cur = conn.cursor() - -cnt = 0 -for line in sys.stdin: - m = re.match(r'^\s*(\d+)\s+(-?\d+)\s+(-?\d+)', line) - if m: - (count, lat, lon) = (int(m.group(1)), float(m.group(2))/100, float(m.group(3))/100) - cur.execute('insert into {} (count, tile) values (%s, ST_SetSRID(ST_MakeBox2d(ST_Point(%s, %s), ST_Point(%s, %s)), 4326));'.format(options.table), (count, lon, lat, lon + 0.01, lat + 0.01)) - cnt = cnt + 1 - else: - print line - -if options.verbose: - print 'Commit' -conn.commit() -if options.verbose: - print 'Uploaded {} tiles'.format(cnt) -cur.close() -conn.close() diff --git a/server/borders_api.py b/server/borders_api.py deleted file mode 100755 index 6adfd50..0000000 --- a/server/borders_api.py +++ /dev/null @@ -1,921 +0,0 @@ -#!/usr/bin/python -from flask import Flask, g, request, json, jsonify, abort, Response, send_file, send_from_directory -from flask.ext.cors import CORS -from flask.ext.compress import Compress -import psycopg2 -import io, re, zipfile, unicodedata -import config - -try: - from lxml import etree - LXML = True -except: - LXML = False - -app = Flask(__name__) -app.debug=config.DEBUG -Compress(app) -CORS(app) - -@app.route('/') -def hello_world(): - return 'Hello World!' - -@app.route('/www/') -def send_js(path): - if config.DEBUG: - return send_from_directory('../www/', path) - abort(404) - -@app.before_request -def before_request(): - g.conn = psycopg2.connect(config.CONNECTION) - -@app.teardown_request -def teardown(exception): - conn = getattr(g, 'conn', None) - if conn is not None: - conn.close() - -@app.route('/bbox') -def query_bbox(): - xmin = request.args.get('xmin') - xmax = request.args.get('xmax') - ymin = request.args.get('ymin') - ymax = request.args.get('ymax') - simplify_l = request.args.get('simplify') - if simplify_l == '2': - simplify = 0.1 - elif simplify_l == '1': - simplify = 0.01 - else: - simplify = 0 - table = request.args.get('table') - if table in config.OTHER_TABLES: - table = config.OTHER_TABLES[table] - else: - table = config.TABLE - - cur = g.conn.cursor() - cur.execute("""SELECT name, ST_AsGeoJSON({geom}, 7) as geometry, ST_NPoints(geom), - modified, disabled, count_k, cmnt, - round(CASE WHEN ST_Area(geography(geom)) = 'NaN' THEN 0 ELSE ST_Area(geography(geom)) END) as area - FROM {table} - WHERE geom && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)) - order by area desc; - """.format(table=table, geom='ST_SimplifyPreserveTopology(geom, {})'.format(simplify) if simplify > 0 else 'geom'), - (xmin, ymin, xmax, ymax)) - result = [] - for rec in cur: - props = { 'name': rec[0], 'nodes': rec[2], 'modified': rec[3], 'disabled': rec[4], 'count_k': rec[5], 'comment': rec[6], 'area': rec[7] } - feature = { 'type': 'Feature', 'geometry': json.loads(rec[1]), 'properties': props } - result.append(feature) - return jsonify(type='FeatureCollection', features=result) - -@app.route('/small') -def query_small_in_bbox(): - xmin = request.args.get('xmin') - xmax = request.args.get('xmax') - ymin = request.args.get('ymin') - ymax = request.args.get('ymax') - table = request.args.get('table') - if table in config.OTHER_TABLES: - table = config.OTHER_TABLES[table] - else: - table = config.TABLE - cur = g.conn.cursor() - cur.execute('''SELECT name, round(ST_Area(geography(ring))) as area, ST_X(ST_Centroid(ring)), ST_Y(ST_Centroid(ring)) - FROM ( - SELECT name, (ST_Dump(geom)).geom as ring - FROM {table} - WHERE geom && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)) - ) g - WHERE ST_Area(geography(ring)) < %s;'''.format(table=table), (xmin, ymin, xmax, ymax, config.SMALL_KM2 * 1000000)) - result = [] - for rec in cur: - result.append({ 'name': rec[0], 'area': rec[1], 'lon': float(rec[2]), 'lat': float(rec[3]) }) - return jsonify(features=result) - -@app.route('/routing') -def query_routing_points(): - xmin = request.args.get('xmin') - xmax = request.args.get('xmax') - ymin = request.args.get('ymin') - ymax = request.args.get('ymax') - cur = g.conn.cursor() - try: - cur.execute('''SELECT ST_X(geom), ST_Y(geom), type - FROM points - WHERE geom && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s) - );''', (xmin, ymin, xmax, ymax)) - except psycopg2.Error, e: - return jsonify(features=[]) - result = [] - for rec in cur: - result.append({ 'lon': rec[0], 'lat': rec[1], 'type': rec[2] }) - return jsonify(features=result) - -@app.route('/crossing') -def query_crossing(): - xmin = request.args.get('xmin') - xmax = request.args.get('xmax') - ymin = request.args.get('ymin') - ymax = request.args.get('ymax') - region = request.args.get('region', '').encode('utf-8') - points = request.args.get('points') == '1' - rank = request.args.get('rank') or '4' - cur = g.conn.cursor() - sql = """SELECT id, ST_AsGeoJSON({line}, 7) as geometry, region, processed FROM {table} - WHERE line && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)) and processed = 0 {reg} and rank <= %s; - """.format(table=config.CROSSING_TABLE, reg='and region = %s' if region else '', line='line' if not points else 'ST_Centroid(line)') - params = [xmin, ymin, xmax, ymax] - if region: - params.append(region) - params.append(rank) - cur.execute(sql, tuple(params)) - result = [] - for rec in cur: - props = { 'id': rec[0], 'region': rec[2], 'processed': rec[3] } - feature = { 'type': 'Feature', 'geometry': json.loads(rec[1]), 'properties': props } - result.append(feature) - return jsonify(type='FeatureCollection', features=result) - -@app.route('/tables') -def check_osm_table(): - osm = False - backup = False - old = [] - crossing = False - try: - cur = g.conn.cursor() - cur.execute('select osm_id, ST_Area(way), admin_level, name from {} limit 2;'.format(config.OSM_TABLE)) - if cur.rowcount == 2: - osm = True - except psycopg2.Error, e: - pass - try: - cur.execute('select backup, name, ST_Area(geom), modified, disabled, count_k, cmnt from {} limit 2;'.format(config.BACKUP)) - backup = True - except psycopg2.Error, e: - pass - for t, tname in config.OTHER_TABLES.iteritems(): - try: - cur.execute('select name, ST_Area(geom), modified, disabled, count_k, cmnt from {} limit 2;'.format(tname)) - if cur.rowcount == 2: - old.append(t) - except psycopg2.Error, e: - pass - try: - cur = g.conn.cursor() - cur.execute('select id, ST_Length(line), region, processed from {} limit 2;'.format(config.CROSSING_TABLE)) - if cur.rowcount == 2: - crossing = True - except psycopg2.Error, e: - pass - return jsonify(osm=osm, tables=old, readonly=config.READONLY, backup=backup, crossing=crossing) - -@app.route('/search') -def search(): - query = request.args.get('q').encode('utf-8') - cur = g.conn.cursor() - cur.execute('select ST_XMin(geom), ST_YMin(geom), ST_XMax(geom), ST_YMax(geom) from borders where name ilike %s limit 1', ('%{0}%'.format(query),)) - if cur.rowcount > 0: - rec = cur.fetchone() - return jsonify(bounds=[rec[0], rec[1], rec[2], rec[3]]) - return jsonify(status='not found') - -@app.route('/split') -def split(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - line = request.args.get('line') - cur = g.conn.cursor() - # check that we're splitting a single polygon - cur.execute('select ST_NumGeometries(geom) from {} where name = %s;'.format(config.TABLE), (name,)) - res = cur.fetchone() - if not res or res[0] != 1: - return jsonify(status='border should have one outer ring') - cur.execute('select ST_AsText((ST_Dump(ST_Split(geom, ST_GeomFromText(%s, 4326)))).geom) from {} where name = %s;'.format(config.TABLE), (line, name)) - if cur.rowcount > 1: - # no use of doing anything if the polygon wasn't modified - geometries = [] - for res in cur: - geometries.append(res[0]) - # get disabled flag and delete old border - cur.execute('select disabled from {} where name = %s;'.format(config.TABLE), (name,)) - disabled = cur.fetchone()[0] - cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,)) - # find untaken name series - base_name = name - found = False - while not found: - base_name = base_name + '_' - cur.execute('select count(1) from {} where name like %s;'.format(config.TABLE), (name.replace('_', '\_').replace('%', '\%') + '%',)) - found = cur.fetchone()[0] == 0 - # insert new geometries - counter = 1 - for geom in geometries: - cur.execute('insert into {table} (name, geom, disabled, count_k, modified) values (%s, ST_GeomFromText(%s, 4326), %s, -1, now());'.format(table=config.TABLE), ('{}{}'.format(base_name, counter), geom, disabled)) - counter = counter + 1 - g.conn.commit() - - return jsonify(status='ok') - -@app.route('/join') -def join_borders(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - name2 = request.args.get('name2').encode('utf-8') - cur = g.conn.cursor() - cur.execute('update {table} set geom = ST_Union(geom, b2.g), count_k = -1 from (select geom as g from {table} where name = %s) as b2 where name = %s;'.format(table=config.TABLE), (name2, name)) - cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name2,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/point') -def find_osm_borders(): - lat = request.args.get('lat') - lon = request.args.get('lon') - cur = g.conn.cursor() - cur.execute("select osm_id, name, admin_level, (case when ST_Area(geography(way)) = 'NaN' then 0 else ST_Area(geography(way))/1000000 end) as area_km from {table} where ST_Contains(way, ST_SetSRID(ST_Point(%s, %s), 4326)) order by admin_level desc, name asc;".format(table=config.OSM_TABLE), (lon, lat)) - result = [] - for rec in cur: - b = { 'id': rec[0], 'name': rec[1], 'admin_level': rec[2], 'area': rec[3] } - result.append(b) - return jsonify(borders=result) - -@app.route('/from_osm') -def copy_from_osm(): - if config.READONLY: - abort(405) - osm_id = request.args.get('id') - name = request.args.get('name').encode('utf-8') - cur = g.conn.cursor() - cur.execute('insert into {table} (geom, name, modified, count_k) select o.way as way, {name}, now(), -1 from {osm} o where o.osm_id = %s limit 1;'.format(table=config.TABLE, osm=config.OSM_TABLE, name='%s' if name != '' else '%s || o.name'), (name, osm_id)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/rename') -def set_name(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - new_name = request.args.get('newname').encode('utf-8') - cur = g.conn.cursor() - cur.execute('update {} set name = %s where name = %s;'.format(config.TABLE), (new_name, name)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/delete') -def delete_border(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - cur = g.conn.cursor() - cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/disable') -def disable_border(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - cur = g.conn.cursor() - cur.execute('update {} set disabled = true where name = %s;'.format(config.TABLE), (name,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/enable') -def enable_border(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - cur = g.conn.cursor() - cur.execute('update {} set disabled = false where name = %s;'.format(config.TABLE), (name,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/comment', methods=['POST']) -def update_comment(): - name = request.form['name'].encode('utf-8') - comment = request.form['comment'].encode('utf-8') - cur = g.conn.cursor() - cur.execute('update {} set cmnt = %s where name = %s;'.format(config.TABLE), (comment, name)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/divpreview') -def divide_preview(): - like = request.args.get('like').encode('utf-8') - query = request.args.get('query') - cur = g.conn.cursor() - cur.execute('select name, ST_AsGeoJSON(ST_Simplify(way, 0.01)) as way from {table}, (select way as pway from {table} where name like %s) r where ST_Contains(r.pway, way) and {query};'.format(table=config.OSM_TABLE, query=query), (like,)) - result = [] - for rec in cur: - feature = { 'type': 'Feature', 'geometry': json.loads(rec[1]), 'properties': { 'name': rec[0] } } - result.append(feature) - return jsonify(type='FeatureCollection', features=result) - -@app.route('/divide') -def divide(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - like = request.args.get('like').encode('utf-8') - query = request.args.get('query') - prefix = request.args.get('prefix', '').encode('utf-8') - if prefix != '': - prefix = '{}_'.format(prefix); - cur = g.conn.cursor() - cur.execute('''insert into {table} (geom, name, modified, count_k) - select o.way as way, %s || name, now(), -1 - from {osm} o, ( - select way from {osm} where name like %s - ) r - where ST_Contains(r.way, o.way) and {query}; - '''.format(table=config.TABLE, osm=config.OSM_TABLE, query=query), (prefix, like,)) - cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/chop1') -def chop_largest_or_farthest(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - cur = g.conn.cursor() - cur.execute('select ST_NumGeometries(geom) from {} where name = %s;'.format(config.TABLE), (name,)) - res = cur.fetchone() - if not res or res[0] < 2: - return jsonify(status='border should have more than one outer ring') - cur.execute("""INSERT INTO {table} (name, disabled, modified, geom) - SELECT name, disabled, modified, geom from - ( - (WITH w AS (SELECT name, disabled, (ST_Dump(geom)).geom AS g FROM {table} WHERE name = %s) - (SELECT name||'_main' as name, disabled, now() as modified, g as geom, ST_Area(g) as a FROM w ORDER BY a DESC LIMIT 1) - UNION ALL - SELECT name||'_small' as name, disabled, now() as modified, ST_Collect(g) AS geom, ST_Area(ST_Collect(g)) as a - FROM (SELECT name, disabled, g, ST_Area(g) AS a FROM w ORDER BY a DESC OFFSET 1) ww - GROUP BY name, disabled) - ) x;""".format(table=config.TABLE), (name,)) - cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/hull') -def draw_hull(): - if config.READONLY: - abort(405) - name = request.args.get('name').encode('utf-8') - cur = g.conn.cursor() - cur.execute('select ST_NumGeometries(geom) from {} where name = %s;'.format(config.TABLE), (name,)) - res = cur.fetchone() - if not res or res[0] < 2: - return jsonify(status='border should have more than one outer ring') - cur.execute('update {} set geom = ST_ConvexHull(geom) where name = %s;'.format(config.TABLE), (name,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/fixcrossing') -def fix_crossing(): - if config.READONLY: - abort(405) - preview = request.args.get('preview') == '1' - region = request.args.get('region').encode('utf-8') - if region is None: - return jsonify(status='Please specify a region') - ids = request.args.get('ids') - if ids is None or len(ids) == 0: - return jsonify(status='Please specify a list of line ids') - ids = tuple(ids.split(',')) - cur = g.conn.cursor() - if preview: - cur.execute(""" - WITH lines as (SELECT ST_Buffer(ST_Collect(line), 0.002, 1) as g FROM {cross} WHERE id IN %s) - SELECT ST_AsGeoJSON(ST_Collect(ST_MakePolygon(er.ring))) FROM - ( - SELECT ST_ExteriorRing((ST_Dump(ST_Union(ST_Buffer(geom, 0.0), lines.g))).geom) as ring FROM {table}, lines WHERE name = %s - ) as er - """.format(table=config.TABLE, cross=config.CROSSING_TABLE), (ids, region)) - res = cur.fetchone() - if not res: - return jsonify(status='Failed to extend geometry') - f = { "type": "Feature", "properties": {}, "geometry": json.loads(res[0]) } - #return jsonify(type="FeatureCollection", features=[f]) - return jsonify(type="Feature", properties={}, geometry=json.loads(res[0])) - else: - cur.execute(""" - WITH lines as (SELECT ST_Buffer(ST_Collect(line), 0.002, 1) as g FROM {cross} WHERE id IN %s) - UPDATE {table} SET geom = res.g FROM - ( - SELECT ST_Collect(ST_MakePolygon(er.ring)) as g FROM - ( - SELECT ST_ExteriorRing((ST_Dump(ST_Union(ST_Buffer(geom, 0.0), lines.g))).geom) as ring FROM {table}, lines WHERE name = %s - ) as er - ) as res - WHERE name = %s - """.format(table=config.TABLE, cross=config.CROSSING_TABLE), (ids, region, region)) - cur.execute(""" - UPDATE {table} b SET geom = ST_Difference(b.geom, o.geom) - FROM {table} o - WHERE ST_Overlaps(b.geom, o.geom) - AND o.name = %s - """.format(table=config.TABLE), (region,)) - cur.execute("UPDATE {cross} SET processed = 1 WHERE id IN %s".format(cross=config.CROSSING_TABLE), (ids,)) - g.conn.commit() - return jsonify(status='ok') - - -@app.route('/backup') -def backup_do(): - if config.READONLY: - abort(405) - cur = g.conn.cursor() - cur.execute("SELECT to_char(now(), 'IYYY-MM-DD HH24:MI'), max(backup) from {};".format(config.BACKUP)) - (timestamp, tsmax) = cur.fetchone() - if timestamp == tsmax: - return jsonify(status='please try again later') - cur.execute('INSERT INTO {backup} (backup, name, geom, disabled, count_k, modified, cmnt) SELECT %s, name, geom, disabled, count_k, modified, cmnt from {table};'.format(backup=config.BACKUP, table=config.TABLE), (timestamp,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/restore') -def backup_restore(): - if config.READONLY: - abort(405) - ts = request.args.get('timestamp') - cur = g.conn.cursor() - cur.execute('SELECT count(1) from {} where backup = %s;'.format(config.BACKUP), (ts,)) - (count,) = cur.fetchone() - if count <= 0: - return jsonify(status='no such timestamp') - cur.execute('DELETE FROM {};'.format(config.TABLE)) - cur.execute('INSERT INTO {table} (name, geom, disabled, count_k, modified, cmnt) SELECT name, geom, disabled, count_k, modified, cmnt from {backup} where backup = %s;'.format(backup=config.BACKUP, table=config.TABLE), (ts,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/backlist') -def backup_list(): - cur = g.conn.cursor() - cur.execute("SELECT backup, count(1) from {} group by backup order by backup desc;".format(config.BACKUP)) - result = [] - for res in cur: - result.append({ 'timestamp': res[0], 'text': res[0], 'count': res[1] }) - # todo: count number of different objects for the last one - return jsonify(backups=result) - -@app.route('/backdelete') -def backup_delete(): - if config.READONLY: - abort(405) - ts = request.args.get('timestamp') - cur = g.conn.cursor() - cur.execute('SELECT count(1) from {} where backup = %s;'.format(config.BACKUP), (ts,)) - (count,) = cur.fetchone() - if count <= 0: - return jsonify(status='no such timestamp') - cur.execute('DELETE FROM {} WHERE backup = %s;'.format(config.BACKUP), (ts,)) - g.conn.commit() - return jsonify(status='ok') - -@app.route('/josm') -def make_osm(): - xmin = request.args.get('xmin') - xmax = request.args.get('xmax') - ymin = request.args.get('ymin') - ymax = request.args.get('ymax') - table = request.args.get('table') - if table in config.OTHER_TABLES: - table = config.OTHER_TABLES[table] - else: - table = config.TABLE - - cur = g.conn.cursor() - cur.execute('SELECT name, disabled, ST_AsGeoJSON(geom, 7) as geometry FROM {table} WHERE ST_Intersects(ST_SetSRID(ST_Buffer(ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)), 0.3), 4326), geom);'.format(table=table), (xmin, ymin, xmax, ymax)) - - node_pool = { 'id': 1 } # 'lat_lon': id - regions = [] # { name: name, rings: [['outer', [ids]], ['inner', [ids]], ...] } - for rec in cur: - geometry = json.loads(rec[2]) - rings = [] - if geometry['type'] == 'Polygon': - parse_polygon(node_pool, rings, geometry['coordinates']) - elif geometry['type'] == 'MultiPolygon': - for polygon in geometry['coordinates']: - parse_polygon(node_pool, rings, polygon) - if len(rings) > 0: - regions.append({ 'name': rec[0], 'disabled': rec[1], 'rings': rings }) - - xml = '' - for latlon, node_id in node_pool.items(): - if latlon != 'id': - (lat, lon) = latlon.split() - xml = xml + ''.format(id=node_id, lat=lat, lon=lon) - - wrid = 1 - ways = {} # json: id - for region in regions: - w1key = ring_hash(region['rings'][0][1]) - if not config.JOSM_FORCE_MULTI and len(region['rings']) == 1 and w1key not in ways: - # simple case: a way - ways[w1key] = wrid - xml = xml + ''.format(id=wrid) - xml = xml + ''.format(quoteattr(region['name'])) - if region['disabled']: - xml = xml + '' - for nd in region['rings'][0][1]: - xml = xml + ''.format(ref=nd) - xml = xml + '' - wrid = wrid + 1 - else: - # multipolygon - rxml = ''.format(id=wrid) - wrid = wrid + 1 - rxml = rxml + '' - rxml = rxml + ''.format(quoteattr(region['name'])) - if region['disabled']: - rxml = rxml + '' - for ring in region['rings']: - wkey = ring_hash(ring[1]) - if wkey in ways: - # already have that way - rxml = rxml + ''.format(ref=ways[wkey], role=ring[0]) - else: - ways[wkey] = wrid - xml = xml + ''.format(id=wrid) - rxml = rxml + ''.format(ref=wrid, role=ring[0]) - for nd in ring[1]: - xml = xml + ''.format(ref=nd) - xml = xml + '' - wrid = wrid + 1 - xml = xml + rxml + '' - xml = xml + '' - return Response(xml, mimetype='application/x-osm+xml') - -@app.route('/josmbord') -def josm_borders_along(): - name = request.args.get('name') - line = request.args.get('line') - cur = g.conn.cursor() - # select all outer osm borders inside a buffer of the given line - cur.execute(""" - with linestr as ( - select ST_Intersection(geom, ST_Buffer(ST_GeomFromText(%s, 4326), 0.2)) as line - from {table} where name = %s - ), osmborders as ( - select (ST_Dump(way)).geom as g from {osm}, linestr where ST_Intersects(line, way) - ) - select ST_AsGeoJSON((ST_Dump(ST_LineMerge(ST_Intersection(ST_Collect(ST_ExteriorRing(g)), line)))).geom) from osmborders, linestr group by line - """.format(table=config.TABLE, osm=config.OSM_TABLE), (line, name)) - - node_pool = { 'id': 1 } # 'lat_lon': id - lines = [] - for rec in cur: - geometry = json.loads(rec[0]) - if geometry['type'] == 'LineString': - nodes = parse_linestring(node_pool, geometry['coordinates']) - elif geometry['type'] == 'MultiLineString': - nodes = [] - for line in geometry['coordinates']: - nodes.extend(parse_linestring(node_pool, line)) - if len(nodes) > 0: - lines.append(nodes) - - xml = '' - for latlon, node_id in node_pool.items(): - if latlon != 'id': - (lat, lon) = latlon.split() - xml = xml + ''.format(id=node_id, lat=lat, lon=lon) - - wrid = 1 - for line in lines: - xml = xml + ''.format(id=wrid) - for nd in line: - xml = xml + ''.format(ref=nd) - xml = xml + '' - wrid = wrid + 1 - xml = xml + '' - return Response(xml, mimetype='application/x-osm+xml') - -def quoteattr(value): - value = value.replace('&', '&').replace('>', '>').replace('<', '<') - value = value.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ') - value = value.replace('"', '"') - return '"{}"'.format(value) - -def ring_hash(refs): - #return json.dumps(refs) - return hash(tuple(sorted(refs))) - -def parse_polygon(node_pool, rings, polygon): - role = 'outer' - for ring in polygon: - rings.append([role, parse_linestring(node_pool, ring)]) - role = 'inner' - -def parse_linestring(node_pool, linestring): - nodes = [] - for lonlat in linestring: - ref = '{} {}'.format(lonlat[1], lonlat[0]) - if ref in node_pool: - node_id = node_pool[ref] - else: - node_id = node_pool['id'] - node_pool[ref] = node_id - node_pool['id'] = node_id + 1 - nodes.append(node_id) - return nodes - -def append_way(way, way2): - another = list(way2) # make copy to not modify original list - if way[0] == way[-1] or another[0] == another[-1]: - return None - if way[0] == another[0] or way[-1] == another[-1]: - another.reverse() - if way[-1] == another[0]: - result = list(way) - result.extend(another[1:]) - return result - elif way[0] == another[-1]: - result = another - result.extend(way) - return result - return None - -def way_to_wkt(node_pool, refs): - coords = [] - for nd in refs: - coords.append('{} {}'.format(node_pool[nd]['lon'], node_pool[nd]['lat'])) - return '({})'.format(','.join(coords)) - -def import_error(msg): - if config.IMPORT_ERROR_ALERT: - return ''.format(msg) - else: - return jsonify(status=msg) - -def extend_bbox(bbox, x, y=None): - if y is not None: - x = [x, y, x, y] - bbox[0] = min(bbox[0], x[0]) - bbox[1] = min(bbox[1], x[1]) - bbox[2] = max(bbox[2], x[2]) - bbox[3] = max(bbox[3], x[3]) - -def bbox_contains(outer, inner): - return outer[0] <= inner[0] and outer[1] <= inner[1] and outer[2] >= inner[2] and outer[3] >= inner[3] - -@app.route('/import', methods=['POST']) -def import_osm(): - if config.READONLY: - abort(405) - if not LXML: - return import_error('importing is disabled due to absent lxml library') - f = request.files['file'] - if not f: - return import_error('failed upload') - try: - tree = etree.parse(f) - except: - return import_error('malformed xml document') - if not tree: - return import_error('bad document') - root = tree.getroot() - - # read nodes and ways - nodes = {} # id: { lat, lon, modified } - for node in root.iter('node'): - if node.get('action') == 'delete': - continue - modified = int(node.get('id')) < 0 or node.get('action') == 'modify' - nodes[node.get('id')] = { 'lat': float(node.get('lat')), 'lon': float(node.get('lon')), 'modified': modified } - ways = {} # id: { name, disabled, modified, bbox, nodes, used } - for way in root.iter('way'): - if way.get('action') == 'delete': - continue - way_nodes = [] - bbox = [1e4, 1e4, -1e4, -1e4] - modified = int(way.get('id')) < 0 or way.get('action') == 'modify' - for node in way.iter('nd'): - ref = node.get('ref') - if not ref in nodes: - return import_error('missing node {} in way {}'.format(ref, way.get('id'))) - way_nodes.append(ref) - if nodes[ref]['modified']: - modified = True - extend_bbox(bbox, float(nodes[ref]['lon']), float(nodes[ref]['lat'])) - name = None - disabled = False - for tag in way.iter('tag'): - if tag.get('k') == 'name': - name = tag.get('v') - if tag.get('k') == 'disabled' and tag.get('v') == 'yes': - disabled = True - if len(way_nodes) < 2: - return import_error('way with less than 2 nodes: {}'.format(way.get('id'))) - ways[way.get('id')] = { 'name': name, 'disabled': disabled, 'modified': modified, 'bbox': bbox, 'nodes': way_nodes, 'used': False } - - # finally we are constructing regions: first, from multipolygons - regions = {} # name: { modified, disabled, wkt } - for rel in root.iter('relation'): - modified = int(rel.get('id')) < 0 or rel.get('action') == 'modify' - name = None - disabled = False - multi = False - inner = [] - outer = [] - for tag in rel.iter('tag'): - if tag.get('k') == 'name': - name = tag.get('v') - if tag.get('k') == 'disabled' and tag.get('v') == 'yes': - disabled = True - if tag.get('k') == 'type' and tag.get('v') == 'multipolygon': - multi = True - if not multi: - return import_error('found non-multipolygon relation: {}'.format(rel.get('id'))) - if not name: - return import_error('relation {} has no name'.format(rel.get('id'))) - if name in regions: - return import_error('multiple relations with the same name {}'.format(name)) - for member in rel.iter('member'): - ref = member.get('ref') - if not ref in ways: - return import_error('missing way {} in relation {}'.format(ref, rel.get('id'))) - if ways[ref]['modified']: - modified = True - role = member.get('role') - if role == 'outer': - outer.append(ways[ref]) - elif role == 'inner': - inner.append(ways[ref]) - else: - return import_error('unknown role {} in relation {}'.format(role, rel.get('id'))) - ways[ref]['used'] = True - # after parsing ways, so 'used' flag is set - if rel.get('action') == 'delete': - continue - if len(outer) == 0: - continue - #return import_error('relation {} has no outer ways'.format(rel.get('id'))) - # reconstruct rings in multipolygon - for multi in (inner, outer): - i = 0 - while i < len(multi): - way = multi[i]['nodes'] - while way[0] != way[-1]: - productive = False - j = i + 1 - while way[0] != way[-1] and j < len(multi): - new_way = append_way(way, multi[j]['nodes']) - if new_way: - multi[i] = dict(multi[i]) - multi[i]['nodes'] = new_way - way = new_way - if multi[j]['modified']: - multi[i]['modified'] = True - extend_bbox(multi[i]['bbox'], multi[j]['bbox']) - del multi[j] - productive = True - else: - j = j + 1 - if not productive: - return import_error('unconnected way in relation {}'.format(rel.get('id'))) - i = i + 1 - # check for 2-node rings - for multi in (outer, inner): - for way in multi: - if len(way['nodes']) < 3: - return import_error('Way in relation {} has only {} nodes'.format(rel.get('id'), len(way['nodes']))) - # sort inner and outer rings - polygons = [] - for way in outer: - rings = [way_to_wkt(nodes, way['nodes'])] - for i in range(len(inner)-1, -1, -1): - if bbox_contains(way['bbox'], inner[i]['bbox']): - rings.append(way_to_wkt(nodes, inner[i]['nodes'])) - del inner[i] - polygons.append('({})'.format(','.join(rings))) - regions[name] = { 'modified': modified, 'disabled': disabled, 'wkt': 'MULTIPOLYGON({})'.format(','.join(polygons)) } - - # make regions from unused named ways - for wid, w in ways.iteritems(): - if w['used']: - continue - if not w['name']: - continue - #return import_error('unused in multipolygon way with no name: {}'.format(wid)) - if w['nodes'][0] != w['nodes'][-1]: - return import_error('non-closed unused in multipolygon way: {}'.format(wid)) - if len(w['nodes']) < 3: - return import_error('way {} has {} nodes'.format(wid, len(w['nodes']))) - if w['name'] in regions: - return import_error('way {} has the same name as other way/multipolygon'.format(wid)) - regions[w['name']] = { 'modified': w['modified'], 'disabled': w['disabled'], 'wkt': 'POLYGON({})'.format(way_to_wkt(nodes, w['nodes'])) } - - # submit modifications to the database - cur = g.conn.cursor() - added = 0 - updated = 0 - for name, region in regions.iteritems(): - if not region['modified']: - continue - cur.execute('select count(1) from {} where name = %s'.format(config.TABLE), (name,)) - res = cur.fetchone() - try: - if res and res[0] > 0: - # update - cur.execute('update {table} set disabled = %s, geom = ST_GeomFromText(%s, 4326), modified = now(), count_k = -1 where name = %s'.format(table=config.TABLE), (region['disabled'], region['wkt'], name)) - updated = updated + 1 - else: - # create - cur.execute('insert into {table} (name, disabled, geom, modified, count_k) values (%s, %s, ST_GeomFromText(%s, 4326), now(), -1);'.format(table=config.TABLE), (name, region['disabled'], region['wkt'])) - added = added + 1 - except psycopg2.Error, e: - print 'WKT: {}'.format(region['wkt']) - raise - g.conn.commit() - return jsonify(regions=len(regions), added=added, updated=updated) - -@app.route('/poly') -def export_poly(): - xmin = request.args.get('xmin') - xmax = request.args.get('xmax') - ymin = request.args.get('ymin') - ymax = request.args.get('ymax') - table = request.args.get('table') - if table in config.OTHER_TABLES: - table = config.OTHER_TABLES[table] - else: - table = config.TABLE - - cur = g.conn.cursor() - if xmin and xmax and ymin and ymax: - cur.execute("""SELECT name, ST_AsGeoJSON(geom, 7) as geometry FROM {table} WHERE disabled = false - and ST_Intersects(ST_SetSRID(ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)), 4326), geom); - """.format(table=table), (xmin, ymin, xmax, ymax)) - else: - cur.execute("""SELECT name, ST_AsGeoJSON(geom, 7) as geometry FROM {table} WHERE disabled = false;""".format(table=table)) - - memory_file = io.BytesIO(); - with zipfile.ZipFile(memory_file, 'w', zipfile.ZIP_DEFLATED) as zf: - for res in cur: - geometry = json.loads(res[1]) - polygons = [geometry['coordinates']] if geometry['type'] == 'Polygon' else geometry['coordinates'] - # sanitize name, src: http://stackoverflow.com/a/295466/1297601 - name = res[0].decode('utf-8') - name = unicodedata.normalize('NFKD', name) - name = name.encode('ascii', 'ignore') - name = re.sub('[^\w _-]', '', name).strip() - name = name + '.poly' - - poly = io.BytesIO() - poly.write(res[0] + '\n') - pcounter = 1 - for polygon in polygons: - outer = True - for ring in polygon: - poly.write('{}\n'.format(pcounter if outer else -pcounter)) - pcounter = pcounter + 1 - for coord in ring: - poly.write('\t{:E}\t{:E}\n'.format(coord[0], coord[1])) - poly.write('END\n') - outer = False - poly.write('END\n') - zf.writestr(name, poly.getvalue()) - poly.close() - memory_file.seek(0) - return send_file(memory_file, attachment_filename='borders.zip', as_attachment=True) - -@app.route('/stat') -def statistics(): - group = request.args.get('group') - table = request.args.get('table') - if table in config.OTHER_TABLES: - table = config.OTHER_TABLES[table] - else: - table = config.TABLE - cur = g.conn.cursor() - if group == 'total': - cur.execute('select count(1) from borders;') - return jsonify(total=cur.fetchone()[0]) - elif group == 'sizes': - cur.execute("select name, count_k, ST_NPoints(geom), ST_AsGeoJSON(ST_Centroid(geom)), (case when ST_Area(geography(geom)) = 'NaN' then 0 else ST_Area(geography(geom)) / 1000000 end) as area, disabled, (case when cmnt is null or cmnt = '' then false else true end) as cmnt from {};".format(table)) - result = [] - for res in cur: - coord = json.loads(res[3])['coordinates'] - result.append({ 'name': res[0], 'lat': coord[1], 'lon': coord[0], 'size': res[1], 'nodes': res[2], 'area': res[4], 'disabled': res[5], 'commented': res[6] }) - return jsonify(regions=result) - elif group == 'topo': - cur.execute("select name, count(1), min(case when ST_Area(geography(g)) = 'NaN' then 0 else ST_Area(geography(g)) end) / 1000000, sum(ST_NumInteriorRings(g)), ST_AsGeoJSON(ST_Centroid(ST_Collect(g))) from (select name, (ST_Dump(geom)).geom as g from {}) a group by name;".format(table)) - result = [] - for res in cur: - coord = json.loads(res[4])['coordinates'] - result.append({ 'name': res[0], 'outer': res[1], 'min_area': res[2], 'inner': res[3], 'lon': coord[0], 'lat': coord[1] }) - return jsonify(regions=result) - return jsonify(status='wrong group id') - -if __name__ == '__main__': - app.run(threaded=True) diff --git a/server/borders_daemon.py b/server/borders_daemon.py deleted file mode 100755 index 982c19b..0000000 --- a/server/borders_daemon.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/python -import os, sys -import time -import logging -import psycopg2 -import config -try: - from daemon import runner - HAS_DAEMON = True -except: - HAS_DAEMON = False - -class App(): - def __init__(self): - self.stdin_path = '/dev/null' - self.stdout_path = '/dev/tty' - self.stderr_path = '/dev/tty' - self.pidfile_path = '/var/run/borders-daemon.pid' - self.pidfile_timeout = 5 - - def process(self, region): - logger.info('Processing {}'.format(region)) - try: - f = open(config.DAEMON_STATUS_PATH, 'w') - f.write(region) - f.close() - except: - pass - - with self.conn.cursor() as cur: - cur.execute('update {table} set count_k = n.count from (select coalesce(sum(t.count), 0) as count from {table} b, tiles t where ST_Intersects(b.geom, t.tile) and name = %s) as n where name = %s;'.format(table=config.TABLE), (region, region)); - try: - f = open(config.DAEMON_STATUS_PATH, 'w') - f.close() - except: - pass - - def find_region(self): - with self.conn.cursor() as cur: - cur.execute('select name from {table} where count_k < 0 order by st_area(geom) limit 1;'.format(table=config.TABLE)) - res = cur.fetchone() - if not res: - cur.execute('select name from {table} where count_k is null order by st_area(geom) limit 1;'.format(table=config.TABLE)) - res = cur.fetchone() - return res[0] if res else None - - def run(self): - self.conn = psycopg2.connect(config.CONNECTION) - self.conn.autocommit = True - while True: - region = self.find_region() - if region: - self.process(region) - time.sleep(1) # todo: 10 - -def init_logger(): - logger = logging.getLogger("borders-daemon") - logger.setLevel(logging.INFO) - formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") - #handler = logging.FileHandler("/var/log/borders-daemon.log") - handler = logging.StreamHandler() - handler.setFormatter(formatter) - logger.addHandler(handler) - return logger - -if __name__ == '__main__': - app = App() - logger = init_logger() - if not HAS_DAEMON or (len(sys.argv) > 1 and sys.argv[1] == 'run'): - app.run() - else: - r = runner.DaemonRunner(app) - r.do_action() diff --git a/web/Dockerfile.web b/web/Dockerfile.web new file mode 100644 index 0000000..5778d0c --- /dev/null +++ b/web/Dockerfile.web @@ -0,0 +1,8 @@ +FROM tiangolo/uwsgi-nginx-flask:latest + +RUN pip install flask_cors flask_compress psycopg2 unidecode + +COPY app /app +COPY ./uwsgi.ini /app +COPY ./prestart.sh /app + diff --git a/web/app/auto_split.py b/web/app/auto_split.py new file mode 100644 index 0000000..e8e121a --- /dev/null +++ b/web/app/auto_split.py @@ -0,0 +1,501 @@ +import itertools +import json +import psycopg2 + +from collections import defaultdict + +from config import AUTOSPLIT_TABLE as autosplit_table + + +class DisjointClusterUnion: + """Disjoint set union implementation for administrative subregions.""" + + def __init__(self, region_id, subregions, thresholds): + self.region_id = region_id + self.subregions = subregions + self.city_population_thr, self.cluster_population_thr = thresholds + self.representatives = {sub_id: sub_id for sub_id in subregions} + # a cluster is one or more subregions with common borders + self.clusters = {} # representative => cluster object + + # At the beginning, each subregion forms a cluster. + # Then they would be enlarged by merging. + for subregion_id, data in subregions.items(): + self.clusters[subregion_id] = { + 'representative': subregion_id, + 'subregion_ids': [subregion_id], + 'population': data['population'], + 'big_cities_cnt': sum(1 for c in data['cities'] if self.is_city_big(c)), + 'finished': False, # True if the cluster cannot be merged with another + } + + + def is_city_big(self, city): + return city['population'] >= self.city_population_thr + + def get_smallest_cluster(self): + """Find minimal cluster without big cities.""" + smallest_cluster_id = min( + filter( + lambda cluster_id: ( + not self.clusters[cluster_id]['finished'] and + self.clusters[cluster_id]['big_cities_cnt'] == 0) + , + self.clusters.keys() + ), + default=None, + key=lambda cluster_id: self.clusters[cluster_id]['population'] + ) + return smallest_cluster_id + + def mark_cluster_finished(self, cluster_id): + self.clusters[cluster_id]['finished'] = True + + def find_cluster(self, subregion_id): + if self.representatives[subregion_id] == subregion_id: + return subregion_id + else: + representative = self.find_cluster(self.representatives[subregion_id]) + self.representatives[subregion_id] = representative + return representative + + def get_cluster_population(self, subregion_id): + cluster_id = self.find_cluster(subregion_id) + return self.clusters[cluster_id]['population'] + + def get_cluster_count(self): + return len(self.clusters) + + def union(self, cluster_id1, cluster_id2): + # To make it more deterministic + retained_cluster_id = max(cluster_id1, cluster_id2) + dropped_cluster_id = min(cluster_id1, cluster_id2) + r_cluster = self.clusters[retained_cluster_id] + d_cluster = self.clusters[dropped_cluster_id] + r_cluster['subregion_ids'].extend(d_cluster['subregion_ids']) + r_cluster['population'] += d_cluster['population'] + r_cluster['big_cities_cnt'] += d_cluster['big_cities_cnt'] + del self.clusters[dropped_cluster_id] + self.representatives[dropped_cluster_id] = retained_cluster_id + return retained_cluster_id + + def get_cluster_subregion_ids(self, subregion_id): + """Get all elements in a cluster by subregion_id""" + representative = self.find_cluster(subregion_id) + return set(self.clusters[representative]['subregion_ids']) + + def get_all_subregion_ids(self): + subregion_ids = set(itertools.chain.from_iterable( + cl['subregion_ids'] for cl in self.clusters.values() + )) + return subregion_ids + + +def enrich_with_population_and_cities(conn, subregions): + cursor = conn.cursor() + cursor.execute(""" + SELECT b.osm_id, c.name, c.population + FROM osm_borders b, osm_cities c + WHERE b.osm_id IN ({ids}) AND ST_CONTAINS(b.way, c.center) + """.format(ids=','.join(str(x) for x in subregions.keys())) + ) + for rec in cursor: + sub_id = int(rec[0]) + subregions[sub_id]['cities'].append({ + 'name': rec[1], + 'population': int(rec[2]) + }) + subregions[sub_id]['population'] += int(rec[2]) + + +def find_subregions(conn, region_id, next_level): + cursor = conn.cursor() + cursor.execute(""" + SELECT subreg.osm_id, subreg.name + FROM osm_borders reg, osm_borders subreg + WHERE reg.osm_id = %s AND subreg.admin_level = %s AND + ST_Contains(reg.way, subreg.way) + """, + (region_id, next_level) + ) + subregions = { + int(rec[0]): + { + 'osm_id': int(rec[0]), + 'name': rec[1], + 'population': 0, + 'cities': [] + } + for rec in cursor + } + if subregions: + enrich_with_population_and_cities(conn, subregions) + return subregions + + +def get_best_cluster_to_join_with(small_cluster_id, dcu: DisjointClusterUnion, common_border_matrix): + if small_cluster_id not in common_border_matrix: + return None # this may be if a subregion is isolated, like Bezirk Lienz inside Tyrol, Austria + common_borders = defaultdict(lambda: 0.0) # cluster representative => common border length + subregion_ids = dcu.get_cluster_subregion_ids(small_cluster_id) + for subregion_id in subregion_ids: + for other_subregion_id, length in common_border_matrix[subregion_id].items(): + other_cluster_id = dcu.find_cluster(other_subregion_id) + if other_cluster_id != small_cluster_id: + common_borders[other_cluster_id] += length + #print(f"common_borders={json.dumps(common_borders)} of len {len(common_borders)}") + #common_borders = {k:v for k,v in common_borders.items() if v > 0.0} + if not common_borders: + return None + total_common_border_length = sum(common_borders.values()) + total_adjacent_population = sum(dcu.get_cluster_population(x) for x in common_borders) + choice_criterion = ( + ( + lambda cluster_id: ( + common_borders[cluster_id]/total_common_border_length + + -dcu.get_cluster_population(cluster_id)/total_adjacent_population + ) + ) if total_adjacent_population else + lambda cluster_id: ( + common_borders[cluster_id]/total_common_border_length + ) + ) + small_cluster_population = dcu.get_cluster_population(small_cluster_id) + best_cluster_id = max( + filter( + lambda cluster_id: ( + small_cluster_population + dcu.get_cluster_population(cluster_id) + <= dcu.cluster_population_thr + ), + common_borders.keys() + ), + default=None, + key=choice_criterion + ) + return best_cluster_id + + +def calculate_common_border_matrix(conn, subregion_ids): + cursor = conn.cursor() + # ST_Intersection returns 0 if its parameter is a geometry other than + # LINESTRING or MULTILINESTRING + cursor.execute(""" + SELECT b1.osm_id AS osm_id1, b2.osm_id AS osm_id2, + ST_Length(geography(ST_Intersection(b1.way, b2.way))) AS intersection + FROM osm_borders b1, osm_borders b2 + WHERE b1.osm_id IN ({subregion_ids_str}) AND + b2.osm_id IN ({subregion_ids_str}) + AND b1.osm_id < b2.osm_id + """.format( + subregion_ids_str=','.join(str(x) for x in subregion_ids), + ) + ) + common_border_matrix = {} # {subregion_id: { subregion_id: float} } where len > 0 + for rec in cursor: + border_len = float(rec[2]) + if border_len == 0.0: + continue + osm_id1 = int(rec[0]) + osm_id2 = int(rec[1]) + common_border_matrix.setdefault(osm_id1, {})[osm_id2] = border_len + common_border_matrix.setdefault(osm_id2, {})[osm_id1] = border_len + return common_border_matrix + + +def find_golden_splitting(conn, border_id, next_level, + country_region_name, thresholds): + subregions = find_subregions(conn, border_id, next_level) + if not subregions: + print(f"No subregions for {border_id} {country_region_name}") + return + + dcu = DisjointClusterUnion(border_id, subregions, thresholds) + #save_splitting_to_file(dcu, f'all_{country_region_name}') + all_subregion_ids = dcu.get_all_subregion_ids() + common_border_matrix = calculate_common_border_matrix(conn, all_subregion_ids) + + i = 0 + while True: + with open(f"clusters-{i:02d}.json", 'w') as f: + json.dump(dcu.clusters, f, ensure_ascii=False, indent=2) + if dcu.get_cluster_count() == 1: + return dcu + i += 1 + #print(f"i = {i}") + smallest_cluster_id = dcu.get_smallest_cluster() + if not smallest_cluster_id: + return dcu # TODO: return target splitting + #print(f"smallest cluster = {json.dumps(dcu.clusters[smallest_cluster_id])}") + best_cluster_id = get_best_cluster_to_join_with(smallest_cluster_id, dcu, common_border_matrix) + if not best_cluster_id: # !!! a case for South West England and popul 500000 + dcu.mark_cluster_finished(smallest_cluster_id) + continue + assert (smallest_cluster_id != best_cluster_id), f"{smallest_cluster_id}" + #print(f"best cluster = {json.dumps(dcu.clusters[best_cluster_id])}") + new_cluster_id = dcu.union(smallest_cluster_id, best_cluster_id) + #print(f"{json.dumps(dcu.clusters[new_cluster_id])}") + #print() + #import sys; sys.exit() + return dcu + + +def get_union_sql(subregion_ids): + assert(len(subregion_ids) > 0) + if len(subregion_ids) == 1: + return f""" + SELECT way FROM osm_borders WHERE osm_id={subregion_ids[0]} + """ + else: + return f""" + SELECT ST_UNION( + ({get_union_sql(subregion_ids[0:1])}), + ({get_union_sql(subregion_ids[1: ])}) + ) + """ + +def get_geojson(conn, union_sql): + cursor = conn.cursor() + cursor.execute(f"""SELECT ST_AsGeoJSON(({union_sql}))""") + rec = cursor.fetchone() + return rec[0] + +def write_polygons_to_poly(file, polygons, name_prefix): + pcounter = 1 + for polygon in polygons: + outer = True + for ring in polygon: + inner_mark = '' if outer else '!' + name = pcounter if outer else -pcounter + file.write(f"{inner_mark}{name_prefix}_{name}\n") + pcounter = pcounter + 1 + for coord in ring: + file.write('\t{:E}\t{:E}\n'.format(coord[0], coord[1])) + file.write('END\n') + outer = False + + +def save_splitting_to_file(conn, dcu: DisjointClusterUnion, filename_prefix=None): + with open(f"{FOLDER}/{filename_prefix}.poly", 'w') as poly_file: + poly_file.write(f"{filename_prefix}\n") + for cluster_id, data in dcu.clusters.items(): + subregion_ids = data['subregion_ids'] + cluster_geometry_sql = get_union_sql(subregion_ids) + geojson = get_geojson(conn, cluster_geometry_sql) + geometry = json.loads(geojson) + polygons = [geometry['coordinates']] if geometry['type'] == 'Polygon' else geometry['coordinates'] + name_prefix=f"{filename_prefix}_{abs(cluster_id)}" + write_polygons_to_poly(poly_file, polygons, name_prefix) + if GENERATE_ALL_POLY: + with open(f"{FOLDER}/{filename_prefix}{cluster_id}.poly", 'w') as f: + f.write(f"{filename_prefix}_{cluster_id}") + write_polygons_to_poly(f, polygons, name_prefix) + f.write('END\n') + poly_file.write('END\n') + with open(f"{FOLDER}/{filename_prefix}-splitting.json", 'w') as f: + json.dump(dcu.clusters, f, ensure_ascii=False, indent=2) + + +def save_splitting_to_db(conn, dcu: DisjointClusterUnion): + cursor = conn.cursor() + # remove previous splitting of the region + cursor.execute(f""" + DELETE FROM {autosplit_table} + WHERE osm_border_id = {dcu.region_id} + AND city_population_thr = {dcu.city_population_thr} + AND cluster_population_thr = {dcu.cluster_population_thr} + """) + for cluster_id, data in dcu.clusters.items(): + subregion_ids = data['subregion_ids'] + cluster_geometry_sql = get_union_sql(subregion_ids) + cursor.execute(f""" + INSERT INTO {autosplit_table} (osm_border_id, id, geom, city_population_thr, cluster_population_thr) VALUES ( + {dcu.region_id}, + {cluster_id}, + ({cluster_geometry_sql}), + {dcu.city_population_thr}, + {dcu.cluster_population_thr} + ) + """) + conn.commit() + + +def prepare_bulk_split(): + need_split = [ + # large region name, admin_level (2 in most cases), admin_level to split'n'merge, into subregions of what admin_level + ('Germany', 2, 4, 6), # Half of the country is covered by units of AL=5 + ('Metropolitan France', 3, 4, 6), + ('Spain', 2, 4, 6), + ('Portugal', 2, 4, 6), + ('Belgium', 2, 4, 6), + ('Italy', 2, 4, 6), + ('Switzerland', 2, 2, 4), # has admin_level=5 + ('Austria', 2, 4, 6), + ('Poland', 2, 4, 6), # 380(!) of AL=6 + ('Czechia', 2, 6, 7), + ('Ukraine', 2, 4, 6), # should merge back to region=4 level clusters + ('United Kingdom', 2, 5, 6), # whole country is divided by level 4; level 5 is necessary but not comprehensive + ('Denmark', 2, 4, 7), + ('Norway', 2, 4, 7), + ('Sweden', 2, 4, 7), # though division by level 4 is currently ideal + ('Finland', 2, 6, 7), # though division by level 6 is currently ideal + ('Estonia', 2, 2, 6), + ('Latvia', 2, 4, 6), # the whole country takes 56Mb, all 6-level units should merge into 4-level clusters + ('Lithuania', 2, 2, 4), # now Lithuania has 2 mwms of size 60Mb each + ('Belarus', 2, 2, 4), # 6 regions + Minsk city. Would it be merged with the region? + ('Slovakia', 2, 2, 4), # there are no subregions 5, 6, 7. Must leave all 8 4-level regions + ('Hungary', 2, 5, 6), + #('Slovenia', 2, 2, 8), # no levels 3,4,5,6; level 7 incomplete. + ('Croatia', 2, 2, 6), + ('Bosnia and Herzegovina', 2, 2, 4), # other levels - 5, 6, 7 - are incomplete. + ('Serbia', 2, 4, 6), + ('Romania', 2, 2, 4), + ('Bulgaria', 2, 2, 4), + ('Greece', 2, 4, 5), # has 7 4-level regions, must merge 5-level to them again + ('Ireland', 2, 5, 6), # 5-level don't cover the whole country! Still... + ('Turkey', 2, 3, 4), + ] + cursor = conn.cursor() + regions_subset = need_split # [x for x in need_split if x[0] in ('Norway',)] + #cursor.execute("UPDATE osm_borders SET need_split=false WHERE need_split=true") + #cursor.execute("UPDATE osm_borders SET parent=null WHERE parent is not null") + for country_name, country_level, split_level, lower_level in regions_subset: + print(f"start {country_name}") + cursor.execute(f""" + SELECT osm_id FROM osm_borders + WHERE osm_id < 0 AND admin_level={country_level} AND name=%s + """, (country_name,)) + country_border_id = None + for rec in cursor: + assert (not country_border_id), f"more than one country {country_name}" + country_border_id = int(rec[0]) + cursor.execute(f""" + UPDATE osm_borders b + SET need_split=true, + next_admin_level={lower_level}, + parent = {country_border_id} + WHERE parent IS NULL + AND osm_id < 0 AND admin_level={split_level} AND ST_Contains( + (SELECT way FROM osm_borders WHERE osm_id={country_border_id}), + b.way + )""", (country_name,)) + cursor.execute(f""" + UPDATE osm_borders b + SET parent = (SELECT osm_id FROM osm_borders + WHERE osm_id < 0 AND admin_level={split_level} AND ST_Contains(way, b.way) + AND osm_id != -72639 -- crunch to exclude double Crimea region + ) + WHERE parent IS NULL + AND osm_id < 0 and admin_level={lower_level} AND ST_Contains( + (SELECT way FROM osm_borders WHERE admin_level={country_level} AND name=%s), + b.way + )""", + (country_name,)) + conn.commit() + + +def process_ready_to_split(conn): + cursor = conn.cursor() + cursor.execute( + f"""SELECT osm_id + FROM osm_borders + WHERE need_split + -- AND osm_id IN (-8654) -- crunch to restrict the whole process to some regions + -- AND osm_id < -51701 -- crunch to not process what has been already processed + ORDER BY osm_id DESC + """ + ) + for rec in cursor: + region_id = int(rec[0]) + split_region(region_id) + + +def get_region_and_country_names(conn, region_id): + #if region_id != -1574364: return + cursor = conn.cursor() + try: + cursor.execute( + f"""SELECT name, + (SELECT name + FROM osm_borders + WHERE osm_id<0 AND admin_level=2 AND ST_contains(way, b1.way) + ) AS country_name + FROM osm_borders b1 + WHERE osm_id={region_id} + AND b1.osm_id NOT IN (-9086712) -- crunch, stub to exclude incorrect subregions + """ + ) + region_name, country_name = cursor.fetchone() + except psycopg2.errors.CardinalityViolation: + conn.rollback() + cursor.execute( + f"""SELECT name + FROM osm_borders b1 + WHERE osm_id={region_id} + """ + ) + region_name = cursor.fetchone()[0] + country_name = None + print(f"Many countries for region '{region_name}' id={region_id}") + return region_name, country_name + +DEFAULT_CITY_POPULATION_THRESHOLD = 500000 +DEFAULT_CLUSTER_POPULATION_THR = 500000 + +def split_region(conn, region_id, next_level, + thresholds=(DEFAULT_CITY_POPULATION_THRESHOLD, + DEFAULT_CLUSTER_POPULATION_THR), + save_to_files=False): + region_name, country_name = get_region_and_country_names(conn, region_id) + region_name = region_name.replace('/', '|') + country_region_name = f"{country_name}_{region_name}" if country_name else region_name + dcu = find_golden_splitting(conn, region_id, next_level, + country_region_name, thresholds) + if dcu is None: + return + + save_splitting(dcu, conn, save_to_files, country_region_name) + + +def save_splitting(dcu: DisjointClusterUnion, conn, + save_to_files=None, country_region_name=None): + save_splitting_to_db(conn, dcu) + if save_to_files: + print(f"Saving {country_region_name}") + filename_prefix = f"{country_region_name}-{dcu.city_population_thrR}" + save_splitting_to_file(conn, dcu, filename_prefix) + + +#PREFIX = '' +GENERATE_ALL_POLY=False +FOLDER='split_results' +#CITY_POPULATION_THR = 500000 +#CLUSTER_POPULATION_THR = 500000 + +if __name__ == '__main__': + conn = psycopg2.connect("dbname=az_gis3") + prepare_bulk_split() + + import sys; sys.exit() + + process_ready_to_split(conn) + #with open('splitting-162050.json') as f: + import sys; sys.exit() + # clusters = json.load(f) + #make_polys(clusters) + #import sys; sys.exit() + + PREFIX = "UBavaria" + CITY_POPULATION_THR = 500000 + CLUSTER_POPULATION_THR = 500000 + + region_id = -162050 # -165475 # California ## -162050 # Florida + region_id = -2145274 # Upper Bavaria + #region_id = -151339 # South West England + #region_id = -58446 # Scotland + dcu = find_golden_splitting(region_id) + make_polys(dcu.clusters) + with open(f"{PREFIX}_{CITY_POPULATION_THR}_splitting{region_id}-poplen.json", 'w') as f: + json.dump(dcu.clusters, f, ensure_ascii=False, indent=2) + + + diff --git a/web/app/borders_api.py b/web/app/borders_api.py new file mode 100755 index 0000000..b1acf34 --- /dev/null +++ b/web/app/borders_api.py @@ -0,0 +1,1569 @@ +#!/usr/bin/python3.6 +import io +import itertools +import re +import sys, traceback +import zipfile +from unidecode import unidecode +from queue import Queue + +from flask import ( + Flask, g, + request, Response, abort, + json, jsonify, + render_template, + send_file, send_from_directory +) +from flask_cors import CORS +from flask_compress import Compress +import psycopg2 + +import config +from auto_split import prepare_bulk_split, split_region +from countries_structure import ( + CountryStructureException, + create_countries_initial_structure, +) + +try: + from lxml import etree + LXML = True +except: + LXML = False + +app = Flask(__name__) +app.debug=config.DEBUG +Compress(app) +CORS(app) +app.config['JSON_AS_ASCII'] = False + + +@app.route('/static/') +def send_js(path): + if config.DEBUG: + return send_from_directory('static/', path) + abort(404) + +@app.before_request +def before_request(): + g.conn = psycopg2.connect(config.CONNECTION) + +@app.teardown_request +def teardown(exception): + conn = getattr(g, 'conn', None) + if conn is not None: + conn.close() + +@app.route('/') +@app.route('/index.html') +def index(): + return render_template('index.html') + +@app.route('/stat.html') +def stat(): + return render_template('stat.html') + +def fetch_borders(**kwargs): + table = kwargs.get('table', config.TABLE) + simplify = kwargs.get('simplify', 0) + where_clause = kwargs.get('where_clause', '1=1') + only_leaves = kwargs.get('only_leaves', True) + osm_table = config.OSM_TABLE + geom = (f'ST_SimplifyPreserveTopology(geom, {simplify})' + if simplify > 0 else 'geom') + leaves_filter = (f""" AND id NOT IN (SELECT parent_id FROM {table} + WHERE parent_id IS NOT NULL)""" + if only_leaves else '') + query = f""" + SELECT name, geometry, nodes, modified, disabled, count_k, cmnt, + (CASE WHEN area = 'NaN' THEN 0 ELSE area END) AS area, + id, admin_level, parent_id, parent_name + FROM ( + SELECT name, + ST_AsGeoJSON({geom}, 7) as geometry, + ST_NPoints(geom) AS nodes, + modified, + disabled, + count_k, + cmnt, + round(ST_Area(geography(geom))) AS area, + id, + ( SELECT admin_level FROM {osm_table} + WHERE osm_id = t.id + ) AS admin_level, + parent_id, + ( SELECT name FROM {table} + WHERE id = t.parent_id + ) AS parent_name + FROM {table} t + WHERE ({where_clause}) {leaves_filter} + ) q + ORDER BY area DESC + """ + #print(query) + cur = g.conn.cursor() + cur.execute(query) + borders = [] + for rec in cur: + region_id = rec[8] + country_id, country_name = get_region_country(region_id) + props = { 'name': rec[0] or '', 'nodes': rec[2], 'modified': rec[3], + 'disabled': rec[4], 'count_k': rec[5], + 'comment': rec[6], + 'area': rec[7], + 'id': region_id, 'country_id': country_id, + 'admin_level': rec[9], + 'parent_id': rec[10], + 'parent_name': rec[11] or '', + 'country_name': country_name + } + feature = {'type': 'Feature', + 'geometry': json.loads(rec[1]), + 'properties': props + } + borders.append(feature) + #print([x['properties'] for x in borders]) + return borders + +def simplify_level_to_postgis_value(simplify_level): + return ( + 0.1 if simplify_level == '2' + else 0.01 if simplify_level == '1' + else 0 + ) + +@app.route('/bbox') +def query_bbox(): + xmin = float(request.args.get('xmin')) + xmax = float(request.args.get('xmax')) + ymin = float(request.args.get('ymin')) + ymax = float(request.args.get('ymax')) + simplify_level = request.args.get('simplify') + simplify = simplify_level_to_postgis_value(simplify_level) + table = request.args.get('table') + if table in config.OTHER_TABLES: + table = config.OTHER_TABLES[table] + else: + table = config.TABLE + borders = fetch_borders( + table=table, + simplify=simplify, + where_clause=f'geom && ST_MakeBox2D(ST_Point({xmin}, {ymin}),' + f'ST_Point({xmax}, {ymax}))' + ) + return jsonify( + status='ok', + geojson={'type':'FeatureCollection', 'features': borders} + ) + +@app.route('/small') +def query_small_in_bbox(): + xmin = request.args.get('xmin') + xmax = request.args.get('xmax') + ymin = request.args.get('ymin') + ymax = request.args.get('ymax') + table = request.args.get('table') + if table in config.OTHER_TABLES: + table = config.OTHER_TABLES[table] + else: + table = config.TABLE + cur = g.conn.cursor() + cur.execute('''SELECT name, round(ST_Area(geography(ring))) as area, ST_X(ST_Centroid(ring)), ST_Y(ST_Centroid(ring)) + FROM ( + SELECT name, (ST_Dump(geom)).geom as ring + FROM {table} + WHERE geom && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)) + ) g + WHERE ST_Area(geography(ring)) < %s;'''.format(table=table), (xmin, ymin, xmax, ymax, config.SMALL_KM2 * 1000000)) + result = [] + for rec in cur: + result.append({ 'name': rec[0], 'area': rec[1], 'lon': float(rec[2]), 'lat': float(rec[3]) }) + return jsonify(features=result) + +@app.route('/routing') +def query_routing_points(): + xmin = request.args.get('xmin') + xmax = request.args.get('xmax') + ymin = request.args.get('ymin') + ymax = request.args.get('ymax') + cur = g.conn.cursor() + try: + cur.execute('''SELECT ST_X(geom), ST_Y(geom), type + FROM points + WHERE geom && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s) + );''', (xmin, ymin, xmax, ymax)) + except psycopg2.Error as e: + return jsonify(features=[]) + result = [] + for rec in cur: + result.append({ 'lon': rec[0], 'lat': rec[1], 'type': rec[2] }) + return jsonify(features=result) + +@app.route('/crossing') +def query_crossing(): + xmin = request.args.get('xmin') + xmax = request.args.get('xmax') + ymin = request.args.get('ymin') + ymax = request.args.get('ymax') + region = request.args.get('region', '').encode('utf-8') + points = request.args.get('points') == '1' + rank = request.args.get('rank') or '4' + cur = g.conn.cursor() + sql = """SELECT id, ST_AsGeoJSON({line}, 7) as geometry, region, processed FROM {table} + WHERE line && ST_MakeBox2D(ST_Point(%s, %s), ST_Point(%s, %s)) and processed = 0 {reg} and rank <= %s; + """.format(table=config.CROSSING_TABLE, reg='and region = %s' if region else '', line='line' if not points else 'ST_Centroid(line)') + params = [xmin, ymin, xmax, ymax] + if region: + params.append(region) + params.append(rank) + result = [] + try: + cur.execute(sql, tuple(params)) + for rec in cur: + props = { 'id': rec[0], 'region': rec[2], 'processed': rec[3] } + feature = { 'type': 'Feature', 'geometry': json.loads(rec[1]), 'properties': props } + result.append(feature) + except psycopg2.Error as e: + pass + return jsonify(type='FeatureCollection', features=result) + +@app.route('/tables') +def check_osm_table(): + osm = False + backup = False + old = [] + crossing = False + try: + cur = g.conn.cursor() + cur.execute('select osm_id, ST_Area(way), admin_level, name from {} limit 2;'.format(config.OSM_TABLE)) + if cur.rowcount == 2: + osm = True + except psycopg2.Error as e: + pass + try: + cur.execute('select backup, id, name, parent_id, ST_Area(geom), modified, disabled, count_k, cmnt from {} limit 2;'.format(config.BACKUP)) + backup = True + except psycopg2.Error as e: + pass + for t, tname in config.OTHER_TABLES.items(): + try: + cur.execute('select name, ST_Area(geom), modified, disabled, count_k, cmnt from {} limit 2;'.format(tname)) + if cur.rowcount == 2: + old.append(t) + except psycopg2.Error as e: + pass + try: + cur = g.conn.cursor() + cur.execute('select id, ST_Length(line), region, processed from {} limit 2;'.format(config.CROSSING_TABLE)) + if cur.rowcount == 2: + crossing = True + except psycopg2.Error as e: + pass + return jsonify(osm=osm, tables=old, readonly=config.READONLY, backup=backup, crossing=crossing) + +@app.route('/search') +def search(): + query = request.args.get('q') + cur = g.conn.cursor() + cur.execute(f""" + SELECT ST_XMin(geom), ST_YMin(geom), ST_XMax(geom), ST_YMax(geom) + FROM {config.TABLE} + WHERE name ILIKE %s + ORDER BY (ST_Area(geography(geom))) + LIMIT 1""", (f'%{query}%',) + ) + if cur.rowcount > 0: + rec = cur.fetchone() + return jsonify(bounds=[rec[0], rec[1], rec[2], rec[3]]) + return jsonify(status='not found') + +@app.route('/split') +def split(): + if config.READONLY: + abort(405) + region_id = int(request.args.get('id')) + line = request.args.get('line') + save_region = (request.args.get('save_region') == 'true') + table = config.TABLE + cur = g.conn.cursor() + # check that we're splitting a single polygon + cur.execute(f'SELECT ST_NumGeometries(geom) FROM {table} WHERE id = %s;', (region_id,)) + res = cur.fetchone() + if not res or res[0] != 1: + return jsonify(status='border should have one outer ring') + cur.execute(f""" + SELECT ST_AsText((ST_Dump(ST_Split(geom, ST_GeomFromText(%s, 4326)))).geom) + FROM {table} + WHERE id = %s + """, (line, region_id) + ) + if cur.rowcount > 1: + # no use of doing anything if the polygon wasn't modified + geometries = [] + for res in cur: + geometries.append(res[0]) + # get region properties and delete old border + cur.execute(f'SELECT name, parent_id, disabled FROM {table} WHERE id = %s', (region_id,)) + name, parent_id, disabled = cur.fetchone() + if save_region: + parent_id = region_id + else: + cur.execute(f'DELETE FROM {table} WHERE id = %s', (region_id,)) + base_name = name + # insert new geometries + counter = 1 + free_id = get_free_id() + for geom in geometries: + cur.execute(f""" + INSERT INTO {table} (id, name, geom, disabled, count_k, modified, parent_id) + VALUES (%s, %s, ST_GeomFromText(%s, 4326), %s, -1, now(), %s) + """, (free_id, f'{base_name}_{counter}', geom, disabled, parent_id) + ) + counter += 1 + free_id -= 1 + g.conn.commit() + + return jsonify(status='ok') + +@app.route('/join') +def join_borders(): + if config.READONLY: + abort(405) + region_id1 = int(request.args.get('id1')) + region_id2 = int(request.args.get('id2')) + if region_id1 == region_id2: + return jsonify(status='failed to join region with itself') + cur = g.conn.cursor() + try: + table = config.TABLE + free_id = get_free_id() + cur.execute(f""" + UPDATE {table} + SET id = {free_id}, + geom = ST_Union(geom, b2.g), + count_k = -1 + FROM (SELECT geom AS g FROM {table} WHERE id = %s) AS b2 + WHERE id = %s""", (region_id2, region_id1)) + cur.execute(f"DELETE FROM {table} WHERE id = %s", (region_id2,)) + except psycopg2.Error as e: + g.conn.rollback() + return jsonify(status=str(e)) + g.conn.commit() + return jsonify(status='ok') + +def get_parent_region_id(region_id): + cursor = g.conn.cursor() + cursor.execute(f""" + SELECT parent_id FROM {config.TABLE} WHERE id = %s + """, (region_id,) + ) + rec = cursor.fetchone() + parent_id = int(rec[0]) if rec[0] is not None else None + return parent_id + +def get_child_region_ids(region_id): + cursor = g.conn.cursor() + cursor.execute(f""" + SELECT id FROM {config.TABLE} WHERE parent_id = %s + """, (region_id,) + ) + child_ids = [] + for rec in cursor: + child_ids.append(int(rec[0])) + return child_ids + +@app.route('/join_to_parent') +def join_to_parent(): + """Find all descendants of a region and remove them starting + from the lowerst hierarchical level to not violate 'parent_id' + foreign key constraint (which is probably not in ON DELETE CASCADE mode) + """ + region_id = int(request.args.get('id')) + parent_id = get_parent_region_id(region_id) + if not parent_id: + return jsonify(status=f'Region {region_id} has no parent') + cursor = g.conn.cursor() + descendants = [[parent_id]] # regions ordered by hierarchical level + + while True: + parent_ids = descendants[-1] + child_ids = list(itertools.chain.from_iterable( + get_child_region_ids(x) for x in parent_ids + )) + if child_ids: + descendants.append(child_ids) + else: + break + while len(descendants) > 1: + lowerst_ids = descendants.pop() + ids_str = ','.join(str(x) for x in lowerst_ids) + cursor.execute(f""" + DELETE FROM {config.TABLE} WHERE id IN ({ids_str}) + """ + ) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/set_parent') +def set_parent(): + region_id = int(request.args.get('id')) + parent_id = request.args.get('parent_id') + parent_id = int(parent_id) if parent_id else None + table = config.TABLE + cursor = g.conn.cursor() + cursor.execute(f""" + UPDATE {table} SET parent_id = %s WHERE id = %s + """, (parent_id, region_id) + ) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/point') +def find_osm_borders(): + lat = request.args.get('lat') + lon = request.args.get('lon') + cur = g.conn.cursor() + cur.execute("select osm_id, name, admin_level, (case when ST_Area(geography(way)) = 'NaN' then 0 else ST_Area(geography(way))/1000000 end) as area_km from {table} where ST_Contains(way, ST_SetSRID(ST_Point(%s, %s), 4326)) order by admin_level desc, name asc;".format(table=config.OSM_TABLE), (lon, lat)) + result = [] + for rec in cur: + b = { 'id': rec[0], 'name': rec[1], 'admin_level': rec[2], 'area': rec[3] } + result.append(b) + return jsonify(borders=result) + +@app.route('/from_osm') +def copy_from_osm(): + if config.READONLY: + abort(405) + osm_id = request.args.get('id') + name = request.args.get('name') + name_sql = f"'{name}'" if name else "'name'" + table = config.TABLE + osm_table = config.OSM_TABLE + cur = g.conn.cursor() + # Check if this id already in use + cur.execute(f"SELECT id FROM {table} WHERE id = %s", (osm_id,)) + rec = cur.fetchone() + if rec and rec[0]: + return jsonify(status=f"Region with id={osm_id} already exists") + cur.execute(f""" + INSERT INTO {table} (id, geom, name, modified, count_k) + SELECT osm_id, way, {name_sql}, now(), -1 + FROM {osm_table} + WHERE osm_id = %s + """, (osm_id,) + ) + assign_region_to_lowerst_parent(osm_id) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/rename') +def set_name(): + if config.READONLY: + abort(405) + region_id = int(request.args.get('id')) + table = config.TABLE + new_name = request.args.get('new_name') + cur = g.conn.cursor() + cur.execute(f"UPDATE {table} SET name = %s WHERE id = %s", + (new_name, region_id)) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/delete') +def delete_border(): + if config.READONLY: + abort(405) + region_id = int(request.args.get('id')) + cur = g.conn.cursor() + cur.execute('delete from {} where id = %s;'.format(config.TABLE), (region_id,)) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/disable') +def disable_border(): + if config.READONLY: + abort(405) + region_id = int(request.args.get('id')) + cur = g.conn.cursor() + cur.execute(f"UPDATE {config.TABLE} SET disabled = true WHERE id = %s", + (region_id,) + ) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/enable') +def enable_border(): + if config.READONLY: + abort(405) + region_id = int(request.args.get('id')) + cur = g.conn.cursor() + cur.execute(f"UPDATE {config.TABLE} SET disabled = false WHERE id = %s", + (region_id,) + ) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/comment', methods=['POST']) +def update_comment(): + region_id = int(request.form['id']) + comment = request.form['comment'] + cur = g.conn.cursor() + cur.execute(f"UPDATE {config.TABLE} SET cmnt = %s WHERE id = %s", + (comment, region_id) + ) + g.conn.commit() + return jsonify(status='ok') + +def is_administrative_region(region_id): + osm_table = config.OSM_TABLE + cur = g.conn.cursor() + cur.execute(f""" + SELECT osm_id FROM {osm_table} WHERE osm_id = %s + """, (region_id,) + ) + return bool(cur.rowcount > 0) + +def find_osm_child_regions(region_id): + cursor = g.conn.cursor() + table = config.TABLE + osm_table = config.OSM_TABLE + cursor.execute(f""" + SELECT c.id, oc.admin_level + FROM {table} c, {table} p, {osm_table} oc + WHERE p.id = c.parent_id AND c.id = oc.osm_id + AND p.id = %s + """, (region_id,) + ) + children = [] + for rec in cursor: + children.append({'id': int(rec[0]), 'admin_level': int(rec[1])}) + return children + +def is_leaf(region_id): + cursor = g.conn.cursor() + cursor.execute(f""" + SELECT 1 + FROM {config.TABLE} + WHERE parent_id = %s + LIMIT 1 + """, (region_id,) + ) + return cursor.rowcount == 0 + +def get_region_country(region_id): + """Returns the uppermost predecessor of the region in the hierarchy, + possibly itself. + """ + predecessors = get_predecessors(region_id) + return predecessors[-1] + +def get_predecessors(region_id): + """Returns the list of (id, name)-tuples of all predecessors, + starting from the very region_id. + """ + predecessors = [] + table = config.TABLE + cursor = g.conn.cursor() + while True: + cursor.execute(f""" + SELECT id, name, parent_id + FROM {table} WHERE id={region_id} + """) + rec = cursor.fetchone() + if not rec: + raise Exception(f"No record in '{table}' table with id = {region_id}") + predecessors.append(rec[0:2]) + parent_id = rec[2] + if not parent_id: + break + region_id = parent_id + return predecessors + +def get_region_full_name(region_id): + predecessors = get_predecessors(region_id) + return '_'.join(pr[1] for pr in reversed(predecessors)) + +def get_similar_regions(region_id, only_leaves=False): + """Returns ids of regions of the same admin_level in the same country. + Prerequisite: is_administrative_region(region_id) is True. + """ + cursor = g.conn.cursor() + cursor.execute(f""" + SELECT admin_level FROM {config.OSM_TABLE} + WHERE osm_id = %s""", (region_id,) + ) + admin_level = int(cursor.fetchone()[0]) + country_id, country_name = get_region_country(region_id) + q = Queue() + q.put({'id': country_id, 'admin_level': 2}) + similar_region_ids = [] + while not q.empty(): + item = q.get() + if item['admin_level'] == admin_level: + similar_region_ids.append(item['id']) + elif item['admin_level'] < admin_level: + children = find_osm_child_regions(item['id']) + for ch in children: + q.put(ch) + if only_leaves: + similar_region_ids = [r_id for r_id in similar_region_ids + if is_leaf(r_id)] + return similar_region_ids + + +NON_ADMINISTRATIVE_REGION_ERROR = ("Not allowed to split non-administrative" + " border into administrative subregions") + +@app.route('/divpreview') +def divide_preview(): + region_id = int(request.args.get('id')) + if not is_administrative_region(region_id): + return jsonify(status=NON_ADMINISTRATIVE_REGION_ERROR) + next_level = int(request.args.get('next_level')) + apply_to_similar = (request.args.get('apply_to_similar') == 'true') + region_ids = [region_id] + if apply_to_similar: + region_ids = get_similar_regions(region_id, only_leaves=True) + auto_divide = (request.args.get('auto_divide') == 'true') + if auto_divide: + try: + city_population_thr = int(request.args.get('city_population_thr')) + cluster_population_thr = int(request.args.get('cluster_population_thr')) + except ValueError: + return jsonify(status='Not a number in thresholds') + return divide_into_clusters_preview( + region_ids, next_level, + (city_population_thr, cluster_population_thr)) + else: + return divide_into_subregions_preview(region_ids, next_level) + +def get_subregions(region_ids, next_level): + subregions = list(itertools.chain.from_iterable( + get_subregions_one(region_id, next_level) + for region_id in region_ids + )) + return subregions + +def get_subregions_one(region_id, next_level): + osm_table = config.OSM_TABLE + cur = g.conn.cursor() + # We use ST_SimplifyPreserveTopology, since ST_Simplify would give NULL + # for very little regions. + cur.execute(f""" + SELECT name, + ST_AsGeoJSON(ST_SimplifyPreserveTopology(way, 0.01)) as way, + osm_id + FROM {osm_table} + WHERE ST_Contains( + (SELECT way FROM {osm_table} WHERE osm_id = %s), way + ) + AND admin_level = %s + """, (region_id, next_level) + ) + subregions = [] + for rec in cur: + #if rec[1] is None: + # continue + feature = { 'type': 'Feature', 'geometry': json.loads(rec[1]), + 'properties': { 'name': rec[0] } } + subregions.append(feature) + return subregions + +def get_clusters(region_ids, next_level, thresholds): + clusters = list(itertools.chain.from_iterable( + get_clusters_one(region_id, next_level, thresholds) + for region_id in region_ids + )) + return clusters + +def get_clusters_one(region_id, next_level, thresholds): + autosplit_table = config.AUTOSPLIT_TABLE + cursor = g.conn.cursor() + where_clause = f""" + osm_border_id = %s + AND city_population_thr = %s + AND cluster_population_thr = %s + """ + splitting_sql_params = (region_id,) + thresholds + cursor.execute(f""" + SELECT id FROM {autosplit_table} + WHERE {where_clause} + """, splitting_sql_params) + if cursor.rowcount == 0: + split_region(g.conn, region_id, next_level, thresholds) + cursor.execute(f""" + SELECT id, ST_AsGeoJSON(ST_SimplifyPreserveTopology(geom, 0.01)) as way + FROM {autosplit_table} + WHERE {where_clause} + """, splitting_sql_params) + clusters = [] + for rec in cursor: + cluster = { 'type': 'Feature', + 'geometry': json.loads(rec[1]), + 'properties': {'osm_id': int(rec[0])} + } + clusters.append(cluster) + return clusters + +def divide_into_subregions_preview(region_ids, next_level): + subregions = get_subregions(region_ids, next_level) + return jsonify( + status='ok', + subregions={'type': 'FeatureCollection', 'features': subregions} + ) + +def divide_into_clusters_preview(region_ids, next_level, thresholds): + subregions = get_subregions(region_ids, next_level) + clusters = get_clusters(region_ids, next_level, thresholds) + return jsonify( + status='ok', + subregions={'type': 'FeatureCollection', 'features': subregions}, + clusters={'type': 'FeatureCollection', 'features': clusters} + ) + +@app.route('/divide') +def divide(): + if config.READONLY: + abort(405) + region_id = int(request.args.get('id')) + if not is_administrative_region(region_id): + return jsonify(status=NON_ADMINISTRATIVE_REGION_ERROR) + next_level = int(request.args.get('next_level')) + apply_to_similar = (request.args.get('apply_to_similar') == 'true') + region_ids = [region_id] + if apply_to_similar: + region_ids = get_similar_regions(region_id, only_leaves=True) + auto_divide = (request.args.get('auto_divide') == 'true') + if auto_divide: + try: + city_population_thr = int(request.args.get('city_population_thr')) + cluster_population_thr = int(request.args.get('cluster_population_thr')) + except ValueError: + return jsonify(status='Not a number in thresholds') + return divide_into_clusters( + region_ids, next_level, + (city_population_thr, cluster_population_thr)) + else: + return divide_into_subregions(region_ids, next_level) + +def divide_into_subregions(region_ids, next_level): + table = config.TABLE + osm_table = config.OSM_TABLE + cur = g.conn.cursor() + for region_id in region_ids: + # TODO: rewrite SELECT into join rather than subquery to enable gist index + cur.execute(f""" + INSERT INTO {table} (id, geom, name, parent_id, modified, count_k) + SELECT osm_id, way, name, %s, now(), -1 + FROM {osm_table} + WHERE ST_Contains( + (SELECT way FROM {osm_table} WHERE osm_id = %s), way + ) + AND admin_level = {next_level} + """, (region_id, region_id,) + ) + g.conn.commit() + return jsonify(status='ok') + +def divide_into_clusters(region_ids, next_level, thresholds): + table = config.TABLE + autosplit_table = config.AUTOSPLIT_TABLE + cursor = g.conn.cursor() + insert_cursor = g.conn.cursor() + for region_id in region_ids: + cursor.execute(f"SELECT name FROM {table} WHERE id = %s", (region_id,)) + base_name = cursor.fetchone()[0] + + where_clause = f""" + osm_border_id = %s + AND city_population_thr = %s + AND cluster_population_thr = %s + """ + splitting_sql_params = (region_id,) + thresholds + cursor.execute(f""" + SELECT id FROM {autosplit_table} + WHERE {where_clause} + """, splitting_sql_params) + if cursor.rowcount == 0: + split_region(g.conn, region_id, next_level, thresholds) + + free_id = get_free_id() + counter = 0 + cursor.execute(f""" + SELECT id + FROM {autosplit_table} WHERE {where_clause} + """, splitting_sql_params) + for rec in cursor: + cluster_id = rec[0] + counter += 1 + name = f"{base_name}_{counter}" + insert_cursor.execute(f""" + INSERT INTO {table} (id, name, parent_id, geom, modified, count_k) + SELECT {free_id}, '{name}', osm_border_id, geom, now(), -1 + FROM {autosplit_table} WHERE id = %s AND {where_clause} + """, (cluster_id,) + splitting_sql_params) + free_id -= 1 + g.conn.commit() + return jsonify(status='ok') + +@app.route('/chop1') +def chop_largest_or_farthest(): + if config.READONLY: + abort(405) + name = request.args.get('name').encode('utf-8') + cur = g.conn.cursor() + cur.execute('select ST_NumGeometries(geom) from {} where name = %s;'.format(config.TABLE), (name,)) + res = cur.fetchone() + if not res or res[0] < 2: + return jsonify(status='border should have more than one outer ring') + cur.execute("""INSERT INTO {table} (name, disabled, modified, geom) + SELECT name, disabled, modified, geom from + ( + (WITH w AS (SELECT name, disabled, (ST_Dump(geom)).geom AS g FROM {table} WHERE name = %s) + (SELECT name||'_main' as name, disabled, now() as modified, g as geom, ST_Area(g) as a FROM w ORDER BY a DESC LIMIT 1) + UNION ALL + SELECT name||'_small' as name, disabled, now() as modified, ST_Collect(g) AS geom, ST_Area(ST_Collect(g)) as a + FROM (SELECT name, disabled, g, ST_Area(g) AS a FROM w ORDER BY a DESC OFFSET 1) ww + GROUP BY name, disabled) + ) x;""".format(table=config.TABLE), (name,)) + cur.execute('delete from {} where name = %s;'.format(config.TABLE), (name,)) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/hull') +def draw_hull(): + if config.READONLY: + abort(405) + name = request.args.get('name').encode('utf-8') + cur = g.conn.cursor() + cur.execute('select ST_NumGeometries(geom) from {} where name = %s;'.format(config.TABLE), (name,)) + res = cur.fetchone() + if not res or res[0] < 2: + return jsonify(status='border should have more than one outer ring') + cur.execute('update {} set geom = ST_ConvexHull(geom) where name = %s;'.format(config.TABLE), (name,)) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/fixcrossing') +def fix_crossing(): + if config.READONLY: + abort(405) + preview = request.args.get('preview') == '1' + region = request.args.get('region').encode('utf-8') + if region is None: + return jsonify(status='Please specify a region') + ids = request.args.get('ids') + if ids is None or len(ids) == 0: + return jsonify(status='Please specify a list of line ids') + ids = tuple(ids.split(',')) + cur = g.conn.cursor() + if preview: + cur.execute(""" + WITH lines as (SELECT ST_Buffer(ST_Collect(line), 0.002, 1) as g FROM {cross} WHERE id IN %s) + SELECT ST_AsGeoJSON(ST_Collect(ST_MakePolygon(er.ring))) FROM + ( + SELECT ST_ExteriorRing((ST_Dump(ST_Union(ST_Buffer(geom, 0.0), lines.g))).geom) as ring FROM {table}, lines WHERE name = %s + ) as er + """.format(table=config.TABLE, cross=config.CROSSING_TABLE), (ids, region)) + res = cur.fetchone() + if not res: + return jsonify(status='Failed to extend geometry') + f = { "type": "Feature", "properties": {}, "geometry": json.loads(res[0]) } + #return jsonify(type="FeatureCollection", features=[f]) + return jsonify(type="Feature", properties={}, geometry=json.loads(res[0])) + else: + cur.execute(""" + WITH lines as (SELECT ST_Buffer(ST_Collect(line), 0.002, 1) as g FROM {cross} WHERE id IN %s) + UPDATE {table} SET geom = res.g FROM + ( + SELECT ST_Collect(ST_MakePolygon(er.ring)) as g FROM + ( + SELECT ST_ExteriorRing((ST_Dump(ST_Union(ST_Buffer(geom, 0.0), lines.g))).geom) as ring FROM {table}, lines WHERE name = %s + ) as er + ) as res + WHERE name = %s + """.format(table=config.TABLE, cross=config.CROSSING_TABLE), (ids, region, region)) + cur.execute(""" + UPDATE {table} b SET geom = ST_Difference(b.geom, o.geom) + FROM {table} o + WHERE ST_Overlaps(b.geom, o.geom) + AND o.name = %s + """.format(table=config.TABLE), (region,)) + cur.execute("UPDATE {cross} SET processed = 1 WHERE id IN %s".format(cross=config.CROSSING_TABLE), (ids,)) + g.conn.commit() + return jsonify(status='ok') + + +@app.route('/backup') +def backup_do(): + if config.READONLY: + abort(405) + cur = g.conn.cursor() + cur.execute("SELECT to_char(now(), 'IYYY-MM-DD HH24:MI'), max(backup) from {};".format(config.BACKUP)) + (timestamp, tsmax) = cur.fetchone() + if timestamp == tsmax: + return jsonify(status='please try again later') + backup_table = config.BACKUP + table = config.TABLE + cur.execute(f""" + INSERT INTO {backup_table} + (backup, id, name, parent_id, geom, disabled, count_k, modified, cmnt) + SELECT %s, id, name, parent_id, geom, disabled, count_k, modified, cmnt + FROM {table} + """, (timestamp,) + ) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/restore') +def backup_restore(): + if config.READONLY: + abort(405) + ts = request.args.get('timestamp') + cur = g.conn.cursor() + table = config.TABLE + backup_table = config.BACKUP + cur.execute(f"SELECT count(1) from {backup_table} WHERE backup = %s",(ts,)) + (count,) = cur.fetchone() + if count <= 0: + return jsonify(status='no such timestamp') + cur.execute(f'DELETE FROM {table}') + cur.execute(f""" + INSERT INTO {table} + (id, name, parent_id, geom, disabled, count_k, modified, cmnt) + SELECT id, name, parent_id, geom, disabled, count_k, modified, cmnt + FROM {backup_table} + WHERE backup = %s + """, (ts,) + ) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/backlist') +def backup_list(): + cur = g.conn.cursor() + cur.execute("SELECT backup, count(1) from {} group by backup order by backup desc;".format(config.BACKUP)) + result = [] + for res in cur: + result.append({ 'timestamp': res[0], 'text': res[0], 'count': res[1] }) + # todo: count number of different objects for the last one + return jsonify(backups=result) + +@app.route('/backdelete') +def backup_delete(): + if config.READONLY: + abort(405) + ts = request.args.get('timestamp') + cur = g.conn.cursor() + cur.execute('SELECT count(1) from {} where backup = %s;'.format(config.BACKUP), (ts,)) + (count,) = cur.fetchone() + if count <= 0: + return jsonify(status='no such timestamp') + cur.execute('DELETE FROM {} WHERE backup = %s;'.format(config.BACKUP), (ts,)) + g.conn.commit() + return jsonify(status='ok') + +@app.route('/josm') +def make_osm(): + xmin = request.args.get('xmin') + xmax = request.args.get('xmax') + ymin = request.args.get('ymin') + ymax = request.args.get('ymax') + table = request.args.get('table') + if table in config.OTHER_TABLES: + table = config.OTHER_TABLES[table] + else: + table = config.TABLE + borders = fetch_borders( + table=table, + where_clause=f'geom && ST_MakeBox2D(ST_Point({xmin}, {ymin}),' + f'ST_Point({xmax}, {ymax}))' + ) + node_pool = { 'id': 1 } # 'lat_lon': id + regions = [] # { id: id, name: name, rings: [['outer', [ids]], ['inner', [ids]], ...] } + for border in borders: + geometry = border['geometry'] #json.loads(rec[2]) + rings = [] + if geometry['type'] == 'Polygon': + parse_polygon(node_pool, rings, geometry['coordinates']) + elif geometry['type'] == 'MultiPolygon': + for polygon in geometry['coordinates']: + parse_polygon(node_pool, rings, polygon) + if len(rings) > 0: + regions.append({ + 'id': abs(border['properties']['id']), + 'name': border['properties']['name'], + 'disabled': border['properties']['disabled'], + 'rings': rings + }) + + xml = '' + for latlon, node_id in node_pool.items(): + if latlon != 'id': + (lat, lon) = latlon.split() + xml = xml + ''.format(id=node_id, lat=lat, lon=lon) + + ways = {} # json: id + wrid = 1 + for region in regions: + w1key = ring_hash(region['rings'][0][1]) + if not config.JOSM_FORCE_MULTI and len(region['rings']) == 1 and w1key not in ways: + # simple case: a way + ways[w1key] = region['id'] + xml = xml + ''.format(id=region['id']) + xml = xml + ''.format(quoteattr(region['name'])) + if region['disabled']: + xml = xml + '' + for nd in region['rings'][0][1]: + xml = xml + ''.format(ref=nd) + xml = xml + '' + else: + # multipolygon + rxml = ''.format(id=region['id']) + wrid = wrid + 1 + rxml = rxml + '' + rxml = rxml + ''.format(quoteattr(region['name'])) + if region['disabled']: + rxml = rxml + '' + for ring in region['rings']: + wkey = ring_hash(ring[1]) + if wkey in ways: + # already have that way + rxml = rxml + ''.format(ref=ways[wkey], role=ring[0]) + else: + ways[wkey] = wrid + xml = xml + ''.format(id=wrid) + rxml = rxml + ''.format(ref=wrid, role=ring[0]) + for nd in ring[1]: + xml = xml + ''.format(ref=nd) + xml = xml + '' + wrid = wrid + 1 + xml = xml + rxml + '' + xml = xml + '' + return Response(xml, mimetype='application/x-osm+xml') + +@app.route('/josmbord') +def josm_borders_along(): + name = request.args.get('name') + line = request.args.get('line') + cur = g.conn.cursor() + # select all outer osm borders inside a buffer of the given line + cur.execute(""" + with linestr as ( + select ST_Intersection(geom, ST_Buffer(ST_GeomFromText(%s, 4326), 0.2)) as line + from {table} where name = %s + ), osmborders as ( + select (ST_Dump(way)).geom as g from {osm}, linestr where ST_Intersects(line, way) + ) + select ST_AsGeoJSON((ST_Dump(ST_LineMerge(ST_Intersection(ST_Collect(ST_ExteriorRing(g)), line)))).geom) from osmborders, linestr group by line + """.format(table=config.TABLE, osm=config.OSM_TABLE), (line, name)) + + node_pool = { 'id': 1 } # 'lat_lon': id + lines = [] + for rec in cur: + geometry = json.loads(rec[0]) + if geometry['type'] == 'LineString': + nodes = parse_linestring(node_pool, geometry['coordinates']) + elif geometry['type'] == 'MultiLineString': + nodes = [] + for line in geometry['coordinates']: + nodes.extend(parse_linestring(node_pool, line)) + if len(nodes) > 0: + lines.append(nodes) + + xml = '' + for latlon, node_id in node_pool.items(): + if latlon != 'id': + (lat, lon) = latlon.split() + xml = xml + ''.format(id=node_id, lat=lat, lon=lon) + + wrid = 1 + for line in lines: + xml = xml + ''.format(id=wrid) + for nd in line: + xml = xml + ''.format(ref=nd) + xml = xml + '' + wrid = wrid + 1 + xml = xml + '' + return Response(xml, mimetype='application/x-osm+xml') + +def quoteattr(value): + value = value.replace('&', '&').replace('>', '>').replace('<', '<') + value = value.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ') + value = value.replace('"', '"') + return '"{}"'.format(value) + +def ring_hash(refs): + #return json.dumps(refs) + return hash(tuple(sorted(refs))) + +def parse_polygon(node_pool, rings, polygon): + role = 'outer' + for ring in polygon: + rings.append([role, parse_linestring(node_pool, ring)]) + role = 'inner' + +def parse_linestring(node_pool, linestring): + nodes = [] + for lonlat in linestring: + ref = '{} {}'.format(lonlat[1], lonlat[0]) + if ref in node_pool: + node_id = node_pool[ref] + else: + node_id = node_pool['id'] + node_pool[ref] = node_id + node_pool['id'] = node_id + 1 + nodes.append(node_id) + return nodes + +def append_way(way, way2): + another = list(way2) # make copy to not modify original list + if way[0] == way[-1] or another[0] == another[-1]: + return None + if way[0] == another[0] or way[-1] == another[-1]: + another.reverse() + if way[-1] == another[0]: + result = list(way) + result.extend(another[1:]) + return result + elif way[0] == another[-1]: + result = another + result.extend(way) + return result + return None + +def way_to_wkt(node_pool, refs): + coords = [] + for nd in refs: + coords.append('{} {}'.format(node_pool[nd]['lon'], node_pool[nd]['lat'])) + return '({})'.format(','.join(coords)) + +def import_error(msg): + if config.IMPORT_ERROR_ALERT: + return ''.format(msg) + else: + return jsonify(status=msg) + +def extend_bbox(bbox, x, y=None): + if y is not None: + x = [x, y, x, y] + bbox[0] = min(bbox[0], x[0]) + bbox[1] = min(bbox[1], x[1]) + bbox[2] = max(bbox[2], x[2]) + bbox[3] = max(bbox[3], x[3]) + +def bbox_contains(outer, inner): + return outer[0] <= inner[0] and outer[1] <= inner[1] and outer[2] >= inner[2] and outer[3] >= inner[3] + +@app.route('/import', methods=['POST']) +def import_osm(): + if config.READONLY: + abort(405) + if not LXML: + return import_error('importing is disabled due to absent lxml library') + f = request.files['file'] + if not f: + return import_error('failed upload') + try: + tree = etree.parse(f) + except: + return import_error('malformed xml document') + if not tree: + return import_error('bad document') + root = tree.getroot() + + # read nodes and ways + nodes = {} # id: { lat, lon, modified } + for node in root.iter('node'): + if node.get('action') == 'delete': + continue + modified = int(node.get('id')) < 0 or node.get('action') == 'modify' + nodes[node.get('id')] = { 'lat': float(node.get('lat')), 'lon': float(node.get('lon')), 'modified': modified } + ways = {} # id: { name, disabled, modified, bbox, nodes, used } + for way in root.iter('way'): + if way.get('action') == 'delete': + continue + way_nodes = [] + bbox = [1e4, 1e4, -1e4, -1e4] + modified = int(way.get('id')) < 0 or way.get('action') == 'modify' + for node in way.iter('nd'): + ref = node.get('ref') + if not ref in nodes: + return import_error('missing node {} in way {}'.format(ref, way.get('id'))) + way_nodes.append(ref) + if nodes[ref]['modified']: + modified = True + extend_bbox(bbox, float(nodes[ref]['lon']), float(nodes[ref]['lat'])) + name = None + disabled = False + for tag in way.iter('tag'): + if tag.get('k') == 'name': + name = tag.get('v') + if tag.get('k') == 'disabled' and tag.get('v') == 'yes': + disabled = True + if len(way_nodes) < 2: + return import_error('way with less than 2 nodes: {}'.format(way.get('id'))) + ways[way.get('id')] = { 'name': name, 'disabled': disabled, 'modified': modified, 'bbox': bbox, 'nodes': way_nodes, 'used': False } + + # finally we are constructing regions: first, from multipolygons + regions = {} # /*name*/ id: { modified, disabled, wkt, type: 'r'|'w' } + for rel in root.iter('relation'): + if rel.get('action') == 'delete': + continue + osm_id = int(rel.get('id')) + modified = osm_id < 0 or rel.get('action') == 'modify' + name = None + disabled = False + multi = False + inner = [] + outer = [] + for tag in rel.iter('tag'): + if tag.get('k') == 'name': + name = tag.get('v') + if tag.get('k') == 'disabled' and tag.get('v') == 'yes': + disabled = True + if tag.get('k') == 'type' and tag.get('v') == 'multipolygon': + multi = True + if not multi: + return import_error('found non-multipolygon relation: {}'.format(rel.get('id'))) + #if not name: + # return import_error('relation {} has no name'.format(rel.get('id'))) + #if name in regions: + # return import_error('multiple relations with the same name {}'.format(name)) + for member in rel.iter('member'): + ref = member.get('ref') + if not ref in ways: + return import_error('missing way {} in relation {}'.format(ref, rel.get('id'))) + if ways[ref]['modified']: + modified = True + role = member.get('role') + if role == 'outer': + outer.append(ways[ref]) + elif role == 'inner': + inner.append(ways[ref]) + else: + return import_error('unknown role {} in relation {}'.format(role, rel.get('id'))) + ways[ref]['used'] = True + # after parsing ways, so 'used' flag is set + if rel.get('action') == 'delete': + continue + if len(outer) == 0: + return import_error('relation {} has no outer ways'.format(rel.get('id'))) + # reconstruct rings in multipolygon + for multi in (inner, outer): + i = 0 + while i < len(multi): + way = multi[i]['nodes'] + while way[0] != way[-1]: + productive = False + j = i + 1 + while way[0] != way[-1] and j < len(multi): + new_way = append_way(way, multi[j]['nodes']) + if new_way: + multi[i] = dict(multi[i]) + multi[i]['nodes'] = new_way + way = new_way + if multi[j]['modified']: + multi[i]['modified'] = True + extend_bbox(multi[i]['bbox'], multi[j]['bbox']) + del multi[j] + productive = True + else: + j = j + 1 + if not productive: + return import_error('unconnected way in relation {}'.format(rel.get('id'))) + i = i + 1 + # check for 2-node rings + for multi in (outer, inner): + for way in multi: + if len(way['nodes']) < 3: + return import_error('Way in relation {} has only {} nodes'.format(rel.get('id'), len(way['nodes']))) + # sort inner and outer rings + polygons = [] + for way in outer: + rings = [way_to_wkt(nodes, way['nodes'])] + for i in range(len(inner)-1, -1, -1): + if bbox_contains(way['bbox'], inner[i]['bbox']): + rings.append(way_to_wkt(nodes, inner[i]['nodes'])) + del inner[i] + polygons.append('({})'.format(','.join(rings))) + regions[osm_id] = { + 'id': osm_id, + 'type': 'r', + 'name': name, + 'modified': modified, + 'disabled': disabled, + 'wkt': 'MULTIPOLYGON({})'.format(','.join(polygons)) + } + + # make regions from unused named ways + for wid, w in ways.items(): + if w['used']: + continue + if not w['name']: + #continue + return import_error('unused in multipolygon way with no name: {}'.format(wid)) + if w['nodes'][0] != w['nodes'][-1]: + return import_error('non-closed unused in multipolygon way: {}'.format(wid)) + if len(w['nodes']) < 3: + return import_error('way {} has {} nodes'.format(wid, len(w['nodes']))) + #if w['name'] in regions: + # return import_error('way {} has the same name as other way/multipolygon'.format(wid)) + regions[wid] = { + 'id': int(wid), + 'type': 'w', + 'name': w['name'], + 'modified': w['modified'], + 'disabled': w['disabled'], + 'wkt': 'POLYGON({})'.format(way_to_wkt(nodes, w['nodes'])) + } + + # submit modifications to the database + cur = g.conn.cursor() + added = 0 + updated = 0 + free_id = None + for r_id, region in regions.items(): + if not region['modified']: + continue + try: + region_id = create_or_update_region(region, free_id) + except psycopg2.Error as e: + exc_type, exc_value, exc_traceback = sys.exc_info() + traceback.print_exception(exc_type, exc_value, exc_traceback) + return import_error('Database error. See server log for details') + if region_id < 0: + added += 1 + if free_id is None: + free_id = region_id - 1 + else: + free_id -= 1 + else: + updated += 1 + g.conn.commit() + return jsonify(regions=len(regions), added=added, updated=updated) + +def get_free_id(): + cursor = g.conn.cursor() + table = config.TABLE + cursor.execute(f"SELECT min(id) FROM {table} WHERE id < -1000000000") + min_id = cursor.fetchone()[0] + free_id = min_id - 1 if min_id else -1_000_000_001 + return free_id + +def assign_region_to_lowerst_parent(region_id): + pot_parents = find_potential_parents(region_id) + if pot_parents: + # potential_parents are sorted by area ascending + parent_id = pot_parents[0]['properties']['id'] + cursor = g.conn.cursor() + table = config.TABLE + cursor.execute(f""" + UPDATE {table} + SET parent_id = %s + WHERE id = %s + """, (parent_id, region_id) + ) + return True + return False + +def create_or_update_region(region, free_id): + cursor = g.conn.cursor() + table = config.TABLE + osm_table = config.OSM_TABLE + if region['id'] < 0: + if not free_id: + free_id = get_free_id() + region_id = free_id + + cursor.execute(f""" + INSERT INTO {table} + (id, name, disabled, geom, modified, count_k) + VALUES (%s, %s, %s, ST_GeomFromText(%s, 4326), now(), -1) + """, (region_id, region['name'], region['disabled'], region['wkt']) + ) + assign_region_to_lowerst_parent(region_id) + return region_id + else: + cursor.execute(f"SELECT count(1) FROM {table} WHERE id = %s", + (-region['id'],) + ) + rec = cursor.fetchone() + if rec[0] == 0: + raise Exception("Can't find border ({region['id']}) for update") + cursor.execute(f""" + UPDATE {table} + SET disabled = %s, + name = %s, + modified = now(), + count_k = -1, + geom = ST_GeomFromText(%s, 4326) + WHERE id = %s + """, (region['disabled'], region['name'], + region['wkt'], -region['id']) + ) + return region['id'] + +def find_potential_parents(region_id): + table = config.TABLE + osm_table = config.OSM_TABLE + p_geogr = "geography(p.geom)" + c_geogr = "geography(c.geom)" + cursor = g.conn.cursor() + query = f""" + SELECT + p.id, + p.name, + (SELECT admin_level FROM {osm_table} WHERE osm_id = p.id) admin_level, + ST_AsGeoJSON(ST_SimplifyPreserveTopology(p.geom, 0.01)) geometry + FROM {table} p, {table} c + WHERE c.id = %s + AND ST_Intersects(p.geom, c.geom) + AND ST_Area({p_geogr}) > ST_Area({c_geogr}) + AND ST_Area(ST_Intersection({p_geogr}, {c_geogr})) > + 0.5 * ST_Area({c_geogr}) + ORDER BY ST_Area({p_geogr}) + """ + cursor.execute(query, (region_id,)) + parents = [] + for rec in cursor: + props = { + 'id': rec[0], + 'name': rec[1], + 'admin_level': rec[2], + } + feature = {'type': 'Feature', + 'geometry': json.loads(rec[3]), + 'properties': props + } + parents.append(feature) + return parents + +@app.route('/potential_parents') +def potential_parents(): + region_id = int(request.args.get('id')) + parents = find_potential_parents(region_id) + return jsonify( + status='ok', + parents=parents + #geojson={'type':'FeatureCollection', 'features': borders} + ) + + +@app.route('/poly') +def export_poly(): + table = request.args.get('table') + if table in config.OTHER_TABLES: + table = config.OTHER_TABLES[table] + else: + table = config.TABLE + + fetch_borders_args = {'table': table, 'only_leaves': True} + + if 'xmin' in request.args: + xmin = request.args.get('xmin') + xmax = request.args.get('xmax') + ymin = request.args.get('ymin') + ymax = request.args.get('ymax') + fetch_borders_args['where_clause'] = ( + f'geom && ST_MakeBox2D(ST_Point({xmin}, {ymin}),' + f'ST_Point({xmax}, {ymax}))' + ) + borders = fetch_borders(**fetch_borders_args) + + memory_file = io.BytesIO() + with zipfile.ZipFile(memory_file, 'w', zipfile.ZIP_DEFLATED) as zf: + for border in borders: + geometry = border['geometry'] + polygons = ([geometry['coordinates']] + if geometry['type'] == 'Polygon' + else geometry['coordinates']) + # sanitize name, src: http://stackoverflow.com/a/295466/1297601 + name = border['properties']['name'] or str(-border['properties']['id']) + fullname = get_region_full_name(border['properties']['id']) + filename = unidecode(fullname) + filename = re.sub('[^\w _-]', '', filename).strip() + filename = filename + '.poly' + + poly = io.BytesIO() + poly.write(name.encode() + b'\n') + pcounter = 1 + for polygon in polygons: + outer = True + for ring in polygon: + poly.write('{inner_mark}{name}\n'.format( + inner_mark=('' if outer else '!'), + name=(pcounter if outer else -pcounter) + ).encode()) + pcounter = pcounter + 1 + for coord in ring: + poly.write('\t{:E}\t{:E}\n'.format(coord[0], coord[1]).encode()) + poly.write(b'END\n') + outer = False + poly.write(b'END\n') + zf.writestr(filename, poly.getvalue()) + poly.close() + memory_file.seek(0) + return send_file(memory_file, attachment_filename='borders.zip', as_attachment=True) + +@app.route('/stat') +def statistics(): + group = request.args.get('group') + table = request.args.get('table') + if table in config.OTHER_TABLES: + table = config.OTHER_TABLES[table] + else: + table = config.TABLE + cur = g.conn.cursor() + if group == 'total': + cur.execute('select count(1) from borders;') + return jsonify(total=cur.fetchone()[0]) + elif group == 'sizes': + cur.execute("select name, count_k, ST_NPoints(geom), ST_AsGeoJSON(ST_Centroid(geom)), (case when ST_Area(geography(geom)) = 'NaN' then 0 else ST_Area(geography(geom)) / 1000000 end) as area, disabled, (case when cmnt is null or cmnt = '' then false else true end) as cmnt from {};".format(table)) + result = [] + for res in cur: + coord = json.loads(res[3])['coordinates'] + result.append({ 'name': res[0], 'lat': coord[1], 'lon': coord[0], 'size': res[1], 'nodes': res[2], 'area': res[4], 'disabled': res[5], 'commented': res[6] }) + return jsonify(regions=result) + elif group == 'topo': + cur.execute("select name, count(1), min(case when ST_Area(geography(g)) = 'NaN' then 0 else ST_Area(geography(g)) end) / 1000000, sum(ST_NumInteriorRings(g)), ST_AsGeoJSON(ST_Centroid(ST_Collect(g))) from (select name, (ST_Dump(geom)).geom as g from {}) a group by name;".format(table)) + result = [] + for res in cur: + coord = json.loads(res[4])['coordinates'] + result.append({ 'name': res[0], 'outer': res[1], 'min_area': res[2], 'inner': res[3], 'lon': coord[0], 'lat': coord[1] }) + return jsonify(regions=result) + return jsonify(status='wrong group id') + + +@app.route('/border') +def border(): + region_id = int(request.args.get('id')) + table = config.TABLE + simplify_level = request.args.get('simplify') + simplify = simplify_level_to_postgis_value(simplify_level) + borders = fetch_borders( + table=table, + simplify=simplify, + only_leaves=False, + where_clause=f'id = {region_id}' + ) + if not borders: + return jsonify(status=f'No border with id={region_id} found') + return jsonify(status='ok', geojson=borders[0]) + +@app.route('/start_over') +def start_over(): + try: + create_countries_initial_structure(g.conn) + except CountryStructureException as e: + return jsonify(status=str(e)) + + autosplit_table = config.AUTOSPLIT_TABLE + cursor = g.conn.cursor() + cursor.execute(f"DELETE FROM {autosplit_table}") + g.conn.commit() + return jsonify(status='ok') + + +if __name__ == '__main__': + app.run(threaded=True) diff --git a/server/borders_api.wsgi b/web/app/borders_api.wsgi similarity index 100% rename from server/borders_api.wsgi rename to web/app/borders_api.wsgi diff --git a/web/app/borders_daemon.py b/web/app/borders_daemon.py new file mode 100755 index 0000000..bd991c9 --- /dev/null +++ b/web/app/borders_daemon.py @@ -0,0 +1,137 @@ +#!/usr/bin/python3 +import os, sys +import time +import logging +import psycopg2 +import config +try: + from daemon import runner + HAS_DAEMON = True +except: + HAS_DAEMON = False + + +table = config.TABLE + +CONNECT_WAIT_INTERVAL = 5 +CHECK_BORDERS_INTERVAL = 10 + +# For geometries crossing 180th meridian envelope area calculates to +# very small values. Examples area 'United States', 'Chukotka Autonomous Okrug', +# 'Alaska', 'Tuvalu'. For those borders area > env_area. +# Limit on envelope area is imposed due to countries whose bbox covers half the world +# like France or Netherlands with oversea territories for which tile counting +# lasts too long. +no_count_queries = [ + f""" + SELECT id, name + FROM + ( SELECT id, name, + ST_Area(geography(geom))/1000000.0 area, + ST_Area(geography(ST_Envelope(geom)))/1000000.0 env_area + FROM {table} + WHERE {condition} + ) q + WHERE area != 'NaN'::double precision + AND area <= env_area + AND env_area < 5000000 + -- ORDER BY area -- makes the query too much slower (why?) + LIMIT 1 + """ for condition in ('count_k < 0', 'count_k IS NULL') +] + +class App(): + def __init__(self): + self.stdin_path = '/dev/null' + self.stdout_path = '/dev/tty' + self.stderr_path = '/dev/tty' + self.pidfile_path = '/var/log/borders-daemon.pid' + self.pidfile_timeout = 5 + self.conn = None + + def get_connection(self): + while True: + try: + if self.conn is None or self.conn.closed: + self.conn = psycopg2.connect(config.CONNECTION) + self.conn.autocommit = True + + with self.conn.cursor() as cur: + cur.execute(f"SELECT count_k FROM {config.TABLE} LIMIT 1") + + return self.conn + except psycopg2.Error: + try: + self.conn.close() + except: + pass + time.sleep(CONNECT_WAIT_INTERVAL) + + + def process(self, region_id, region_name): + msg = f'Processing {region_name} ({region_id})' + logger.info(msg) + try: + f = open(config.DAEMON_STATUS_PATH, 'w') + f.write(msg) + f.close() + except Exception as e: + logger.error(str(e)) + pass + + with self.get_connection().cursor() as cur: + cur.execute(f""" + UPDATE {table} + SET count_k = n.count + FROM (SELECT coalesce(sum(t.count), 0) AS count + FROM {table} b, tiles t + WHERE b.id = %s AND ST_Intersects(b.geom, t.tile) + ) AS n + WHERE id = %s + """, (region_id, region_id) + ) + try: + f = open(config.DAEMON_STATUS_PATH, 'w') + f.close() + except Exception as e: + logger.error(str(e)) + pass + + def find_region(self): + with self.get_connection().cursor() as cur: + cur.execute(no_count_queries[0]) + res = cur.fetchone() + if not res: + cur.execute(no_count_queries[1]) + res = cur.fetchone() + return res if res else (None, None) + + def run(self): + while True: + try: + region_id, region_name = self.find_region() + if region_id: + self.process(region_id, region_name) + else: + time.sleep(CHECK_BORDERS_INTERVAL) + except: + time.sleep(CHECK_BORDERS_INTERVAL) + +def init_logger(): + logger = logging.getLogger("borders-daemon") + logger.setLevel(logging.INFO) + formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") + handler = logging.FileHandler("/var/log/borders-daemon.log") + #handler = logging.StreamHandler() + handler.setFormatter(formatter) + logger.addHandler(handler) + return logger + +if __name__ == '__main__': + app = App() + logger = init_logger() + if not HAS_DAEMON or (len(sys.argv) > 1 and sys.argv[1] == 'run'): + app.run() + else: + r = runner.DaemonRunner(app) + r.do_action() diff --git a/server/config.py b/web/app/config.py similarity index 59% rename from server/config.py rename to web/app/config.py index 70704fc..13e5cec 100644 --- a/server/config.py +++ b/web/app/config.py @@ -1,18 +1,22 @@ # postgresql connection string -CONNECTION = 'dbname=borders' +CONNECTION = 'dbname=borders user=borders password=borders host=dbhost port=5432' # passed to flask.Debug -DEBUG = False +DEBUG = True # if the main table is read-only -READONLY = False +READONLY = False # main table name TABLE = 'borders' # from where OSM borders are imported OSM_TABLE = 'osm_borders' -# tables with borders for reference -OTHER_TABLES = { 'old': 'old_borders' } +# transit table for autosplitting results +AUTOSPLIT_TABLE = 'splitting' +## tables with borders for reference +OTHER_TABLES = { + #'old': 'old_borders' +} # backup table BACKUP = 'borders_backup' -# table with crossing lines +## table with crossing lines CROSSING_TABLE = 'crossing' # area of an island for it to be considered small SMALL_KM2 = 10 @@ -21,4 +25,4 @@ JOSM_FORCE_MULTI = True # alert instead of json on import error IMPORT_ERROR_ALERT = False # file to which daemon writes the name of currently processed region -DAEMON_STATUS_PATH = '/var/www/html/borders-daemon-status.txt' +DAEMON_STATUS_PATH = '/var/tmp/borders-daemon-status.txt' diff --git a/web/app/countries_structure.py b/web/app/countries_structure.py new file mode 100644 index 0000000..faed6cf --- /dev/null +++ b/web/app/countries_structure.py @@ -0,0 +1,417 @@ +import itertools + +import config + + +table = config.TABLE +osm_table = config.OSM_TABLE +autosplit_table = config.AUTOSPLIT_TABLE + + +# admin_level => list of countries which should be initially divided at one admin level +unilevel_countries = { + 2: [ + 'Afghanistan', + 'Albania', + 'Algeria', + 'Andorra', + 'Angola', + 'Antigua and Barbuda', + 'Armenia', + 'Australia', # need to be divided at level 4 but has many small islands of level 4 + 'Azerbaijan', # has 2 non-covering 3-level regions + 'Bahrain', + 'Barbados', + 'Belize', + 'Benin', + 'Bermuda', + 'Bhutan', + 'Botswana', + 'British Sovereign Base Areas', # ! include into Cyprus + 'British Virgin Islands', + 'Bulgaria', + 'Burkina Faso', + 'Burundi', + 'Cambodia', + 'Cameroon', + 'Cape Verde', + 'Central African Republic', + 'Chad', + 'Chile', + 'Colombia', + 'Comoros', + 'Congo-Brazzaville', # BUG whith autodivision at level 4 + 'Cook Islands', + 'Costa Rica', + 'Croatia', # next level = 6 + 'Cuba', + 'Cyprus', + "Côte d'Ivoire", + 'Democratic Republic of the Congo', + 'Djibouti', + 'Dominica', + 'Dominican Republic', + 'East Timor', + 'Ecuador', + 'Egypt', + 'El Salvador', + 'Equatorial Guinea', + 'Eritrea', + 'Estonia', + 'Eswatini', + 'Ethiopia', + 'Falkland Islands', + 'Faroe Islands', + 'Federated States of Micronesia', + 'Fiji', + 'Gabon', + 'Georgia', + 'Ghana', + 'Gibraltar', + 'Greenland', + 'Grenada', + 'Guatemala', + 'Guernsey', + 'Guinea', + 'Guinea-Bissau', + 'Guyana', + 'Haiti', + 'Honduras', + 'Iceland', + 'Indonesia', + 'Iran', + 'Iraq', + 'Isle of Man', + 'Israel', # ! don't forget to separate Jerusalem + 'Jamaica', + 'Jersey', + 'Jordan', + 'Kazakhstan', + 'Kenya', # ! level 3 doesn't cover the whole country + 'Kiribati', + 'Kosovo', + 'Kuwait', + 'Kyrgyzstan', + 'Laos', + 'Latvia', + 'Lebanon', + 'Liberia', + 'Libya', + 'Liechtenstein', + 'Lithuania', + 'Luxembourg', + 'Madagascar', + 'Malaysia', + 'Maldives', + 'Mali', + 'Malta', + 'Marshall Islands', + 'Martinique', + 'Mauritania', + 'Mauritius', + 'Mexico', + 'Moldova', + 'Monaco', + 'Mongolia', + 'Montenegro', + 'Montserrat', + 'Mozambique', + 'Myanmar', + 'Namibia', + 'Nauru', + 'Nicaragua', + 'Niger', + 'Nigeria', + 'Niue', + 'North Korea', + 'North Macedonia', + 'Oman', + 'Palau', + # ! 'Palestina' is not a country in OSM - need make an mwm + 'Panama', + 'Papua New Guinea', + 'Peru', # need split-merge + 'Philippines', # split at level 3 and merge or not merte + 'Qatar', + 'Romania', # need split-merge + 'Rwanda', + 'Saint Helena, Ascension and Tristan da Cunha', + 'Saint Kitts and Nevis', + 'Saint Lucia', + 'Saint Vincent and the Grenadines', + 'San Marino', + 'Samoa', + 'Saudi Arabia', + 'Senegal', + 'Seychelles', + 'Sierra Leone', + 'Singapore', + 'Slovakia', # ! split at level 3 then 4, and add Bratislava region (4) + 'Slovenia', + 'Solomon Islands', + 'Somalia', + 'South Georgia and the South Sandwich Islands', + 'South Korea', + 'South Sudan', + 'South Ossetia', # ! don't forget to divide from Georgia + 'Sri Lanka', + 'Sudan', + 'São Tomé and Príncipe', + 'Suriname', + 'Switzerland', + 'Syria', + 'Taiwan', + 'Tajikistan', + 'Thailand', + 'The Bahamas', + 'The Gambia', + 'Togo', + 'Tokelau', + 'Tonga', + 'Trinidad and Tobago', + 'Tunisia', + 'Turkmenistan', + 'Turks and Caicos Islands', + 'Tuvalu', + 'United Arab Emirate', + 'Uruguay', + 'Uzbekistan', + 'Vanuatu', + 'Venezuela', # level 3 not comprehensive + 'Vietnam', + # ! don't forget 'Wallis and Futuna', belongs to France + 'Yemen', + 'Zambia', + 'Zimbabwe', + ], + 3: [ + 'Malawi', + 'Nepal', # ! one region is lost after division + 'Pakistan', + 'Paraguay', + 'Tanzania', + 'Turkey', + 'Uganda', + ], + 4: [ + 'Austria', + 'Bangladesh', + 'Belarus', # maybe need merge capital region with the province + 'Belgium', # maybe need merge capital region into encompassing province + 'Bolivia', + 'Bosnia and Herzegovina', # other levels - 5, 6, 7 - are incomplete. + 'Canada', + 'China', # ! don't forget about Macau and Hong Kong of level 3 not covered by level 4 + 'Denmark', + 'Greece', # ! has one small 3-level subregion! + 'Hungary', # maybe multilevel division at levels [4, 5] ? + 'India', + 'Italy', + 'Japan', # ? About 50 4-level subregions, some of which requires further division + 'Morocco', # ! not all regions appear after substitution with level 4 + 'New Zealand', # ! don't forget islands to the north and south + 'Norway', + 'Poland', # 380(!) subregions of AL=6 + 'Portugal', + 'Russia', + 'Serbia', + 'South Africa', + 'Spain', + 'Ukraine', + 'United States', + ], + 5: [ + 'Ireland', # ! 5-level don't cover the whole country + ], + 6: [ + 'Czechia', + ] +} + +# Country name => list of admin levels to which it should be initially divided. +# 'Germany': [4, 5] implies that the country is divided at level 4 at first, then all +# 4-level subregions are divided into subregions of level 5 (if any) +multilevel_countries = { + 'Brazil': [3, 4], + 'Finland': [3, 6], # [3,5,6] in more fresh data? # division by level 6 seems ideal + 'France': [3, 4], + 'Germany': [4, 5], # not the whole country is covered by units of AL=5 + 'Netherlands': [3, 4], # there are carribean lands of level both 3 and 4 + 'Sweden': [3, 4], # division by level 4 seems ideal + 'United Kingdom': [4, 5], # level 5 is necessary but not comprehensive + +} + +country_initial_levels = dict(itertools.chain( + ((country, ([level] if level > 2 else [])) + for level, countries in unilevel_countries.items() + for country in countries), + multilevel_countries.items() +)) + + +class CountryStructureException(Exception): + pass + + +def _clear_borders(conn): + cursor = conn.cursor() + cursor.execute(f"DELETE FROM {table}") + conn.commit() + + +def _find_subregions(conn, osm_ids, next_level, parents, names): + """Return subregions of level 'next_level' for regions with osm_ids.""" + cursor = conn.cursor() + parent_osm_ids = ','.join(str(x) for x in osm_ids) + cursor.execute(f""" + SELECT b.osm_id, b.name, subb.osm_id, subb.name + FROM {osm_table} b, {osm_table} subb + WHERE subb.admin_level=%s + AND b.osm_id IN ({parent_osm_ids}) + AND ST_Contains(b.way, subb.way) + """, + (next_level,) + ) + + # parent_osm_id => [(osm_id, name), (osm_id, name), ...] + subregion_ids = [] + + for rec in cursor: + parent_osm_id = rec[0] + osm_id = rec[2] + parents[osm_id] = parent_osm_id + name = rec[3] + names[osm_id] = name + subregion_ids.append(osm_id) + return subregion_ids + + +def _create_regions(conn, osm_ids, parents, names): + if not osm_ids: + return + osm_ids = list(osm_ids) # to ensure order + cursor = conn.cursor() + sql_values = ','.join( + f'({osm_id},' + '%s,' + f'(SELECT way FROM {osm_table} WHERE osm_id={osm_id}),' + f'{parents[osm_id] or "NULL"},' + 'now())' + for osm_id in osm_ids + ) + #print(f"create regions with osm_ids={osm_ids}") + #print(f"names={tuple(names[osm_id] for osm_id in osm_ids)}") + #print(f"all parents={parents}") + cursor.execute(f""" + INSERT INTO {table} (id, name, geom, parent_id, modified) + VALUES {sql_values} + """, tuple(names[osm_id] for osm_id in osm_ids) + ) + + +def _make_country_structure(conn, country_osm_id): + names = {} # osm_id => osm name + parents = {} # osm_id => parent_osm_id + + country_name = _get_osm_border_name_by_osm_id(conn, country_osm_id) + names[country_osm_id] = country_name + parents[country_osm_id] = None + + _create_regions(conn, [country_osm_id], parents, names) + + if country_initial_levels.get(country_name): + admin_levels = country_initial_levels[country_name] + prev_admin_levels = [2] + admin_levels[:-1] + prev_region_ids = [country_osm_id] + + for admin_level, prev_level in zip(admin_levels, prev_admin_levels): + if not prev_region_ids: + raise CountryStructureException( + f"Empty prev_region_ids at {country_name}, " + f"AL={admin_level}, prev-AL={prev_level}" + ) + subregion_ids = _find_subregions(conn, prev_region_ids, + admin_level, parents, names) + _create_regions(conn, subregion_ids, parents, names) + prev_region_ids = subregion_ids + + +def create_countries_initial_structure(conn): + _clear_borders(conn) + cursor = conn.cursor() + cursor.execute(f""" + SELECT osm_id, name + FROM {osm_table} + WHERE admin_level = 2 + """ + # and name in --('Germany', 'Luxembourg', 'Austria') + # ({','.join(f"'{c}'" for c in country_initial_levels.keys())}) + #""" + ) + for rec in cursor: + _make_country_structure(conn, rec[0]) + conn.commit() + +def _get_osm_border_name_by_osm_id(conn, osm_id): + cursor = conn.cursor() + cursor.execute(f""" + SELECT name FROM {osm_table} + WHERE osm_id = %s + """, (osm_id,)) + rec = cursor.fetchone() + if not rec: + raise CountryStructureException(f'Not found region with osm_id="{osm_id}"') + return rec[0] + + +def _get_country_osm_id_by_name(conn, name): + cursor = conn.cursor() + cursor.execute(f""" + SELECT osm_id FROM {osm_table} + WHERE admin_level = 2 AND name = %s + """, (name,)) + row_count = cursor.rowcount + if row_count > 1: + raise CountryStructureException(f'More than one country "{name}"') + rec = cursor.fetchone() + if not rec: + raise CountryStructureException(f'Not found country "{name}"') + return int(rec[0]) + + + +splitting = [ + # large region name, admin_level (2 in most cases), admin_level to split'n'merge, into subregions of what admin_level + ('Germany', 2, 4, 6), # Half of the country is covered by units of AL=5 + ('Metropolitan France', 3, 4, 6), + ('Spain', 2, 4, 6), + ('Portugal', 2, 4, 6), + ('Belgium', 2, 4, 6), + ('Italy', 2, 4, 6), + ('Switzerland', 2, 2, 4), # has admin_level=5 + ('Austria', 2, 4, 6), + ('Poland', 2, 4, 6), # 380(!) of AL=6 + ('Czechia', 2, 6, 7), + ('Ukraine', 2, 4, 6), # should merge back to region=4 level clusters + ('United Kingdom', 2, 5, 6), # whole country is divided by level 4; level 5 is necessary but not comprehensive + ('Denmark', 2, 4, 7), + ('Norway', 2, 4, 7), + ('Sweden', 2, 4, 7), # though division by level 4 is currently ideal + ('Finland', 2, 6, 7), # though division by level 6 is currently ideal + ('Estonia', 2, 2, 6), + ('Latvia', 2, 4, 6), # the whole country takes 56Mb, all 6-level units should merge into 4-level clusters + ('Lithuania', 2, 2, 4), # now Lithuania has 2 mwms of size 60Mb each + ('Belarus', 2, 2, 4), # 6 regions + Minsk city. Would it be merged with the region? + ('Slovakia', 2, 2, 4), # there are no subregions 5, 6, 7. Must leave all 8 4-level regions + ('Hungary', 2, 5, 6), + #('Slovenia', 2, 2, 8), # no levels 3,4,5,6; level 7 incomplete. + ('Croatia', 2, 2, 6), + ('Bosnia and Herzegovina', 2, 2, 4), # other levels - 5, 6, 7 - are incomplete. + ('Serbia', 2, 4, 6), + ('Romania', 2, 2, 4), + ('Bulgaria', 2, 2, 4), + ('Greece', 2, 4, 5), # has 7 4-level regions, must merge 5-level to them again + ('Ireland', 2, 5, 6), # 5-level don't cover the whole country! Still... + ('Turkey', 2, 3, 4), + ] diff --git a/www/borders.js b/web/app/static/borders.js similarity index 56% rename from www/borders.js rename to web/app/static/borders.js index e69ea34..c386cb6 100644 --- a/www/borders.js +++ b/web/app/static/borders.js @@ -1,13 +1,13 @@ var STYLE_BORDER = { stroke: true, color: '#03f', weight: 3, fill: true, fillOpacity: 0.1 }; -var STYLE_SELECTED = { stroke: true, color: '#ff3', weight: 3, fill: true, fillOpacity: 0.1 }; +var STYLE_SELECTED = { stroke: true, color: '#ff3', weight: 3, fill: true, fillOpacity: 0.75 }; var FILL_TOO_SMALL = '#0f0'; var FILL_TOO_BIG = '#800'; var FILL_ZERO = 'black'; var OLD_BORDERS_NAME; // filled in checkHasOSM() -var IMPORT_ENABLED = false; +var IMPORT_ENABLED = true; var map, borders = {}, bordersLayer, selectedId, editing = false, readonly = false; -var size_good = 5, size_bad = 50; +var size_good = 50, size_bad = 70; var maxRank = 1; var tooSmallLayer = null; var oldBordersLayer = null; @@ -17,9 +17,13 @@ var crossingLayer = null; function init() { map = L.map('map', { editable: true }).setView([30, 0], 3); var hash = new L.Hash(map); - L.tileLayer('http://tile.openstreetmap.org/{z}/{x}/{y}.png', { attribution: '© OpenStreetMap' }).addTo(map); - L.tileLayer('http://korona.geog.uni-heidelberg.de/tiles/adminb/x={x}&y={y}&z={z}', - { attribution: '© GIScience Heidelberg' }).addTo(map); + L.tileLayer('https://tile.openstreetmap.de/{z}/{x}/{y}.png', { attribution: '© OpenStreetMap' }).addTo(map); + //L.tileLayer('https://b.tile.openstreetmap.fr/osmfr/{z}/{x}/{y}.png', { attribution: '© OpenStreetMap' }).addTo(map); +//L.tileLayer('http://tile.openstreetmap.org/{z}/{x}/{y}.png', { attribution: '© OpenStreetMap' }).addTo(map); +// L.tileLayer('http://korona.geog.uni-heidelberg.de/tiles/adminb/x={x}&y={y}&z={z}', +// { attribution: '© GIScience Heidelberg' }).addTo(map); +// L.tileLayer('https://tile.cyclestreets.net/boundaries/{z}/{x}/{y}.png', +// { attribution: '© CycleStreets.net' }).addTo(map); bordersLayer = L.layerGroup(); map.addLayer(bordersLayer); routingGroup = L.layerGroup(); @@ -33,27 +37,37 @@ function init() { $('#b_josm').css('visibility', map.getZoom() >= 7 ? 'visible' : 'hidden'); }); - if( IMPORT_ENABLED ) { - $('#import_link').css('display', 'none'); - $('#filefm').css('display', 'block'); - $('#filefm').attr('action', getServer('import')); - var iframe = ''; - $('#filefm').after(iframe); - } $('#poly_all').attr('href', getPolyDownloadLink()); $('#poly_bbox').on('mousedown', function() { $(this).attr('href', getPolyDownloadLink(true)); }); $('#r_green').val(size_good); $('#r_red').val(size_bad); + $('#hide_import_button').click(function() { + $('#import_div').hide(); + $('#filefm input[type=file]').val(''); + }); + $('#h_iframe').load(function() { + console.log('frame loaded'); + var frame_doc = $('#h_iframe')[0].contentWindow.document; + var frame_body = $('body', frame_doc); + frame_body.css({'font-size': '9pt'}); + updateBorders(); + }); $('#fsearch').keyup(function(e) { if( e.keyCode == 13 ) $('#b_search').click(); }); - $('#b_rename').keyup(function(e) { + $('#b_comment').keyup(function(e) { if( e.keyCode == 13 ) - $('#do_rename').click(); + $('#b_comment_send').click(); }); + $('#auto_divide').change(function() { + if (this.checked) + $('#population_thresholds').show(); + else + $('#population_thresholds').hide(); + }); checkHasOSM(); filterSelect(true); } @@ -70,8 +84,8 @@ function checkHasOSM() { } if( res.crossing ) $('#cross_actions').css('display', 'block'); - if( !res.backup ) { - $('#backups').css('display', 'none'); + if( res.backup ) { + $('#backups').show(); } if( res.readonly ) { $('#action_buttons').css('display', 'none'); @@ -84,7 +98,7 @@ function checkHasOSM() { $('#filefm').css('display', 'block'); $('#filefm').attr('action', getServer('import')); var iframe = ''; - $('#filefm').after(iframe); + // $('#filefm').after(iframe); } } }); @@ -101,12 +115,12 @@ function updateBorders() { 'ymin': b.getSouth(), 'ymax': b.getNorth() }, - success: processResult, + success: makeAnswerHandler(processBorders), dataType: 'json', simplified: simplified }); - $.ajax(getServer('routing'), { + /*$.ajax(getServer('routing'), { data: { 'xmin': b.getWest(), 'xmax': b.getEast(), @@ -125,7 +139,7 @@ function updateBorders() { 'ymin': b.getSouth(), 'ymax': b.getNorth(), 'points': (map.getZoom() < 10 ? 1 : 0), - 'rank': maxRank + 'rank': maxRank }, success: processCrossing, dataType: 'json' @@ -148,7 +162,16 @@ function updateBorders() { success: processOldBorders, dataType: 'json' }); - } + } */ +} + +function makeAnswerHandler(on_ok_func) { + return function(answer) { + if (answer.status !== 'ok') + alert(answer.status); + else + on_ok_func(answer); + }; } routingTypes = {1: "Border and feature are intersecting several times.", @@ -163,7 +186,8 @@ function processRouting(data) { } } -function processResult(data) { +function processBorders(data) { + data = data.geojson; for( var id in borders ) { if( id != selectedId || !editing ) { bordersLayer.removeLayer(borders[id].layer); @@ -174,9 +198,8 @@ function processResult(data) { for( var f = 0; f < data.features.length; f++ ) { var layer = L.GeoJSON.geometryToLayer(data.features[f].geometry), props = data.features[f].properties; - props.simplified = this.simplified; - if( 'name' in props && props.name != '' ) - updateBorder(props.name, layer, props); + props.simplified = this.simplified; + updateBorder(props.id, layer, props); } if( selectedId in borders ) { selectLayer({ target: borders[selectedId].layer }); @@ -184,6 +207,14 @@ function processResult(data) { selectLayer(null); } + [subregionsLayer, clustersLayer, + parentLayer, potentialParentLayer].forEach( + function(layer) { + if (layer) + layer.bringToFront(); + } + ); + var b = map.getBounds(); if( tooSmallLayer != null ) { tooSmallLayer.clearLayers(); @@ -259,27 +290,33 @@ function selectLayer(e) { } if( e != null && 'id' in e.target && e.target.id in borders ) { selectedId = e.target.id; + if (selectedIdForParentAssigning && + selectedIdForParentAssigning != selectedId) { + finishChooseParent(); + } e.target.setStyle(STYLE_SELECTED); var props = borders[selectedId]; if( props['disabled'] ) e.target.setStyle({ fillOpacity: 0.01 }); $('#b_name').text(props['name']); - $('#b_size').text(Math.round(props['count_k'] * window.BYTES_FOR_NODE / 1024 / 1024) + ' MB'); + $('#b_al').text(props['admin_level'] ? '('+props['admin_level']+')' : ''); + $('#b_parent_name').text(props['parent_name']); + $('#b_size').text(Math.round(props['count_k'] * BYTES_FOR_NODE / 1024 / 1024) + ' MB'); //$('#b_nodes').text(borders[selectedId].layer.getLatLngs()[0].length); $('#b_nodes').text(props['nodes']); $('#b_date').text(props['modified']); $('#b_area').text(L.Util.formatNum(props['area'] / 1000000, 2)); $('#b_comment').val(props['comment'] || ''); - $('#b_status').text(props['disabled'] ? 'Отключено' : 'В сборке'); + //$('#b_status').text(props['disabled'] ? 'Отключено' : 'В сборке'); $('#b_disable').text(props['disabled'] ? 'Вернуть' : 'Убрать'); } else selectedId = null; $('#actions').css('visibility', selectedId == null ? 'hidden' : 'visible'); - $('#rename').css('display', 'none'); + $('#rename').hide(); } function filterSelect(noRefresh) { - value = $('#f_type').val(); + var value = $('#f_type').val(); $('#f_size').css('display', value == 'size' ? 'block' : 'none'); $('#f_chars').css('display', value == 'chars' ? 'block' : 'none'); $('#f_comments').css('display', value == 'comments' ? 'block' : 'none'); @@ -295,15 +332,31 @@ function filterSelect(noRefresh) { updateBorders(); } +var colors = ['red', 'orange', 'yellow', 'lime', 'green', 'olive', 'cyan', 'darkcyan', + 'blue', 'navy', 'magenta', 'purple', 'deeppink', 'brown'] //'black'; +var alphabet = 'abcdefghijklmnopqrstuvwxyz'; + +function getCountryColor(props) { + var country_name = props.country_name; + if (!country_name) + return 'black'; + var firstLetter = country_name[0].toLowerCase(); + var index = alphabet.indexOf(firstLetter); + if (index === -1) + return 'black'; + var indexInColors = index % colors.length; + return colors[indexInColors]; +} + function getColor(props) { var color = STYLE_BORDER.color; - fType = $('#f_type').val(); + var fType = $('#f_type').val(); if( fType == 'size' ) { if( props['count_k'] <= 0 ) color = FILL_ZERO; - else if( props['count_k'] * window.BYTES_FOR_NODE < size_good * 1024 * 1024 ) + else if( props['count_k'] * BYTES_FOR_NODE < size_good * 1024 * 1024 ) color = FILL_TOO_SMALL; - else if( props['count_k'] * window.BYTES_FOR_NODE > size_bad * 1024 * 1024 ) + else if( props['count_k'] * BYTES_FOR_NODE > size_bad * 1024 * 1024 ) color = FILL_TOO_BIG; } else if( fType == 'topo' ) { var rings = countRings([0, 0], props.layer); @@ -322,6 +375,9 @@ function getColor(props) { if( props['comment'] && props['comment'] != '' ) color = FILL_TOO_BIG; } + else if (fType == 'country') { + color = getCountryColor(props) + } return color; } @@ -381,11 +437,20 @@ function bOldBorders() { } } -function importInJOSM(method, data ) { +function importInJOSM(method, data) { var url = getServer(method) + '?' + $.param(data); + var params = [ + ['new_layer', 'true'], + ['format', '.osm'], + ['layer_name', 'borders_' + Date.now()], + ['url', url] + ]; + var params_str = params.map(x => (x[0] + '=' + x[1])).join('&'); $.ajax({ - url: 'http://127.0.0.1:8111/import', - data: { url: url, new_layer: 'true', format: '.osm' }, + url: 'http://127.0.0.1:8111/import?' + encodeURI(params_str), + // Don't use ajax 'data' param since the order of + // params in url matters: url= must be the last + // otherwise all the rest params would be a part of that url. complete: function(t) { if( t.status != 200 ) window.alert('Please enable remote_control in JOSM'); @@ -428,23 +493,43 @@ function bJosmZoom() { } function bImport() { - document.getElementById('filefm').submit(); + if ($('#filefm input[type=file]').val()) { + document.querySelector('#filefm').submit(); + var frame_doc = $('#h_iframe')[0].contentWindow.document; + var frame_body = $('body', frame_doc); + frame_body.html('
Идёт загрузка...
'); + $('#import_div').show(); + } } -function bShowRename() { +function finishRename() { + $('#rename_link').html('Название ▼'); + $('#rename').hide(); +} + +function bToggleRename() { if( !selectedId || !(selectedId in borders) || readonly ) return; - $('#b_rename').val(borders[selectedId].name); - $('#rename').css('display', 'block'); + var rename_el = $('#rename'); + if (rename_el.is(':hidden')) { + $('#b_rename').val(borders[selectedId].name); + $('#rename_link').html('Название ▲'); + rename_el.show(); + } + else { + finishRename(); + } } function bRename() { if( !selectedId || !(selectedId in borders) ) return; - $('#rename').css('display', 'none'); $.ajax(getServer('rename'), { - data: { 'name': selectedId, 'newname': $('#b_rename').val() }, - success: updateBorders + data: { 'id': selectedId, 'new_name': $('#b_rename').val() }, + success: makeAnswerHandler(function () { + finishRename(); + updateBorders(); + }) }); } @@ -452,7 +537,7 @@ function bDisable() { if( !selectedId || !(selectedId in borders) ) return; $.ajax(getServer(borders[selectedId].disabled ? 'enable' : 'disable'), { - data: { 'name': selectedId }, + data: { 'id': selectedId }, success: updateBorders }); } @@ -460,19 +545,130 @@ function bDisable() { function bDelete() { if( !selectedId || !(selectedId in borders) ) return; - if( !window.confirm('Точно удалить регион ' + selectedId + '?') ) + var name = borders[selectedId].name; + if( !window.confirm('Точно удалить регион ' + + name + ' (' + selectedId + ')' + '?') ) return; $.ajax(getServer('delete'), { - data: { 'name': selectedId }, + data: { 'id': selectedId }, success: updateBorders }); } +var selectedIdForParentAssigning = null; +var potentialParentLayer = null; +var potentialParentLayers = {}; +var potentialParents = null; + +function finishChooseParent() { + if (potentialParentLayer) { + map.removeLayer(potentialParentLayer); + potentialParentLayer = null; + } + potentialParentLayers = {}; + potentialParents = {}; + selectedIdForParentAssigning = null; + $('#potential_parents').empty().hide(); + $('#parent_link').html('Родитель ▼:'); +} + +function bTogglePotentialParents() { + var potentialParentsDiv = $('#potential_parents'); + if (potentialParentsDiv.is(':visible')) { + finishChooseParent(); + } + else { + if (!selectedId || !(selectedId in borders)) + return; + selectedIdForParentAssigning = selectedId; + $('#parent_link').html('Родитель ▲:'); + potentialParentsDiv.html('Ожидайте...').show(); + $.ajax(getServer('potential_parents'), { + data: {'id': selectedIdForParentAssigning}, + success: processPotentialParents + }); + } +} +/* +function clearObject(obj) { + for (var k in obj) + if (obj.hasOwnProperty(k)) + delete obj[k]; +} +*/ + +function makeShowParent(parent_id) { + return function(event) { + event.preventDefault(); + if (!(parent_id in potentialParentLayers)) { + potentialParentLayers[parent_id] = L.geoJson( + potentialParents[parent_id], { + style: function(f) { + return { color: 'blue', weight: 2, fill: false }; + } + }); + } + if (potentialParentLayer) { + map.removeLayer(potentialParentLayer); + } + potentialParentLayer = potentialParentLayers[parent_id]; + map.addLayer(potentialParentLayer); + potentialParentLayer.bringToFront(); + }; +} + +function makeSetParent(parent_id) { + return function(event) { + event.preventDefault(); + $.ajax(getServer('set_parent'), { + data: { + 'id': selectedIdForParentAssigning, + 'parent_id': parent_id + }, + success: makeAnswerHandler(function() { + updateBorders(); + finishChooseParent(); + }) + }); + }; +} + +function processPotentialParents(answer) { + if (!selectedIdForParentAssigning || !(selectedIdForParentAssigning in borders)) + return; + var parents = answer.parents; + potentialParents = {}; + var potentialParentsDiv = $('#potential_parents'); + if (parents.length == 0) { + potentialParentsDiv.html('Ничего не найдено.'); + return; + } + potentialParentsDiv.html(''); + var selectedParentId = borders[selectedIdForParentAssigning].parent_id; + for (var i = 0; i < parents.length; ++i) { + var parent = parents[i]; + var parent_id = parent.properties.id; + potentialParents[parent_id] = parent; + var div = $('
').appendTo(potentialParentsDiv); + var name = parent.properties.name || '' + parent_id; + $('' + name + '').appendTo(div); + $(' ').appendTo(div); + $('показать') + .click(makeShowParent(parent_id)) + .appendTo(div); + var isCurrentParent = (parent_id === selectedParentId); + $(' ').appendTo(div); + $('' + (isCurrentParent ? 'отвязать': 'назначить') + '') + .click(makeSetParent(isCurrentParent ? null : parent_id)) + .appendTo(div); + } +} + function sendComment( text ) { if( !selectedId || !(selectedId in borders) ) return; $.ajax(getServer('comment'), { - data: { 'name': selectedId, 'comment': text }, + data: { 'id': selectedId, 'comment': text }, type: 'POST', success: updateBorders }); @@ -494,7 +690,8 @@ function bSplit() { if( !selectedId || !(selectedId in borders) ) return; splitSelected = selectedId; - $('#s_sel').text(selectedId); + var name = borders[selectedId].name; + $('#s_sel').text(name + ' (' + selectedId + ')'); $('#actions').css('display', 'none'); $('#split').css('display', 'block'); map.on('editable:drawing:end', bSplitDrawn); @@ -521,15 +718,19 @@ function bSplitAgain() { function bSplitDo() { var wkt = '', lls = splitLayer.getLatLngs(); - for( i = 0; i < lls.length; i++ ) { + for (var i = 0; i < lls.length; i++ ) { if( i > 0 ) wkt += ','; wkt += L.Util.formatNum(lls[i].lng, 6) + ' ' + L.Util.formatNum(lls[i].lat, 6); } $.ajax(getServer('split'), { - data: { 'name': splitSelected, 'line': 'LINESTRING(' + wkt + ')' }, + data: { + 'id': splitSelected, + 'line': 'LINESTRING(' + wkt + ')', + 'save_region': $('#save_split_region').prop('checked') + }, datatype: 'json', - success: function(data) { if( data.status != 'ok' ) alert(data.status); else updateBorders(); } + success: makeAnswerHandler(updateBorders) }); bSplitCancel(); } @@ -551,8 +752,8 @@ function bSplitCancel() { map.editTools.stopDrawing(); if( splitLayer != null ) map.removeLayer(splitLayer); - $('#actions').css('display', 'block'); - $('#split').css('display', 'none'); + $('#split').hide(); + $('#actions').show(); } var joinSelected = null, joinAnother = null; @@ -562,7 +763,8 @@ function bJoin() { return; joinSelected = selectedId; joinAnother = null; - $('#j_sel').text(selectedId); + var name = borders[selectedId].name; + $('#j_sel').text(name + '(' + selectedId + ')'); $('#actions').css('display', 'none'); $('#j_do').css('display', 'none'); $('#join').css('display', 'block'); @@ -570,9 +772,10 @@ function bJoin() { // called from selectLayer() when joinSelected is not null function bJoinSelect(layer) { - if( 'id' in layer && layer.id in borders ) { + if( 'id' in layer && layer.id in borders && layer.id != joinSelected ) { joinAnother = layer.id; - $('#j_name2').text(joinAnother); + var name2 = borders[joinAnother].name; + $('#j_name2').text(name2 + '(' + joinAnother + ')'); $('#j_do').css('display', 'block'); } } @@ -580,8 +783,8 @@ function bJoinSelect(layer) { function bJoinDo() { if( joinSelected != null && joinAnother != null ) { $.ajax(getServer('join'), { - data: { 'name': joinSelected, 'name2': joinAnother }, - success: updateBorders + data: { 'id1': joinSelected, 'id2': joinAnother }, + success: makeAnswerHandler(updateBorders) }); } bJoinCancel(); @@ -589,14 +792,76 @@ function bJoinDo() { function bJoinCancel() { joinSelected = null; - $('#actions').css('display', 'block'); - $('#join').css('display', 'none'); + $('#join').hide(); + $('#actions').show(); +} + + +var parentLayer = null; + +function bJoinToParent() { + if( !selectedId || !(selectedId in borders) ) + return; + var props = borders[selectedId]; + if (!props['parent_id']) { + alert('Это регион верхнего уровня'); + return; + } + joinSelected = selectedId; + $('#j_to_parent_sel').text(props['name'] + ' (' + selectedId + ')'); + $('#j_sel_parent').text(props['parent_name'] + ' (' + props['parent_id'] + ')'); + $('#actions').hide(); + $('#join_to_parent').show(); + +} + +function bJoinToParentPreview() { + if (parentLayer != null) { + map.removeLayer(parentLayer); + parentLayer = null; + } + var props = borders[selectedId]; + var simplified = map.getZoom() < 7 ? 2 : (map.getZoom() < 11 ? 1 : 0); + $.ajax(getServer('border'), { + data: {'id': props['parent_id'], 'simplify': simplified}, + success: makeAnswerHandler(processJoinToParentPreview) + }); +} + +function processJoinToParentPreview(answer) { + parentLayer = L.geoJson(answer.geojson, { + style: function(f) { + return { color: 'black', weight: 2, fill: false }; + } + }); + map.addLayer(parentLayer); + parentLayer.bringToFront(); +} + +function bJoinToParentDo() { + if( joinSelected != null) { + $.ajax(getServer('join_to_parent'), { + data: { 'id': joinSelected }, + success: makeAnswerHandler(updateBorders) + }); + } + bJoinToParentCancel(); +} + +function bJoinToParentCancel() { + joinSelected = null; + if (parentLayer != null) { + map.removeLayer(parentLayer); + parentLayer = null; + } + $('#actions').show(); + $('#join_to_parent').hide(); } var pMarker = L.marker([0, 0], { draggable: true }); function bPoint() { - $('#p_name').val(selectedId && selectedId in borders ? selectedId : ''); + $('#p_name').val('*'); selectLayer(null); $('#actions').css('display', 'none'); $('#point').css('display', 'block'); @@ -633,88 +898,139 @@ function pPointSelect(id, name1) { name = name.replace('*', name1); $.ajax(getServer('from_osm'), { data: { 'name': name, 'id': id }, - success: updateBorders + success: makeAnswerHandler(updateBorders) }); bPointCancel(); } function bPointCancel() { - $('#actions').css('display', 'block'); - $('#point').css('display', 'none'); + $('#point').hide(); + $('#actions').show(); $('#p_list').text(''); map.removeLayer(pMarker); } -var divPreview = null, divSelected = null; +var subregionsLayer = null, + clustersLayer = null, + divSelectedId = null; function bDivide() { if( !selectedId || !(selectedId in borders) ) return; - divSelected = selectedId; + divSelectedId = selectedId; $('#actions').css('display', 'none'); $('#d_do').css('display', 'none'); $('#d_none').css('display', 'none'); $('#divide').css('display', 'block'); // pre-fill 'like' and 'where' fields - $('#d_like').val(borders[selectedId].name); + $('#region_to_divide').text(borders[selectedId].name + ' (' + + selectedId + ')'); $('#d_prefix').val(borders[selectedId].name); - $('#d_where').val('admin_level = 4'); + var next_admin_level = borders[selectedId].admin_level ? + borders[selectedId].admin_level + 1 : null; + $('#next_level').val(next_admin_level); +} + +function clearDivideLayers() { + if (subregionsLayer != null) { + map.removeLayer(subregionsLayer); + subregionsLayer = null; + } + if (clustersLayer != null) { + map.removeLayer(clustersLayer); + clustersLayer = null; + } } function bDividePreview() { - if( divPreview != null ) { - map.removeLayer(divPreview); - divPreview = null; - } - $('#d_do').css('display', 'none'); - $('#d_none').css('display', 'none'); + var auto_divide = $('#auto_divide').prop('checked'); + if (auto_divide && ( + !$('#city_population_thr').val() || + !$('#cluster_population_thr').val()) + ) { + alert('Fill population thresholds'); + return; + } + clearDivideLayers(); + $('#d_do').hide(); + $('#d_none').hide(); + var apply_to_similar= $('#apply_to_similar').prop('checked'); + var params = { + 'id': divSelectedId, + 'next_level': $('#next_level').val(), + 'auto_divide': auto_divide, + 'apply_to_similar': apply_to_similar + }; + if (auto_divide) { + params['city_population_thr'] = $('#city_population_thr').val(); + params['cluster_population_thr'] = $('#cluster_population_thr').val(); + } $.ajax(getServer('divpreview'), { - data: { - 'like': $('#d_like').val(), - 'query': $('#d_where').val() - }, - success: bDivideDrawPreview + data: params, + success: makeAnswerHandler(bDivideDrawPreview) }); } -function bDivideDrawPreview(geojson) { - if( !('features' in geojson) || !geojson.features.length ) { - $('#d_none').css('display', 'block'); +function bDivideDrawPreview(response) { + var subregions = response.subregions; + var clusters = response.clusters; + if( !('features' in subregions) || !subregions.features.length ) { + $('#d_none').show(); return; } - divPreview = L.geoJson(geojson, { + subregionsLayer = L.geoJson(subregions, { style: function(f) { return { color: 'blue', weight: 1, fill: false }; } }); - map.addLayer(divPreview); - $('#d_count').text(geojson.features.length); - $('#d_do').css('display', 'block'); + map.addLayer(subregionsLayer); + subregionsLayer.bringToFront(); + if (clusters) { + clustersLayer = L.geoJson(clusters, { + style: function(f) { + return { color: 'black', weight: 2, fill: false }; + } + }); + map.addLayer(clustersLayer); + clustersLayer.bringToFront(); + } + var subregions_count_text = '' + subregions.features.length + ' подрегионов'; + if (clusters) + subregions_count_text += ', ' + clusters.features.length + ' кластеров'; + $('#d_count').text(subregions_count_text); + $('#d_do').show(); } function bDivideDo() { + var auto_divide = $('#auto_divide').prop('checked'); + var apply_to_similar= $('#apply_to_similar').prop('checked'); + var params = { + 'id': divSelectedId, + 'next_level': $('#next_level').val(), + 'auto_divide': auto_divide, + 'apply_to_similar': apply_to_similar + }; + if (auto_divide) { + params['city_population_thr'] = $('#city_population_thr').val(); + params['cluster_population_thr'] = $('#cluster_population_thr').val(); + } $.ajax(getServer('divide'), { - data: { - 'name': divSelected, - 'prefix': $('#d_prefix').val(), - 'like': $('#d_like').val(), - 'query': $('#d_where').val() - }, + data: params, success: updateBorders }); bDivideCancel(); } function bDivideCancel() { - if( divPreview != null ) { - map.removeLayer(divPreview); - divPreview = null; - } - divSelected = null; - $('#actions').css('display', 'block'); - $('#divide').css('display', 'none'); + clearDivideLayers(); + divSelectedId = null; + $('#divide').hide(); + $('#actions').show(); } + + + function bLargest() { if( !selectedId || !(selectedId in borders) ) return; @@ -729,7 +1045,12 @@ function bHull() { return; $.ajax(getServer('hull'), { data: { 'name': selectedId }, - success: updateBorders + success: function(answer) { + if (answer.status !== 'ok') + alert(answer.status); + else + updateBorders(); + } }); } @@ -798,15 +1119,21 @@ function bBackupDelete(timestamp) { bBackupCancel(); } -function getPolyDownloadLink(bbox) { - var b = map.getBounds(); - var data = { - 'xmin': b.getWest(), - 'xmax': b.getEast(), - 'ymin': b.getSouth(), - 'ymax': b.getNorth() - }; - return getServer('poly') + (bbox ? '?' + $.param(data) : ''); +function getPolyDownloadLink(use_bbox) { + var downloadLink = getServer('poly'); + + if (use_bbox) { + var b = map.getBounds(); + var data = { + 'xmin': b.getWest(), + 'xmax': b.getEast(), + 'ymin': b.getSouth(), + 'ymax': b.getNorth() + }; + downloadLink += '?' + $.param(data); + } + + return downloadLink; } var crossSelected = null, fcPreview = null; @@ -933,3 +1260,31 @@ function bFixCrossCancel() { $('#actions').css('display', 'block'); $('#fixcross').css('display', 'none'); } + +function startOver() { + if (confirm('Вы уверены, что хотите начать разбиение сначала?')) { + finishChooseParent(); + bSplitCancel(); + bJoinCancel(); + bJoinToParentCancel(); + bPointCancel(); + bDivideCancel(); + bBackupCancel(); + bFixCrossCancel(); + selectLayer(null); + $('#wait_start_over').show(); + $.ajax(getServer('start_over'), { + success: makeAnswerHandler(function() { + for (var id in borders) { + bordersLayer.removeLayer(borders[id].layer); + delete borders[id]; + } + updateBorders(); + }), + complete: function() { + $('#wait_start_over').hide(); + } + + }); + } +} diff --git a/web/app/static/config.js b/web/app/static/config.js new file mode 100644 index 0000000..d788be1 --- /dev/null +++ b/web/app/static/config.js @@ -0,0 +1,15 @@ +const BYTES_FOR_NODE = 8; + +const SELF_URL = document.location.origin; + +// If the web api works not at server's root, you many need something like: +// const API_URL = SELF_URL + '/borders-api'; +const API_URL = SELF_URL; + + +function getServer(endpoint, base_url) { + var url = base_url ? base_url : API_URL; + if (endpoint) + url += '/' + endpoint; + return url; +} diff --git a/www/import.html b/web/app/static/import.html similarity index 100% rename from www/import.html rename to web/app/static/import.html diff --git a/www/lib/Leaflet.Editable.js b/web/app/static/lib/Leaflet.Editable.js similarity index 100% rename from www/lib/Leaflet.Editable.js rename to web/app/static/lib/Leaflet.Editable.js diff --git a/www/lib/images/layers-2x.png b/web/app/static/lib/images/layers-2x.png similarity index 100% rename from www/lib/images/layers-2x.png rename to web/app/static/lib/images/layers-2x.png diff --git a/www/lib/images/layers.png b/web/app/static/lib/images/layers.png similarity index 100% rename from www/lib/images/layers.png rename to web/app/static/lib/images/layers.png diff --git a/www/lib/images/marker-icon-2x.png b/web/app/static/lib/images/marker-icon-2x.png similarity index 100% rename from www/lib/images/marker-icon-2x.png rename to web/app/static/lib/images/marker-icon-2x.png diff --git a/www/lib/images/marker-icon.png b/web/app/static/lib/images/marker-icon.png similarity index 100% rename from www/lib/images/marker-icon.png rename to web/app/static/lib/images/marker-icon.png diff --git a/www/lib/images/marker-shadow.png b/web/app/static/lib/images/marker-shadow.png similarity index 100% rename from www/lib/images/marker-shadow.png rename to web/app/static/lib/images/marker-shadow.png diff --git a/www/lib/images/spritesheet-2x.png b/web/app/static/lib/images/spritesheet-2x.png similarity index 100% rename from www/lib/images/spritesheet-2x.png rename to web/app/static/lib/images/spritesheet-2x.png diff --git a/www/lib/images/spritesheet.png b/web/app/static/lib/images/spritesheet.png similarity index 100% rename from www/lib/images/spritesheet.png rename to web/app/static/lib/images/spritesheet.png diff --git a/www/lib/jquery-1.11.2.min.js b/web/app/static/lib/jquery-1.11.2.min.js similarity index 100% rename from www/lib/jquery-1.11.2.min.js rename to web/app/static/lib/jquery-1.11.2.min.js diff --git a/www/lib/leaflet-hash.js b/web/app/static/lib/leaflet-hash.js similarity index 100% rename from www/lib/leaflet-hash.js rename to web/app/static/lib/leaflet-hash.js diff --git a/www/lib/leaflet.css b/web/app/static/lib/leaflet.css similarity index 100% rename from www/lib/leaflet.css rename to web/app/static/lib/leaflet.css diff --git a/www/lib/leaflet.js b/web/app/static/lib/leaflet.js similarity index 100% rename from www/lib/leaflet.js rename to web/app/static/lib/leaflet.js diff --git a/www/stat.js b/web/app/static/stat.js similarity index 100% rename from www/stat.js rename to web/app/static/stat.js diff --git a/www/index.html b/web/app/templates/index.html similarity index 52% rename from www/index.html rename to web/app/templates/index.html index 8b61c1f..37be9b2 100644 --- a/www/index.html +++ b/web/app/templates/index.html @@ -3,29 +3,49 @@ Редактор границ для MAPS.ME - - - - - - - + + + + + + + @@ -33,6 +53,7 @@