[generator_tool] Cleaned up old code

This commit is contained in:
Alex Zolotarev 2011-08-20 15:26:54 +03:00 committed by Alex Zolotarev
parent 8b62b26a77
commit eeabd44449
18 changed files with 117 additions and 526 deletions

View file

@ -16,16 +16,12 @@ namespace borders
class PolygonLoader
{
string m_baseDir;
// @TODO not used
int m_level;
CountryPolygons & m_polygons;
m2::RectD & m_rect;
public:
// @TODO level is not used
PolygonLoader(string const & basePolygonsDir, int level, CountryPolygons & polygons, m2::RectD & rect)
: m_baseDir(basePolygonsDir), m_level(level), m_polygons(polygons), m_rect(rect)
PolygonLoader(string const & basePolygonsDir, CountryPolygons & polygons, m2::RectD & rect)
: m_baseDir(basePolygonsDir), m_polygons(polygons), m_rect(rect)
{
}
@ -47,14 +43,8 @@ namespace borders
}
};
bool LoadCountriesList(string const & baseDir, CountriesContainerT & countries,
int simplifyCountriesLevel)
bool LoadCountriesList(string const & baseDir, CountriesContainerT & countries)
{
if (simplifyCountriesLevel > 0)
{
LOG_SHORT(LINFO, ("Simplificator level for country polygons:", simplifyCountriesLevel));
}
countries.Clear();
ifstream stream((baseDir + POLYGONS_FILE).c_str());
string line;
@ -68,7 +58,7 @@ namespace borders
CountryPolygons country;
m2::RectD rect;
PolygonLoader loader(baseDir, simplifyCountriesLevel, country, rect);
PolygonLoader loader(baseDir, country, rect);
strings::Tokenize(line, "|", loader);
if (!country.m_regions.IsEmpty())
countries.Add(country, rect);

View file

@ -21,7 +21,5 @@ namespace borders
typedef m4::Tree<CountryPolygons> CountriesContainerT;
/// @param[in] simplifyCountriesLevel if positive, used as a level for simplificator
bool LoadCountriesList(string const & baseDir, CountriesContainerT & countries,
int simplifyCountriesLevel = -1);
bool LoadCountriesList(string const & baseDir, CountriesContainerT & countries);
}

View file

@ -1,126 +0,0 @@
#pragma once
#include "world_map_generator.hpp"
#include "../base/base.hpp"
#include "../coding/file_writer.hpp"
#include "../geometry/rect2d.hpp"
#include "../indexer/feature.hpp"
#include "../std/string.hpp"
namespace feature
{
// Groups features in buckets according to their coordinates.
template <class FeatureOutT, class FeatureClipperT, class BoundsT, typename CellIdT>
class CellFeatureBucketer
{
typedef typename FeatureClipperT::feature_builder_t feature_builder_t;
void Init()
{
uint32_t const size = 1 << 2 * m_Level;
m_Buckets.resize(size);
for (uint32_t i = 0; i < m_Buckets.size(); ++i)
{
CellIdT cell = CellIdT::FromBitsAndLevel(i, m_Level);
double minX, minY, maxX, maxY;
CellIdConverter<BoundsT, CellIdT>::GetCellBounds(cell, minX, minY, maxX, maxY);
m_Buckets[i].m_Rect = m2::RectD(minX, minY, maxX, maxY);
}
}
public:
template <class TInfo>
explicit CellFeatureBucketer(TInfo & info)
: m_Level(info.cellBucketingLevel), m_FeatureOutInitData(info.datFilePrefix, info.datFileSuffix),
m_worldMap(info.maxScaleForWorldFeatures, info.mergeCoastlines, m_FeatureOutInitData)
{
Init();
}
/// @note this constructor doesn't support world file generation
CellFeatureBucketer(int level, typename FeatureOutT::InitDataType const & initData)
: m_Level(level), m_FeatureOutInitData(initData), m_worldMap(-1, false, initData)
{
Init();
}
void operator () (feature_builder_t & fb)
{
m_worldMap(fb);
FeatureClipperT clipper(fb);
// TODO: Is feature fully inside GetLimitRect()?
m2::RectD const limitRect = fb.GetLimitRect();
for (uint32_t i = 0; i < m_Buckets.size(); ++i)
{
// First quick and dirty limit rect intersection.
// Clipper may (or may not) do a better intersection.
if (m_Buckets[i].m_Rect.IsIntersect(limitRect))
{
feature_builder_t clippedFb;
if (clipper(m_Buckets[i].m_Rect, clippedFb))
{
if (!m_Buckets[i].m_pOut)
m_Buckets[i].m_pOut = new FeatureOutT(BucketName(i), m_FeatureOutInitData);
(*(m_Buckets[i].m_pOut))(clippedFb);
}
}
}
}
template <typename F> void GetBucketNames(F f) const
{
for (uint32_t i = 0; i < m_Buckets.size(); ++i)
if (m_Buckets[i].m_pOut)
f(BucketName(i));
}
private:
inline string BucketName(uint32_t i) const
{
return CellIdT::FromBitsAndLevel(i, m_Level).ToString();
}
struct Bucket
{
Bucket() : m_pOut(NULL) {}
~Bucket() { delete m_pOut; }
FeatureOutT * m_pOut;
m2::RectD m_Rect;
};
int m_Level;
typename FeatureOutT::InitDataType m_FeatureOutInitData;
vector<Bucket> m_Buckets;
WorldMapGenerator<FeatureOutT> m_worldMap;
};
class SimpleFeatureClipper
{
public:
typedef FeatureBuilder1 feature_builder_t;
private:
feature_builder_t const & m_Feature;
public:
explicit SimpleFeatureClipper(feature_builder_t const & f) : m_Feature(f)
{
}
bool operator () (m2::RectD const & /*rect*/, feature_builder_t & clippedF) const
{
clippedF = m_Feature;
return true;
}
};
}

View file

@ -1,5 +1,4 @@
#include "feature_generator.hpp"
#include "feature_bucketer.hpp"
#include "data_cache_file.hpp"
#include "osm_element.hpp"
#include "polygonizer.hpp"
@ -20,7 +19,6 @@
#include "../std/bind.hpp"
#include "../std/unordered_map.hpp"
namespace feature
{
@ -271,35 +269,19 @@ bool GenerateImpl(GenerateInfo & info)
{
try
{
TNodesHolder nodes(info.tmpDir + NODES_FILE);
TNodesHolder nodes(info.m_tmpDir + NODES_FILE);
typedef FileHolder<TNodesHolder> holder_t;
holder_t holder(nodes, info.tmpDir);
holder_t holder(nodes, info.m_tmpDir);
holder.LoadIndex();
if (info.splitByPolygons)
{
typedef Polygonizer<FeaturesCollector, MercatorBounds, RectId> FeaturePolygonizerType;
// prefix is data dir
FeaturePolygonizerType bucketer(info);
TParser<FeaturePolygonizerType, holder_t> parser(bucketer, holder);
ParseXMLFromStdIn(parser);
bucketer.Finish();
info.bucketNames = bucketer.Names();
}
else
{
CHECK_GREATER_OR_EQUAL(info.cellBucketingLevel, 0, ());
CHECK_LESS(info.cellBucketingLevel, 10, ());
typedef CellFeatureBucketer<FeaturesCollector, SimpleFeatureClipper,
MercatorBounds, RectId> FeatureBucketerType;
FeatureBucketerType bucketer(info);
TParser<FeatureBucketerType, holder_t> parser(bucketer, holder);
ParseXMLFromStdIn(parser);
bucketer.GetBucketNames(MakeBackInsertFunctor(info.bucketNames));
}
typedef Polygonizer<FeaturesCollector> PolygonizerT;
// prefix is data dir
PolygonizerT bucketer(info);
TParser<PolygonizerT, holder_t> parser(bucketer, holder);
ParseXMLFromStdIn(parser);
info.m_bucketNames = bucketer.Names();
}
catch (Reader::Exception const & e)
{
@ -318,14 +300,4 @@ bool GenerateFeatures(GenerateInfo & info, bool lightNodes)
return GenerateImpl<points_in_file, SecondPassParserUsual>(info);
}
/*
bool GenerateCoastlines(GenerateInfo & info, bool lightNodes)
{
if (lightNodes)
return GenerateImpl<points_in_map, SecondPassParserJoin>(info);
else
return GenerateImpl<points_in_file, SecondPassParserJoin>(info);
}
*/
}

View file

@ -1,5 +1,7 @@
#pragma once
#include "generate_info.hpp"
#include "../indexer/osm_decl.hpp"
#include "../geometry/rect2d.hpp"
@ -13,26 +15,7 @@ class FeatureBuilder1;
namespace feature
{
struct GenerateInfo
{
GenerateInfo()
: maxScaleForWorldFeatures(-1), splitByPolygons(false),
simplifyCountriesLevel(-1), mergeCoastlines(false) {}
string tmpDir, datFilePrefix, datFileSuffix;
/// If not -1, world will be split by buckets with specified level
int cellBucketingLevel;
vector<string> bucketNames;
/// Features with scale level [0..maxScaleForWorldFeatures] will be
/// included into separate world data file
/// @note if -1, world file will not be created
int maxScaleForWorldFeatures;
bool splitByPolygons;
int simplifyCountriesLevel;
bool mergeCoastlines;
};
bool GenerateFeatures(GenerateInfo & info, bool lightNodes);
//bool GenerateCoastlines(GenerateInfo & info, bool lightNodes);
// Writes features to dat file.
class FeaturesCollector

View file

@ -0,0 +1,20 @@
#pragma once
#include "../std/string.hpp"
#include "../std/vector.hpp"
namespace feature
{
struct GenerateInfo
{
GenerateInfo() : m_createWorld(false), m_splitByPolygons(false) {}
string m_tmpDir;
string m_datFilePrefix;
string m_datFileSuffix;
vector<string> m_bucketNames;
bool m_createWorld;
bool m_splitByPolygons;
};
} // namespace feature

View file

@ -30,7 +30,6 @@ SOURCES += \
HEADERS += \
feature_merger.hpp \
xml_element.hpp \
feature_bucketer.hpp \
osm_element.hpp \
data_generator.hpp \
feature_generator.hpp \
@ -49,3 +48,4 @@ HEADERS += \
mwm_rect_updater.hpp \
feature_emitter_iface.hpp \
dumper.hpp \
generate_info.hpp \

View file

@ -1,77 +0,0 @@
#include "../../testing/testing.hpp"
#include "../feature_bucketer.hpp"
#include "../../indexer/feature.hpp"
#include "../../indexer/mercator.hpp"
#include "../../indexer/cell_id.hpp"
#include "../../indexer/classificator_loader.hpp"
#include "../../platform/platform.hpp"
#include "../../indexer/indexer_tests/feature_routine.hpp"
#include "../../base/stl_add.hpp"
namespace
{
class PushBackFeatureDebugStringOutput
{
public:
typedef map<string, vector<string> > * InitDataType;
PushBackFeatureDebugStringOutput(string const & name, InitDataType const & initData)
: m_pContainer(&((*initData)[name]))
{
}
void operator() (FeatureBuilder1 const & fb)
{
FeatureType f;
FeatureBuilder2Feature(
static_cast<FeatureBuilder2 &>(const_cast<FeatureBuilder1 &>(fb)), f);
m_pContainer->push_back(f.DebugString(0));
}
private:
vector<string> * m_pContainer;
};
typedef feature::CellFeatureBucketer<
PushBackFeatureDebugStringOutput,
feature::SimpleFeatureClipper,
MercatorBounds,
RectId
> FeatureBucketer;
}
UNIT_TEST(FeatureBucketerSmokeTest)
{
Platform & pl = GetPlatform();
// classificator is needed because inside bucketer we're use it in WorldMapGenerator
// @TODO clean up or remove cell bucketer and replace with world countries bucketer
classificator::Read(pl.GetReader("drawing_rules.bin"),
pl.GetReader("classificator.txt"),
pl.GetReader("visibility.txt"),
pl.GetReader("types.txt"));
map<string, vector<string> > out, expectedOut;
FeatureBucketer bucketer(1, &out);
FeatureBuilder2 fb;
fb.AddPoint(m2::PointD(10, 10));
fb.AddPoint(m2::PointD(20, 20));
fb.AddType(0);
fb.SetLinear();
bucketer(fb);
FeatureType f;
FeatureBuilder2Feature(fb, f);
expectedOut["3"].push_back(f.DebugString(0));
TEST_EQUAL(out, expectedOut, ());
vector<string> bucketNames;
bucketer.GetBucketNames(MakeBackInsertFunctor(bucketNames));
TEST_EQUAL(bucketNames, vector<string>(1, "3"), ());
}

View file

@ -23,7 +23,6 @@ HEADERS += \
SOURCES += \
../../testing/testingmain.cpp \
../../indexer/indexer_tests/feature_routine.cpp \
feature_bucketer_test.cpp \
osm_parser_test.cpp \
feature_merger_test.cpp \
osm_type_test.cpp \

View file

@ -2,7 +2,6 @@
#include "../feature_generator.hpp"
#include "../feature_sorter.hpp"
#include "../update_generator.hpp"
#include "../feature_bucketer.hpp"
#include "../statistics.hpp"
#include "../classif_routine.hpp"
#include "../borders_generator.hpp"
@ -45,12 +44,8 @@ DEFINE_bool(use_light_nodes, false,
DEFINE_string(data_path, "", "Working directory, 'path_to_exe/../../data' if empty.");
DEFINE_string(output, "", "Prefix of filenames of outputted .dat and .idx files.");
DEFINE_string(intermediate_data_path, "", "Path to store nodes, ways, relations.");
DEFINE_int32(bucketing_level, -1, "If positive, level of cell ids for bucketing.");
DEFINE_int32(generate_world_scale, -1, "If specified, features for zoomlevels [0..this_value] "
"which are enabled in classificator will be MOVED to the separate world file");
DEFINE_bool(split_by_polygons, false, "Use kml shape files to split planet by regions and countries");
DEFINE_int32(simplify_countries_level, -1, "If positive, simplifies country polygons. Recommended values [10..15]");
DEFINE_bool(merge_coastlines, false, "If defined, tries to merge coastlines when renerating World file");
DEFINE_bool(generate_world, false, "Generate separate world file");
DEFINE_bool(split_by_polygons, false, "Use countries borders to split planet by regions and countries");
DEFINE_string(generate_borders, "",
"Create binary country .borders file for osm xml file given in 'output' parameter,"
"specify tag name and optional value: ISO3166-1 or admin_level=4");
@ -104,7 +99,7 @@ int main(int argc, char ** argv)
}
feature::GenerateInfo genInfo;
genInfo.tmpDir = FLAGS_intermediate_data_path;
genInfo.m_tmpDir = FLAGS_intermediate_data_path;
// load classificator only if necessary
if (FLAGS_generate_features || FLAGS_generate_geometry ||
@ -124,38 +119,35 @@ int main(int argc, char ** argv)
LOG(LINFO, ("Generating final data ..."));
if (FLAGS_output.empty() || FLAGS_split_by_polygons) // do not break data path for polygons
genInfo.datFilePrefix = path;
genInfo.m_datFilePrefix = path;
else
genInfo.datFilePrefix = path + FLAGS_output + (FLAGS_bucketing_level > 0 ? "-" : "");
genInfo.datFileSuffix = DATA_FILE_EXTENSION;
genInfo.m_datFilePrefix = path + FLAGS_output;
genInfo.m_datFileSuffix = DATA_FILE_EXTENSION;
// split data by countries polygons
genInfo.splitByPolygons = FLAGS_split_by_polygons;
genInfo.simplifyCountriesLevel = FLAGS_simplify_countries_level;
genInfo.m_splitByPolygons = FLAGS_split_by_polygons;
genInfo.cellBucketingLevel = FLAGS_bucketing_level;
genInfo.maxScaleForWorldFeatures = FLAGS_generate_world_scale;
genInfo.mergeCoastlines = FLAGS_merge_coastlines;
genInfo.m_createWorld = FLAGS_generate_world;
if (!feature::GenerateFeatures(genInfo, FLAGS_use_light_nodes))
return -1;
for (size_t i = 0; i < genInfo.bucketNames.size(); ++i)
genInfo.bucketNames[i] = genInfo.datFilePrefix + genInfo.bucketNames[i] + genInfo.datFileSuffix;
for (size_t i = 0; i < genInfo.m_bucketNames.size(); ++i)
genInfo.m_bucketNames[i] = genInfo.m_datFilePrefix + genInfo.m_bucketNames[i] + genInfo.m_datFileSuffix;
if (FLAGS_generate_world_scale >= 0)
genInfo.bucketNames.push_back(genInfo.datFilePrefix + WORLD_FILE_NAME + genInfo.datFileSuffix);
if (FLAGS_generate_world)
genInfo.m_bucketNames.push_back(genInfo.m_datFilePrefix + WORLD_FILE_NAME + genInfo.m_datFileSuffix);
}
else
{
genInfo.bucketNames.push_back(path + FLAGS_output + DATA_FILE_EXTENSION);
genInfo.m_bucketNames.push_back(path + FLAGS_output + DATA_FILE_EXTENSION);
}
// Enumerate over all dat files that were created.
size_t const count = genInfo.bucketNames.size();
size_t const count = genInfo.m_bucketNames.size();
for (size_t i = 0; i < count; ++i)
{
string const & datFile = genInfo.bucketNames[i];
string const & datFile = genInfo.m_bucketNames[i];
if (FLAGS_generate_geometry)
{
@ -199,7 +191,7 @@ int main(int argc, char ** argv)
}
//if (FLAGS_split_by_polygons)
// UpdateMWMRectsFromBoundaries(path, FLAGS_simplify_countries_level);
// UpdateMWMRectsFromBoundaries(path);
// Create http update list for countries and corresponding files
if (FLAGS_generate_update)

View file

@ -52,10 +52,10 @@ namespace
};
}
void UpdateMWMRectsFromBoundaries(string const & dataPath, int level)
void UpdateMWMRectsFromBoundaries(string const & dataPath)
{
borders::CountriesContainerT countries;
borders::LoadCountriesList(dataPath, countries, level);
borders::LoadCountriesList(dataPath, countries);
countries.ForEachWithRect(DoUpdateRect(dataPath));
}

View file

@ -2,4 +2,4 @@
#include "../std/string.hpp"
void UpdateMWMRectsFromBoundaries(string const & dataPath, int level);
void UpdateMWMRectsFromBoundaries(string const & dataPath);

View file

@ -1,4 +1,5 @@
#pragma once
#include "borders_loader.hpp"
#include "world_map_generator.hpp"
@ -10,6 +11,7 @@
#include "../base/base.hpp"
#include "../base/buffer_vector.hpp"
#include "../base/macros.hpp"
#include "../std/scoped_ptr.hpp"
#include "../std/string.hpp"
@ -27,13 +29,24 @@
namespace feature
{
// Groups features according to country polygons
template <class FeatureOutT, class BoundsT, typename CellIdT>
template <class FeatureOutT>
class Polygonizer
{
typename FeatureOutT::InitDataType m_filePrefixAndSuffix;
vector<FeatureOutT*> m_Buckets;
vector<string> m_Names;
borders::CountriesContainerT m_countries;
scoped_ptr<WorldMapGenerator<FeatureOutT> > m_worldMap;
#if PARALLEL_POLYGONIZER
QThreadPool m_ThreadPool;
QSemaphore m_ThreadPoolSemaphore;
QMutex m_EmitFeatureMutex;
#endif
public:
template <class TInfo>
Polygonizer(TInfo & info) : m_FeatureOutInitData(info.datFilePrefix, info.datFileSuffix),
m_worldMap(info.maxScaleForWorldFeatures, info.mergeCoastlines, m_FeatureOutInitData)
template <class T>
Polygonizer(T const & info) : m_filePrefixAndSuffix(info.m_datFilePrefix, info.m_datFileSuffix)
#if PARALLEL_POLYGONIZER
, m_ThreadPoolSemaphore(m_ThreadPool.maxThreadCount() * 8)
#endif
@ -42,14 +55,21 @@ namespace feature
LOG(LINFO, ("Polygonizer thread pool threads:", m_ThreadPool.maxThreadCount()));
#endif
CHECK(borders::LoadCountriesList(info.datFilePrefix, m_countries, info.simplifyCountriesLevel),
("Error loading country polygons files"));
if (info.m_createWorld)
m_worldMap.reset(new WorldMapGenerator<FeatureOutT>(info));
//LOG_SHORT(LINFO, ("Loaded polygons count for regions:"));
//for (size_t i = 0; i < m_countries.size(); ++i)
//{
// LOG_SHORT(LINFO, (m_countries[i].m_name, m_countries[i].m_regions.size()));
//}
if (info.m_splitByPolygons)
{
CHECK(borders::LoadCountriesList(info.m_datFilePrefix, m_countries),
("Error loading country polygons files"));
}
else
{ // Insert fake country polygon equal to whole world to
// create only one output file which contains all features
m_countries.Add(borders::CountryPolygons(),
m2::RectD(MercatorBounds::minX, MercatorBounds::minY,
MercatorBounds::maxX, MercatorBounds::maxY));
}
}
~Polygonizer()
{
@ -93,7 +113,8 @@ namespace feature
void operator () (FeatureBuilder1 const & fb)
{
m_worldMap(fb);
if (m_worldMap)
(*m_worldMap)(fb);
buffer_vector<borders::CountryPolygons const *, 32> vec;
m_countries.ForEachInRect(fb.GetLimitRect(), InsertCountriesPtr(vec));
@ -134,7 +155,7 @@ namespace feature
if (country->m_index == -1)
{
m_Names.push_back(country->m_name);
m_Buckets.push_back(new FeatureOutT(country->m_name, m_FeatureOutInitData));
m_Buckets.push_back(new FeatureOutT(country->m_name, m_filePrefixAndSuffix));
country->m_index = m_Buckets.size()-1;
}
@ -147,19 +168,6 @@ namespace feature
}
private:
typename FeatureOutT::InitDataType m_FeatureOutInitData;
vector<FeatureOutT*> m_Buckets;
vector<string> m_Names;
borders::CountriesContainerT m_countries;
WorldMapGenerator<FeatureOutT> m_worldMap;
#if PARALLEL_POLYGONIZER
QThreadPool m_ThreadPool;
QSemaphore m_ThreadPoolSemaphore;
QMutex m_EmitFeatureMutex;
#endif
friend class PolygonizerTask;
class PolygonizerTask

View file

@ -1,6 +1,7 @@
#pragma once
#include "feature_merger.hpp"
#include "generate_info.hpp"
#include "../indexer/feature_visibility.hpp"
#include "../indexer/scales.hpp"
@ -10,26 +11,23 @@
inline int GetMinFeatureDrawScale(FeatureBuilder1 const & fb)
{
FeatureBase fBase = fb.GetFeatureBase();
FeatureBase const fBase = fb.GetFeatureBase();
int const minScale = feature::MinDrawableScaleForFeature(fBase);
// some features become invisible after merge processing, so -1 is possible
return (minScale == -1 ? 1000 : minScale);
}
template <class FeatureOutT>
class WorldMapGenerator
{
class WorldEmitter : public FeatureEmitterIFace
{
FeatureOutT m_output;
int m_maxWorldScale;
public:
template <class TInit>
WorldEmitter(int maxScale, TInit const & initData)
: m_output(WORLD_FILE_NAME, initData), m_maxWorldScale(maxScale)
WorldEmitter(TInit const & initData) : m_output(WORLD_FILE_NAME, initData)
{
}
@ -41,47 +39,21 @@ class WorldMapGenerator
bool NeedPushToWorld(FeatureBuilder1 const & fb) const
{
return (m_maxWorldScale >= GetMinFeatureDrawScale(fb));
return (scales::GetUpperWorldScale() >= GetMinFeatureDrawScale(fb));
}
void PushSure(FeatureBuilder1 const & fb) { m_output(fb); }
};
/// if NULL, separate world data file is not generated
scoped_ptr<WorldEmitter> m_worldBucket;
/// features visible before or at this scale level will go to World map
bool m_mergeCoastlines;
WorldEmitter m_worldBucket;
FeatureTypesProcessor m_typesCorrector;
FeatureMergeProcessor m_merger;
public:
WorldMapGenerator(int maxWorldScale, bool mergeCoastlines,
typename FeatureOutT::InitDataType const & initData)
: m_mergeCoastlines(mergeCoastlines), m_merger(30)
template <class T>
WorldMapGenerator(T const & info) : m_worldBucket(typename FeatureOutT::InitDataType(
info.m_datFilePrefix, info.m_datFileSuffix)), m_merger(30)
{
if (maxWorldScale >= 0)
m_worldBucket.reset(new WorldEmitter(maxWorldScale, initData));
// fill vector with types that need to be replaced
//char const * arrReplace[][3] = {
// {"highway", "motorway_link", "motorway"},
// {"highway", "motorway_junction", "motorway"},
// {"highway", "primary_link", "primary"},
// {"highway", "trunk_link", "trunk"},
// {"highway", "secondary_link", "secondary"},
// {"highway", "tertiary_link", "tertiary"}
//};
//for (size_t i = 0; i < ARRAY_SIZE(arrReplace); ++i)
//{
// char const * arr1[] = { arrReplace[i][0], arrReplace[i][1] };
// char const * arr2[] = { arrReplace[i][0], arrReplace[i][2] };
// m_typesCorrector.SetMappingTypes(arr1, arr2);
//}
// Do not strip last types for given tags,
// for example, do not cut "-2" in "boundary-administrative-2"
char const * arrDontNormalize[][3] = {
@ -99,18 +71,18 @@ public:
void operator()(FeatureBuilder1 const & fb)
{
if (m_worldBucket && m_worldBucket->NeedPushToWorld(fb))
if (m_worldBucket.NeedPushToWorld(fb))
{
if (m_mergeCoastlines && (fb.GetGeomType() == feature::GEOM_LINE))
// Always try to merge coastlines
if (fb.GetGeomType() == feature::GEOM_LINE)
m_merger(m_typesCorrector(fb));
else
m_worldBucket->PushSure(fb);
m_worldBucket.PushSure(fb);
}
}
void DoMerge()
{
if (m_worldBucket)
m_merger.DoMerge(*m_worldBucket);
m_merger.DoMerge(m_worldBucket);
}
};

View file

@ -1,127 +0,0 @@
#!/bin/bash
################################################
# Builds whole planet in /media/ssd/common dir #
################################################
# At least "set -e -u" should always be here, not just for debugging!
# "set -x" is useful to see what is going on.
set -e -u -x
# global params
LIGHT_NODES=false
PROCESSORS=4
# displays usage and exits
function Usage {
echo ''
echo "Usage: $0 [path_to_data_folder_with_classsif_and_planet.osm.bz2] [bucketing_level] [optional_path_to_intermediate_data]"
echo "Planet squares size is (2^bucketing_level x 2^bucketing_level)"
echo "If optional intermediate path is given, only second pass will be executed"
exit 0
}
# for parallel builds
function forky() {
local num_par_procs
if [[ -z $1 ]] ; then
num_par_procs=2
else
num_par_procs=$1
fi
while [[ $(jobs | wc -l) -ge $num_par_procs ]] ; do
sleep 1
done
}
if [ $# -lt 2 ]; then
Usage
fi
DATA_PATH=$1
BUCKETING_LEVEL=$2
# set up necessary Windows MinGW settings
#if [ ${WINDIR+1} ]; then
#fi
# check if we have QT in PATH
if [ ! `which qmake` ]; then
echo 'You should add your qmake binary into the PATH. This can be done in 2 ways:'
echo ' 1. Set it temporarily by executing: export PATH=/c/qt/your_qt_dir/bin:$PATH'
echo ' 2. Set it permanently by adding export... string above to your ~/.bashrc'
echo 'Hint: for second solution you can type from git bash console: notepad ~/.bashrc'
exit 0
fi
# determine script path
MY_PATH=`dirname $0`
# find generator_tool
IT_PATHS_ARRAY=( "$MY_PATH/../../../omim-build-release/out/release/generator_tool" \
"$MY_PATH/../../out/release/generator_tool" )
for i in {0..1}; do
if [ -x ${IT_PATHS_ARRAY[i]} ]; then
GENERATOR_TOOL=${IT_PATHS_ARRAY[i]}
echo TOOL: $GENERATOR_TOOL
break
fi
done
if [[ ! -n $GENERATOR_TOOL ]]; then
echo 'No generator_tool found, please build omim-build-release or omim/out/release'
echo ""
Usage
fi
OSM_BZ2=$DATA_PATH/planet.osm.bz2
if ! [ -f $OSM_BZ2 ]; then
echo "Can't open file $OSM_BZ2, did you forgot to specify dataDir?"
echo ""
Usage
fi
TMPDIR=$DATA_PATH/intermediate_data/
if [ $# -ge 3 ]; then
TMPDIR=$3/
fi
if ! [ -d $TMPDIR ]; then
mkdir -p $TMPDIR
fi
PV="cat"
if [ `which pv` ]
then
PV=pv
fi
# skip 1st pass if intermediate data path was given
if [ $# -lt 3 ]; then
# 1st pass - not paralleled
$PV $OSM_BZ2 | bzip2 -d | $GENERATOR_TOOL --intermediate_data_path=$TMPDIR \
--use_light_nodes=$LIGHT_NODES \
--preprocess_xml
fi
# 2nd pass - not paralleled
$PV $OSM_BZ2 | bzip2 -d | $GENERATOR_TOOL --intermediate_data_path=$TMPDIR \
--use_light_nodes=$LIGHT_NODES --bucketing_level=$BUCKETING_LEVEL \
--generate_features --generate_world_scale=9 \
--data_path=$DATA_PATH
# 3rd pass - do in parallel
for file in $DATA_PATH/*.mwm; do
if [ "$file" != "minsk-pass" ]; then
filename=$(basename "$file")
extension="${filename##*.}"
filename="${filename%.*}"
$GENERATOR_TOOL -output="$filename" -data_path=$DATA_PATH -generate_geometry -sort_features -generate_index -intermediate_data_path=$TMPDIR &
forky $PROCESSORS
fi
done
wait

View file

@ -1,14 +1,10 @@
#!/bin/bash
################################################
# Cool script for building dat and index files #
# Cool script for building mwm files #
################################################
# At least "set -e -u" should always be here, not just for debugging!
# "set -x" is useful to see what is going on.
set -e -u -x
DEFAULT_BUCKETING_LEVEL=0
# displays usage and exits
function Usage {
echo ''
@ -109,5 +105,5 @@ $PV $OSM_BZ2 | bzip2 -d | $GENERATOR_TOOL -intermediate_data_path=$TMPDIR \
$PV $OSM_BZ2 | bzip2 -d | $GENERATOR_TOOL -intermediate_data_path=$TMPDIR \
-use_light_nodes=$LIGHT_NODES \
-generate_features -sort_features -generate_geometry -generate_index \
-output=$1 -bucketing_level=$DEFAULT_BUCKETING_LEVEL -generate_search_index
-generate_features -generate_geometry -generate_index \
-generate_search_index -output=$1

View file

@ -17,4 +17,4 @@ fi
$PV ../../../omim-maps/$2.osm.bz2 | bzip2 -d | $GENERATOR_TOOL --preprocess_xml=true --use_light_nodes=true --intermediate_data_path=$TMPDIR
$PV ../../../omim-maps/$2.osm.bz2 | bzip2 -d | $GENERATOR_TOOL --use_light_nodes=true --generate_features=true --generate_geometry=true --generate_index=true --sort_features=true --intermediate_data_path=$TMPDIR --output=$2 --bucketing_level=0
$PV ../../../omim-maps/$2.osm.bz2 | bzip2 -d | $GENERATOR_TOOL --use_light_nodes=true --generate_features=true --generate_geometry=true --generate_index=true --intermediate_data_path=$TMPDIR --output=$2

View file

@ -8,15 +8,13 @@
set -e -u -x
# global params
LIGHT_NODES=false
PROCESSORS=8
SIMPLIFY=-1
LIGHT_NODES=true
PROCESSORS=2
# displays usage and exits
function Usage {
echo ''
echo "Usage: $0 [path_to_data_folder_with_classsif_and_planet.osm.bz2] [bucketing_level] [optional_path_to_intermediate_data] [world_only]"
echo "Planet squares size is (2^bucketing_level x 2^bucketing_level)"
echo "Usage: $0 [path_to_data_folder_with_classsif_and_planet.osm.bz2] [optional_path_to_intermediate_data]"
echo "If optional intermediate path is given, only second pass will be executed"
exit 0
}
@ -35,12 +33,11 @@ function forky() {
done
}
if [ $# -lt 2 ]; then
if [ $# -lt 1 ]; then
Usage
fi
DATA_PATH=$1
BUCKETING_LEVEL=$2
# set up necessary Windows MinGW settings
#if [ ${WINDIR+1} ]; then
@ -86,8 +83,8 @@ fi
TMPDIR=$DATA_PATH/intermediate_data/
if [ $# -ge 3 ]; then
TMPDIR=$3/
if [ $# -ge 2 ]; then
TMPDIR=$2/
fi
if ! [ -d $TMPDIR ]; then
@ -100,24 +97,18 @@ then
PV=pv
fi
WORLD_ONLY=false
if [ $# -ge 4 ]; then
WORLD_ONLY=true
fi
# skip 1st pass if intermediate data path was given
if [ $# -lt 3 ]; then
if [ $# -lt 2 ]; then
# 1st pass - not paralleled
$PV $OSM_BZ2 | bzip2 -d | $GENERATOR_TOOL -intermediate_data_path=$TMPDIR \
-use_light_nodes=$LIGHT_NODES \
-preprocess_xml
fi
# 2nd pass - not paralleled
# 2nd pass - paralleled in the code
$PV $OSM_BZ2 | bzip2 -d | $GENERATOR_TOOL -intermediate_data_path=$TMPDIR \
-use_light_nodes=$LIGHT_NODES -split_by_polygons -simplify_countries_level=$SIMPLIFY \
-generate_features -generate_world_scale=9 -merge_coastlines=true \
-use_light_nodes=$LIGHT_NODES -split_by_polygons \
-generate_features -generate_world \
-data_path=$DATA_PATH
# 3rd pass - do in parallel
@ -126,7 +117,7 @@ for file in $DATA_PATH/*.mwm; do
filename=$(basename "$file")
extension="${filename##*.}"
filename="${filename%.*}"
$GENERATOR_TOOL -data_path=$DATA_PATH -generate_geometry -sort_features -generate_index -generate_search_index -output="$filename" &
$GENERATOR_TOOL -data_path=$DATA_PATH -generate_geometry -generate_index -generate_search_index -output="$filename" &
forky $PROCESSORS
fi
done