forked from organicmaps/organicmaps
- Remove old code (mapped ways routine).
- Some minor code fixes.
This commit is contained in:
parent
718fd2960e
commit
8d9b7c1589
12 changed files with 49 additions and 172 deletions
|
@ -194,36 +194,6 @@ namespace cache
|
|||
}
|
||||
};
|
||||
|
||||
class MappedWay
|
||||
{
|
||||
public:
|
||||
|
||||
enum WayType
|
||||
{
|
||||
coast_direct = 0,
|
||||
empty_direct = 1,
|
||||
coast_opposite = 2,
|
||||
empty_opposite = 3
|
||||
};
|
||||
|
||||
MappedWay() : m_id(0) {}
|
||||
MappedWay(uint64_t id, WayType type) : m_id((id << 2) | type)
|
||||
{
|
||||
CHECK_EQUAL(0, id & 0xC000000000000000ULL, ("Highest 2 bits should be 0.", id));
|
||||
}
|
||||
|
||||
bool operator<(MappedWay const & r) const
|
||||
{
|
||||
return m_id < r.m_id;
|
||||
}
|
||||
|
||||
uint64_t GetId() const { return m_id >> 2; }
|
||||
WayType GetType() const { return static_cast<WayType>(m_id & 3); }
|
||||
|
||||
private:
|
||||
uint64_t m_id;
|
||||
};
|
||||
STATIC_ASSERT(sizeof(MappedWay) == 8);
|
||||
|
||||
template <class TNodesHolder, class TData, class TFile>
|
||||
class BaseFileHolder
|
||||
|
@ -238,17 +208,13 @@ namespace cache
|
|||
typedef detail::file_map_t<TFile, uint64_t> offset_map_t;
|
||||
offset_map_t m_nodes2rel, m_ways2rel;
|
||||
|
||||
typedef detail::file_map_t<TFile, MappedWay> ways_map_t;
|
||||
ways_map_t m_mappedWays;
|
||||
|
||||
public:
|
||||
BaseFileHolder(TNodesHolder & nodes, string const & dir)
|
||||
: m_nodes(nodes),
|
||||
m_ways(dir + WAYS_FILE),
|
||||
m_relations(dir + RELATIONS_FILE),
|
||||
m_nodes2rel(dir + NODES_FILE + ID2REL_EXT),
|
||||
m_ways2rel(dir + WAYS_FILE + ID2REL_EXT),
|
||||
m_mappedWays(dir + MAPPED_WAYS)
|
||||
m_ways2rel(dir + WAYS_FILE + ID2REL_EXT)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
#include "../std/bind.hpp"
|
||||
|
||||
|
||||
namespace data
|
||||
{
|
||||
|
||||
|
@ -46,17 +47,6 @@ public:
|
|||
add_id2rel_vector(this->m_ways2rel, id, e.ways);
|
||||
}
|
||||
|
||||
void AddMappedWay(user_id_t id, WayElement const & e, bool emptyTags)
|
||||
{
|
||||
typedef cache::MappedWay way_t;
|
||||
|
||||
way_t::WayType const directType = (emptyTags ? way_t::empty_direct : way_t::coast_direct);
|
||||
way_t::WayType const oppositeType = (emptyTags ? way_t::empty_opposite : way_t::coast_opposite);
|
||||
|
||||
this->m_mappedWays.write(e.nodes.front(), way_t(id, directType)); // direct
|
||||
this->m_mappedWays.write(e.nodes.back(), way_t(id, oppositeType)); // opposite
|
||||
}
|
||||
|
||||
void SaveIndex()
|
||||
{
|
||||
this->m_ways.SaveOffsets();
|
||||
|
@ -64,7 +54,6 @@ public:
|
|||
|
||||
this->m_nodes2rel.flush_to_file();
|
||||
this->m_ways2rel.flush_to_file();
|
||||
this->m_mappedWays.flush_to_file();
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -273,6 +273,14 @@ void FeatureBuilder1::AddOsmId(string const & type, uint64_t osmId)
|
|||
m_osmIds.push_back(osm::OsmId(type, osmId));
|
||||
}
|
||||
|
||||
int FeatureBuilder1::GetMinFeatureDrawScale() const
|
||||
{
|
||||
int const minScale = feature::MinDrawableScaleForFeature(GetFeatureBase());
|
||||
|
||||
// some features become invisible after merge processing, so -1 is possible
|
||||
return (minScale == -1 ? 1000 : minScale);
|
||||
}
|
||||
|
||||
string debug_print(FeatureBuilder1 const & f)
|
||||
{
|
||||
ostringstream out;
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
#include "../std/bind.hpp"
|
||||
|
||||
|
||||
namespace serial { class CodingParams; }
|
||||
|
||||
/// Used for serialization\deserialization of features during --generate_features.
|
||||
|
@ -86,6 +87,8 @@ public:
|
|||
/// For OSM debugging, store original OSM id
|
||||
void AddOsmId(string const & type, uint64_t osmId);
|
||||
|
||||
int GetMinFeatureDrawScale() const;
|
||||
|
||||
protected:
|
||||
/// Used for feature debugging
|
||||
vector<osm::OsmId> m_osmIds;
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include "osm_element.hpp"
|
||||
#include "polygonizer.hpp"
|
||||
#include "osm_decl.hpp"
|
||||
#include "generate_info.hpp"
|
||||
|
||||
#include "../defines.hpp"
|
||||
|
||||
|
@ -31,7 +32,6 @@ class FileHolder : public cache::BaseFileHolder<TNodesHolder, cache::DataFileRea
|
|||
typedef cache::BaseFileHolder<TNodesHolder, reader_t, FileReader> base_type;
|
||||
|
||||
typedef typename base_type::offset_map_t offset_map_t;
|
||||
typedef typename base_type::ways_map_t ways_map_t;
|
||||
|
||||
typedef typename base_type::user_id_t user_id_t;
|
||||
|
||||
|
@ -91,23 +91,6 @@ public:
|
|||
return this->m_ways.Read(id, e);
|
||||
}
|
||||
|
||||
bool GetNextWay(user_id_t & prevWay, user_id_t node, WayElement & e)
|
||||
{
|
||||
typedef typename ways_map_t::iter_t iter_t;
|
||||
pair<iter_t, iter_t> range = this->m_mappedWays.GetRange(node);
|
||||
for (; range.first != range.second; ++range.first)
|
||||
{
|
||||
cache::MappedWay const & w = range.first->second;
|
||||
if (w.GetType() != cache::MappedWay::coast_opposite && w.GetId() != prevWay)
|
||||
{
|
||||
this->m_ways.Read(w.GetId(), e);
|
||||
prevWay = w.GetId();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
template <class ToDo> void ForEachRelationByWay(user_id_t id, ToDo & toDo)
|
||||
{
|
||||
process_relation<ToDo> processor(this->m_relations, toDo);
|
||||
|
@ -133,7 +116,6 @@ public:
|
|||
|
||||
this->m_nodes2rel.read_to_memory();
|
||||
this->m_ways2rel.read_to_memory();
|
||||
this->m_mappedWays.read_to_memory();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -146,12 +128,6 @@ FeaturesCollector::FeaturesCollector(string const & fName)
|
|||
{
|
||||
}
|
||||
|
||||
FeaturesCollector::FeaturesCollector(string const & bucket,
|
||||
FeaturesCollector::InitDataType const & prefix)
|
||||
: m_datFile(prefix.first + bucket + prefix.second)
|
||||
{
|
||||
}
|
||||
|
||||
uint32_t FeaturesCollector::GetFileSize(FileWriter const & f)
|
||||
{
|
||||
// .dat file should be less than 4Gb
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
#pragma once
|
||||
|
||||
#include "generate_info.hpp"
|
||||
#include "osm_decl.hpp"
|
||||
|
||||
#include "../geometry/rect2d.hpp"
|
||||
|
||||
#include "../coding/file_container.hpp"
|
||||
#include "../coding/file_writer.hpp"
|
||||
|
||||
#include "../std/vector.hpp"
|
||||
#include "../std/string.hpp"
|
||||
|
||||
|
||||
class FeatureBuilder1;
|
||||
|
||||
namespace feature
|
||||
{
|
||||
class GenerateInfo;
|
||||
|
||||
bool GenerateFeatures(GenerateInfo & info, bool lightNodes);
|
||||
|
||||
// Writes features to dat file.
|
||||
|
@ -30,11 +30,7 @@ namespace feature
|
|||
void WriteFeatureBase(vector<char> const & bytes, FeatureBuilder1 const & fb);
|
||||
|
||||
public:
|
||||
// Stores prefix and suffix of a dat file name.
|
||||
typedef pair<string, string> InitDataType;
|
||||
|
||||
FeaturesCollector(string const & fName);
|
||||
FeaturesCollector(string const & bucket, InitDataType const & prefix);
|
||||
|
||||
void operator() (FeatureBuilder1 const & f);
|
||||
};
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
#include "../base/string_utils.hpp"
|
||||
|
||||
|
||||
template <class THolder>
|
||||
class FirstPassParser : public BaseOSMParser
|
||||
{
|
||||
|
@ -42,10 +43,7 @@ protected:
|
|||
else if (p->name == "way")
|
||||
{
|
||||
// store way
|
||||
|
||||
WayElement e(id);
|
||||
bool bUnite = false;
|
||||
bool bEmptyTags = true;
|
||||
|
||||
for (size_t i = 0; i < p->childs.size(); ++i)
|
||||
{
|
||||
|
@ -55,23 +53,10 @@ protected:
|
|||
VERIFY ( strings::to_uint64(p->childs[i].attrs["ref"], ref), ("Bad node ref in way : ", p->childs[i].attrs["ref"]) );
|
||||
e.nodes.push_back(ref);
|
||||
}
|
||||
else if (!bUnite && (p->childs[i].name == "tag"))
|
||||
{
|
||||
bEmptyTags = false;
|
||||
|
||||
// process way's tags to define - if we need to join ways
|
||||
string const & k = p->childs[i].attrs["k"];
|
||||
string const & v = p->childs[i].attrs["v"];
|
||||
bUnite = feature::NeedUnite(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
if (e.IsValid())
|
||||
{
|
||||
m_holder.AddWay(id, e);
|
||||
if (bUnite || bEmptyTags)
|
||||
m_holder.AddMappedWay(id, e, bEmptyTags);
|
||||
}
|
||||
}
|
||||
else if (p->name == "relation")
|
||||
{
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "../mwm_rect_updater.hpp"
|
||||
#include "../statistics.hpp"
|
||||
#include "../unpack_mwm.hpp"
|
||||
#include "../generate_info.hpp"
|
||||
|
||||
#include "../../indexer/classificator_loader.hpp"
|
||||
#include "../../indexer/data_header.hpp"
|
||||
|
@ -146,6 +147,8 @@ int main(int argc, char ** argv)
|
|||
|
||||
// Enumerate over all dat files that were created.
|
||||
size_t const count = genInfo.m_bucketNames.size();
|
||||
string const worldPath = path + WORLD_FILE_NAME + DATA_FILE_EXTENSION;
|
||||
|
||||
for (size_t i = 0; i < count; ++i)
|
||||
{
|
||||
string const & datFile = genInfo.m_bucketNames[i];
|
||||
|
@ -153,8 +156,7 @@ int main(int argc, char ** argv)
|
|||
if (FLAGS_generate_geometry)
|
||||
{
|
||||
LOG(LINFO, ("Generating result features for ", datFile));
|
||||
if (!feature::GenerateFinalFeatures(datFile,
|
||||
datFile == path + WORLD_FILE_NAME + DATA_FILE_EXTENSION))
|
||||
if (!feature::GenerateFinalFeatures(datFile, datFile == worldPath))
|
||||
{
|
||||
// If error - move to next bucket without index generation
|
||||
continue;
|
||||
|
|
|
@ -6,44 +6,6 @@
|
|||
|
||||
#include "../std/target_os.hpp"
|
||||
|
||||
namespace feature
|
||||
{
|
||||
char const * arrUnite[1][2] = { { "natural", "coastline" } };
|
||||
|
||||
bool NeedUnite(string const & k, string const & v)
|
||||
{
|
||||
for (size_t i = 0; i < ARRAY_SIZE(arrUnite); ++i)
|
||||
if (k == arrUnite[i][0] && v == arrUnite[i][1])
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool NeedUnite(uint32_t type)
|
||||
{
|
||||
static uint32_t arrTypes[1] = { 0 };
|
||||
|
||||
if (arrTypes[0] == 0)
|
||||
{
|
||||
// initialize static array
|
||||
for (size_t i = 0; i < ARRAY_SIZE(arrUnite); ++i)
|
||||
{
|
||||
size_t const count = ARRAY_SIZE(arrUnite[i]);
|
||||
vector<string> path(count);
|
||||
for (size_t j = 0; j < count; ++j)
|
||||
path[j] = arrUnite[i][j];
|
||||
|
||||
arrTypes[i] = classif().GetTypeByPath(path);
|
||||
}
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < ARRAY_SIZE(arrTypes); ++i)
|
||||
if (arrTypes[i] == type)
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
void progress_policy::Begin(string const & /*name*/, size_t factor)
|
||||
{
|
||||
|
|
|
@ -36,17 +36,6 @@ STATIC_ASSERT(sizeof(LatLonPos) == 24);
|
|||
#define MAPPED_WAYS "mapped_ways.n2w"
|
||||
|
||||
|
||||
namespace feature
|
||||
{
|
||||
/// @name Need to unite features.
|
||||
//@{
|
||||
/// @param[in] k, v Key and Value from relation tags.
|
||||
bool NeedUnite(string const & k, string const & v);
|
||||
/// @param[in] type Type from feature.
|
||||
bool NeedUnite(uint32_t type);
|
||||
//@}
|
||||
}
|
||||
|
||||
class progress_policy
|
||||
{
|
||||
size_t m_count;
|
||||
|
|
|
@ -6,8 +6,11 @@
|
|||
#include "../indexer/feature.hpp"
|
||||
#include "../indexer/feature_visibility.hpp"
|
||||
#include "../indexer/cell_id.hpp"
|
||||
|
||||
#include "../geometry/rect2d.hpp"
|
||||
|
||||
#include "../coding/file_writer.hpp"
|
||||
|
||||
#include "../base/base.hpp"
|
||||
#include "../base/buffer_vector.hpp"
|
||||
#include "../base/macros.hpp"
|
||||
|
@ -15,6 +18,7 @@
|
|||
#include "../std/scoped_ptr.hpp"
|
||||
#include "../std/string.hpp"
|
||||
|
||||
|
||||
#ifndef PARALLEL_POLYGONIZER
|
||||
#define PARALLEL_POLYGONIZER 1
|
||||
#endif
|
||||
|
@ -26,13 +30,16 @@
|
|||
#include <QMutexLocker>
|
||||
#endif
|
||||
|
||||
|
||||
namespace feature
|
||||
{
|
||||
// Groups features according to country polygons
|
||||
template <class FeatureOutT>
|
||||
class Polygonizer
|
||||
{
|
||||
typename FeatureOutT::InitDataType m_filePrefixAndSuffix;
|
||||
string m_prefix;
|
||||
string m_suffix;
|
||||
|
||||
vector<FeatureOutT*> m_Buckets;
|
||||
vector<string> m_Names;
|
||||
borders::CountriesContainerT m_countries;
|
||||
|
@ -45,8 +52,9 @@ namespace feature
|
|||
#endif
|
||||
|
||||
public:
|
||||
template <class T>
|
||||
Polygonizer(T const & info) : m_filePrefixAndSuffix(info.m_datFilePrefix, info.m_datFileSuffix)
|
||||
template <class TInfo>
|
||||
explicit Polygonizer(TInfo const & info)
|
||||
: m_prefix(info.m_datFilePrefix), m_suffix(info.m_datFileSuffix)
|
||||
#if PARALLEL_POLYGONIZER
|
||||
, m_ThreadPoolSemaphore(m_ThreadPool.maxThreadCount() * 8)
|
||||
#endif
|
||||
|
@ -64,7 +72,8 @@ namespace feature
|
|||
("Error loading country polygons files"));
|
||||
}
|
||||
else
|
||||
{ // Insert fake country polygon equal to whole world to
|
||||
{
|
||||
// Insert fake country polygon equal to whole world to
|
||||
// create only one output file which contains all features
|
||||
m_countries.Add(borders::CountryPolygons(),
|
||||
m2::RectD(MercatorBounds::minX, MercatorBounds::minY,
|
||||
|
@ -155,7 +164,7 @@ namespace feature
|
|||
if (country->m_index == -1)
|
||||
{
|
||||
m_Names.push_back(country->m_name);
|
||||
m_Buckets.push_back(new FeatureOutT(country->m_name, m_filePrefixAndSuffix));
|
||||
m_Buckets.push_back(new FeatureOutT(m_prefix + country->m_name + m_suffix));
|
||||
country->m_index = m_Buckets.size()-1;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,33 +1,26 @@
|
|||
#pragma once
|
||||
|
||||
#include "feature_merger.hpp"
|
||||
#include "generate_info.hpp"
|
||||
|
||||
#include "../indexer/feature_visibility.hpp"
|
||||
#include "../indexer/scales.hpp"
|
||||
|
||||
#include "../defines.hpp"
|
||||
|
||||
|
||||
inline int GetMinFeatureDrawScale(FeatureBuilder1 const & fb)
|
||||
{
|
||||
FeatureBase const fBase = fb.GetFeatureBase();
|
||||
int const minScale = feature::MinDrawableScaleForFeature(fBase);
|
||||
|
||||
// some features become invisible after merge processing, so -1 is possible
|
||||
return (minScale == -1 ? 1000 : minScale);
|
||||
}
|
||||
|
||||
/// Process FeatureBuilder1 for world map. Main functions:
|
||||
/// - check for visibility in world map
|
||||
/// - merge linear features
|
||||
template <class FeatureOutT>
|
||||
class WorldMapGenerator
|
||||
{
|
||||
class WorldEmitter : public FeatureEmitterIFace
|
||||
class EmitterImpl : public FeatureEmitterIFace
|
||||
{
|
||||
FeatureOutT m_output;
|
||||
|
||||
public:
|
||||
template <class TInit>
|
||||
WorldEmitter(TInit const & initData) : m_output(WORLD_FILE_NAME, initData)
|
||||
template <class TInfo>
|
||||
explicit EmitterImpl(TInfo const & info)
|
||||
: m_output(info.m_datFilePrefix + WORLD_FILE_NAME + info.m_datFileSuffix)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -39,20 +32,20 @@ class WorldMapGenerator
|
|||
|
||||
bool NeedPushToWorld(FeatureBuilder1 const & fb) const
|
||||
{
|
||||
return (scales::GetUpperWorldScale() >= GetMinFeatureDrawScale(fb));
|
||||
return (scales::GetUpperWorldScale() >= fb.GetMinFeatureDrawScale());
|
||||
}
|
||||
|
||||
void PushSure(FeatureBuilder1 const & fb) { m_output(fb); }
|
||||
};
|
||||
|
||||
WorldEmitter m_worldBucket;
|
||||
EmitterImpl m_worldBucket;
|
||||
FeatureTypesProcessor m_typesCorrector;
|
||||
FeatureMergeProcessor m_merger;
|
||||
|
||||
public:
|
||||
template <class T>
|
||||
WorldMapGenerator(T const & info) : m_worldBucket(typename FeatureOutT::InitDataType(
|
||||
info.m_datFilePrefix, info.m_datFileSuffix)), m_merger(30)
|
||||
template <class TInfo>
|
||||
explicit WorldMapGenerator(TInfo const & info)
|
||||
: m_worldBucket(info), m_merger(POINT_COORD_BITS)
|
||||
{
|
||||
// Do not strip last types for given tags,
|
||||
// for example, do not cut "-2" in "boundary-administrative-2"
|
||||
|
@ -73,7 +66,6 @@ public:
|
|||
{
|
||||
if (m_worldBucket.NeedPushToWorld(fb))
|
||||
{
|
||||
// Always try to merge coastlines
|
||||
if (fb.GetGeomType() == feature::GEOM_LINE)
|
||||
m_merger(m_typesCorrector(fb));
|
||||
else
|
||||
|
|
Loading…
Add table
Reference in a new issue