Refactoring of feature::DataHeader.

Added base point and scales array to header.
World and country generation now have different scale ranges.
This commit is contained in:
vng 2011-03-14 10:10:19 +02:00 committed by Alex Zolotarev
parent 25ea1267b4
commit 221d3d99da
28 changed files with 159 additions and 309 deletions

View file

@ -1,47 +1,61 @@
#include "data_header.hpp"
#include "../base/string_utils.hpp"
#include "../platform/platform.hpp"
#include "../indexer/point_to_int64.hpp"
#include "../coding/file_reader.hpp"
#include "../coding/file_writer.hpp"
#include "../indexer/cell_id.hpp"
#include "../coding/write_to_sink.hpp"
#include "../coding/varint.hpp"
#include "../base/start_mem_debug.hpp"
namespace feature
{
DataHeader::DataHeader()
{
Reset();
}
namespace
{
struct do_reset
{
void operator() (string & t, int) { t.clear(); }
void operator() (uint64_t & t, int) { t = 0; }
void operator() (pair<int64_t, int64_t> &, int) {}
};
}
void DataHeader::Reset()
{
do_reset doReset;
for_each_tuple(m_params, doReset);
}
m2::RectD const DataHeader::Bounds() const
void DataHeader::SetBase(m2::PointD const & p)
{
return Int64ToRect(Get<EBoundary>());
m_base = PointToInt64(p.x, p.y);
}
m2::RectD const DataHeader::GetBounds() const
{
return Int64ToRect(m_bounds);
}
void DataHeader::SetBounds(m2::RectD const & r)
{
Set<EBoundary>(RectToInt64(r));
m_bounds = RectToInt64(r);
}
void DataHeader::SetScales(int * arr)
{
for (int i = 0; i < m_scales.size(); ++i)
m_scales[i] = static_cast<uint8_t>(arr[i]);
}
void DataHeader::Save(FileWriter & w) const
{
WriteToSink(w, m_base);
WriteVarInt(w, m_bounds.first - m_base);
WriteVarInt(w, m_bounds.second - m_base);
w.Write(m_scales.data(), m_scales.size());
}
void DataHeader::Load(FileReader const & r)
{
ReaderSource<FileReader> src(r);
m_base = ReadPrimitiveFromSource<int64_t>(src);
m_bounds.first = ReadVarInt<int64_t>(src) + m_base;
m_bounds.second = ReadVarInt<int64_t>(src) + m_base;
src.Read(m_scales.data(), m_scales.size());
}
}

View file

@ -1,70 +1,48 @@
#pragma once
#include "../base/std_serialization.hpp"
#include "../coding/streams_sink.hpp"
#include "../geometry/rect2d.hpp"
#include "../std/string.hpp"
#include "../std/tuple.hpp"
#include "../std/array.hpp"
#include "../base/start_mem_debug.hpp"
class FileReader;
class FileWriter;
namespace feature
{
/// All file sizes are in bytes
class DataHeader
{
/// @TODO move version somewhere else
static int32_t const MAPS_MAJOR_VERSION_BINARY_FORMAT = 1;
int64_t m_base;
private:
typedef tuple<
pair<int64_t, int64_t> // boundary;
> params_t;
params_t m_params;
pair<int64_t, int64_t> m_bounds;
enum param_t { EBoundary };
template <int N>
typename tuple_element<N, params_t>::type const & Get() const { return m_params.get<N>(); }
template <int N, class T>
void Set(T const & t) { m_params.get<N>() = t; }
array<uint8_t, 4> m_scales;
public:
DataHeader();
/// Zeroes all fields
/// Zero all fields
void Reset();
m2::RectD const Bounds() const;
void SetBase(m2::PointD const & p);
int64_t GetBase() const { return m_base; }
m2::RectD const GetBounds() const;
void SetBounds(m2::RectD const & r);
void SetScales(int * arr);
size_t GetScalesCount() const { return m_scales.size(); }
int GetScale(int i) const { return m_scales[i]; }
/// @name Serialization
//@{
template <class TWriter> void Save(TWriter & writer) const
{
stream::SinkWriterStream<TWriter> w(writer);
w << MAPS_MAJOR_VERSION_BINARY_FORMAT;
serial::save_tuple(w, m_params);
}
/// @return false if header can't be read (invalid or newer version format)
template <class TReader> bool Load(TReader & reader)
{
stream::SinkReaderStream<TReader> r(reader);
uint32_t ver;
r >> ver;
if (ver > MAPS_MAJOR_VERSION_BINARY_FORMAT)
return false;
Reset();
serial::load_tuple(r, m_params);
return true;
}
void Save(FileWriter & w) const;
void Load(FileReader const & r);
//@}
};
}
#include "../base/stop_mem_debug.hpp"

View file

@ -1,59 +0,0 @@
#include "../base/SRC_FIRST.hpp"
#include "data_header_reader.hpp"
#include "data_header.hpp"
#include "../defines.hpp"
#include "../coding/file_container.hpp"
#include "../base/start_mem_debug.hpp"
namespace feature
{
uint64_t GetSkipHeaderSize(Reader const & reader)
{
uint64_t const headerSize = ReadPrimitiveFromPos<uint64_t>(reader, 0);
return headerSize + sizeof(uint64_t);
}
uint64_t ReadDataHeader(string const & datFileName, feature::DataHeader & outHeader)
{
return ReadDataHeader(FilesContainerR(datFileName).GetReader(DATA_FILE_TAG), outHeader);
}
uint64_t ReadDataHeader(FileReader const & reader, feature::DataHeader & outHeader)
{
try
{
uint64_t const toSkip = GetSkipHeaderSize(reader);
ReaderSource<FileReader> src(reader);
src.Skip(sizeof(uint64_t));
outHeader.Load(src);
return toSkip;
}
catch (Reader::Exception const & e)
{
ASSERT(false, ("Error reading header from dat file", e.what()));
return 0;
}
}
void WriteDataHeader(Writer & writer, feature::DataHeader const & header)
{
typedef vector<unsigned char> TBuffer;
TBuffer buffer;
MemWriter<TBuffer> w(buffer);
header.Save(w);
uint64_t const sz = buffer.size();
WriteToSink(writer, sz);
if (sz > 0)
writer.Write(&buffer[0], buffer.size());
}
}

View file

@ -1,20 +0,0 @@
#pragma once
#include "../base/base.hpp"
#include "../std/string.hpp"
class Writer;
class Reader;
class FileReader;
namespace feature
{
class DataHeader;
/// @return total header size, which should be skipped for data read, or 0 if error
uint64_t GetSkipHeaderSize(Reader const & reader);
uint64_t ReadDataHeader(string const & datFileName, feature::DataHeader & outHeader);
uint64_t ReadDataHeader(FileReader const & reader, feature::DataHeader & outHeader);
void WriteDataHeader(Writer & writer, feature::DataHeader const & header);
}

View file

@ -578,6 +578,7 @@ FeatureType::FeatureType(read_source_t & src)
void FeatureType::Deserialize(read_source_t & src)
{
m_cont = &src.m_cont;
m_header = &src.m_header;
m_Points.clear();
m_Triangles.clear();
@ -587,7 +588,7 @@ void FeatureType::Deserialize(read_source_t & src)
m_InnerStats.MakeZero();
base_type::Deserialize(src.m_data, src.m_offset, src.m_base);
base_type::Deserialize(src.m_data, src.m_offset, m_header->GetBase());
}
namespace
@ -595,18 +596,18 @@ namespace
uint32_t const kInvalidOffset = uint32_t(-1);
}
int FeatureType::GetScaleIndex(int scale)
int FeatureType::GetScaleIndex(int scale) const
{
int const count = ARRAY_SIZE(feature::g_arrScales);
int const count = m_header->GetScalesCount();
if (scale == -1) return count-1;
for (size_t i = 0; i < count; ++i)
if (scale <= feature::g_arrScales[i])
for (int i = 0; i < count; ++i)
if (scale <= m_header->GetScale(i))
return i;
return -1;
}
int FeatureType::GetScaleIndex(int scale, offsets_t const & offsets)
int FeatureType::GetScaleIndex(int scale, offsets_t const & offsets) const
{
if (scale == -1)
{
@ -620,8 +621,8 @@ int FeatureType::GetScaleIndex(int scale, offsets_t const & offsets)
}
else
{
for (size_t i = 0; i < ARRAY_SIZE(feature::g_arrScales); ++i)
if (scale <= feature::g_arrScales[i])
for (size_t i = 0; i < m_header->GetScalesCount(); ++i)
if (scale <= m_header->GetScale(i))
{
if (offsets[i] != kInvalidOffset)
return i;
@ -847,7 +848,7 @@ uint32_t FeatureType::ParseGeometry(int scale) const
points.reserve(count);
uint32_t const scaleIndex = GetScaleIndex(scale);
ASSERT_LESS ( scaleIndex, ARRAY_SIZE(feature::g_arrScales), () );
ASSERT_LESS ( scaleIndex, m_header->GetScalesCount(), () );
points.push_back(m_Points.front());
for (size_t i = 1; i < count-1; ++i)
@ -898,14 +899,14 @@ uint32_t FeatureType::ParseTriangles(int scale) const
return sz;
}
void FeatureType::ReadOffsets(ArrayByteSource & src, uint8_t mask, offsets_t & offsets)
void FeatureType::ReadOffsets(ArrayByteSource & src, uint8_t mask, offsets_t & offsets) const
{
ASSERT_GREATER ( mask, 0, () );
int index = 0;
while (mask > 0)
{
ASSERT_LESS ( index, ARRAY_SIZE(feature::g_arrScales), () );
ASSERT_LESS ( index, m_header->GetScalesCount(), () );
offsets[index++] = (mask & 0x01) ? ReadVarUint<uint32_t>(src) : kInvalidOffset;
mask = mask >> 1;
}

View file

@ -1,6 +1,7 @@
#pragma once
#include "cell_id.hpp"
#include "data_header.hpp"
#include "../geometry/point2d.hpp"
#include "../geometry/rect2d.hpp"
@ -343,10 +344,10 @@ public:
buffer_t m_data;
uint32_t m_offset;
int64_t m_base;
feature::DataHeader m_header;
read_source_t(FilesContainerR const & cont)
: m_cont(cont), m_offset(0), m_base(0)
: m_cont(cont), m_offset(0)
{
}
@ -469,6 +470,8 @@ private:
FilesContainerR * m_cont;
feature::DataHeader const * m_header;
mutable bool m_bHeader2Parsed, m_bPointsParsed, m_bTrianglesParsed;
mutable inner_geom_stat_t m_InnerStats;
@ -477,10 +480,10 @@ private:
typedef array<uint32_t, 4> offsets_t; // should be synchronized with ARRAY_SIZE(g_arrScales)
static void ReadOffsets(ArrayByteSource & src, uint8_t mask, offsets_t & offsets);
void ReadOffsets(ArrayByteSource & src, uint8_t mask, offsets_t & offsets) const;
static int GetScaleIndex(int scale);
static int GetScaleIndex(int scale, offsets_t const & offset);
int GetScaleIndex(int scale) const;
int GetScaleIndex(int scale, offsets_t const & offset) const;
mutable offsets_t m_ptsOffsets, m_trgOffsets;
};

View file

@ -22,7 +22,8 @@ namespace feature
}
static int g_arrScales[] = { 7, 10, 14, 17 }; // 17 = scales::GetUpperScale()
static int g_arrWorldScales[] = { 1, 3, 4, 6 }; // 6 = upper scale for world.mwm visibility
static int g_arrCountryScales[] = { 7, 10, 14, 17 }; // 17 = scales::GetUpperScale()
inline string GetTagForIndex(char const * prefix, int ind)
{
@ -31,7 +32,8 @@ namespace feature
str = prefix;
static char arrChar[] = { '0', '1', '2', '3' };
STATIC_ASSERT ( ARRAY_SIZE(arrChar) == ARRAY_SIZE(g_arrScales) );
STATIC_ASSERT ( ARRAY_SIZE(arrChar) == ARRAY_SIZE(g_arrWorldScales) );
STATIC_ASSERT ( ARRAY_SIZE(arrChar) == ARRAY_SIZE(g_arrCountryScales) );
ASSERT ( ind >= 0 && ind < ARRAY_SIZE(arrChar), (ind) );
str += arrChar[ind];

View file

@ -11,6 +11,12 @@ public:
/// but only if they have common point
void AppendFeature(FeatureBuilder1Merger const & fb);
void SetAreaSafe()
{
if (!m_bArea)
m_bArea = true;
}
uint32_t KeyType() const
{
ASSERT_EQUAL ( m_Types.size(), 1, () );

View file

@ -34,10 +34,7 @@ namespace feature
FileReader reader(fName);
ReaderSource<FileReader> src(reader);
// skip header
uint64_t currPos = feature::GetSkipHeaderSize(reader);
src.Skip(currPos);
uint64_t currPos = 0;
uint64_t const fSize = reader.Size();
// read features one by one

View file

@ -1,6 +1,5 @@
#pragma once
#include "feature.hpp"
#include "data_header_reader.hpp"
#include "../defines.hpp"
@ -17,8 +16,6 @@ struct FeatureReaders
FeatureReaders(FilesContainerR const & cont)
: m_cont(cont), m_datR(cont.GetReader(DATA_FILE_TAG))
{
uint64_t const offset = feature::GetSkipHeaderSize(m_datR);
m_datR = m_datR.SubReader(offset, m_datR.Size() - offset);
}
};
@ -28,9 +25,7 @@ public:
FeaturesVector(FeatureReaders const & dataR)
: m_RecordReader(dataR.m_datR, 256), m_source(dataR.m_cont)
{
FileReader r = dataR.m_cont.GetReader(HEADER_FILE_TAG);
m_source.m_base = ReadPrimitiveFromPos<int64_t>(r, 0);
// LOG(LINFO, ("OFFSET = ", m_source.m_base));
m_source.m_header.Load(dataR.m_cont.GetReader(HEADER_FILE_TAG));
}
void Get(uint64_t pos, FeatureType & feature) const

View file

@ -2,7 +2,6 @@
#include "cell_id.hpp"
#include "covering.hpp"
#include "data_header.hpp"
#include "data_header_reader.hpp"
#include "features_vector.hpp"
#include "scale_index.hpp"
#include "scales.hpp"
@ -12,7 +11,6 @@
#include "../geometry/rect2d.hpp"
#include "../coding/file_container.hpp"
//#include "../coding/varint.hpp"
#include "../base/base.hpp"
#include "../base/macros.hpp"
@ -220,8 +218,8 @@ private:
{
// TODO: If path is cellid-style-square, make rect from cellid and don't open the file.
feature::DataHeader header;
feature::ReadDataHeader(path, header);
m_Rect = header.Bounds();
header.Load(FilesContainerR(path).GetReader(HEADER_FILE_TAG));
m_Rect = header.GetBounds();
}
// TODO: GetIndex(), Open() and Close() make Index single-threaded!

View file

@ -1,5 +1,4 @@
#include "index_builder.hpp"
#include "data_header_reader.hpp"
#include "features_vector.hpp"
#include "../defines.hpp"
@ -19,7 +18,11 @@ namespace indexer
FilesContainerW writeCont(datFile, FileWriter::OP_APPEND);
FileWriter writer = writeCont.GetWriter(INDEX_FILE_TAG);
BuildIndex(featuresVector, writer, tmpFile);
feature::DataHeader header;
header.Load(readCont.GetReader(HEADER_FILE_TAG));
BuildIndex(header.GetScale(header.GetScalesCount()-1) + 1, featuresVector, writer, tmpFile);
writer.Flush();
writeCont.Finish();

View file

@ -5,7 +5,8 @@
namespace indexer
{
template <class FeaturesVectorT, typename WriterT>
void BuildIndex(FeaturesVectorT const & featuresVector,
void BuildIndex(uint32_t bucketsCount,
FeaturesVectorT const & featuresVector,
WriterT & writer,
string const & tmpFilePrefix)
{
@ -13,7 +14,7 @@ namespace indexer
uint64_t indexSize;
{
SubWriter<WriterT> subWriter(writer);
IndexScales(featuresVector, subWriter, tmpFilePrefix);
IndexScales(bucketsCount, featuresVector, subWriter, tmpFilePrefix);
indexSize = subWriter.Size();
}
LOG(LINFO, ("Built scale index. Size =", indexSize));

View file

@ -27,7 +27,6 @@ SOURCES += \
index_builder.cpp \
feature_visibility.cpp \
data_header.cpp \
data_header_reader.cpp \
geometry_coding.cpp \
geometry_serialization.cpp \
tesselator.cpp \
@ -59,7 +58,6 @@ HEADERS += \
file_writer_stream.hpp \
feature_visibility.hpp \
data_header.hpp \
data_header_reader.hpp \
tree_structure.hpp \
feature_impl.hpp \
geometry_coding.hpp \

View file

@ -1,43 +0,0 @@
#include "../../testing/testing.hpp"
#include "../data_header_reader.hpp"
#include "../data_header.hpp"
#include "../cell_id.hpp"
#include "../../coding/file_reader.hpp"
#include "../../coding/file_writer.hpp"
UNIT_TEST(DataHeaderSerialization)
{
char const * fileName = "mfj4340smn54123.tmp";
feature::DataHeader header1;
// normalize rect due to convertation rounding errors
m2::RectD rect(11.5, 12.6, 13.7, 14.8);
std::pair<int64_t, int64_t> cellIds = RectToInt64(rect);
rect = Int64ToRect(cellIds);
header1.SetBounds(rect);
uint64_t const controlNumber = 0x54321643;
{
FileWriter writer(fileName);
feature::WriteDataHeader(writer, header1);
writer.Write(&controlNumber, sizeof(controlNumber));
}
feature::DataHeader header2;
TEST_GREATER(feature::ReadDataHeader(FileReader(fileName), header2), 0, ());
TEST_EQUAL(header1.Bounds(), header2.Bounds(), ());
{
FileReader reader(fileName);
uint64_t const headerSize = feature::GetSkipHeaderSize(reader);
TEST_GREATER(headerSize, 0, ());
uint64_t number = 0;
reader.Read(headerSize, &number, sizeof(number));
TEST_EQUAL(controlNumber, number, ());
}
FileWriter::DeleteFile(fileName);
}

View file

@ -4,7 +4,6 @@
#include "../index_builder.hpp"
#include "../classif_routine.hpp"
#include "../features_vector.hpp"
#include "../data_header_reader.hpp"
#include "../../defines.hpp"
#include "../../platform/platform.hpp"
#include "../../coding/file_container.hpp"
@ -25,7 +24,7 @@ UNIT_TEST(BuildIndexTest)
{
FeaturesVector featuresVector(originalContainer);
MemWriter<vector<char> > serialWriter(serialIndex);
indexer::BuildIndex(featuresVector, serialWriter, "build_index_test");
indexer::BuildIndex(ScaleIndexBase::NUM_BUCKETS, featuresVector, serialWriter, "build_index_test");
}
// Create a new mwm file.

View file

@ -34,8 +34,7 @@ SOURCES += \
mercator_test.cpp \
sort_and_merge_intervals_test.cpp \
feature_test.cpp \
data_header_test.cpp \
feature_bucketer_test.cpp \
feature_routine.cpp \
geometry_coding_test.cpp \
triangles_tree_coding_test.cpp
triangles_tree_coding_test.cpp \

View file

@ -8,7 +8,6 @@
#include "../../indexer/data_header.hpp"
#include "../../indexer/osm_decl.hpp"
#include "../../indexer/data_header_reader.hpp"
#include "../../indexer/mercator.hpp"
#include "../../indexer/cell_id.hpp"
@ -142,23 +141,15 @@ public:
// FeaturesCollector implementation
///////////////////////////////////////////////////////////////////////////////////////////////////
void FeaturesCollector::Init()
{
// write empty stub, will be updated in Finish()
WriteDataHeader(m_datFile, feature::DataHeader());
}
FeaturesCollector::FeaturesCollector(string const & fName)
: m_datFile(fName)
{
Init();
}
FeaturesCollector::FeaturesCollector(string const & bucket,
FeaturesCollector::InitDataType const & prefix)
: m_datFile(prefix.first + bucket + prefix.second)
{
Init();
}
uint32_t FeaturesCollector::GetFileSize(FileWriter const & f)
@ -194,20 +185,6 @@ void FeaturesCollector::operator() (FeatureBuilder1 const & fb)
WriteFeatureBase(bytes, fb);
}
void FeaturesCollector::WriteHeader()
{
// rewrite map information with actual data
m_datFile.Seek(0);
feature::DataHeader header;
header.SetBounds(m_bounds);
WriteDataHeader(m_datFile, header);
}
FeaturesCollector::~FeaturesCollector()
{
WriteHeader();
}
///////////////////////////////////////////////////////////////////////////////////////////////////
// Generate functions implementations.
///////////////////////////////////////////////////////////////////////////////////////////////////

View file

@ -43,12 +43,8 @@ namespace feature
m2::RectD m_bounds;
protected:
void Init();
static uint32_t GetFileSize(FileWriter const & f);
void WriteHeader();
void WriteFeatureBase(vector<char> const & bytes, FeatureBuilder1 const & fb);
public:
@ -57,7 +53,6 @@ namespace feature
FeaturesCollector(string const & fName);
FeaturesCollector(string const & bucket, InitDataType const & prefix);
~FeaturesCollector();
void operator() (FeatureBuilder1 const & f);
};

View file

@ -72,21 +72,21 @@ namespace
namespace feature
{
typedef array<uint8_t, 4> scales_t;
class FeaturesCollector2 : public FeaturesCollector
{
FilesContainerW m_writer;
vector<FileWriter*> m_geoFile, m_trgFile;
int64_t m_base;
static const int m_scales = ARRAY_SIZE(g_arrScales);
feature::DataHeader m_header;
public:
FeaturesCollector2(string const & fName, int64_t base)
: FeaturesCollector(fName + DATA_FILE_TAG), m_writer(fName), m_base(base)
FeaturesCollector2(string const & fName, feature::DataHeader const & header)
: FeaturesCollector(fName + DATA_FILE_TAG), m_writer(fName), m_header(header)
{
for (int i = 0; i < m_scales; ++i)
for (int i = 0; i < m_header.GetScalesCount(); ++i)
{
string const postfix = utils::to_string(i);
m_geoFile.push_back(new FileWriter(fName + GEOMETRY_FILE_TAG + postfix));
@ -96,12 +96,10 @@ namespace feature
~FeaturesCollector2()
{
WriteHeader();
// write own mwm header (now it's a base point only)
LOG(LINFO, ("OFFSET = ", m_base));
m_header.SetBounds(m_bounds);
FileWriter w = m_writer.GetWriter(HEADER_FILE_TAG);
WriteToSink(w, m_base);
m_header.Save(w);
w.Flush();
// assume like we close files
@ -109,7 +107,7 @@ namespace feature
m_writer.Append(m_datFile.GetName(), DATA_FILE_TAG);
for (int i = 0; i < m_scales; ++i)
for (int i = 0; i < m_header.GetScalesCount(); ++i)
{
string const geomFile = m_geoFile[i]->GetName();
string const trgFile = m_trgFile[i]->GetName();
@ -315,18 +313,18 @@ namespace feature
{
(void)GetFileSize(m_datFile);
GeometryHolder holder(*this, fb, m_base);
GeometryHolder holder(*this, fb, m_header.GetBase());
bool const isLine = fb.IsLine();
bool const isArea = fb.IsArea();
for (int i = m_scales-1; i >= 0; --i)
for (int i = m_header.GetScalesCount()-1; i >= 0; --i)
{
if (fb.IsDrawableInRange(i > 0 ? g_arrScales[i-1] + 1 : 0, g_arrScales[i]))
if (fb.IsDrawableInRange(i > 0 ? m_header.GetScale(i-1) + 1 : 0, m_header.GetScale(i)))
{
// simplify and serialize geometry
points_t points;
SimplifyPoints(holder.GetSourcePoints(), points, g_arrScales[i]);
SimplifyPoints(holder.GetSourcePoints(), points, m_header.GetScale(i));
if (isLine)
holder.AddPoints(points, i);
@ -345,7 +343,7 @@ namespace feature
{
simpleHoles.push_back(points_t());
SimplifyPoints(*iH, simpleHoles.back(), g_arrScales[i]);
SimplifyPoints(*iH, simpleHoles.back(), m_header.GetScale(i));
if (simpleHoles.back().size() < 3)
simpleHoles.pop_back();
@ -358,7 +356,7 @@ namespace feature
if (fb.PreSerialize(holder.m_buffer))
{
fb.Serialize(holder.m_buffer, m_base);
fb.Serialize(holder.m_buffer, m_header.GetBase());
WriteFeatureBase(holder.m_buffer.m_buffer, fb);
}
@ -372,7 +370,7 @@ namespace feature
}
bool GenerateFinalFeatures(string const & datFilePath, bool bSort)
bool GenerateFinalFeatures(string const & datFilePath, bool bSort, bool bWorld)
{
// rename input file
Platform & platform = GetPlatform();
@ -398,7 +396,11 @@ namespace feature
{
FileReader reader(tempDatFilePath);
FeaturesCollector2 collector(datFilePath, feature::pts::FromPoint(midPoints.GetCenter()));
feature::DataHeader header;
header.SetBase(midPoints.GetCenter());
header.SetScales(bWorld ? g_arrWorldScales : g_arrCountryScales);
FeaturesCollector2 collector(datFilePath, header);
FeatureBuilder1::buffer_t buffer;
for (size_t i = 0; i < midPoints.m_vec.size(); ++i)

View file

@ -13,7 +13,7 @@ namespace feature
{
/// Final generation of data from input feature-dat-file.
/// @param[in] bSort sorts features in the given file by their mid points
bool GenerateFinalFeatures(string const & datFile, bool bSort);
bool GenerateFinalFeatures(string const & datFile, bool bSort, bool bWorld);
template <class PointT>
inline bool are_points_equal(PointT const & p1, PointT const & p2)

View file

@ -138,6 +138,7 @@ int main(int argc, char ** argv)
for (size_t i = 0; i < genInfo.bucketNames.size(); ++i)
genInfo.bucketNames[i] = genInfo.datFilePrefix + genInfo.bucketNames[i] + genInfo.datFileSuffix;
if (FLAGS_generate_world_scale >= 0)
genInfo.bucketNames.push_back(genInfo.datFilePrefix + WORLD_FILE_NAME + genInfo.datFileSuffix);
}
@ -147,14 +148,16 @@ int main(int argc, char ** argv)
}
// Enumerate over all dat files that were created.
for (size_t i = 0; i < genInfo.bucketNames.size(); ++i)
size_t const count = genInfo.bucketNames.size();
for (size_t i = 0; i < count; ++i)
{
string const & datFile = genInfo.bucketNames[i];
if (FLAGS_generate_geometry)
{
LOG(LINFO, ("Generating result features for ", datFile));
if (!feature::GenerateFinalFeatures(datFile, FLAGS_sort_features))
if (!feature::GenerateFinalFeatures(datFile,
FLAGS_sort_features, datFile == path + WORLD_FILE_NAME + DATA_FILE_EXTENSION))
{
// If error - move to next bucket without index generation
continue;

View file

@ -21,7 +21,7 @@ namespace stats
vector<string> tags;
tags.push_back(DATA_FILE_TAG);
for (int i = 0; i < ARRAY_SIZE(feature::g_arrScales); ++i)
for (int i = 0; i < ARRAY_SIZE(feature::g_arrCountryScales); ++i)
{
tags.push_back(feature::GetTagForIndex(GEOMETRY_FILE_TAG, i));
tags.push_back(feature::GetTagForIndex(TRIANGLE_FILE_TAG, i));

View file

@ -16,6 +16,7 @@
#include "../../std/scoped_ptr.hpp"
#include "../../std/unordered_map.hpp"
namespace m2
{
inline size_t hash_value(m2::PointD const & pt)
@ -41,6 +42,18 @@ class WorldMapGenerator
TypesContainerT m_features;
private:
bool EmitAreaFeature(FeatureBuilder1Merger & fbm)
{
if (fbm.FirstPoint() == fbm.LastPoint())
{
fbm.SetAreaSafe();
(*m_worldBucket)(fbm);
++m_areasCounter;
return true;
}
else return false;
}
/// scans all features and tries to merge them with each other
/// @return true if one feature was merged
bool ReMergeFeatures(FeaturesContainerT & features)
@ -55,12 +68,8 @@ private:
features.erase(found);
++m_mergedCounter;
if (base->second.FirstPoint() == base->second.LastPoint())
{
(*m_worldBucket)(base->second);
if (EmitAreaFeature(base->second))
features.erase(base);
++m_areasCounter;
}
return true;
}
}
@ -78,12 +87,7 @@ private:
++m_mergedCounter;
}
if (fbm.FirstPoint() == fbm.LastPoint())
{
(*m_worldBucket)(fbm);
++m_areasCounter;
}
else
if (!EmitAreaFeature(fbm))
{
pair<FeaturesContainerT::iterator, bool> result = container.insert(make_pair(fbm.FirstPoint(), fbm));
// if we found feature with the same starting point, emit it directly

View file

@ -138,7 +138,8 @@ private:
};
template <class FeaturesVectorT, class WriterT>
inline void IndexScales(FeaturesVectorT const & featuresVector,
inline void IndexScales(uint32_t bucketsCount,
FeaturesVectorT const & featuresVector,
WriterT & writer,
string const & tmpFilePrefix)
{
@ -147,8 +148,8 @@ inline void IndexScales(FeaturesVectorT const & featuresVector,
//typedef pair<int64_t, uint32_t> CellFeaturePair;
STATIC_ASSERT(sizeof(CellFeaturePair) == 12);
VarSerialVectorWriter<WriterT> recordWriter(writer, ScaleIndexBase::NUM_BUCKETS);
for (uint32_t bucket = 0; bucket < ScaleIndexBase::NUM_BUCKETS; ++bucket)
VarSerialVectorWriter<WriterT> recordWriter(writer, bucketsCount);
for (uint32_t bucket = 0; bucket < bucketsCount; ++bucket)
{
LOG(LINFO, ("Building scale index for bucket:", bucket));
uint32_t numFeatures = 0;

View file

@ -7,7 +7,6 @@
#include "../indexer/drawing_rule_def.hpp"
#include "../indexer/mercator.hpp"
#include "../indexer/data_header_reader.hpp"
#include "../indexer/data_header.hpp"
#include "../indexer/scales.hpp"
#include "../indexer/feature.hpp"
@ -140,19 +139,15 @@ class FrameWork
{
// update rect for Show All button
feature::DataHeader header;
if (feature::ReadDataHeader(datFile, header))
header.Load(FilesContainerR(datFile).GetReader(HEADER_FILE_TAG));
m_model.AddWorldRect(header.GetBounds());
{
m_model.AddWorldRect(header.Bounds());
{
threads::MutexGuard lock(m_modelSyn);
m_model.AddMap(datFile);
}
}
else
{
LOG(LWARNING, ("Trying to activate invalid data file", datFile));
threads::MutexGuard lock(m_modelSyn);
m_model.AddMap(datFile);
}
}
void RemoveMap(string const & datFile)
{
threads::MutexGuard lock(m_modelSyn);

View file

@ -8,7 +8,6 @@
#include "../../map/feature_vec_model.hpp"
#include "../../indexer/data_header_reader.hpp"
#include "../../indexer/data_header.hpp"
#include "../../indexer/scales.hpp"
#include "../../indexer/feature_visibility.hpp"
@ -235,10 +234,10 @@ namespace
src1.AddMap(path);
feature::DataHeader mapInfo;
TEST_GREATER(feature::ReadDataHeader(path, mapInfo), 0, ());
mapInfo.Load(FilesContainerR(path).GetReader(HEADER_FILE_TAG));
vector<m2::RectD> rects;
rects.push_back(mapInfo.Bounds());
rects.push_back(mapInfo.GetBounds());
while (!rects.empty())
{

View file

@ -7,16 +7,17 @@
#include "../coding/streams_sink.hpp"
#include "../coding/file_reader.hpp"
#include "../coding/file_writer.hpp"
#include "../coding/file_container.hpp"
#include "../version/version.hpp"
#include "../platform/platform.hpp"
#include "../indexer/data_header.hpp"
#include "../indexer/data_header_reader.hpp"
#include "../std/fstream.hpp"
namespace storage
{
/// Simple check - compare url size with real file size on disk
@ -34,9 +35,10 @@ namespace storage
CountryBoundsCalculator(m2::RectD & bounds) : m_bounds(bounds) {}
void operator()(TTile const & tile)
{
static feature::DataHeader header;
if (feature::ReadDataHeader(GetPlatform().WritablePathForFile(tile.first), header))
m_bounds.Add(header.Bounds());
feature::DataHeader header;
FilesContainerR reader(GetPlatform().WritablePathForFile(tile.first));
header.Load(reader.GetReader(HEADER_FILE_TAG));
m_bounds.Add(header.GetBounds());
}
};