forked from organicmaps/organicmaps
Review fixes.
This commit is contained in:
parent
3532ba29cf
commit
954f332076
3 changed files with 30 additions and 30 deletions
|
@ -1245,13 +1245,15 @@ namespace
|
|||
|
||||
void Query::InitParams(bool localitySearch, SearchQueryParams & params)
|
||||
{
|
||||
params.Clear();
|
||||
|
||||
if (!m_prefix.empty())
|
||||
params.m_prefixTokens.push_back(m_prefix);
|
||||
|
||||
size_t const tokensCount = m_tokens.size();
|
||||
params.m_tokens.resize(tokensCount);
|
||||
|
||||
// Add normal tokens.
|
||||
params.m_tokens.resize(tokensCount);
|
||||
for (size_t i = 0; i < tokensCount; ++i)
|
||||
params.m_tokens[i].push_back(m_tokens[i]);
|
||||
|
||||
|
|
|
@ -10,22 +10,6 @@ namespace search
|
|||
{
|
||||
namespace
|
||||
{
|
||||
class DoStoreNumbers
|
||||
{
|
||||
public:
|
||||
DoStoreNumbers(vector<size_t> & vec) : m_vec(vec) {}
|
||||
|
||||
void operator()(SearchQueryParams::TString const & s, size_t i)
|
||||
{
|
||||
/// @todo Do smart filtering of house numbers and zipcodes.
|
||||
if (feature::IsNumber(s))
|
||||
m_vec.push_back(i);
|
||||
}
|
||||
|
||||
private:
|
||||
vector<size_t> & m_vec;
|
||||
};
|
||||
|
||||
class DoAddStreetSynonyms
|
||||
{
|
||||
public:
|
||||
|
@ -40,24 +24,24 @@ public:
|
|||
// All synonyms should be lowercase!
|
||||
if (ss == "n")
|
||||
AddSym(i, "north");
|
||||
else if (ss == "w")
|
||||
if (ss == "w")
|
||||
AddSym(i, "west");
|
||||
else if (ss == "s")
|
||||
if (ss == "s")
|
||||
AddSym(i, "south");
|
||||
else if (ss == "e")
|
||||
if (ss == "e")
|
||||
AddSym(i, "east");
|
||||
else if (ss == "nw")
|
||||
if (ss == "nw")
|
||||
AddSym(i, "northwest");
|
||||
else if (ss == "ne")
|
||||
if (ss == "ne")
|
||||
AddSym(i, "northeast");
|
||||
else if (ss == "sw")
|
||||
if (ss == "sw")
|
||||
AddSym(i, "southwest");
|
||||
else if (ss == "se")
|
||||
if (ss == "se")
|
||||
AddSym(i, "southeast");
|
||||
}
|
||||
|
||||
private:
|
||||
SearchQueryParams::TSynonymsVector & GetSyms(size_t i)
|
||||
SearchQueryParams::TSynonymsVector & GetSyms(size_t i) const
|
||||
{
|
||||
size_t const count = m_params.m_tokens.size();
|
||||
if (i < count)
|
||||
|
@ -72,9 +56,17 @@ private:
|
|||
};
|
||||
} // namespace
|
||||
|
||||
void SearchQueryParams::Clear()
|
||||
{
|
||||
m_tokens.clear();
|
||||
m_prefixTokens.clear();
|
||||
m_langs.clear();
|
||||
}
|
||||
|
||||
void SearchQueryParams::EraseTokens(vector<size_t> & eraseInds)
|
||||
{
|
||||
eraseInds.erase(unique(eraseInds.begin(), eraseInds.end()), eraseInds.end());
|
||||
ASSERT(is_sorted(eraseInds.begin(), eraseInds.end()), ());
|
||||
|
||||
// fill temporary vector
|
||||
vector<TSynonymsVector> newTokens;
|
||||
|
@ -107,7 +99,11 @@ void SearchQueryParams::ProcessAddressTokens()
|
|||
// Erases all number tokens.
|
||||
// Assumes that USA street name numbers are end with "st, nd, rd, th" suffixes.
|
||||
vector<size_t> toErase;
|
||||
ForEachToken(DoStoreNumbers(toErase));
|
||||
ForEachToken([&toErase](SearchQueryParams::TString const & s, size_t i)
|
||||
{
|
||||
if (feature::IsNumber(s))
|
||||
toErase.push_back(i);
|
||||
});
|
||||
EraseTokens(toErase);
|
||||
|
||||
// Adds synonyms for N, NE, NW, etc.
|
||||
|
|
|
@ -14,6 +14,12 @@ struct SearchQueryParams
|
|||
using TSynonymsVector = vector<TString>;
|
||||
using TLangsSet = unordered_set<int8_t>;
|
||||
|
||||
vector<TSynonymsVector> m_tokens;
|
||||
TSynonymsVector m_prefixTokens;
|
||||
TLangsSet m_langs;
|
||||
|
||||
void Clear();
|
||||
|
||||
/// @param[in] eraseInds Sorted vector of token's indexes.
|
||||
void EraseTokens(vector<size_t> & eraseInds);
|
||||
|
||||
|
@ -23,10 +29,6 @@ struct SearchQueryParams
|
|||
inline bool CanSuggest() const { return (m_tokens.empty() && !m_prefixTokens.empty()); }
|
||||
inline bool IsLangExist(int8_t l) const { return (m_langs.count(l) > 0); }
|
||||
|
||||
vector<TSynonymsVector> m_tokens;
|
||||
TSynonymsVector m_prefixTokens;
|
||||
TLangsSet m_langs;
|
||||
|
||||
private:
|
||||
template <class ToDo>
|
||||
void ForEachToken(ToDo && toDo);
|
||||
|
|
Loading…
Add table
Reference in a new issue