forked from organicmaps/organicmaps-tmp
Add some tokenize tests.
This commit is contained in:
parent
8a47c2fab6
commit
f0122d60b5
2 changed files with 46 additions and 4 deletions
|
@ -295,6 +295,11 @@ UNIT_TEST(SimpleTokenizer)
|
|||
" -\xD9\x87", tokens);
|
||||
}
|
||||
|
||||
{
|
||||
char const * s[] = {"1", "2"};
|
||||
tokens.assign(&s[0], &s[0] + ARRAY_SIZE(s));
|
||||
TestIter("/1/2/", "/", tokens);
|
||||
}
|
||||
}
|
||||
|
||||
UNIT_TEST(LastUniChar)
|
||||
|
|
|
@ -1,21 +1,29 @@
|
|||
#include "../../testing/testing.hpp"
|
||||
#include "../approximate_string_match.hpp"
|
||||
|
||||
#include "match_cost_mock.hpp"
|
||||
|
||||
#include "../../indexer/search_delimiters.hpp"
|
||||
|
||||
#include "../../base/stl_add.hpp"
|
||||
|
||||
#include "../../std/memcpy.hpp"
|
||||
|
||||
|
||||
using namespace search;
|
||||
using namespace strings;
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
uint32_t FullMatchCost(char const * a, char const * b, uint32_t maxCost = 1000)
|
||||
{
|
||||
return ::search::StringMatchCost(a, strlen(a), b, strlen(b),
|
||||
search::MatchCostMock<char>(), maxCost);
|
||||
return StringMatchCost(a, strlen(a), b, strlen(b), MatchCostMock<char>(), maxCost);
|
||||
}
|
||||
|
||||
uint32_t PrefixMatchCost(char const * a, char const * b)
|
||||
{
|
||||
return ::search::StringMatchCost(a, strlen(a), b, strlen(b),
|
||||
search::MatchCostMock<char>(), 1000, true);
|
||||
return StringMatchCost(a, strlen(a), b, strlen(b), MatchCostMock<char>(), 1000, true);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -67,3 +75,32 @@ UNIT_TEST(StringMatchCost_PrefixMatch)
|
|||
TEST_EQUAL(PrefixMatchCost("Helpo", "Hello!"), 1, ());
|
||||
TEST_EQUAL(PrefixMatchCost("Happo", "Hello!"), 3, ());
|
||||
}
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
void TestEqual(vector<UniString> const v, char const * arr[])
|
||||
{
|
||||
for (size_t i = 0; i < v.size(); ++i)
|
||||
{
|
||||
TEST_EQUAL(ToUtf8(v[i]), arr[i], ());
|
||||
TEST_EQUAL(v[i], MakeUniString(arr[i]), ());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
UNIT_TEST(StringSplit_Smoke)
|
||||
{
|
||||
vector<UniString> tokens;
|
||||
|
||||
{
|
||||
string const s = "1/2";
|
||||
UniString const s1 = NormalizeAndSimplifyString(s);
|
||||
TEST_EQUAL(ToUtf8(s1), s, ());
|
||||
|
||||
char const * arr[] = { "1", "2" };
|
||||
SplitUniString(s1, MakeBackInsertFunctor(tokens), Delimiters());
|
||||
TestEqual(tokens, arr);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue