summaryrefslogtreecommitdiff
path: root/test/plugins
diff options
context:
space:
mode:
Diffstat (limited to 'test/plugins')
-rw-r--r--test/plugins/plain/DynamicTokenizerTest.cpp81
1 files changed, 40 insertions, 41 deletions
diff --git a/test/plugins/plain/DynamicTokenizerTest.cpp b/test/plugins/plain/DynamicTokenizerTest.cpp
index 63fa466..5183fdd 100644
--- a/test/plugins/plain/DynamicTokenizerTest.cpp
+++ b/test/plugins/plain/DynamicTokenizerTest.cpp
@@ -25,8 +25,7 @@ namespace ousia {
TEST(DynamicTokenizer, tokenRegistration)
{
- CharReader reader{"test"};
- DynamicTokenizer tokenizer{reader};
+ DynamicTokenizer tokenizer;
ASSERT_EQ(EmptyToken, tokenizer.registerToken(""));
@@ -57,10 +56,10 @@ TEST(DynamicTokenizer, textTokenPreserveWhitespace)
CharReader reader{" this \t is only a \n\n test text "};
// 012345 6789012345678 9 0123456789012345
// 0 1 2 3
- DynamicTokenizer tokenizer{reader, WhitespaceMode::PRESERVE};
+ DynamicTokenizer tokenizer{WhitespaceMode::PRESERVE};
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ(" this \t is only a \n\n test text ", token.content);
@@ -68,17 +67,17 @@ TEST(DynamicTokenizer, textTokenPreserveWhitespace)
ASSERT_EQ(0U, loc.getStart());
ASSERT_EQ(36U, loc.getEnd());
- ASSERT_FALSE(tokenizer.read(token));
+ ASSERT_FALSE(tokenizer.read(reader, token));
}
{
CharReader reader{"this \t is only a \n\n test text"};
// 01234 5678901234567 8 9012345678901
// 0 1 2 3
- DynamicTokenizer tokenizer{reader, WhitespaceMode::PRESERVE};
+ DynamicTokenizer tokenizer{WhitespaceMode::PRESERVE};
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("this \t is only a \n\n test text", token.content);
@@ -86,7 +85,7 @@ TEST(DynamicTokenizer, textTokenPreserveWhitespace)
ASSERT_EQ(0U, loc.getStart());
ASSERT_EQ(32U, loc.getEnd());
- ASSERT_FALSE(tokenizer.read(token));
+ ASSERT_FALSE(tokenizer.read(reader, token));
}
}
@@ -96,10 +95,10 @@ TEST(DynamicTokenizer, textTokenTrimWhitespace)
CharReader reader{" this \t is only a \n\n test text "};
// 012345 6789012345678 9 0123456789012345
// 0 1 2 3
- DynamicTokenizer tokenizer{reader, WhitespaceMode::TRIM};
+ DynamicTokenizer tokenizer{WhitespaceMode::TRIM};
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("this \t is only a \n\n test text", token.content);
@@ -107,17 +106,17 @@ TEST(DynamicTokenizer, textTokenTrimWhitespace)
ASSERT_EQ(1U, loc.getStart());
ASSERT_EQ(33U, loc.getEnd());
- ASSERT_FALSE(tokenizer.read(token));
+ ASSERT_FALSE(tokenizer.read(reader, token));
}
{
CharReader reader{"this \t is only a \n\n test text"};
// 01234 5678901234567 8 9012345678901
// 0 1 2 3
- DynamicTokenizer tokenizer{reader, WhitespaceMode::TRIM};
+ DynamicTokenizer tokenizer{WhitespaceMode::TRIM};
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("this \t is only a \n\n test text", token.content);
@@ -125,7 +124,7 @@ TEST(DynamicTokenizer, textTokenTrimWhitespace)
ASSERT_EQ(0U, loc.getStart());
ASSERT_EQ(32U, loc.getEnd());
- ASSERT_FALSE(tokenizer.read(token));
+ ASSERT_FALSE(tokenizer.read(reader, token));
}
}
@@ -135,10 +134,10 @@ TEST(DynamicTokenizer, textTokenCollapseWhitespace)
CharReader reader{" this \t is only a \n\n test text "};
// 012345 6789012345678 9 0123456789012345
// 0 1 2 3
- DynamicTokenizer tokenizer{reader, WhitespaceMode::COLLAPSE};
+ DynamicTokenizer tokenizer{WhitespaceMode::COLLAPSE};
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("this is only a test text", token.content);
@@ -146,17 +145,17 @@ TEST(DynamicTokenizer, textTokenCollapseWhitespace)
ASSERT_EQ(1U, loc.getStart());
ASSERT_EQ(33U, loc.getEnd());
- ASSERT_FALSE(tokenizer.read(token));
+ ASSERT_FALSE(tokenizer.read(reader, token));
}
{
CharReader reader{"this \t is only a \n\n test text"};
// 01234 5678901234567 8 9012345678901
// 0 1 2 3
- DynamicTokenizer tokenizer{reader, WhitespaceMode::COLLAPSE};
+ DynamicTokenizer tokenizer{WhitespaceMode::COLLAPSE};
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("this is only a test text", token.content);
@@ -164,21 +163,21 @@ TEST(DynamicTokenizer, textTokenCollapseWhitespace)
ASSERT_EQ(0U, loc.getStart());
ASSERT_EQ(32U, loc.getEnd());
- ASSERT_FALSE(tokenizer.read(token));
+ ASSERT_FALSE(tokenizer.read(reader, token));
}
}
TEST(DynamicTokenizer, simpleReadToken)
{
CharReader reader{"test1:test2"};
- DynamicTokenizer tokenizer{reader};
+ DynamicTokenizer tokenizer;
const TokenTypeId tid = tokenizer.registerToken(":");
ASSERT_EQ(0U, tid);
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("test1", token.content);
@@ -194,7 +193,7 @@ TEST(DynamicTokenizer, simpleReadToken)
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(tid, token.type);
ASSERT_EQ(":", token.content);
@@ -210,7 +209,7 @@ TEST(DynamicTokenizer, simpleReadToken)
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("test2", token.content);
@@ -227,14 +226,14 @@ TEST(DynamicTokenizer, simpleReadToken)
TEST(DynamicTokenizer, simplePeekToken)
{
CharReader reader{"test1:test2"};
- DynamicTokenizer tokenizer{reader};
+ DynamicTokenizer tokenizer;
const TokenTypeId tid = tokenizer.registerToken(":");
ASSERT_EQ(0U, tid);
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.peek(token));
+ ASSERT_TRUE(tokenizer.peek(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("test1", token.content);
@@ -248,7 +247,7 @@ TEST(DynamicTokenizer, simplePeekToken)
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.peek(token));
+ ASSERT_TRUE(tokenizer.peek(reader, token));
ASSERT_EQ(tid, token.type);
ASSERT_EQ(":", token.content);
@@ -262,7 +261,7 @@ TEST(DynamicTokenizer, simplePeekToken)
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.peek(token));
+ ASSERT_TRUE(tokenizer.peek(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("test2", token.content);
@@ -276,7 +275,7 @@ TEST(DynamicTokenizer, simplePeekToken)
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("test1", token.content);
@@ -290,7 +289,7 @@ TEST(DynamicTokenizer, simplePeekToken)
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(tid, token.type);
ASSERT_EQ(":", token.content);
@@ -304,7 +303,7 @@ TEST(DynamicTokenizer, simplePeekToken)
{
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("test2", token.content);
@@ -320,7 +319,7 @@ TEST(DynamicTokenizer, simplePeekToken)
TEST(DynamicTokenizer, ambiguousTokens)
{
CharReader reader{"abc"};
- DynamicTokenizer tokenizer(reader);
+ DynamicTokenizer tokenizer;
TokenTypeId t1 = tokenizer.registerToken("abd");
TokenTypeId t2 = tokenizer.registerToken("bc");
@@ -329,7 +328,7 @@ TEST(DynamicTokenizer, ambiguousTokens)
ASSERT_EQ(1U, t2);
DynamicToken token;
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(TextToken, token.type);
ASSERT_EQ("a", token.content);
@@ -338,7 +337,7 @@ TEST(DynamicTokenizer, ambiguousTokens)
ASSERT_EQ(0U, loc.getStart());
ASSERT_EQ(1U, loc.getEnd());
- ASSERT_TRUE(tokenizer.read(token));
+ ASSERT_TRUE(tokenizer.read(reader, token));
ASSERT_EQ(t2, token.type);
ASSERT_EQ("bc", token.content);
@@ -347,7 +346,7 @@ TEST(DynamicTokenizer, ambiguousTokens)
ASSERT_EQ(1U, loc.getStart());
ASSERT_EQ(3U, loc.getEnd());
- ASSERT_FALSE(tokenizer.read(token));
+ ASSERT_FALSE(tokenizer.read(reader, token));
}
TEST(DynamicTokenizer, commentTestWhitespacePreserve)
@@ -355,7 +354,7 @@ TEST(DynamicTokenizer, commentTestWhitespacePreserve)
CharReader reader{"Test/Test /* Block Comment */", 0};
// 012345678901234567890123456789
// 0 1 2
- DynamicTokenizer tokenizer(reader, WhitespaceMode::PRESERVE);
+ DynamicTokenizer tokenizer(WhitespaceMode::PRESERVE);
const TokenTypeId t1 = tokenizer.registerToken("/");
const TokenTypeId t2 = tokenizer.registerToken("/*");
@@ -371,14 +370,14 @@ TEST(DynamicTokenizer, commentTestWhitespacePreserve)
DynamicToken t;
for (auto &te : expected) {
- EXPECT_TRUE(tokenizer.read(t));
+ EXPECT_TRUE(tokenizer.read(reader, t));
EXPECT_EQ(te.type, t.type);
EXPECT_EQ(te.content, t.content);
EXPECT_EQ(te.location.getSourceId(), t.location.getSourceId());
EXPECT_EQ(te.location.getStart(), t.location.getStart());
EXPECT_EQ(te.location.getEnd(), t.location.getEnd());
}
- ASSERT_FALSE(tokenizer.read(t));
+ ASSERT_FALSE(tokenizer.read(reader, t));
}
TEST(DynamicTokenizer, commentTestWhitespaceCollapse)
@@ -386,7 +385,7 @@ TEST(DynamicTokenizer, commentTestWhitespaceCollapse)
CharReader reader{"Test/Test /* Block Comment */", 0};
// 012345678901234567890123456789
// 0 1 2
- DynamicTokenizer tokenizer(reader, WhitespaceMode::COLLAPSE);
+ DynamicTokenizer tokenizer(WhitespaceMode::COLLAPSE);
const TokenTypeId t1 = tokenizer.registerToken("/");
const TokenTypeId t2 = tokenizer.registerToken("/*");
@@ -402,14 +401,14 @@ TEST(DynamicTokenizer, commentTestWhitespaceCollapse)
DynamicToken t;
for (auto &te : expected) {
- EXPECT_TRUE(tokenizer.read(t));
+ EXPECT_TRUE(tokenizer.read(reader, t));
EXPECT_EQ(te.type, t.type);
EXPECT_EQ(te.content, t.content);
EXPECT_EQ(te.location.getSourceId(), t.location.getSourceId());
EXPECT_EQ(te.location.getStart(), t.location.getStart());
EXPECT_EQ(te.location.getEnd(), t.location.getEnd());
}
- ASSERT_FALSE(tokenizer.read(t));
+ ASSERT_FALSE(tokenizer.read(reader, t));
}
}