summaryrefslogtreecommitdiff
path: root/test/core/parser/utils/TokenizedDataTestUtils.hpp
diff options
context:
space:
mode:
Diffstat (limited to 'test/core/parser/utils/TokenizedDataTestUtils.hpp')
-rw-r--r--test/core/parser/utils/TokenizedDataTestUtils.hpp41
1 files changed, 26 insertions, 15 deletions
diff --git a/test/core/parser/utils/TokenizedDataTestUtils.hpp b/test/core/parser/utils/TokenizedDataTestUtils.hpp
index c384f9d..30f72ae 100644
--- a/test/core/parser/utils/TokenizedDataTestUtils.hpp
+++ b/test/core/parser/utils/TokenizedDataTestUtils.hpp
@@ -21,15 +21,17 @@
namespace ousia {
-static void assertToken(TokenizedDataReader &reader, TokenId id,
- const std::string &text, const TokenSet &tokens = TokenSet{},
- WhitespaceMode mode = WhitespaceMode::TRIM,
- SourceOffset start = InvalidSourceOffset,
- SourceOffset end = InvalidSourceOffset,
- SourceId sourceId = InvalidSourceId)
+inline void assertToken(TokenizedDataReader &reader, TokenId id,
+ const std::string &text,
+ const TokenSet &tokens = TokenSet{},
+ WhitespaceMode mode = WhitespaceMode::TRIM,
+ SourceOffset start = InvalidSourceOffset,
+ SourceOffset end = InvalidSourceOffset,
+ SourceId sourceId = InvalidSourceId,
+ bool endAtWhitespace = false)
{
Token token;
- ASSERT_TRUE(reader.read(token, tokens, mode));
+ ASSERT_TRUE(reader.read(token, tokens, mode, endAtWhitespace));
EXPECT_EQ(id, token.id);
EXPECT_EQ(text, token.content);
if (start != InvalidSourceOffset) {
@@ -41,23 +43,32 @@ static void assertToken(TokenizedDataReader &reader, TokenId id,
EXPECT_EQ(sourceId, token.getLocation().getSourceId());
}
-static void assertText(TokenizedDataReader &reader, const std::string &text,
- const TokenSet &tokens = TokenSet{},
- WhitespaceMode mode = WhitespaceMode::TRIM,
- SourceOffset start = InvalidSourceOffset,
- SourceOffset end = InvalidSourceOffset,
- SourceId id = InvalidSourceId)
+inline void assertText(TokenizedDataReader &reader, const std::string &text,
+ const TokenSet &tokens = TokenSet{},
+ WhitespaceMode mode = WhitespaceMode::TRIM,
+ SourceOffset start = InvalidSourceOffset,
+ SourceOffset end = InvalidSourceOffset,
+ SourceId id = InvalidSourceId)
{
assertToken(reader, Tokens::Data, text, tokens, mode, start, end, id);
}
-static void assertEnd(TokenizedDataReader &reader)
+inline void assertTextEndAtWhitespace(
+ TokenizedDataReader &reader, const std::string &text,
+ const TokenSet &tokens = TokenSet{},
+ WhitespaceMode mode = WhitespaceMode::TRIM,
+ SourceOffset start = InvalidSourceOffset,
+ SourceOffset end = InvalidSourceOffset, SourceId id = InvalidSourceId)
+{
+ assertToken(reader, Tokens::Data, text, tokens, mode, start, end, id, true);
+}
+
+inline void assertEnd(TokenizedDataReader &reader)
{
Token token;
ASSERT_TRUE(reader.atEnd());
ASSERT_FALSE(reader.read(token));
}
-
}
#endif /* _OUSIA_TOKENIZED_DATA_TEST_UTILS_HPP_ */