diff options
author | Andreas Stöckel <astoecke@techfak.uni-bielefeld.de> | 2015-04-12 18:47:29 +0200 |
---|---|---|
committer | Andreas Stöckel <astoecke@techfak.uni-bielefeld.de> | 2016-04-25 22:24:16 +0200 |
commit | 667d9c4a082552fb64c5ffe7b0bd6212c8a8b1b3 (patch) | |
tree | 100e8e3fbd86970dec9ef97c773419ac2bba291b /test/core/parser/utils/TokenizedDataTestUtils.hpp | |
parent | 0884afe16263a110597671f60dcb4ff7df66f456 (diff) |
Implement endAtWhitespace flag which tells TokenizedDataReader to stop reading data after the first whitespace character
Diffstat (limited to 'test/core/parser/utils/TokenizedDataTestUtils.hpp')
-rw-r--r-- | test/core/parser/utils/TokenizedDataTestUtils.hpp | 41 |
1 files changed, 26 insertions, 15 deletions
diff --git a/test/core/parser/utils/TokenizedDataTestUtils.hpp b/test/core/parser/utils/TokenizedDataTestUtils.hpp index c384f9d..30f72ae 100644 --- a/test/core/parser/utils/TokenizedDataTestUtils.hpp +++ b/test/core/parser/utils/TokenizedDataTestUtils.hpp @@ -21,15 +21,17 @@ namespace ousia { -static void assertToken(TokenizedDataReader &reader, TokenId id, - const std::string &text, const TokenSet &tokens = TokenSet{}, - WhitespaceMode mode = WhitespaceMode::TRIM, - SourceOffset start = InvalidSourceOffset, - SourceOffset end = InvalidSourceOffset, - SourceId sourceId = InvalidSourceId) +inline void assertToken(TokenizedDataReader &reader, TokenId id, + const std::string &text, + const TokenSet &tokens = TokenSet{}, + WhitespaceMode mode = WhitespaceMode::TRIM, + SourceOffset start = InvalidSourceOffset, + SourceOffset end = InvalidSourceOffset, + SourceId sourceId = InvalidSourceId, + bool endAtWhitespace = false) { Token token; - ASSERT_TRUE(reader.read(token, tokens, mode)); + ASSERT_TRUE(reader.read(token, tokens, mode, endAtWhitespace)); EXPECT_EQ(id, token.id); EXPECT_EQ(text, token.content); if (start != InvalidSourceOffset) { @@ -41,23 +43,32 @@ static void assertToken(TokenizedDataReader &reader, TokenId id, EXPECT_EQ(sourceId, token.getLocation().getSourceId()); } -static void assertText(TokenizedDataReader &reader, const std::string &text, - const TokenSet &tokens = TokenSet{}, - WhitespaceMode mode = WhitespaceMode::TRIM, - SourceOffset start = InvalidSourceOffset, - SourceOffset end = InvalidSourceOffset, - SourceId id = InvalidSourceId) +inline void assertText(TokenizedDataReader &reader, const std::string &text, + const TokenSet &tokens = TokenSet{}, + WhitespaceMode mode = WhitespaceMode::TRIM, + SourceOffset start = InvalidSourceOffset, + SourceOffset end = InvalidSourceOffset, + SourceId id = InvalidSourceId) { assertToken(reader, Tokens::Data, text, tokens, mode, start, end, id); } -static void assertEnd(TokenizedDataReader &reader) +inline void assertTextEndAtWhitespace( + TokenizedDataReader &reader, const std::string &text, + const TokenSet &tokens = TokenSet{}, + WhitespaceMode mode = WhitespaceMode::TRIM, + SourceOffset start = InvalidSourceOffset, + SourceOffset end = InvalidSourceOffset, SourceId id = InvalidSourceId) +{ + assertToken(reader, Tokens::Data, text, tokens, mode, start, end, id, true); +} + +inline void assertEnd(TokenizedDataReader &reader) { Token token; ASSERT_TRUE(reader.atEnd()); ASSERT_FALSE(reader.read(token)); } - } #endif /* _OUSIA_TOKENIZED_DATA_TEST_UTILS_HPP_ */ |