diff options
author | Andreas Stöckel <astoecke@techfak.uni-bielefeld.de> | 2015-02-26 00:22:12 +0100 |
---|---|---|
committer | Andreas Stöckel <astoecke@techfak.uni-bielefeld.de> | 2015-02-26 00:22:12 +0100 |
commit | 041a2dd18050e9e26ca1ee00851461dff1e1f90c (patch) | |
tree | 875348def51c04178375f58e4bac0d1966fa3aa7 /test/core/parser/utils/TokenizedDataTest.cpp | |
parent | b95cf0ddd1aee517ed948155d43da4e2b64cfcdf (diff) |
Moved "assert" functions to own header
Diffstat (limited to 'test/core/parser/utils/TokenizedDataTest.cpp')
-rw-r--r-- | test/core/parser/utils/TokenizedDataTest.cpp | 39 |
1 files changed, 2 insertions, 37 deletions
diff --git a/test/core/parser/utils/TokenizedDataTest.cpp b/test/core/parser/utils/TokenizedDataTest.cpp index dfe2526..8488459 100644 --- a/test/core/parser/utils/TokenizedDataTest.cpp +++ b/test/core/parser/utils/TokenizedDataTest.cpp @@ -20,44 +20,9 @@ #include <core/parser/utils/TokenizedData.hpp> -namespace ousia { - -void assertToken(TokenizedDataReader &reader, TokenId id, - const std::string &text, const TokenSet &tokens = TokenSet{}, - WhitespaceMode mode = WhitespaceMode::TRIM, - SourceOffset start = InvalidSourceOffset, - SourceOffset end = InvalidSourceOffset, - SourceId sourceId = InvalidSourceId) -{ - Token token; - ASSERT_TRUE(reader.read(token, tokens, mode)); - EXPECT_EQ(id, token.id); - EXPECT_EQ(text, token.content); - if (start != InvalidSourceOffset) { - EXPECT_EQ(start, token.getLocation().getStart()); - } - if (end != InvalidSourceOffset) { - EXPECT_EQ(end, token.getLocation().getEnd()); - } - EXPECT_EQ(sourceId, token.getLocation().getSourceId()); -} - -void assertText(TokenizedDataReader &reader, const std::string &text, - const TokenSet &tokens = TokenSet{}, - WhitespaceMode mode = WhitespaceMode::TRIM, - SourceOffset start = InvalidSourceOffset, - SourceOffset end = InvalidSourceOffset, - SourceId id = InvalidSourceId) -{ - assertToken(reader, Tokens::Data, text, tokens, mode, start, end, id); -} +#include "TokenizedDataTestUtils.hpp" -void assertEnd(TokenizedDataReader &reader) -{ - Token token; - ASSERT_TRUE(reader.atEnd()); - ASSERT_FALSE(reader.read(token)); -} +namespace ousia { TEST(TokenizedData, dataWhitespacePreserve) { |