summaryrefslogtreecommitdiff
path: root/test/core/parser/utils/TokenizedDataTestUtils.hpp
blob: 30f72aeb0753731749ba8a7e6c9fddb176d0cc97 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
/*
    Ousía
    Copyright (C) 2014, 2015  Benjamin Paaßen, Andreas Stöckel

    This program is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
*/

#ifndef _OUSIA_TOKENIZED_DATA_TEST_UTILS_HPP_
#define _OUSIA_TOKENIZED_DATA_TEST_UTILS_HPP_

namespace ousia {

inline void assertToken(TokenizedDataReader &reader, TokenId id,
                        const std::string &text,
                        const TokenSet &tokens = TokenSet{},
                        WhitespaceMode mode = WhitespaceMode::TRIM,
                        SourceOffset start = InvalidSourceOffset,
                        SourceOffset end = InvalidSourceOffset,
                        SourceId sourceId = InvalidSourceId,
                        bool endAtWhitespace = false)
{
	Token token;
	ASSERT_TRUE(reader.read(token, tokens, mode, endAtWhitespace));
	EXPECT_EQ(id, token.id);
	EXPECT_EQ(text, token.content);
	if (start != InvalidSourceOffset) {
		EXPECT_EQ(start, token.getLocation().getStart());
	}
	if (end != InvalidSourceOffset) {
		EXPECT_EQ(end, token.getLocation().getEnd());
	}
	EXPECT_EQ(sourceId, token.getLocation().getSourceId());
}

inline void assertText(TokenizedDataReader &reader, const std::string &text,
                       const TokenSet &tokens = TokenSet{},
                       WhitespaceMode mode = WhitespaceMode::TRIM,
                       SourceOffset start = InvalidSourceOffset,
                       SourceOffset end = InvalidSourceOffset,
                       SourceId id = InvalidSourceId)
{
	assertToken(reader, Tokens::Data, text, tokens, mode, start, end, id);
}

inline void assertTextEndAtWhitespace(
    TokenizedDataReader &reader, const std::string &text,
    const TokenSet &tokens = TokenSet{},
    WhitespaceMode mode = WhitespaceMode::TRIM,
    SourceOffset start = InvalidSourceOffset,
    SourceOffset end = InvalidSourceOffset, SourceId id = InvalidSourceId)
{
	assertToken(reader, Tokens::Data, text, tokens, mode, start, end, id, true);
}

inline void assertEnd(TokenizedDataReader &reader)
{
	Token token;
	ASSERT_TRUE(reader.atEnd());
	ASSERT_FALSE(reader.read(token));
}
}

#endif /* _OUSIA_TOKENIZED_DATA_TEST_UTILS_HPP_ */