1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
|
/*
Ousía
Copyright (C) 2014, 2015 Benjamin Paaßen, Andreas Stöckel
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <gtest/gtest.h>
#include <core/parser/stack/TokenStack.hpp>
namespace ousia {
namespace parser_stack {
static Manager mgr;
static Rooted<Node> nd1{new Node(mgr)};
static Rooted<Node> nd2{new Node(mgr)};
static Rooted<Node> nd3{new Node(mgr)};
static const std::vector<SyntaxDescriptor> ListA{
SyntaxDescriptor(Tokens::Empty, 1, Tokens::Empty, nd1, 0),
SyntaxDescriptor(2, Tokens::Empty, Tokens::Empty, nd2, 2),
SyntaxDescriptor(3, Tokens::Empty, Tokens::Empty, nd3, 1)};
static const std::vector<SyntaxDescriptor> ListB{
SyntaxDescriptor(Tokens::Empty, 1, Tokens::Empty, nd1, -1),
SyntaxDescriptor(2, Tokens::Empty, 3, nd3, 3),
};
static const std::vector<SyntaxDescriptor> ListC{
SyntaxDescriptor(Tokens::Empty, Tokens::Empty, 4, nd2, 5),
SyntaxDescriptor(Tokens::Empty, Tokens::Empty, 3, nd3, 6),
};
TEST(TokenStack, tokens)
{
TokenStack ts;
ASSERT_EQ((TokenSet{}), ts.tokens());
ts.pushTokens(ListA);
ASSERT_EQ((TokenSet{1, 2, 3}), ts.tokens());
ts.pushTokens(ListB);
ASSERT_EQ((TokenSet{1, 2, 3}), ts.tokens());
ts.pushTokens(ListC);
ASSERT_EQ((TokenSet{3, 4}), ts.tokens());
ts.popTokens();
ASSERT_EQ((TokenSet{1, 2, 3}), ts.tokens());
ts.popTokens();
ASSERT_EQ((TokenSet{1, 2, 3}), ts.tokens());
ts.popTokens();
ASSERT_EQ((TokenSet{}), ts.tokens());
}
TEST(TokenStack, lookup)
{
TokenStack ts;
ts.pushTokens(ListA);
ts.pushTokens(ListB);
ts.pushTokens(ListC);
TokenDescriptor descr = ts.lookup(3);
ASSERT_EQ(0U, descr.open.size());
ASSERT_EQ(0U, descr.close.size());
ASSERT_EQ(1U, descr.shortForm.size());
ASSERT_EQ(ListC[1], descr.shortForm[0]);
}
TEST(TokenStack, sorting)
{
TokenStack ts;
std::vector<SyntaxDescriptor> descrs;
descrs.insert(descrs.end(), ListC.begin(), ListC.end());
descrs.insert(descrs.end(), ListA.begin(), ListA.end());
descrs.insert(descrs.end(), ListB.begin(), ListB.end());
ts.pushTokens(descrs);
TokenDescriptor descr = ts.lookup(3);
ASSERT_EQ(1U, descr.open.size());
ASSERT_EQ(0U, descr.close.size());
ASSERT_EQ(2U, descr.shortForm.size());
ASSERT_EQ(ListA[2], descr.open[0]);
ASSERT_EQ(ListB[1], descr.shortForm[0]);
ASSERT_EQ(ListC[1], descr.shortForm[1]);
}
}
}
|