From 08e654dcda7fa6d764558ea184fe1a47bc4001c8 Mon Sep 17 00:00:00 2001 From: Benjamin Paassen Date: Fri, 21 Nov 2014 11:10:31 +0100 Subject: Finished last commits moving action by also moving the tests and correcting all references and namespaces in the code. --- test/core/utils/TokenizerTest.cpp | 125 -------------------------------------- 1 file changed, 125 deletions(-) delete mode 100644 test/core/utils/TokenizerTest.cpp (limited to 'test/core/utils/TokenizerTest.cpp') diff --git a/test/core/utils/TokenizerTest.cpp b/test/core/utils/TokenizerTest.cpp deleted file mode 100644 index 79cc01d..0000000 --- a/test/core/utils/TokenizerTest.cpp +++ /dev/null @@ -1,125 +0,0 @@ -/* - Ousía - Copyright (C) 2014, 2015 Benjamin Paaßen, Andreas Stöckel - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . -*/ - -#include - -#include - -#include - -namespace ousia { -namespace utils { -TEST(TokenTreeNode, testConstructor) -{ - TokenTreeNode root{{{"a", 1}, {"aab", 2}, {"aac", 3}, {"abd", 4}}}; - - ASSERT_EQ(-1, root.tokenId); - ASSERT_EQ(1, root.children.size()); - ASSERT_TRUE(root.children.find('a') != root.children.end()); - - const TokenTreeNode &a = root.children.at('a'); - ASSERT_EQ(1, a.tokenId); - ASSERT_EQ(2, a.children.size()); - ASSERT_TRUE(a.children.find('a') != a.children.end()); - ASSERT_TRUE(a.children.find('b') != a.children.end()); - - const TokenTreeNode &aa = a.children.at('a'); - ASSERT_EQ(-1, aa.tokenId); - ASSERT_EQ(2, aa.children.size()); - ASSERT_TRUE(aa.children.find('b') != aa.children.end()); - ASSERT_TRUE(aa.children.find('c') != aa.children.end()); - - const TokenTreeNode &aab = aa.children.at('b'); - ASSERT_EQ(2, aab.tokenId); - ASSERT_EQ(0, aab.children.size()); - - const TokenTreeNode &aac = aa.children.at('c'); - ASSERT_EQ(3, aac.tokenId); - ASSERT_EQ(0, aac.children.size()); - - const TokenTreeNode &ab = a.children.at('b'); - ASSERT_EQ(-1, ab.tokenId); - ASSERT_EQ(1, ab.children.size()); - ASSERT_TRUE(ab.children.find('d') != ab.children.end()); - - const TokenTreeNode &abd = ab.children.at('d'); - ASSERT_EQ(4, abd.tokenId); - ASSERT_EQ(0, abd.children.size()); -} - -TEST(Tokenizer, testTokenization) -{ - TokenTreeNode root{{{"/", 1}, {"/*", 2}, {"*/", 3}}}; - - BufferedCharReader reader; - reader.feed("Test/Test /* Block Comment */"); - // 12345678901234567890123456789 - // 0 1 2 - - std::vector expected = { - {TOKEN_TEXT, "Test", 1, 1, 5, 1}, - {1, "/", 5, 1, 6, 1}, - {TOKEN_TEXT, "Test ", 6, 1, 11, 1}, - {2, "/*", 11, 1, 13, 1}, - {TOKEN_TEXT, " Block Comment ", 13, 1, 28, 1}, - {3, "*/", 28, 1, 30, 1}}; - - Tokenizer tokenizer{reader, root}; - - Token t; - for (auto &te : expected) { - ASSERT_TRUE(tokenizer.next(t)); - ASSERT_EQ(te.tokenId, t.tokenId); - ASSERT_EQ(te.content, t.content); - ASSERT_EQ(te.startColumn, t.startColumn); - ASSERT_EQ(te.startLine, t.startLine); - ASSERT_EQ(te.endColumn, t.endColumn); - ASSERT_EQ(te.endLine, t.endLine); - } - ASSERT_FALSE(tokenizer.next(t)); -} - -TEST(Tokenizer, testIncompleteTokens) -{ - TokenTreeNode root{{{"ab", 1}, {"c", 2}}}; - - BufferedCharReader reader; - reader.feed("ac"); - // 1234567890 - // 0 1 - - std::vector expected = { - {TOKEN_TEXT, "a", 1, 1, 2, 1}, - {2, "c", 2, 1, 3, 1}}; - - Tokenizer tokenizer{reader, root}; - - Token t; - for (auto &te : expected) { - ASSERT_TRUE(tokenizer.next(t)); - ASSERT_EQ(te.tokenId, t.tokenId); - ASSERT_EQ(te.content, t.content); - ASSERT_EQ(te.startColumn, t.startColumn); - ASSERT_EQ(te.startLine, t.startLine); - ASSERT_EQ(te.endColumn, t.endColumn); - ASSERT_EQ(te.endLine, t.endLine); - } - ASSERT_FALSE(tokenizer.next(t)); -} -} -} -- cgit v1.2.3