summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorAndreas Stöckel <astoecke@techfak.uni-bielefeld.de>2015-02-03 21:43:40 +0100
committerAndreas Stöckel <astoecke@techfak.uni-bielefeld.de>2015-02-03 21:43:40 +0100
commit7d8684c9239df6b05f3c9b25b1470d671b90df14 (patch)
tree646d1a7d1a3c14ca1d131d9327e672f55857b6a7 /test
parenta9d898ffb53b7da187281fb9e471cbb6e002fa19 (diff)
Fixed GCC 4.9 warnings
Diffstat (limited to 'test')
-rw-r--r--test/core/TokenizerTest.cpp14
-rw-r--r--test/core/common/CharReaderTest.cpp8
2 files changed, 11 insertions, 11 deletions
diff --git a/test/core/TokenizerTest.cpp b/test/core/TokenizerTest.cpp
index d6e9306..c53f93d 100644
--- a/test/core/TokenizerTest.cpp
+++ b/test/core/TokenizerTest.cpp
@@ -28,37 +28,37 @@ TEST(TokenTreeNode, testConstructor)
TokenTreeNode root{{{"a", 1}, {"aab", 2}, {"aac", 3}, {"abd", 4}}};
ASSERT_EQ(-1, root.tokenId);
- ASSERT_EQ(1, root.children.size());
+ ASSERT_EQ(1U, root.children.size());
ASSERT_TRUE(root.children.find('a') != root.children.end());
const TokenTreeNode &a = root.children.at('a');
ASSERT_EQ(1, a.tokenId);
- ASSERT_EQ(2, a.children.size());
+ ASSERT_EQ(2U, a.children.size());
ASSERT_TRUE(a.children.find('a') != a.children.end());
ASSERT_TRUE(a.children.find('b') != a.children.end());
const TokenTreeNode &aa = a.children.at('a');
ASSERT_EQ(-1, aa.tokenId);
- ASSERT_EQ(2, aa.children.size());
+ ASSERT_EQ(2U, aa.children.size());
ASSERT_TRUE(aa.children.find('b') != aa.children.end());
ASSERT_TRUE(aa.children.find('c') != aa.children.end());
const TokenTreeNode &aab = aa.children.at('b');
ASSERT_EQ(2, aab.tokenId);
- ASSERT_EQ(0, aab.children.size());
+ ASSERT_EQ(0U, aab.children.size());
const TokenTreeNode &aac = aa.children.at('c');
ASSERT_EQ(3, aac.tokenId);
- ASSERT_EQ(0, aac.children.size());
+ ASSERT_EQ(0U, aac.children.size());
const TokenTreeNode &ab = a.children.at('b');
ASSERT_EQ(-1, ab.tokenId);
- ASSERT_EQ(1, ab.children.size());
+ ASSERT_EQ(1U, ab.children.size());
ASSERT_TRUE(ab.children.find('d') != ab.children.end());
const TokenTreeNode &abd = ab.children.at('d');
ASSERT_EQ(4, abd.tokenId);
- ASSERT_EQ(0, abd.children.size());
+ ASSERT_EQ(0U, abd.children.size());
}
TEST(Tokenizer, testTokenization)
diff --git a/test/core/common/CharReaderTest.cpp b/test/core/common/CharReaderTest.cpp
index a1ea18f..bcdf58d 100644
--- a/test/core/common/CharReaderTest.cpp
+++ b/test/core/common/CharReaderTest.cpp
@@ -482,7 +482,7 @@ TEST(CharReader, simplePeek)
ASSERT_EQ(testStr, res);
// We must now be at line 1, column 1 and NOT at the end of the stream
- ASSERT_EQ(0, reader.getOffset());
+ ASSERT_EQ(0U, reader.getOffset());
ASSERT_FALSE(reader.atEnd());
reader.consumePeek();
@@ -546,7 +546,7 @@ TEST(CharReader, fork)
{
CharReaderFork fork = reader.fork();
- ASSERT_EQ(4, fork.getOffset());
+ ASSERT_EQ(4U, fork.getOffset());
fork.peek(c);
ASSERT_EQ('i', c);
@@ -554,7 +554,7 @@ TEST(CharReader, fork)
fork.read(c);
ASSERT_EQ('t', c);
- ASSERT_EQ(5, fork.getOffset());
+ ASSERT_EQ(5U, fork.getOffset());
reader.read(c);
reader.read(c);
@@ -562,7 +562,7 @@ TEST(CharReader, fork)
fork.commit();
}
- ASSERT_EQ(5, reader.getOffset());
+ ASSERT_EQ(5U, reader.getOffset());
}
}