diff options
author | Benjamin Paassen <bpaassen@techfak.uni-bielefeld.de> | 2015-01-23 15:47:59 +0100 |
---|---|---|
committer | Benjamin Paassen <bpaassen@techfak.uni-bielefeld.de> | 2015-01-23 15:47:59 +0100 |
commit | 18d3637ca02ab69f1ee744fa94c43c243de0f571 (patch) | |
tree | 42c859f014ab7dbb7d31a747e0ef3839c77c60fa /test | |
parent | 85d72823ef18711fe7a29f5b23cc37b318766332 (diff) | |
parent | aa817d3bfd90aa39b6fd8a915bc78a8bb210cd3d (diff) |
Merge branch 'master' of somweyr.de:ousia
Diffstat (limited to 'test')
-rw-r--r-- | test/core/CodeTokenizerTest.cpp | 49 | ||||
-rw-r--r-- | test/core/RegistryTest.cpp | 67 | ||||
-rw-r--r-- | test/core/TokenizerTest.cpp | 48 | ||||
-rw-r--r-- | test/core/common/CharReaderTest.cpp | 109 | ||||
-rw-r--r-- | test/core/common/LoggerTest.cpp | 52 | ||||
-rw-r--r-- | test/core/common/UtilsTest.cpp | 19 | ||||
-rw-r--r-- | test/core/parser/ParserStackTest.cpp | 3 | ||||
-rw-r--r-- | test/core/parser/StandaloneParserContext.hpp | 7 | ||||
-rw-r--r-- | test/plugins/css/CSSParserTest.cpp | 7 | ||||
-rw-r--r-- | test/plugins/filesystem/FileLocatorTest.cpp | 20 | ||||
-rw-r--r-- | test/plugins/xml/XmlParserTest.cpp | 8 |
11 files changed, 188 insertions, 201 deletions
diff --git a/test/core/CodeTokenizerTest.cpp b/test/core/CodeTokenizerTest.cpp index 4d11622..2d4d5a7 100644 --- a/test/core/CodeTokenizerTest.cpp +++ b/test/core/CodeTokenizerTest.cpp @@ -38,7 +38,7 @@ TEST(CodeTokenizer, testTokenizer) " */\n" // 3 "var my_string = 'My \\'String\\'';\n" // 4 "// and a line comment\n" // 5 - "var my_obj = { a = 4;}"}; // 6 + "var my_obj = { a = 4;}", 0}; // 6 // 123456789012345678901234567890123456789 // 0 1 2 3 TokenTreeNode root{{{"/*", 1}, @@ -60,40 +60,39 @@ TEST(CodeTokenizer, testTokenizer) {6, {CodeTokenMode::LINEBREAK, LINEBREAK}}}; std::vector<Token> expected = { - {BLOCK_COMMENT, "*\n * Some Block Comment\n ", 1, 1, 4, 3}, - {LINEBREAK, "\n", 4, 3, 1, 4}, - {TOKEN_TEXT, "var", 1, 4, 4, 4}, - {TOKEN_TEXT, "my_string", 5, 4, 14, 4}, - {TOKEN_TEXT, "=", 15, 4, 16, 4}, - {STRING, "My 'String'", 17, 4, 32, 4}, - {TOKEN_TEXT, ";", 32, 4, 33, 4}, - {LINEBREAK, "\n", 33, 4, 1, 5}, + {BLOCK_COMMENT, "*\n * Some Block Comment\n ", SourceLocation{0, 0, 29}}, + {LINEBREAK, "\n", SourceLocation{0, 29, 30}}, + {TOKEN_TEXT, "var", SourceLocation{0, 30, 33}}, + {TOKEN_TEXT, "my_string", SourceLocation{0, 34, 43}}, + {TOKEN_TEXT, "=", SourceLocation{0, 44, 45}}, + {STRING, "My 'String'", SourceLocation{0, 46, 61}}, + {TOKEN_TEXT, ";", SourceLocation{0, 61, 62}}, + {LINEBREAK, "\n", SourceLocation{0, 62, 63}}, // this is slightly counter-intuitive but makes sense if you think about // it: As a line comment is ended by a line break the line break is // technically still a part of the line comment and thus the ending // is in the next line. - {LINE_COMMENT, " and a line comment", 1, 5, 1, 6}, - {TOKEN_TEXT, "var", 1, 6, 4, 6}, - {TOKEN_TEXT, "my_obj", 5, 6, 11, 6}, - {TOKEN_TEXT, "=", 12, 6, 13, 6}, - {CURLY_OPEN, "{", 14, 6, 15, 6}, - {TOKEN_TEXT, "a", 16, 6, 17, 6}, - {TOKEN_TEXT, "=", 18, 6, 19, 6}, - {TOKEN_TEXT, "4;", 20, 6, 22, 6}, - {CURLY_CLOSE, "}", 22, 6, 23, 6}, + {LINE_COMMENT, " and a line comment", SourceLocation{0, 63, 85}}, + {TOKEN_TEXT, "var", SourceLocation{0, 85, 88}}, + {TOKEN_TEXT, "my_obj", SourceLocation{0, 89, 95}}, + {TOKEN_TEXT, "=", SourceLocation{0, 96, 97}}, + {CURLY_OPEN, "{", SourceLocation{0, 98, 99}}, + {TOKEN_TEXT, "a", SourceLocation{0, 100, 101}}, + {TOKEN_TEXT, "=", SourceLocation{0, 102, 103}}, + {TOKEN_TEXT, "4;", SourceLocation{0, 104, 106}}, + {CURLY_CLOSE, "}", SourceLocation{0, 106, 107}}, }; CodeTokenizer tokenizer{reader, root, descriptors}; Token t; for (auto &te : expected) { - ASSERT_TRUE(tokenizer.next(t)); - ASSERT_EQ(te.tokenId, t.tokenId); - ASSERT_EQ(te.content, t.content); - ASSERT_EQ(te.startColumn, t.startColumn); - ASSERT_EQ(te.startLine, t.startLine); - ASSERT_EQ(te.endColumn, t.endColumn); - ASSERT_EQ(te.endLine, t.endLine); + EXPECT_TRUE(tokenizer.next(t)); + EXPECT_EQ(te.tokenId, t.tokenId); + EXPECT_EQ(te.content, t.content); + EXPECT_EQ(te.location.getSourceId(), t.location.getSourceId()); + EXPECT_EQ(te.location.getStart(), t.location.getStart()); + EXPECT_EQ(te.location.getEnd(), t.location.getEnd()); } ASSERT_FALSE(tokenizer.next(t)); } diff --git a/test/core/RegistryTest.cpp b/test/core/RegistryTest.cpp index 45e09d3..21195f2 100644 --- a/test/core/RegistryTest.cpp +++ b/test/core/RegistryTest.cpp @@ -20,18 +20,83 @@ #include <sstream> +#include <core/common/Exceptions.hpp> +#include <core/parser/Parser.hpp> +#include <core/parser/ParserContext.hpp> #include <core/resource/ResourceLocator.hpp> #include <core/Registry.hpp> namespace ousia { +namespace { +class TestParser : public Parser { +protected: + Rooted<Node> doParse(CharReader &reader, ParserContext &ctx) override + { + return new Node{ctx.manager}; + } +}; +} + +static const Rtti rtti1{"rtti1"}; +static const Rtti rtti2{"rtti2"}; + +TEST(Registry, parsers) +{ + Registry registry; + + TestParser parser1; + TestParser parser2; + + registry.registerParser({"text/vnd.ousia.oxm", "text/vnd.ousia.oxd"}, + {&rtti1, &rtti2}, &parser1); + registry.registerParser({"text/vnd.ousia.opd"}, {&rtti2}, &parser2); + + ASSERT_THROW( + registry.registerParser({"text/vnd.ousia.opd"}, {&rtti2}, &parser1), + OusiaException); + + { + auto res = registry.getParserForMimetype("text/vnd.ousia.oxm"); + ASSERT_EQ(&parser1, res.first); + ASSERT_EQ(RttiSet({&rtti1, &rtti2}), res.second); + } + + { + auto res = registry.getParserForMimetype("text/vnd.ousia.opd"); + ASSERT_EQ(&parser2, res.first); + ASSERT_EQ(RttiSet({&rtti2}), res.second); + } + + { + auto res = registry.getParserForMimetype("application/javascript"); + ASSERT_EQ(nullptr, res.first); + ASSERT_EQ(RttiSet({}), res.second); + } +} + +TEST(Registry, extensions) +{ + Registry registry; + + registry.registerExtension("oxm", "text/vnd.ousia.oxm"); + registry.registerExtension("oxd", "text/vnd.ousia.oxd"); + ASSERT_EQ("text/vnd.ousia.oxm", registry.getMimetypeForExtension("oxm")); + ASSERT_EQ("text/vnd.ousia.oxm", registry.getMimetypeForExtension("OXM")); + ASSERT_EQ("text/vnd.ousia.oxd", registry.getMimetypeForExtension("OxD")); + ASSERT_EQ("", registry.getMimetypeForExtension("pdf")); + + ASSERT_THROW(registry.registerExtension("oxm", "text/vnd.ousia.oxm"), + OusiaException); +} + TEST(Registry, locateResource) { StaticResourceLocator locator; locator.store("path", "test"); Registry registry; - registry.registerResourceLocator(locator); + registry.registerResourceLocator(&locator); Resource res; ASSERT_TRUE( diff --git a/test/core/TokenizerTest.cpp b/test/core/TokenizerTest.cpp index 2b80662..d6e9306 100644 --- a/test/core/TokenizerTest.cpp +++ b/test/core/TokenizerTest.cpp @@ -65,29 +65,28 @@ TEST(Tokenizer, testTokenization) { TokenTreeNode root{{{"/", 1}, {"/*", 2}, {"*/", 3}}}; - CharReader reader{"Test/Test /* Block Comment */"}; - // 12345678901234567890123456789 + CharReader reader{"Test/Test /* Block Comment */", 0}; + // 012345678901234567890123456789 // 0 1 2 std::vector<Token> expected = { - {TOKEN_TEXT, "Test", 1, 1, 5, 1}, - {1, "/", 5, 1, 6, 1}, - {TOKEN_TEXT, "Test ", 6, 1, 11, 1}, - {2, "/*", 11, 1, 13, 1}, - {TOKEN_TEXT, " Block Comment ", 13, 1, 28, 1}, - {3, "*/", 28, 1, 30, 1}}; + {TOKEN_TEXT, "Test", SourceLocation{0, 0, 4}}, + {1, "/", SourceLocation{0, 4, 5}}, + {TOKEN_TEXT, "Test ", SourceLocation{0, 5, 10}}, + {2, "/*", SourceLocation{0, 10, 12}}, + {TOKEN_TEXT, " Block Comment ", SourceLocation{0, 12, 27}}, + {3, "*/", SourceLocation{0, 27, 29}}}; Tokenizer tokenizer{reader, root}; Token t; for (auto &te : expected) { - ASSERT_TRUE(tokenizer.next(t)); - ASSERT_EQ(te.tokenId, t.tokenId); - ASSERT_EQ(te.content, t.content); - ASSERT_EQ(te.startColumn, t.startColumn); - ASSERT_EQ(te.startLine, t.startLine); - ASSERT_EQ(te.endColumn, t.endColumn); - ASSERT_EQ(te.endLine, t.endLine); + EXPECT_TRUE(tokenizer.next(t)); + EXPECT_EQ(te.tokenId, t.tokenId); + EXPECT_EQ(te.content, t.content); + EXPECT_EQ(te.location.getSourceId(), t.location.getSourceId()); + EXPECT_EQ(te.location.getStart(), t.location.getStart()); + EXPECT_EQ(te.location.getEnd(), t.location.getEnd()); } ASSERT_FALSE(tokenizer.next(t)); } @@ -96,23 +95,22 @@ TEST(Tokenizer, testIncompleteTokens) { TokenTreeNode root{{{"ab", 1}, {"c", 2}}}; - CharReader reader{"ac"}; + CharReader reader{"ac", 0}; std::vector<Token> expected = { - {TOKEN_TEXT, "a", 1, 1, 2, 1}, - {2, "c", 2, 1, 3, 1}}; + {TOKEN_TEXT, "a", SourceLocation{0, 0, 1}}, + {2, "c", SourceLocation{0, 1, 2}}}; Tokenizer tokenizer{reader, root}; Token t; for (auto &te : expected) { - ASSERT_TRUE(tokenizer.next(t)); - ASSERT_EQ(te.tokenId, t.tokenId); - ASSERT_EQ(te.content, t.content); - ASSERT_EQ(te.startColumn, t.startColumn); - ASSERT_EQ(te.startLine, t.startLine); - ASSERT_EQ(te.endColumn, t.endColumn); - ASSERT_EQ(te.endLine, t.endLine); + EXPECT_TRUE(tokenizer.next(t)); + EXPECT_EQ(te.tokenId, t.tokenId); + EXPECT_EQ(te.content, t.content); + EXPECT_EQ(te.location.getSourceId(), t.location.getSourceId()); + EXPECT_EQ(te.location.getStart(), t.location.getStart()); + EXPECT_EQ(te.location.getEnd(), t.location.getEnd()); } ASSERT_FALSE(tokenizer.next(t)); } diff --git a/test/core/common/CharReaderTest.cpp b/test/core/common/CharReaderTest.cpp index 702d958..fba60f9 100644 --- a/test/core/common/CharReaderTest.cpp +++ b/test/core/common/CharReaderTest.cpp @@ -453,9 +453,8 @@ TEST(CharReader, simpleRead) // The two strings must equal ASSERT_EQ(testStr, res); - // We must now be at line 1, column 15 - ASSERT_EQ(1, reader.getLine()); - ASSERT_EQ((int)(testStr.size() + 1), reader.getColumn()); + // Check the char reader offset + ASSERT_EQ(testStr.size(), reader.getOffset()); // If we call either read or peek, false is returned ASSERT_FALSE(reader.read(c)); @@ -483,15 +482,11 @@ TEST(CharReader, simplePeek) ASSERT_EQ(testStr, res); // We must now be at line 1, column 1 and NOT at the end of the stream - ASSERT_EQ(1, reader.getLine()); - ASSERT_EQ(1, reader.getColumn()); + ASSERT_EQ(0, reader.getOffset()); ASSERT_FALSE(reader.atEnd()); - // If we consume the peek, we must be at line 1, column 15 and we should be - // at the end of the stream reader.consumePeek(); - ASSERT_EQ(1, reader.getLine()); - ASSERT_EQ((int)(testStr.size() + 1), reader.getColumn()); + ASSERT_EQ(testStr.size(), reader.getOffset()); ASSERT_TRUE(reader.atEnd()); // If we call either read or peek, false is returned @@ -499,64 +494,6 @@ TEST(CharReader, simplePeek) ASSERT_FALSE(reader.peek(c)); } -TEST(CharReader, rowColumnCounter) -{ - // Feed a test string into the reader - CharReader reader{"1\n\r2\n3\r\n\n4"}; - - // We should currently be in line 1, column 1 - ASSERT_EQ(1, reader.getLine()); - ASSERT_EQ(1, reader.getColumn()); - - // Read two characters - char c; - for (int i = 0; i < 2; i++) - reader.read(c); - ASSERT_EQ(2, reader.getLine()); - ASSERT_EQ(1, reader.getColumn()); - - // Read two characters - for (int i = 0; i < 2; i++) - reader.read(c); - ASSERT_EQ(3, reader.getLine()); - ASSERT_EQ(1, reader.getColumn()); - - // Read three characters - for (int i = 0; i < 3; i++) - reader.read(c); - ASSERT_EQ(5, reader.getLine()); - ASSERT_EQ(1, reader.getColumn()); -} - -TEST(CharReader, rowColumnCounterTest) -{ - // Feed a test string into the reader - CharReader reader{"1\n\r2\n3\r\n\n4", 4, 10}; - - // We should currently be in line 1, column 1 - ASSERT_EQ(4, reader.getLine()); - ASSERT_EQ(10, reader.getColumn()); - - // Read two characters - char c; - for (int i = 0; i < 2; i++) - reader.read(c); - ASSERT_EQ(5, reader.getLine()); - ASSERT_EQ(1, reader.getColumn()); - - // Read two characters - for (int i = 0; i < 2; i++) - reader.read(c); - ASSERT_EQ(6, reader.getLine()); - ASSERT_EQ(1, reader.getColumn()); - - // Read three characters - for (int i = 0; i < 3; i++) - reader.read(c); - ASSERT_EQ(8, reader.getLine()); - ASSERT_EQ(1, reader.getColumn()); -} - TEST(CharReader, linebreakSubstitution) { // Feed a test string into the reader and read all characters back @@ -571,23 +508,6 @@ TEST(CharReader, linebreakSubstitution) ASSERT_EQ("this\nis\njust\na test\n\ntest\n", res); } -TEST(CharReader, rowColumnCounterUTF8) -{ - // Feed a test string with some umlauts into the reader - CharReader reader{"\x61\xc3\x96\xc3\x84\xc3\x9c\xc3\x9f"}; - - // Read all bytes - char c; - while (reader.read(c)) { - // Do nothing - } - - // The sequence above equals 5 UTF-8 characters (so after reading all the - // cursor is at position 6) - ASSERT_EQ(1, reader.getLine()); - ASSERT_EQ(6, reader.getColumn()); -} - TEST(CharReader, stream) { // Copy the test data to a string stream @@ -608,8 +528,8 @@ TEST(CharReader, stream) TEST(CharReader, fork) { std::string testStr{"first line\n\n\rsecond line\n\rlast line"}; - // 0123456789 0 123456789012 3456789012 - // 0 1 2 3 + // 0123456789 0 1 234567890123 4 5678901234 + // 0 1 2 3 char c; CharReader reader{testStr}; @@ -626,8 +546,7 @@ TEST(CharReader, fork) { CharReaderFork fork = reader.fork(); - ASSERT_EQ(1, fork.getLine()); - ASSERT_EQ(5, fork.getColumn()); + ASSERT_EQ(4, fork.getOffset()); fork.peek(c); ASSERT_EQ('i', c); @@ -635,11 +554,7 @@ TEST(CharReader, fork) fork.read(c); ASSERT_EQ('t', c); - ASSERT_EQ(1, fork.getLine()); - ASSERT_EQ(6, fork.getColumn()); - - ASSERT_EQ(1, reader.getLine()); - ASSERT_EQ(5, reader.getColumn()); + ASSERT_EQ(5, fork.getOffset()); reader.read(c); reader.read(c); @@ -647,11 +562,10 @@ TEST(CharReader, fork) fork.commit(); } - ASSERT_EQ(1, reader.getLine()); - ASSERT_EQ(6, reader.getColumn()); + ASSERT_EQ(5, reader.getOffset()); } -TEST(CharReaderTest, context) +/*TEST(CharReader, context) { std::string testStr{"first line\n\n\rsecond line\n\rlast line"}; // 0123456789 0 123456789012 3456789012 @@ -816,6 +730,7 @@ TEST(CharReaderTest, context) ASSERT_TRUE(ctx.truncatedStart); ASSERT_FALSE(ctx.truncatedEnd); } -} +}*/ + } diff --git a/test/core/common/LoggerTest.cpp b/test/core/common/LoggerTest.cpp index 66e49cd..9b20cc6 100644 --- a/test/core/common/LoggerTest.cpp +++ b/test/core/common/LoggerTest.cpp @@ -33,32 +33,30 @@ struct Pos { SourceLocation getLocation() { return pos; } }; -static SourceContext contextCallback(const SourceLocation &location, - void *) +static SourceContext contextCallback(const SourceLocation &location) { - return SourceContext{"int bla = blub;", 10, true, false}; + SourceContext ctx; + ctx.filename = "testfile.test"; + ctx.startLine = 10; + ctx.endLine = 10; + ctx.startColumn = 20; + ctx.endColumn = 20; + return ctx; } TEST(TerminalLogger, log) { // Test for manual visual expection only -- no assertions TerminalLogger logger{std::cerr, true}; - logger.pushFile("test.odp"); + logger.setSourceContextCallback(contextCallback); - logger.debug("This is a test debug message", SourceLocation{10, 20}); - logger.debug("This is a test debug message with no column", - SourceLocation{10}); - logger.debug("This is a test debug message with no line"); - logger.note("This is a test note", SourceLocation{10, 20}); - logger.warning("This is a test warning", SourceLocation{10, 20}); - logger.error("This is a test error", SourceLocation{10, 20}); - logger.fatalError("This is a test fatal error!", SourceLocation{10, 20}); + logger.debug("This is a test debug message"); + logger.note("This is a test note"); + logger.warning("This is a test warning"); + logger.error("This is a test error"); + logger.fatalError("This is a test fatal error!"); - logger.pushFile("test2.odp", SourceLocation{}, contextCallback); - logger.error("This is a test error with context", SourceLocation{10, 20}); - logger.popFile(); - - Pos pos(SourceLocation{10, 20}); + logger.error("This is a test error with context"); try { throw LoggableException{"An exception"}; @@ -66,15 +64,6 @@ TEST(TerminalLogger, log) catch (const LoggableException &ex) { logger.log(ex); } - - try { - throw LoggableException{"An exception at position", pos}; - } - catch (const LoggableException &ex) { - logger.log(ex); - } - - logger.log(Severity::ERROR, "This is a positioned log message", pos); } TEST(TerminalLogger, fork) @@ -82,16 +71,11 @@ TEST(TerminalLogger, fork) // Test for manual visual expection only -- no assertions TerminalLogger logger{std::cerr, true}; + logger.setSourceContextCallback(contextCallback); + LoggerFork fork = logger.fork(); - fork.pushFile("test.odp", SourceLocation{}, contextCallback); - fork.error("This is a test error with context", SourceLocation{10, 20}); - fork.pushFile("test2.odp"); - fork.error("This is a test error without context"); - fork.popFile(); - fork.error("Another error"); - fork.popFile(); - fork.error("Another error"); + fork.error("This is a test error with context"); // Print all error messages fork.commit(); diff --git a/test/core/common/UtilsTest.cpp b/test/core/common/UtilsTest.cpp index 53beb79..c8f6922 100644 --- a/test/core/common/UtilsTest.cpp +++ b/test/core/common/UtilsTest.cpp @@ -54,5 +54,24 @@ TEST(Utils, split) ASSERT_EQ(std::vector<std::string>({"", "a", "be", "c", ""}), Utils::split(".a.be.c.", '.')); } + +TEST(Utils, toLower) +{ + ASSERT_EQ("", Utils::toLower("")); + ASSERT_EQ("foo00", Utils::toLower("foo00")); + ASSERT_EQ("foo00", Utils::toLower("fOO00")); +} + +TEST(Utils, extractFileExtension) +{ + ASSERT_EQ("", Utils::extractFileExtension("")); + ASSERT_EQ("", Utils::extractFileExtension("test")); + ASSERT_EQ("ext", Utils::extractFileExtension("test.ext")); + ASSERT_EQ("", Utils::extractFileExtension("foo.bar/test")); + ASSERT_EQ("", Utils::extractFileExtension("foo.bar\\test")); + ASSERT_EQ("ext", Utils::extractFileExtension("foo.bar/test.ext")); + ASSERT_EQ("ext", Utils::extractFileExtension("foo.bar/test.EXT")); +} + } diff --git a/test/core/parser/ParserStackTest.cpp b/test/core/parser/ParserStackTest.cpp index 69978b0..81160da 100644 --- a/test/core/parser/ParserStackTest.cpp +++ b/test/core/parser/ParserStackTest.cpp @@ -24,7 +24,6 @@ #include <core/parser/StandaloneParserContext.hpp> namespace ousia { -namespace parser { static const State STATE_DOCUMENT = 0; static const State STATE_BODY = 1; @@ -168,7 +167,5 @@ TEST(ParserStack, validation) ASSERT_FALSE(logger.hasError()); s.end(); } - -} } diff --git a/test/core/parser/StandaloneParserContext.hpp b/test/core/parser/StandaloneParserContext.hpp index 347d34f..51cd1e6 100644 --- a/test/core/parser/StandaloneParserContext.hpp +++ b/test/core/parser/StandaloneParserContext.hpp @@ -23,16 +23,18 @@ #include <core/model/Project.hpp> #include <core/parser/Parser.hpp> +#include <core/parser/ParserScope.hpp> +#include <core/parser/ParserContext.hpp> +#include <core/Registry.hpp> namespace ousia { -namespace parser { struct StandaloneParserContext { public: Manager manager; Logger logger; - Scope scope; Registry registry; + ParserScope scope; Rooted<model::Project> project; ParserContext context; @@ -47,7 +49,6 @@ public: context(scope, registry, externalLogger, manager, project){}; }; } -} #endif /* _OUSIA_STANDALONE_PARSER_CONTEXT_ */ diff --git a/test/plugins/css/CSSParserTest.cpp b/test/plugins/css/CSSParserTest.cpp index 84522b3..420241e 100644 --- a/test/plugins/css/CSSParserTest.cpp +++ b/test/plugins/css/CSSParserTest.cpp @@ -26,8 +26,6 @@ #include <core/parser/StandaloneParserContext.hpp> namespace ousia { -namespace parser { -namespace css { TEST(CSSParser, testParseSelectors) { // create a string describing a SelectorTree @@ -268,8 +266,7 @@ void assertException(std::string css) CharReader reader(css); TerminalLogger logger(std::cerr, true); { - ScopedLogger sl(logger, "test.css", SourceLocation{}, - CharReader::contextCallback, &reader); + ScopedLogger sl(logger); StandaloneParserContext ctx(sl); CSSParser instance; @@ -296,5 +293,3 @@ TEST(CSSParser, testParseExceptions) assertException("A > "); } } -} -} diff --git a/test/plugins/filesystem/FileLocatorTest.cpp b/test/plugins/filesystem/FileLocatorTest.cpp index 17d43dd..beb091d 100644 --- a/test/plugins/filesystem/FileLocatorTest.cpp +++ b/test/plugins/filesystem/FileLocatorTest.cpp @@ -142,6 +142,26 @@ TEST(FileLocator, testLocate) assert_not_located(locator, "c.txt", "", ResourceType::SCRIPT); } +TEST(FileLocator, testLocateRelative) +{ + FileLocator locator; + locator.addUnittestSearchPath("filesystem"); + + // Add the respective search path + locator.addUnittestSearchPath("filesystem/b"); + + Resource resA, resC; + ASSERT_TRUE(locator.locate(resA, "a.txt")); + ASSERT_TRUE(locator.locate(resC, "c.txt")); + + Resource resD; + ASSERT_TRUE(locator.locate(resD, "d.txt")); + ASSERT_TRUE(locator.locate(resD, "d.txt", ResourceType::UNKNOWN, resA)); + ASSERT_TRUE(locator.locate(resD, "d.txt", ResourceType::UNKNOWN, resC)); + ASSERT_FALSE(locator.locate(resD, "./d.txt", ResourceType::UNKNOWN, resA)); + ASSERT_TRUE(locator.locate(resD, "./d.txt", ResourceType::UNKNOWN, resC)); +} + TEST(FileLocator, testStream) { FileLocator locator; diff --git a/test/plugins/xml/XmlParserTest.cpp b/test/plugins/xml/XmlParserTest.cpp index f1956e0..52b64e5 100644 --- a/test/plugins/xml/XmlParserTest.cpp +++ b/test/plugins/xml/XmlParserTest.cpp @@ -20,14 +20,13 @@ #include <gtest/gtest.h> +#include <core/common/CharReader.hpp> #include <core/common/Logger.hpp> #include <core/parser/StandaloneParserContext.hpp> #include <plugins/xml/XmlParser.hpp> namespace ousia { -namespace parser { -namespace xml { static TerminalLogger logger(std::cerr, true); @@ -41,7 +40,6 @@ TEST(XmlParser, mismatchedTagException) p.parse("<document>\n</document2>", ctx.context); } catch (LoggableException ex) { - ASSERT_EQ(2, ex.loc.line); hadException = true; } ASSERT_TRUE(hadException); @@ -82,8 +80,6 @@ TEST(XmlParser, namespaces) XmlParser p; CharReader reader(TEST_DATA); { - ScopedLogger sl(logger, "test.oxd", SourceLocation{}, - CharReader::contextCallback, &reader); try { p.parse(TEST_DATA, ctx.context); } @@ -94,6 +90,4 @@ TEST(XmlParser, namespaces) } } } -} -} |