From 84c9abc3e9762c4486ddc5ca0352a5d697a51987 Mon Sep 17 00:00:00 2001 From: Andreas Stöckel Date: Wed, 25 Feb 2015 23:09:26 +0100 Subject: start of branch, commit log will be rewritten --- src/core/parser/utils/TokenizedData.cpp | 353 ++++++++++++++++++++++++++------ 1 file changed, 287 insertions(+), 66 deletions(-) (limited to 'src/core/parser/utils/TokenizedData.cpp') diff --git a/src/core/parser/utils/TokenizedData.cpp b/src/core/parser/utils/TokenizedData.cpp index fc7bfaf..aeefa26 100644 --- a/src/core/parser/utils/TokenizedData.cpp +++ b/src/core/parser/utils/TokenizedData.cpp @@ -47,6 +47,17 @@ struct TokenMark { */ TokenLength len; + /** + * Specifies whether the token is special or not. + */ + bool special; + + /** + * Maximum token length. + */ + static constexpr TokenLength MaxTokenLength = + std::numeric_limits::max(); + /** * Constructor of the TokenMark structure, initializes all members with the * given values. @@ -55,9 +66,10 @@ struct TokenMark { * @param bufStart is the start position of the TokenMark in the internal * character buffer. * @param len is the length of the token. + * @param special modifies the sort order, special tokens are prefered. */ - TokenMark(TokenId id, size_t bufStart, TokenLength len) - : bufStart(bufStart), id(id), len(len) + TokenMark(TokenId id, size_t bufStart, TokenLength len, bool special) + : bufStart(bufStart), id(id), len(len), special(special) { } @@ -72,7 +84,8 @@ struct TokenMark { TokenMark(size_t bufStart) : bufStart(bufStart), id(Tokens::Empty), - len(std::numeric_limits::max()) + len(MaxTokenLength), + special(true) { } @@ -86,8 +99,22 @@ struct TokenMark { */ friend bool operator<(const TokenMark &m1, const TokenMark &m2) { - return (m1.bufStart < m2.bufStart) || - (m1.bufStart == m2.bufStart && m1.len > m2.len); + // Prefer the mark with the smaller bufStart + if (m1.bufStart < m2.bufStart) { + return true; + } + + // Special handling for marks with the same bufStart + if (m1.bufStart == m2.bufStart) { + // If exactly one of the two marks is special, return true if this + // one is special + if (m1.special != m2.special) { + return m1.special; + } + // Otherwise prefer longer marks + return m1.len > m2.len; + } + return false; } }; } @@ -110,19 +137,44 @@ private: std::vector buf; /** - * Vector containing all token marks. + * Buffset storing the "protected" flag of the character data. */ - std::vector marks; + std::vector protectedChars; /** * Vector storing all the character offsets efficiently. */ SourceOffsetVector offsets; + /** + * Vector containing all token marks. + */ + mutable std::vector marks; + + /** + * Position of the first linebreak in a sequence of linebreaks. + */ + size_t firstLinebreak; + + /** + * Current indentation level. + */ + uint16_t currentIndentation; + + /** + * Last indentation level. + */ + uint16_t lastIndentation; + + /** + * Number of linebreaks without any content between them. + */ + uint16_t numLinebreaks; + /** * Flag indicating whether the internal "marks" vector is sorted. */ - bool sorted; + mutable bool sorted; public: /** @@ -132,7 +184,7 @@ public: * @param sourceId is the source identifier that should be used for * constructing the location when returning tokens. */ - TokenizedDataImpl(SourceId sourceId) : sourceId(sourceId), sorted(true) {} + TokenizedDataImpl(SourceId sourceId) : sourceId(sourceId) { clear(); } /** * Appends a complete string to the internal character buffer and extends @@ -140,22 +192,22 @@ public: * * @param data is the string that should be appended to the buffer. * @param offsStart is the start offset in bytes in the input file. + * @param protect if set to true, the appended characters will not be + * affected by whitespace handling, they will be returned as is. * @return the current size of the internal byte buffer. The returned value * is intended to be used for the "mark" function. */ - size_t append(const std::string &data, SourceOffset offsStart) - { // Append the data to the internal buffer - buf.insert(buf.end(), data.begin(), data.end()); - - // Extend the text regions, interpolate the source position (this may - // yield incorrect results) - const size_t size = buf.size(); - for (SourceOffset offs = offsStart; offs < offsStart + data.size(); - offs++) { - offsets.storeOffset(offs, offs + 1); + size_t append(const std::string &data, SourceOffset offsStart, bool protect) + { + for (size_t i = 0; i < data.size(); i++) { + if (offsStart != InvalidSourceOffset) { + append(data[i], offsStart + i, offsStart + i + 1, protect); + } else { + append(data[i], InvalidSourceOffset, InvalidSourceOffset, + protect); + } } - - return size; + return size(); } /** @@ -165,16 +217,86 @@ public: * @param c is the character that should be appended to the buffer. * @param offsStart is the start offset in bytes in the input file. * @param offsEnd is the end offset in bytes in the input file. + * @param protect if set to true, the appended character will not be + * affected by whitespace handling, it will be returned as is. * @return the current size of the internal byte buffer. The returned value * is intended to be used for the "mark" function. */ - size_t append(char c, SourceOffset offsStart, SourceOffset offsEnd) + size_t append(char c, SourceOffset offsStart, SourceOffset offsEnd, + bool protect) { // Add the character to the list and store the location of the character // in the source file buf.push_back(c); + protectedChars.push_back(protect); offsets.storeOffset(offsStart, offsEnd); - return buf.size(); + + // Insert special tokens + const size_t size = buf.size(); + const bool isWhitespace = Utils::isWhitespace(c); + const bool isLinebreak = Utils::isLinebreak(c); + + // Handle linebreaks + if (isLinebreak) { + // Mark linebreaks as linebreak + mark(Tokens::Newline, size - 1, 1, false); + + // The linebreak sequence started at the previous character + if (numLinebreaks == 0) { + firstLinebreak = size - 1; + } + + // Reset the indentation + currentIndentation = 0; + + // Increment the number of linebreaks + numLinebreaks++; + + const size_t markStart = firstLinebreak; + const size_t markLength = size - firstLinebreak; + + // Issue two consecutive linebreaks as paragraph token + if (numLinebreaks == 2) { + mark(Tokens::Paragraph, markStart, markLength, false); + } + + // Issue three consecutive linebreaks as paragraph token + if (numLinebreaks >= 3) { + mark(Tokens::Section, markStart, markLength, false); + } + } else if (isWhitespace) { + // Count the whitespace characters at the beginning of the line + if (numLinebreaks > 0) { + // Implement the UNIX/Pyhton rule for tabs: Tabs extend to the + // next multiple of eight. + if (c == '\t') { + currentIndentation = (currentIndentation + 8) & ~7; + } else { + currentIndentation++; + } + } + } + + // Issue indent and unindent tokens + if (!isWhitespace && numLinebreaks > 0) { + // Issue a larger indentation than that in the previous line as + // "Indent" token + if (currentIndentation > lastIndentation) { + mark(Tokens::Indent, size - 1, 0, true); + } + + // Issue a smaller indentation than that in the previous line as + // "Dedent" token + if (currentIndentation < lastIndentation) { + mark(Tokens::Dedent, size - 1, 0, true); + } + + // Reset the internal state machine + lastIndentation = currentIndentation; + numLinebreaks = 0; + } + + return size; } /** @@ -184,11 +306,12 @@ public: * @param bufStart is the start position in the internal buffer. Use the * values returned by append to calculate the start position. * @param len is the length of the token. + * @param special tags the mark as "special", prefering it in the sort order */ - void mark(TokenId id, size_t bufStart, TokenLength len) + void mark(TokenId id, size_t bufStart, TokenLength len, bool special) { // Push the new instance back onto the list - marks.emplace_back(id, bufStart, len); + marks.emplace_back(id, bufStart, len, special); // Update the sorted flag as soon as more than one element is in the // list @@ -212,9 +335,13 @@ public: * @return true if a token was returned, false if no more tokens are * available. */ - bool next(Token &token, WhitespaceMode mode, - const std::unordered_set &tokens, size_t &cursor) + bool next(Token &token, WhitespaceMode mode, const TokenSet &tokens, + TokenizedDataCursor &cursor) const { + // Some variables for convenient access + size_t &bufPos = cursor.bufPos; + size_t &markPos = cursor.markPos; + // Sort the "marks" vector if it has not been sorted yet. if (!sorted) { std::sort(marks.begin(), marks.end()); @@ -222,10 +349,11 @@ public: } // Fetch the next larger TokenMark instance, make sure the token is in - // the "enabled" list - auto it = - std::lower_bound(marks.begin(), marks.end(), TokenMark(cursor)); - while (it != marks.end() && tokens.count(it->id) == 0) { + // the "enabled" list and within the buffer range + auto it = std::lower_bound(marks.begin() + markPos, marks.end(), + TokenMark(bufPos)); + while (it != marks.end() && (tokens.count(it->id) == 0 || + it->bufStart + it->len > buf.size())) { it++; } @@ -236,15 +364,15 @@ public: // Depending on the whitespace mode, fetch all the data between the // cursor position and the calculated end position and return a token // containing that data. - if (cursor < end && cursor < buf.size()) { + if (bufPos < end && bufPos < buf.size()) { switch (mode) { case WhitespaceMode::PRESERVE: { token = Token( - Tokens::Data, std::string(&buf[cursor], end - cursor), + Tokens::Data, std::string(&buf[bufPos], end - bufPos), SourceLocation(sourceId, - offsets.loadOffset(cursor).first, + offsets.loadOffset(bufPos).first, offsets.loadOffset(end).first)); - cursor = end; + bufPos = end; return true; } case WhitespaceMode::TRIM: @@ -254,30 +382,35 @@ public: size_t stringStart; size_t stringEnd; std::string content; + const char *cBuf = &buf[bufPos]; + auto filter = [cBuf, this](size_t i) -> bool { + return Utils::isWhitespace(cBuf[i]) && + !protectedChars[i]; + }; if (mode == WhitespaceMode::TRIM) { - content = Utils::trim(&buf[cursor], end - cursor, - stringStart, stringEnd); + content = Utils::trim(cBuf, end - bufPos, stringStart, + stringEnd, filter); } else { - content = Utils::collapse(&buf[cursor], end - cursor, - stringStart, stringEnd); + content = Utils::collapse( + cBuf, end - bufPos, stringStart, stringEnd, filter); } // If the resulting string is empty (only whitespaces), // abort if (content.empty()) { - cursor = end; + bufPos = end; break; } // Calculate the absolute positions and return the token - stringStart += cursor; - stringEnd += cursor; + stringStart += bufPos; + stringEnd += bufPos; token = Token( Tokens::Data, content, SourceLocation(sourceId, offsets.loadOffset(stringStart).first, offsets.loadOffset(stringEnd).first)); - cursor = end; + bufPos = end; return true; } } @@ -286,14 +419,18 @@ public: // If start equals end, we're currently directly at a token // instance. Return this token and advance the cursor to the end of // the token. - if (cursor == end && it != marks.end()) { + if (bufPos == end && it != marks.end()) { const size_t tokenStart = it->bufStart; const size_t tokenEnd = it->bufStart + it->len; token = Token( it->id, std::string(&buf[tokenStart], it->len), SourceLocation(sourceId, offsets.loadOffset(tokenStart).first, offsets.loadOffset(tokenEnd).first)); - cursor = tokenEnd; + + // Update the cursor, consume the token by incrementing the marks + // pos counter + bufPos = tokenEnd; + markPos = it - marks.begin() + 1; return true; } @@ -303,12 +440,63 @@ public: return false; } + /** + * Resets the TokenizedDataImpl instance to the state it had when it was + * constructred. + */ + void clear() + { + buf.clear(); + protectedChars.clear(); + offsets.clear(); + marks.clear(); + currentIndentation = 0; + lastIndentation = 0; + numLinebreaks = 1; // Assume the stream starts with a linebreak + sorted = true; + } + + /** + * Trims the length of the TokenizedDataImpl instance to the given length. + * + * @param length is the number of characters to which the TokenizedData + * instance should be trimmed. + */ + void trim(size_t length) + { + if (length < size()) { + buf.resize(length); + offsets.trim(length); + } + } + /** * Returns the current size of the internal buffer. * * @return the size of the internal character buffer. */ - size_t getSize() { return buf.size(); } + size_t size() const { return buf.size(); } + + /** + * Returns true if no data is in the data buffer. + * + * @return true if the "buf" instance has no data. + */ + bool empty() const { return buf.empty(); } + + /** + * Returns the current location of all data in the buffer. + * + * @return the location of the entire data represented by this instance. + */ + SourceLocation getLocation() const + { + if (empty()) { + return SourceLocation{sourceId}; + } + return SourceLocation{sourceId, offsets.loadOffset(0).first, + offsets.loadOffset(size()).second}; + } }; /* Class TokenizedData */ @@ -316,50 +504,83 @@ public: TokenizedData::TokenizedData() : TokenizedData(InvalidSourceId) {} TokenizedData::TokenizedData(SourceId sourceId) - : impl(std::make_shared(sourceId)), cursor(0) + : impl(std::make_shared(sourceId)) { } TokenizedData::~TokenizedData() {} -size_t TokenizedData::append(const std::string &data, SourceOffset offsStart) +size_t TokenizedData::append(const std::string &data, SourceOffset offsStart, + bool protect) { - return impl->append(data, offsStart); + return impl->append(data, offsStart, protect); } size_t TokenizedData::append(char c, SourceOffset offsStart, - SourceOffset offsEnd) + SourceOffset offsEnd, bool protect) { - return impl->append(c, offsStart, offsEnd); + return impl->append(c, offsStart, offsEnd, protect); } void TokenizedData::mark(TokenId id, TokenLength len) { - impl->mark(id, impl->getSize() - len, len); + impl->mark(id, impl->size() - len, len, false); } void TokenizedData::mark(TokenId id, size_t bufStart, TokenLength len) { - impl->mark(id, bufStart, len); + impl->mark(id, bufStart, len, false); } -bool TokenizedData::next(Token &token, WhitespaceMode mode) +void TokenizedData::clear() { impl->clear(); } + +void TokenizedData::trim(size_t length) { impl->trim(length); } + +size_t TokenizedData::size() const { return impl->size(); } + +bool TokenizedData::empty() const { return impl->empty(); } + +SourceLocation TokenizedData::getLocation() const { - return impl->next(token, mode, tokens, cursor); + return impl->getLocation(); } -bool TokenizedData::text(Token &token, WhitespaceMode mode) +TokenizedDataReader TokenizedData::reader() const { - // Copy the current cursor position to not update the actual cursor position - // if the operation was not successful - size_t cursorCopy = cursor; - if (!impl->next(token, mode, tokens, cursorCopy) || - token.id != Tokens::Data) { - return false; - } + return TokenizedDataReader(impl, TokenizedDataCursor(), + TokenizedDataCursor()); +} + +/* Class TokenizedDataReader */ - // There is indeed a text token, update the internal cursor position - cursor = cursorCopy; - return true; +TokenizedDataReader::TokenizedDataReader( + std::shared_ptr impl, + const TokenizedDataCursor &readCursor, + const TokenizedDataCursor &peekCursor) + : impl(impl), readCursor(readCursor), peekCursor(peekCursor) +{ +} + +TokenizedDataReaderFork TokenizedDataReader::fork() +{ + return TokenizedDataReaderFork(*this, impl, readCursor, peekCursor); +} + +bool TokenizedDataReader::atEnd() const +{ + return readCursor.bufPos >= impl->size(); +} + +bool TokenizedDataReader::read(Token &token, const TokenSet &tokens, + WhitespaceMode mode) +{ + peekCursor = readCursor; + return impl->next(token, mode, tokens, readCursor); +} + +bool TokenizedDataReader::peek(Token &token, const TokenSet &tokens, + WhitespaceMode mode) +{ + return impl->next(token, mode, tokens, peekCursor); } } -- cgit v1.2.3 From b95cf0ddd1aee517ed948155d43da4e2b64cfcdf Mon Sep 17 00:00:00 2001 From: Andreas Stöckel Date: Thu, 26 Feb 2015 00:21:33 +0100 Subject: Fixed non-initialized variable --- src/core/parser/utils/TokenizedData.cpp | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) (limited to 'src/core/parser/utils/TokenizedData.cpp') diff --git a/src/core/parser/utils/TokenizedData.cpp b/src/core/parser/utils/TokenizedData.cpp index aeefa26..bcbbe43 100644 --- a/src/core/parser/utils/TokenizedData.cpp +++ b/src/core/parser/utils/TokenizedData.cpp @@ -26,6 +26,12 @@ #include "TokenizedData.hpp" namespace ousia { +/** + * Maximum token length. + */ +constexpr TokenLength MaxTokenLength = + std::numeric_limits::max(); + namespace { /** * Structure used to represent the position of a token in the internal @@ -52,12 +58,6 @@ struct TokenMark { */ bool special; - /** - * Maximum token length. - */ - static constexpr TokenLength MaxTokenLength = - std::numeric_limits::max(); - /** * Constructor of the TokenMark structure, initializes all members with the * given values. @@ -450,6 +450,7 @@ public: protectedChars.clear(); offsets.clear(); marks.clear(); + firstLinebreak = 0; currentIndentation = 0; lastIndentation = 0; numLinebreaks = 1; // Assume the stream starts with a linebreak -- cgit v1.2.3 From 6776f53b60ade0ece65ab895d23476761c5481d5 Mon Sep 17 00:00:00 2001 From: Andreas Stöckel Date: Sat, 28 Feb 2015 15:47:13 +0100 Subject: Trimming forgotten protectedChars buffer --- src/core/parser/utils/TokenizedData.cpp | 1 + 1 file changed, 1 insertion(+) (limited to 'src/core/parser/utils/TokenizedData.cpp') diff --git a/src/core/parser/utils/TokenizedData.cpp b/src/core/parser/utils/TokenizedData.cpp index bcbbe43..c3c4f98 100644 --- a/src/core/parser/utils/TokenizedData.cpp +++ b/src/core/parser/utils/TokenizedData.cpp @@ -467,6 +467,7 @@ public: { if (length < size()) { buf.resize(length); + protectedChars.resize(length); offsets.trim(length); } } -- cgit v1.2.3 From 1c33913ebb5d9202575d3ca99bd17366d30f2261 Mon Sep 17 00:00:00 2001 From: Andreas Stöckel Date: Tue, 3 Mar 2015 00:30:38 +0100 Subject: Started restructuring and adapting Stack class, reenabled unit tests (does not compile right now) --- CMakeLists.txt | 2 +- src/core/parser/stack/Stack.cpp | 447 ++++++++------- src/core/parser/stack/Stack.hpp | 13 +- src/core/parser/utils/TokenizedData.cpp | 10 +- src/core/parser/utils/TokenizedData.hpp | 12 + test/core/parser/stack/StackTest.cpp | 959 ++++++++++++++++---------------- 6 files changed, 744 insertions(+), 699 deletions(-) (limited to 'src/core/parser/utils/TokenizedData.cpp') diff --git a/CMakeLists.txt b/CMakeLists.txt index f99c212..c7ad7a3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -326,7 +326,7 @@ IF(TEST) test/core/model/StyleTest test/core/model/TypesystemTest test/core/parser/ParserScopeTest -# test/core/parser/stack/StackTest + test/core/parser/stack/StackTest test/core/parser/stack/StateTest test/core/parser/stack/TokenRegistryTest test/core/parser/utils/SourceOffsetVectorTest diff --git a/src/core/parser/stack/Stack.cpp b/src/core/parser/stack/Stack.cpp index e5bd224..89217ea 100644 --- a/src/core/parser/stack/Stack.cpp +++ b/src/core/parser/stack/Stack.cpp @@ -86,6 +86,12 @@ public: */ bool inImplicitDefaultField : 1; + /** + * Set to true if the handler current is in an implicitly started range + * field. + */ + bool inImplicitRangeField: 1; + /** * Set to false if this field is only opened pro-forma and does not accept * any data. Otherwise set to true. @@ -230,6 +236,18 @@ void HandlerInfo::fieldEnd() * Stub instance of HandlerInfo containing no handler information. */ static HandlerInfo EmptyHandlerInfo{true, true, false, true, true, false, true}; + +/** + * Small helper class makeing sure the reference at some variable is reset once + * the scope is left. + */ +template +struct GuardedTemporaryPointer { + T **ptr; + GuardedTemporaryPointer(T *ref, T **ptr) : ptr(ptr) { *ptr = ref; } + + ~GuardedTemporaryPointer() { *ptr = nullptr; } +}; } /* Helper functions */ @@ -352,11 +370,18 @@ private: HandlerInfo &lastInfo(); /** - * Ends all handlers that currently are not inside a field and already had - * a default field. This method is called whenever the data() and command() - * events are reached. + * Returns a set containing the tokens that should currently be processed + * by the TokenizedData instance. + * + * @return a TokenSet instance containing all tokens that should currently + * be processed. */ - void endOverdueHandlers(); + TokenSet currentTokens() const; + + /** + * Returns the whitespace mode defined by the current command. + */ + WhitespaceMode currentWhitespaceMode() const; /** * Ends the current handler and removes the corresponding element from the @@ -365,13 +390,14 @@ private: void endCurrentHandler(); /** - * Tries to start a default field for the current handler, if currently the - * handler is not inside a field and did not have a default field yet. - * - * @return true if the handler is inside a field, false if no field could - * be started. + * Ends all handlers that currently are not inside a field and already had + * a default field. Tries to start a default field for the current handler, + * if currently the handler is not inside a field and did not have a default + * field yet. This method is called whenever the data(), startAnnotation(), + * startToken(), startCommand(), annotationStart() or annotationEnd() events + * are reached. */ - bool ensureHandlerIsInField(); + void prepareCurrentHandler(); /** * Returns true if all handlers on the stack are currently valid, or false @@ -381,6 +407,30 @@ private: */ bool handlersValid(); + /** + * Called whenever there is an actual data pending on the current + * TokenizedDataReader. Tries to feed this data to the current handler. + */ + void handleData(); + + /** + * Called whenever there is a token waiting to be processed. If possible + * tries to end a current handler with this token or to start a new handler + * with the token. + * + * @param token is the token that should be handled. + */ + void handleToken(const Token &token); + + /** + * Called by the rangeEnd() and fieldEnd() methods to end the current ranged + * command. + * + * @param rangeCommand specifies whether this should end the range of a + * command with range. + */ + void handleFieldEnd(bool rangeCommand); + public: StackImpl(ParserCallbacks &parser, ParserContext &ctx, const std::multimap &states); @@ -403,7 +453,6 @@ public: TokenId registerToken(const std::string &token) override; void unregisterToken(TokenId id) override; Variant readData() override; - bool hasData(); void pushTokens(const std::vector &tokens) override; void popTokens() override; }; @@ -492,16 +541,6 @@ std::set StackImpl::expectedCommands() return res; } -const State &StackImpl::currentState() const -{ - return stack.empty() ? States::None : stack.back().state(); -} - -std::string StackImpl::currentCommandName() const -{ - return stack.empty() ? std::string{} : stack.back().name(); -} - const State *StackImpl::findTargetState(const std::string &name) { const State *currentState = &(this->currentState()); @@ -527,6 +566,28 @@ const State *StackImpl::findTargetStateOrWildcard(const std::string &name) return targetState; } +const State &StackImpl::currentState() const +{ + return stack.empty() ? States::None : stack.back().state(); +} + +std::string StackImpl::currentCommandName() const +{ + return stack.empty() ? std::string{} : stack.back().name(); +} + +TokenSet StackImpl::currentTokens() const +{ + // TODO: Implement + return Tokens{}; +} + +WhitespaceMode currentWhitespaceMode() const +{ + // TODO: Implement + return WhitespaceMode::COLLAPSE; +} + HandlerInfo &StackImpl::currentInfo() { return stack.empty() ? EmptyHandlerInfo : stack.back(); @@ -536,6 +597,8 @@ HandlerInfo &StackImpl::lastInfo() return stack.size() < 2U ? EmptyHandlerInfo : stack[stack.size() - 2]; } +/* Stack helper functions */ + void StackImpl::endCurrentHandler() { if (!stack.empty()) { @@ -563,44 +626,37 @@ void StackImpl::endCurrentHandler() } } -void StackImpl::endOverdueHandlers() +void StackImpl::prepareCurrentHandler() { - if (!stack.empty()) { - // Fetch the handler info for the current top-level element - HandlerInfo &info = stack.back(); + // Repeat until a valid handler is found on the stack + while (true) { + // Fetch the handler for the current top-level element + HandlerInfo &info = currentInfo(); - // Abort if this handler currently is inside a field - if (info.inField || (!info.hadDefaultField && info.valid)) { + // If the current Handler is in a field, there is nothing to be done, + // abort + if (info.inField) { return; } - // Otherwise end the current handler - endCurrentHandler(); - } -} - -bool StackImpl::ensureHandlerIsInField() -{ - // If the current handler is not in a field (and actually has a handler) - // try to start a default field - HandlerInfo &info = currentInfo(); - if (!info.inField && info.handler != nullptr) { - // Abort if the element already had a default field or the handler is - // not valid + // If the current field already had a default field or is not valid, + // end it and repeat if (info.hadDefaultField || !info.valid) { - return false; + endCurrentHandler(); + continue; } // Try to start a new default field, abort if this did not work bool isDefault = true; if (!info.handler->fieldStart(isDefault, info.fieldIdx)) { - return false; + endCurrentHandler(); + continue; } - // Mark the field as started - info.fieldStart(true, true, true); + // Mark the field as started and return -- the field should be marked + // is implicit if this is not a field with range + info.fieldStart(true, !info.range, true, info.range); } - return true; } bool StackImpl::handlersValid() @@ -613,13 +669,105 @@ bool StackImpl::handlersValid() return true; } +void StackImpl::handleData() +{ + // Repeat until we found some handle willingly consuming the data + while (true) { + // Prepare the stack -- make sure all overdue handlers are ended and + // we currently are in an open field + prepareCurrentHandler(); + + // Fetch the current handler information + HandlerInfo &info = currentInfo(); + + // If this field should not get any data, log an error and do not + // call the "data" handler + if (!info.inValidField) { + if (!info.hadDefaultField) { + logger().error("Did not expect any data here", data); + } + return; + } + + // If we're currently in an invalid subtree, just eat the data and abort + if (!handlersValid()) { + return; + } + + // Fork the logger and set it as temporary logger for the "data" + // method. We only want to keep error messages if this was not a + // try to implicitly open a default field. + LoggerFork loggerFork = logger().fork(); + info.handler->setLogger(loggerFork); + + // Pass the data to the current Handler instance + bool valid = false; + try { + valid = info.handler->data(); + } + catch (LoggableException ex) { + loggerFork.log(ex); + } + + // Reset the logger instance of the handler as soon as possible + info.handler->resetLogger(); + + // If placing the data here failed and we're currently in an + // implicitly opened field, just unroll the stack to the next field + // and try again + if (!valid && info.inImplicitDefaultField) { + endCurrentHandler(); + continue; + } + + // Commit the content of the logger fork. Do not change the valid flag. + loggerFork.commit(); + } +} + +void StackImpl::handleToken(const Token &token) { + // TODO: Implement + // Just eat them for now +} + +void StackImpl::handleFieldEnd(bool rangedCommand) +{ + // Throw away all overdue handlers, start the default field at least once + // if this has not been done yet (this is important for range commands) + prepareStack(); + + // Close all implicit default fields + while (!stack.empty()) { + HandlerInfo &info = currentInfo(); + if (!info.inImplicitDefaultField) { + break; + } + endCurrentHandler(); + } + + // Fetch the information attached to the current handler + HandlerInfo &info = currentInfo(); + if (!info.inField || stack.empty()) { + logger().error("Got field end, but there is no field here to end"); + return; + } + + // Only continue if the current handler stack is in a valid state, do not + // call the fieldEnd function if something went wrong before + if (handlersValid()) { + if (info.range && info.inDefaultField) + info.handler->fieldEnd(); + } + + // This command no longer is in a field + info.fieldEnd(); +} + +/* Class StackImpl public functions */ + void StackImpl::commandStart(const Variant &name, const Variant::mapType &args, bool range) { - // End handlers that already had a default field and are currently not - // active. - endOverdueHandlers(); - // Make sure the given identifier is valid (preventing "*" from being // malicously passed to this function) if (!Utils::isNamespacedIdentifier(name.asString())) { @@ -629,6 +777,10 @@ void StackImpl::commandStart(const Variant &name, const Variant::mapType &args, } while (true) { + // Prepare the stack -- make sure all overdue handlers are ended and + // we currently are in an open field + prepareCurrentHandler(); + // Try to find a target state for the given command, if none can be // found and the current command does not have an open field, then try // to create an empty default field, otherwise this is an exception @@ -644,12 +796,6 @@ void StackImpl::commandStart(const Variant &name, const Variant::mapType &args, } } - // Make sure we're currently inside a field - if (!ensureHandlerIsInField()) { - endCurrentHandler(); - continue; - } - // Fork the logger. We do not want any validation errors to skip LoggerFork loggerFork = logger().fork(); @@ -670,17 +816,14 @@ void StackImpl::commandStart(const Variant &name, const Variant::mapType &args, HandlerInfo &parentInfo = lastInfo(); HandlerInfo &info = currentInfo(); - // Call the "start" method of the handler, store the result of the - // start - // method as the validity of the handler -- do not call the start - // method + // Call the "start" method of the handler, store the result of the start + // method as the validity of the handler -- do not call the start method // if the stack is currently invalid (as this may cause further, // unwanted errors) bool validStack = handlersValid(); info.valid = false; if (validStack) { - // Canonicalize the arguments (if this has not already been - // done), + // Canonicalize the arguments (if this has not already been done), // allow additional arguments and numeric indices Variant::mapType canonicalArgs = args; targetState->arguments.validateMap(canonicalArgs, loggerFork, true, @@ -697,10 +840,8 @@ void StackImpl::commandStart(const Variant &name, const Variant::mapType &args, } // We started the command within an implicit default field and it is - // not - // valid -- remove both the new handler and the parent field from - // the - // stack + // not valid -- remove both the new handler and the parent field from + // the stack if (!info.valid && parentInfo.inImplicitDefaultField) { endCurrentHandler(); endCurrentHandler(); @@ -708,9 +849,8 @@ void StackImpl::commandStart(const Variant &name, const Variant::mapType &args, } // If we ended up here, starting the command may or may not have - // worked, - // but after all, we cannot unroll the stack any further. Update the - // "valid" flag, commit any potential error messages and return. + // worked, but after all, we cannot unroll the stack any further. Update + // the "valid" flag, commit any potential error messages and return. info.valid = parentInfo.valid && info.valid; info.range = range; loggerFork.commit(); @@ -732,106 +872,31 @@ void StackImpl::annotationEnd(const Variant &className, void StackImpl::rangeEnd() { - // TODO + handleFieldEnd(true); } void StackImpl::data(const TokenizedData &data) { - // TODO: Rewrite this function for token handling - // TODO: This loop needs to be refactored out - /*while (!data.atEnd()) { - // End handlers that already had a default field and are currently - not - // active. - endOverdueHandlers(); - - const bool hasNonWhitespaceText = data.hasNonWhitespaceText(); - - // Check whether there is any command the data can be sent to -- if - not, - // make sure the data actually is data - if (stack.empty()) { - if (hasNonWhitespaceText) { - throw LoggableException("No command here to receive data.", - data); - } - return; - } - - // Fetch the current command handler information - HandlerInfo &info = currentInfo(); - - // Make sure the current handler has an open field - if (!ensureHandlerIsInField()) { - endCurrentHandler(); - continue; - } - - // If this field should not get any data, log an error and do not - call - // the "data" handler - if (!info.inValidField) { - // If the "hadDefaultField" flag is set, we already issued an - error - // message - if (!info.hadDefaultField) { - if (hasNonWhitespaceText) { - logger().error("Did not expect any data here", data); - } - return; - } - } - - if (handlersValid() && info.inValidField) { - // Fork the logger and set it as temporary logger for the - "start" - // method. We only want to keep error messages if this was not a - try - // to implicitly open a default field. - LoggerFork loggerFork = logger().fork(); - info.handler->setLogger(loggerFork); - - // Pass the data to the current Handler instance - bool valid = false; - try { - // Create a fork of the TokenizedData and let the handler - work - // on it - TokenizedData dataFork = data; - valid = info.handler->data(dataFork); - - // If the data was validly handled by the handler, commit - the - // change - if (valid) { - data = dataFork; - } - } - catch (LoggableException ex) { - loggerFork.log(ex); - } - - // Reset the logger instance as soon as possible - info.handler->resetLogger(); - - // If placing the data here failed and we're currently in an - // implicitly opened field, just unroll the stack to the next - field - // and try again - if (!valid && info.inImplicitDefaultField) { - endCurrentHandler(); - continue; - } - - // Commit the content of the logger fork. Do not change the - valid - // flag. - loggerFork.commit(); - } - - // There was no reason to unroll the stack any further, so continue - return; - }*/ + // Fetch a reader for the given tokenized data instance. + TokenizedDataReader reader = data.reader(); + + // Use the GuardedTemporaryPointer to make sure that the member variable + // dataReader is resetted to nullptr once this scope is left. + GuardedTemporaryPointer ptr(&reader, &dataReader); + + // Peek a token from the reader, repeat until all tokens have been read + Token token; + while (reader.peek(token, currentTokens(), currentWhitespaceMode())) { + // Handle the token as text data or as actual token + if (token.id == Tokens::Data) { + handleData(); + } else { + handleToken(token); + } + + // Consume the peeked token + reader.consumePeek(); + } } void StackImpl::fieldStart(bool isDefault) @@ -853,8 +918,7 @@ void StackImpl::fieldStart(bool isDefault) } // If the handler already had a default field we cannot start a new - // field - // (the default field always is the last field) -- mark the command as + // field (the default field always is the last field) -- mark the command as // invalid if (info.hadDefaultField) { logger().error(std::string("Got field start, but command \"") + @@ -862,8 +926,7 @@ void StackImpl::fieldStart(bool isDefault) std::string("\" does not have any more fields")); } - // Copy the isDefault flag to a local variable, the fieldStart method - // will + // Copy the isDefault flag to a local variable, the fieldStart method will // write into this variable bool defaultField = isDefault; @@ -891,40 +954,11 @@ void StackImpl::fieldStart(bool isDefault) void StackImpl::fieldEnd() { - // Unroll the stack until the next explicitly open field - while (!stack.empty()) { - HandlerInfo &info = currentInfo(); - if (info.inField && !info.inImplicitDefaultField) { - break; - } - endCurrentHandler(); - } - - // Fetch the information attached to the current handler - HandlerInfo &info = currentInfo(); - if (!info.inField || info.inImplicitDefaultField || stack.empty()) { - logger().error( - "Got field end, but there is no command for which to end the " - "field."); - return; - } - - // Only continue if the current handler stack is in a valid state, do - // not - // call the fieldEnd function if something went wrong before - if (handlersValid() && !info.hadDefaultField && info.inValidField) { - try { - info.handler->fieldEnd(); - } - catch (LoggableException ex) { - logger().log(ex); - } - } - - // This command no longer is in a field - info.fieldEnd(); + handleFieldEnd(false); } +/* Class StackImpl HandlerCallbacks */ + TokenId StackImpl::registerToken(const std::string &token) { return tokenRegistry.registerToken(token); @@ -950,14 +984,7 @@ Variant StackImpl::readData() if (dataReader != nullptr) { TokenizedDataReaderFork dataReaderFork = dataReader->fork(); Token token; - - // TODO: Use correct token set - TokenSet tokens; - - // TODO: Use correct whitespace mode - WhitespaceMode mode = WhitespaceMode::COLLAPSE; - - dataReaderFork.read(token, tokens, mode); + dataReaderFork.read(token, currentTokens(), currentWhitespaceMode()); if (token.id == Tokens::Data) { Variant res = Variant::fromString(token.content); res.setLocation(token.getLocation()); @@ -967,8 +994,6 @@ Variant StackImpl::readData() return Variant{}; } -bool StackImpl::hasData() { return readData() != nullptr; } - /* Class Stack */ Stack::Stack(ParserCallbacks &parser, ParserContext &ctx, @@ -1013,5 +1038,7 @@ void Stack::fieldStart(bool isDefault) { impl->fieldStart(isDefault); } void Stack::fieldEnd() { impl->fieldEnd(); } void Stack::data(const TokenizedData &data) { impl->data(data); } + +void Stack::data(const std::string &str) { data(TokenizedData(str)); } } } diff --git a/src/core/parser/stack/Stack.hpp b/src/core/parser/stack/Stack.hpp index de281d4..1de7cff 100644 --- a/src/core/parser/stack/Stack.hpp +++ b/src/core/parser/stack/Stack.hpp @@ -150,13 +150,24 @@ public: /** * Function that should be called whenever character data is found in the - * input stream. May only be called if the currently is a command on the + * input stream. May only be called if there currently is a command on the * stack. * * @param data is a TokenizedData instance containing the pre-segmented data * that should be read. */ void data(const TokenizedData &data); + + /** + * Function that may be called whenever character data is found in the + * input stream. May only be called if the currently is a command on the + * stack. This method is mainly intended for unit testing. Pass a + * TokenizedData instance to the + * + * @param str is a string containing the data that should be passed to the + * tokenizer. + */ + void data(const std::string &str); }; } } diff --git a/src/core/parser/utils/TokenizedData.cpp b/src/core/parser/utils/TokenizedData.cpp index c3c4f98..d8a8b37 100644 --- a/src/core/parser/utils/TokenizedData.cpp +++ b/src/core/parser/utils/TokenizedData.cpp @@ -29,8 +29,7 @@ namespace ousia { /** * Maximum token length. */ -constexpr TokenLength MaxTokenLength = - std::numeric_limits::max(); +constexpr TokenLength MaxTokenLength = std::numeric_limits::max(); namespace { /** @@ -510,6 +509,13 @@ TokenizedData::TokenizedData(SourceId sourceId) { } +TokenizedData::TokenizedData(const std::string &data, SourceOffset offsStart, + SourceId sourceId) + : TokenizedData(sourceId) +{ + append(data, offsStart); +} + TokenizedData::~TokenizedData() {} size_t TokenizedData::append(const std::string &data, SourceOffset offsStart, diff --git a/src/core/parser/utils/TokenizedData.hpp b/src/core/parser/utils/TokenizedData.hpp index b72ca02..bc937f2 100644 --- a/src/core/parser/utils/TokenizedData.hpp +++ b/src/core/parser/utils/TokenizedData.hpp @@ -95,6 +95,18 @@ public: */ TokenizedData(SourceId sourceId); + /** + * Creates a new instance of TokenizedData, takes a SourceId and an initial + * string buffer. + * + * @param data is the string that should be appended to the buffer. + * @param offsStart is the start offset in bytes in the input file. + * @param sourceId is the source identifier that should be used for + * constructing the location when returning tokens. + */ + TokenizedData(const std::string &data, SourceOffset offsStart = 0, + SourceId sourceId = InvalidSourceId); + /** * Destructor. Needs to be defined explicitly for freeing a shared pointer * of the incomplete TokenizedDataImpl type. diff --git a/test/core/parser/stack/StackTest.cpp b/test/core/parser/stack/StackTest.cpp index 83966d5..8f6c4df 100644 --- a/test/core/parser/stack/StackTest.cpp +++ b/test/core/parser/stack/StackTest.cpp @@ -21,6 +21,7 @@ #include #include +#include #include #include #include @@ -38,70 +39,69 @@ static StandaloneEnvironment env(logger); namespace { +class Parser : public ParserCallbacks { + TokenId registerToken(const std::string &token) override + { + return Tokens::Empty; + } + + void unregisterToken(TokenId id) override + { + // Do nothing here + } +}; + +static Parser parser; + struct Tracker { - int startCount; + int startCommandCount; + int startAnnotationCount; + int startTokenCount; + int endTokenCount; int endCount; int fieldStartCount; int fieldEndCount; - int annotationStartCount; - int annotationEndCount; int dataCount; - Variant::mapType startArgs; - bool fieldStartIsDefault; - size_t fieldStartIdx; - Variant annotationStartClassName; - Variant::mapType annotationStartArgs; - Variant annotationEndClassName; - Variant annotationEndElementName; - TokenizedData dataData; - - bool startResult; - bool fieldStartSetIsDefault; + bool startCommandResult; + bool startAnnotationResult; + bool startTokenResult; + Handler::EndTokenResult endTokenResult; bool fieldStartResult; - bool annotationStartResult; - bool annotationEndResult; bool dataResult; Tracker() { reset(); } void reset() { - startCount = 0; + startCommandCount = 0; + startAnnotationCount = 0; + startTokenCount = 0; + endTokenCount = 0; endCount = 0; fieldStartCount = 0; fieldEndCount = 0; - annotationStartCount = 0; - annotationEndCount = 0; dataCount = 0; - startArgs = Variant::mapType{}; - fieldStartIsDefault = false; - fieldStartIdx = 0; - annotationStartClassName = Variant::fromString(std::string{}); - annotationStartArgs = Variant::mapType{}; - annotationEndClassName = Variant::fromString(std::string{}); - annotationEndElementName = Variant::fromString(std::string{}); - dataData = TokenizedData(); - - startResult = true; - fieldStartSetIsDefault = false; + startCommandResult = true; + startAnnotationResult = true; + startTokenResult = true; + endTokenResult = Handler::EndTokenResult::ENDED_THIS; fieldStartResult = true; - annotationStartResult = true; - annotationEndResult = true; dataResult = true; } - void expect(int startCount, int endCount, int fieldStartCount, - int fieldEndCount, int annotationStartCount, - int annotationEndCount, int dataCount) + void expect(int startCommandCount, int endCount, int fieldStartCount, + int fieldEndCount, int dataCount, int startAnnotationCount = 0, + int startTokenCount = 0, int endTokenCount = 0) { - EXPECT_EQ(startCount, this->startCount); + EXPECT_EQ(startCommandCount, this->startCommandCount); + EXPECT_EQ(startAnnotationCount, this->startAnnotationCount); + EXPECT_EQ(startTokenCount, this->startTokenCount); + EXPECT_EQ(endTokenCount, this->endTokenCount); EXPECT_EQ(endCount, this->endCount); EXPECT_EQ(fieldStartCount, this->fieldStartCount); EXPECT_EQ(fieldEndCount, this->fieldEndCount); - EXPECT_EQ(annotationStartCount, this->annotationStartCount); - EXPECT_EQ(annotationEndCount, this->annotationEndCount); EXPECT_EQ(dataCount, this->dataCount); } }; @@ -113,55 +113,44 @@ private: TestHandler(const HandlerData &handlerData) : Handler(handlerData) {} public: - bool start(Variant::mapType &args) override + bool startCommand(Variant::mapType &args) override { - tracker.startCount++; - tracker.startArgs = args; - if (!tracker.startResult) { - logger().error( - "The TestHandler was told not to allow a field start. So it " - "doesn't. The TestHandler always obeys its master."); - } - return tracker.startResult; + tracker.startCommandCount++; + return tracker.startCommandResult; } - void end() override { tracker.endCount++; } - - bool fieldStart(bool &isDefault, size_t fieldIdx) override + bool startAnnotation(Variant::mapType &args, + AnnotationType annotationType) override { - tracker.fieldStartCount++; - tracker.fieldStartIsDefault = isDefault; - tracker.fieldStartIdx = fieldIdx; - if (tracker.fieldStartSetIsDefault) { - isDefault = true; - } - return tracker.fieldStartResult; + tracker.startAnnotationCount++; + return tracker.startAnnotationResult; } - void fieldEnd() override { tracker.fieldEndCount++; } + bool startToken(Handle node) override + { + tracker.startTokenCount++; + return tracker.startTokenResult; + } - bool annotationStart(const Variant &className, - Variant::mapType &args) override + EndTokenResult endToken(const Token &token, Handle node) override { - tracker.annotationStartCount++; - tracker.annotationStartClassName = className; - tracker.annotationStartArgs = args; - return tracker.annotationStartResult; + tracker.endTokenCount++; + return tracker.endTokenResult; } - bool annotationEnd(const Variant &className, - const Variant &elementName) override + void end() override { tracker.endCount++; } + + bool fieldStart(bool &isDefault, size_t fieldIdx) override { - tracker.annotationEndCount++; - tracker.annotationEndClassName = className; - tracker.annotationEndElementName = elementName; - return tracker.annotationEndResult; + tracker.fieldStartCount++; + return tracker.fieldStartResult; } - bool data(TokenizedData &data) override + void fieldEnd() override { tracker.fieldEndCount++; } + + bool data() override { tracker.dataCount++; - tracker.dataData = data; return tracker.dataResult; } @@ -205,544 +194,544 @@ TEST(Stack, basicTest) tracker.reset(); logger.reset(); { - Stack s{env.context, States::TestHandlers}; + Stack s{parser, env.context, States::TestHandlers}; EXPECT_EQ("", s.currentCommandName()); EXPECT_EQ(&States::None, &s.currentState()); - s.command("document", {}); + s.commandStart("document", {}, true); s.fieldStart(true); s.data("test1"); EXPECT_EQ("document", s.currentCommandName()); EXPECT_EQ(&States::Document, &s.currentState()); - tracker.expect(1, 0, 1, 0, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + tracker.expect(1, 0, 1, 0, 1); // scc, ec, fsc, fse, dc, sac, stc, etc - s.command("body", {}); + s.commandStart("body", {}, true); s.fieldStart(true); s.data("test2"); EXPECT_EQ("body", s.currentCommandName()); EXPECT_EQ(&States::Body, &s.currentState()); - tracker.expect(2, 0, 2, 0, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + tracker.expect(2, 0, 2, 0, 2); // scc, ec, fsc, fse, dc, sac, stc, etc - s.command("inner", {}); + s.commandStart("inner", {}, true); s.fieldStart(true); EXPECT_EQ("inner", s.currentCommandName()); EXPECT_EQ(&States::BodyChildren, &s.currentState()); s.fieldEnd(); - tracker.expect(3, 0, 3, 1, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + tracker.expect(3, 0, 3, 1, 2); // scc, ec, fsc, fse, dc, sac, stc, etc s.fieldEnd(); EXPECT_EQ("body", s.currentCommandName()); EXPECT_EQ(&States::Body, &s.currentState()); - tracker.expect(3, 1, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + tracker.expect(3, 1, 3, 2, 2); // scc, ec, fsc, fse, dc, sac, stc, etc - s.command("body", {}); + s.commandStart("body", {}, true); EXPECT_EQ("body", s.currentCommandName()); EXPECT_EQ(&States::Body, &s.currentState()); - tracker.expect(4, 2, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + tracker.expect(4, 2, 3, 2, 2); // scc, ec, fsc, fse, dc, sac, stc, etc s.fieldStart(true); s.data("test3"); EXPECT_EQ("body", s.currentCommandName()); EXPECT_EQ(&States::Body, &s.currentState()); s.fieldEnd(); - tracker.expect(4, 2, 4, 3, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc + tracker.expect(4, 2, 4, 3, 3); // scc, ec, fsc, fse, dc, sac, stc, etc EXPECT_EQ("body", s.currentCommandName()); EXPECT_EQ(&States::Body, &s.currentState()); s.fieldEnd(); - tracker.expect(4, 3, 4, 4, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc + tracker.expect(4, 3, 4, 4, 3); // scc, ec, fsc, fse, dc, sac, stc, etc EXPECT_EQ("document", s.currentCommandName()); EXPECT_EQ(&States::Document, &s.currentState()); } - tracker.expect(4, 4, 4, 4, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc + tracker.expect(4, 4, 4, 4, 3); // scc, ec, fsc, fse, dc, sac, stc, etc ASSERT_FALSE(logger.hasError()); } - +/* TEST(Stack, errorInvalidCommands) { - Stack s{env.context, States::TestHandlers}; - tracker.reset(); - EXPECT_THROW(s.command("body", {}), LoggableException); - s.command("document", {}); - s.fieldStart(true); - EXPECT_THROW(s.command("document", {}), LoggableException); - s.command("empty", {}); - s.fieldStart(true); - EXPECT_THROW(s.command("body", {}), LoggableException); - s.command("special", {}); - s.fieldStart(true); - s.fieldEnd(); - s.fieldEnd(); - s.fieldEnd(); - - logger.reset(); - s.fieldEnd(); - ASSERT_TRUE(logger.hasError()); - - EXPECT_THROW(s.data("test"), LoggableException); - EXPECT_EQ(&States::None, &s.currentState()); + Stack s{env.context, States::TestHandlers}; + tracker.reset(); + EXPECT_THROW(s.command("body", {}), LoggableException); + s.command("document", {}); + s.fieldStart(true); + EXPECT_THROW(s.command("document", {}), LoggableException); + s.command("empty", {}); + s.fieldStart(true); + EXPECT_THROW(s.command("body", {}), LoggableException); + s.command("special", {}); + s.fieldStart(true); + s.fieldEnd(); + s.fieldEnd(); + s.fieldEnd(); + + logger.reset(); + s.fieldEnd(); + ASSERT_TRUE(logger.hasError()); + + EXPECT_THROW(s.data("test"), LoggableException); + EXPECT_EQ(&States::None, &s.currentState()); } TEST(Stack, validation) { - Stack s{env.context, States::TestHandlers}; - tracker.reset(); - logger.reset(); - - s.command("arguments", {}); - EXPECT_TRUE(logger.hasError()); - s.fieldStart(true); - s.fieldEnd(); - - logger.reset(); - s.command("arguments", {{"a", 5}}); - EXPECT_TRUE(logger.hasError()); - s.fieldStart(true); - s.fieldEnd(); - - logger.reset(); - s.command("arguments", {{"a", 5}, {"b", "test"}}); - EXPECT_FALSE(logger.hasError()); - s.fieldStart(true); - s.fieldEnd(); + Stack s{env.context, States::TestHandlers}; + tracker.reset(); + logger.reset(); + + s.command("arguments", {}); + EXPECT_TRUE(logger.hasError()); + s.fieldStart(true); + s.fieldEnd(); + + logger.reset(); + s.command("arguments", {{"a", 5}}); + EXPECT_TRUE(logger.hasError()); + s.fieldStart(true); + s.fieldEnd(); + + logger.reset(); + s.command("arguments", {{"a", 5}, {"b", "test"}}); + EXPECT_FALSE(logger.hasError()); + s.fieldStart(true); + s.fieldEnd(); } TEST(Stack, invalidCommandName) { - tracker.reset(); - logger.reset(); - - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - s.fieldStart(true); - s.fieldEnd(); - tracker.expect(1, 0, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.command("a_", {}); - tracker.expect(2, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - s.fieldStart(true); - s.fieldEnd(); - tracker.expect(2, 1, 2, 2, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.command("a_:b", {}); - tracker.expect(3, 2, 2, 2, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - s.fieldStart(true); - s.fieldEnd(); - tracker.expect(3, 2, 3, 3, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - ASSERT_THROW(s.command("_a", {}), LoggableException); - tracker.expect(3, 3, 3, 3, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - ASSERT_THROW(s.command("a:", {}), LoggableException); - tracker.expect(3, 3, 3, 3, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - ASSERT_THROW(s.command("a:_b", {}), LoggableException); - tracker.expect(3, 3, 3, 3, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + tracker.reset(); + logger.reset(); + + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + s.fieldStart(true); + s.fieldEnd(); + tracker.expect(1, 0, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.command("a_", {}); + tracker.expect(2, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + s.fieldStart(true); + s.fieldEnd(); + tracker.expect(2, 1, 2, 2, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.command("a_:b", {}); + tracker.expect(3, 2, 2, 2, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + s.fieldStart(true); + s.fieldEnd(); + tracker.expect(3, 2, 3, 3, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + ASSERT_THROW(s.command("_a", {}), LoggableException); + tracker.expect(3, 3, 3, 3, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + ASSERT_THROW(s.command("a:", {}), LoggableException); + tracker.expect(3, 3, 3, 3, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + ASSERT_THROW(s.command("a:_b", {}), LoggableException); + tracker.expect(3, 3, 3, 3, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc } TEST(Stack, multipleFields) { - tracker.reset(); - logger.reset(); - { - Stack s{env.context, States::AnyHandlers}; - - s.command("a", {{"a", false}}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - EXPECT_EQ("a", s.currentCommandName()); - EXPECT_EQ(Variant::mapType({{"a", false}}), tracker.startArgs); - - s.fieldStart(false); - tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - EXPECT_FALSE(tracker.fieldStartIsDefault); - EXPECT_EQ(0U, tracker.fieldStartIdx); - - s.data("test"); - tracker.expect(1, 0, 1, 0, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - EXPECT_EQ("test", tracker.dataData.text().asString()); - - s.fieldEnd(); - tracker.expect(1, 0, 1, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - - s.fieldStart(false); - tracker.expect(1, 0, 2, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - EXPECT_FALSE(tracker.fieldStartIsDefault); - EXPECT_EQ(1U, tracker.fieldStartIdx); - - s.data("test2"); - tracker.expect(1, 0, 2, 1, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - EXPECT_EQ("test2", tracker.dataData.text().asString()); - - s.fieldEnd(); - tracker.expect(1, 0, 2, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - - s.fieldStart(true); - tracker.expect(1, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - EXPECT_TRUE(tracker.fieldStartIsDefault); - EXPECT_EQ(2U, tracker.fieldStartIdx); - - s.data("test3"); - tracker.expect(1, 0, 3, 2, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc - EXPECT_EQ("test3", tracker.dataData.text().asString()); - - s.fieldEnd(); - tracker.expect(1, 0, 3, 3, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(1, 1, 3, 3, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + { + Stack s{env.context, States::AnyHandlers}; + + s.command("a", {{"a", false}}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + EXPECT_EQ("a", s.currentCommandName()); + EXPECT_EQ(Variant::mapType({{"a", false}}), tracker.startArgs); + + s.fieldStart(false); + tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + EXPECT_FALSE(tracker.fieldStartIsDefault); + EXPECT_EQ(0U, tracker.fieldStartIdx); + + s.data("test"); + tracker.expect(1, 0, 1, 0, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + EXPECT_EQ("test", tracker.dataData.text().asString()); + + s.fieldEnd(); + tracker.expect(1, 0, 1, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + + s.fieldStart(false); + tracker.expect(1, 0, 2, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + EXPECT_FALSE(tracker.fieldStartIsDefault); + EXPECT_EQ(1U, tracker.fieldStartIdx); + + s.data("test2"); + tracker.expect(1, 0, 2, 1, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + EXPECT_EQ("test2", tracker.dataData.text().asString()); + + s.fieldEnd(); + tracker.expect(1, 0, 2, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + + s.fieldStart(true); + tracker.expect(1, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + EXPECT_TRUE(tracker.fieldStartIsDefault); + EXPECT_EQ(2U, tracker.fieldStartIdx); + + s.data("test3"); + tracker.expect(1, 0, 3, 2, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc + EXPECT_EQ("test3", tracker.dataData.text().asString()); + + s.fieldEnd(); + tracker.expect(1, 0, 3, 3, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(1, 1, 3, 3, 0, 0, 3); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, implicitDefaultFieldOnNewCommand) { - tracker.reset(); - logger.reset(); - { - Stack s{env.context, States::AnyHandlers}; - - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.command("b", {}); - tracker.expect(2, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(2, 2, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + { + Stack s{env.context, States::AnyHandlers}; + + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.command("b", {}); + tracker.expect(2, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(2, 2, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, implicitDefaultFieldOnNewCommandWithExplicitDefaultField) { - tracker.reset(); - logger.reset(); - { - Stack s{env.context, States::AnyHandlers}; - - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("a", s.currentCommandName()); - - s.command("b", {}); - tracker.expect(2, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("b", s.currentCommandName()); - s.fieldStart(true); - s.fieldEnd(); - tracker.expect(2, 0, 2, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("b", s.currentCommandName()); - } - tracker.expect(2, 2, 2, 2, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + { + Stack s{env.context, States::AnyHandlers}; + + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("a", s.currentCommandName()); + + s.command("b", {}); + tracker.expect(2, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("b", s.currentCommandName()); + s.fieldStart(true); + s.fieldEnd(); + tracker.expect(2, 0, 2, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("b", s.currentCommandName()); + } + tracker.expect(2, 2, 2, 2, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, noImplicitDefaultFieldOnIncompatibleCommand) { - tracker.reset(); - logger.reset(); - { - Stack s{env.context, States::AnyHandlers}; - - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("a", s.currentCommandName()); - - tracker.fieldStartResult = false; - s.command("b", {}); - tracker.expect(2, 1, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("b", s.currentCommandName()); - } - tracker.expect(2, 2, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + { + Stack s{env.context, States::AnyHandlers}; + + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("a", s.currentCommandName()); + + tracker.fieldStartResult = false; + s.command("b", {}); + tracker.expect(2, 1, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("b", s.currentCommandName()); + } + tracker.expect(2, 2, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, noImplicitDefaultFieldIfDefaultFieldGiven) { - tracker.reset(); - logger.reset(); - { - Stack s{env.context, States::AnyHandlers}; - - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("a", s.currentCommandName()); - s.fieldStart(true); - tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("a", s.currentCommandName()); - s.fieldEnd(); - tracker.expect(1, 0, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("a", s.currentCommandName()); - - s.command("b", {}); - tracker.expect(2, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("b", s.currentCommandName()); - } - tracker.expect(2, 2, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + { + Stack s{env.context, States::AnyHandlers}; + + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("a", s.currentCommandName()); + s.fieldStart(true); + tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("a", s.currentCommandName()); + s.fieldEnd(); + tracker.expect(1, 0, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("a", s.currentCommandName()); + + s.command("b", {}); + tracker.expect(2, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("b", s.currentCommandName()); + } + tracker.expect(2, 2, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, noEndIfStartFails) { - tracker.reset(); - logger.reset(); - { - Stack s{env.context, States::AnyHandlers}; - - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("a", s.currentCommandName()); - - tracker.startResult = false; - s.command("b", {}); - tracker.expect(3, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_EQ("b", s.currentCommandName()); - } - tracker.expect(3, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_TRUE(logger.hasError()); + tracker.reset(); + logger.reset(); + { + Stack s{env.context, States::AnyHandlers}; + + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("a", s.currentCommandName()); + + tracker.startResult = false; + s.command("b", {}); + tracker.expect(3, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_EQ("b", s.currentCommandName()); + } + tracker.expect(3, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_TRUE(logger.hasError()); } TEST(Stack, implicitDefaultFieldOnData) { - tracker.reset(); - logger.reset(); - { - Stack s{env.context, States::AnyHandlers}; - - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.data("test"); - tracker.expect(1, 0, 1, 0, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(1, 1, 1, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + { + Stack s{env.context, States::AnyHandlers}; + + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.data("test"); + tracker.expect(1, 0, 1, 0, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(1, 1, 1, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, autoFieldEnd) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(1, 1, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(1, 1, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, autoImplicitFieldEnd) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - s.command("b", {}); - s.command("c", {}); - s.command("d", {}); - s.command("e", {}); - s.fieldStart(true); - s.fieldEnd(); - tracker.expect(5, 0, 5, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(5, 5, 5, 5, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + s.command("b", {}); + s.command("c", {}); + s.command("d", {}); + s.command("e", {}); + s.fieldStart(true); + s.fieldEnd(); + tracker.expect(5, 0, 5, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(5, 5, 5, 5, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, invalidDefaultField) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.fieldStartResult = false; - s.fieldStart(true); - s.fieldEnd(); - tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(1, 1, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.fieldStartResult = false; + s.fieldStart(true); + s.fieldEnd(); + tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(1, 1, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, errorInvalidDefaultFieldData) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.fieldStartResult = false; - s.fieldStart(true); - ASSERT_FALSE(logger.hasError()); - s.data("test"); - ASSERT_TRUE(logger.hasError()); - s.fieldEnd(); - tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(1, 1, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.fieldStartResult = false; + s.fieldStart(true); + ASSERT_FALSE(logger.hasError()); + s.data("test"); + ASSERT_TRUE(logger.hasError()); + s.fieldEnd(); + tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(1, 1, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc } TEST(Stack, errorInvalidFieldData) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.fieldStartResult = false; - ASSERT_FALSE(logger.hasError()); - s.fieldStart(false); - ASSERT_TRUE(logger.hasError()); - s.data("test"); - s.fieldEnd(); - tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(1, 1, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.fieldStartResult = false; + ASSERT_FALSE(logger.hasError()); + s.fieldStart(false); + ASSERT_TRUE(logger.hasError()); + s.data("test"); + s.fieldEnd(); + tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(1, 1, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc } TEST(Stack, errorFieldStartNoCommand) { - tracker.reset(); - logger.reset(); + tracker.reset(); + logger.reset(); - Stack s{env.context, States::AnyHandlers}; - ASSERT_THROW(s.fieldStart(false), LoggableException); - ASSERT_THROW(s.fieldStart(true), LoggableException); - tracker.expect(0, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + Stack s{env.context, States::AnyHandlers}; + ASSERT_THROW(s.fieldStart(false), LoggableException); + ASSERT_THROW(s.fieldStart(true), LoggableException); + tracker.expect(0, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc } TEST(Stack, errorMultipleFieldStarts) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.fieldStart(false); - ASSERT_FALSE(logger.hasError()); - s.fieldStart(false); - ASSERT_TRUE(logger.hasError()); - tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.fieldEnd(); - tracker.expect(1, 0, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(1, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.fieldStart(false); + ASSERT_FALSE(logger.hasError()); + s.fieldStart(false); + ASSERT_TRUE(logger.hasError()); + tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.fieldEnd(); + tracker.expect(1, 0, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(1, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc } TEST(Stack, errorMultipleFieldEnds) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.fieldStart(false); - s.fieldEnd(); - ASSERT_FALSE(logger.hasError()); - tracker.expect(1, 0, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - s.fieldEnd(); - ASSERT_TRUE(logger.hasError()); - tracker.expect(1, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(1, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.fieldStart(false); + s.fieldEnd(); + ASSERT_FALSE(logger.hasError()); + tracker.expect(1, 0, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + s.fieldEnd(); + ASSERT_TRUE(logger.hasError()); + tracker.expect(1, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(1, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc } TEST(Stack, errorOpenField) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.fieldStart(false); - ASSERT_FALSE(logger.hasError()); - } - ASSERT_TRUE(logger.hasError()); - tracker.expect(1, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.fieldStart(false); + ASSERT_FALSE(logger.hasError()); + } + ASSERT_TRUE(logger.hasError()); + tracker.expect(1, 1, 1, 1, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc } TEST(Stack, fieldEndWhenImplicitDefaultFieldOpen) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - s.fieldStart(true); - s.command("b", {}); - s.data("test"); - s.fieldEnd(); - tracker.expect(2, 1, 2, 2, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(2, 2, 2, 2, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + s.fieldStart(true); + s.command("b", {}); + s.data("test"); + s.fieldEnd(); + tracker.expect(2, 1, 2, 2, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(2, 2, 2, 2, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); } TEST(Stack, fieldAfterDefaultField) { - tracker.reset(); - logger.reset(); - - { - Stack s{env.context, States::AnyHandlers}; - s.command("a", {}); - tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - s.fieldStart(true); - tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.command("b", {}); - tracker.expect(2, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - - s.fieldStart(false); - tracker.expect(2, 0, 2, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc - s.data("f1"); - tracker.expect(2, 0, 2, 0, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - s.fieldEnd(); - tracker.expect(2, 0, 2, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - tracker.fieldStartSetIsDefault = true; - - s.fieldStart(false); - tracker.fieldStartSetIsDefault = false; - tracker.expect(2, 0, 3, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc - s.data("f2"); - tracker.expect(2, 0, 3, 1, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - s.fieldEnd(); - tracker.expect(2, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - - ASSERT_FALSE(logger.hasError()); - s.fieldStart(false); - ASSERT_TRUE(logger.hasError()); - logger.reset(); - tracker.expect(2, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - s.data("f3"); - tracker.expect(2, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - s.fieldEnd(); - tracker.expect(2, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - - s.fieldEnd(); - tracker.expect(2, 1, 3, 3, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - } - tracker.expect(2, 2, 3, 3, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc - ASSERT_FALSE(logger.hasError()); -} + tracker.reset(); + logger.reset(); + + { + Stack s{env.context, States::AnyHandlers}; + s.command("a", {}); + tracker.expect(1, 0, 0, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + s.fieldStart(true); + tracker.expect(1, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.command("b", {}); + tracker.expect(2, 0, 1, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + + s.fieldStart(false); + tracker.expect(2, 0, 2, 0, 0, 0, 0); // sc, ec, fsc, fse, asc, aec, dc + s.data("f1"); + tracker.expect(2, 0, 2, 0, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + s.fieldEnd(); + tracker.expect(2, 0, 2, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + tracker.fieldStartSetIsDefault = true; + + s.fieldStart(false); + tracker.fieldStartSetIsDefault = false; + tracker.expect(2, 0, 3, 1, 0, 0, 1); // sc, ec, fsc, fse, asc, aec, dc + s.data("f2"); + tracker.expect(2, 0, 3, 1, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + s.fieldEnd(); + tracker.expect(2, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + + ASSERT_FALSE(logger.hasError()); + s.fieldStart(false); + ASSERT_TRUE(logger.hasError()); + logger.reset(); + tracker.expect(2, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + s.data("f3"); + tracker.expect(2, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + s.fieldEnd(); + tracker.expect(2, 0, 3, 2, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + + s.fieldEnd(); + tracker.expect(2, 1, 3, 3, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + } + tracker.expect(2, 2, 3, 3, 0, 0, 2); // sc, ec, fsc, fse, asc, aec, dc + ASSERT_FALSE(logger.hasError()); +}*/ } } -- cgit v1.2.3