summaryrefslogtreecommitdiff
path: root/src/core/parser/stack
diff options
context:
space:
mode:
authorAndreas Stöckel <astoecke@techfak.uni-bielefeld.de>2015-04-12 15:18:12 +0200
committerAndreas Stöckel <astoecke@techfak.uni-bielefeld.de>2016-04-25 22:24:15 +0200
commitb7f89b4fa3dc15dbe0fa12a27b4d9167f41664f2 (patch)
treec9ccc6131c37c34a368009a9d3a8a1316333832d /src/core/parser/stack
parentae1b41524c89c29b47b189fd6741f3aeefeaeb50 (diff)
Add greedy flag to TokenDescriptor and SyntaxDescriptor and set it correctly. Shorten Stack "checkTokensAreUnambiguous" method
Diffstat (limited to 'src/core/parser/stack')
-rw-r--r--src/core/parser/stack/OntologyHandler.cpp5
-rw-r--r--src/core/parser/stack/Stack.cpp40
2 files changed, 26 insertions, 19 deletions
diff --git a/src/core/parser/stack/OntologyHandler.cpp b/src/core/parser/stack/OntologyHandler.cpp
index c153316..f6bfb9a 100644
--- a/src/core/parser/stack/OntologyHandler.cpp
+++ b/src/core/parser/stack/OntologyHandler.cpp
@@ -502,11 +502,16 @@ bool OntologyOpenCloseShortHandler::data()
void OntologyOpenCloseShortHandler::end()
{
+ // Make sure data was given
if (descr->isEmpty()) {
logger().error(std::string("Expected valid token for ") + name() +
std::string(" syntax descriptor."),
location());
}
+
+ // Update the greedy flag
+ descr->greedy = greedy;
+
scope().pop(logger());
}
diff --git a/src/core/parser/stack/Stack.cpp b/src/core/parser/stack/Stack.cpp
index 3e719e6..696a070 100644
--- a/src/core/parser/stack/Stack.cpp
+++ b/src/core/parser/stack/Stack.cpp
@@ -928,37 +928,39 @@ static void strayTokenError(const Token &token, TokenDescriptor &descr,
return;
}
-static void checkTokensAreUnambigous(const Token &token,
+static void checkTokensAreUnambiguous(const Token &token,
const TokenDescriptor &descr,
Logger &logger)
{
- const ssize_t maxDepth = std::numeric_limits<ssize_t>::max();
- const SyntaxDescriptor none(Tokens::Empty, Tokens::Empty, Tokens::Empty,
- nullptr, maxDepth);
+ // Some helper functions and constants
+ constexpr ssize_t MAX_DEPTH = std::numeric_limits<ssize_t>::max();
+ static const SyntaxDescriptor EMPTY_DESCR(
+ Tokens::Empty, Tokens::Empty, Tokens::Empty, nullptr, MAX_DEPTH, true);
+ static auto get = [](size_t i, const std::vector<SyntaxDescriptor> &descrs)
+ -> const SyntaxDescriptor &
+ {
+ return (i < descrs.size()) ? descrs[i] : EMPTY_DESCR;
+ };
// Check whether there is any ambiguity -- e.g. there are two tokens with
// the same depth (the effort they need to be created). The shortForm and
// open lists are assumed to be sorted by depth.
- ssize_t errorDepth = maxDepth;
+ ssize_t errorDepth = MAX_DEPTH;
size_t i = 0;
size_t j = 0;
- while (errorDepth == maxDepth &&
+ while (errorDepth == MAX_DEPTH &&
(i < descr.open.size() || j < descr.shortForm.size())) {
- const SyntaxDescriptor &di1 =
- i < descr.open.size() ? descr.open[i] : none;
- const SyntaxDescriptor &di2 =
- (i + 1 < descr.open.size()) ? descr.open[i + 1] : none;
- const SyntaxDescriptor &dj1 =
- j < descr.shortForm.size() ? descr.shortForm[j] : none;
- const SyntaxDescriptor &dj2 =
- (j + 1 < descr.shortForm.size()) ? descr.shortForm[j + 1] : none;
-
- if (di1.depth != maxDepth &&
+ const SyntaxDescriptor &di1 = get(i, descr.open);
+ const SyntaxDescriptor &di2 = get(i + 1, descr.open);
+ const SyntaxDescriptor &dj1 = get(j, descr.shortForm);
+ const SyntaxDescriptor &dj2 = get(j + 1, descr.shortForm);
+
+ if (di1.depth != MAX_DEPTH &&
(di1.depth == di2.depth || di1.depth == dj1.depth ||
di1.depth == dj2.depth)) {
errorDepth = di1.depth;
}
- if (dj1.depth != maxDepth &&
+ if (dj1.depth != MAX_DEPTH &&
(dj1.depth == dj2.depth || di2.depth == dj1.depth)) {
errorDepth = dj1.depth;
}
@@ -968,7 +970,7 @@ static void checkTokensAreUnambigous(const Token &token,
}
// Issue an error message if an ambiguity exists
- if (errorDepth != maxDepth) {
+ if (errorDepth != MAX_DEPTH) {
logger.error("Token \"" + token.name() + "\" is ambiguous!");
logger.note(
"The token could be ambiguously used in one of the following "
@@ -1123,7 +1125,7 @@ void StackImpl::handleToken(const Token &token)
}
// Make sure the given open token descriptors are unambiguous
- checkTokensAreUnambigous(token, descr, logger());
+ checkTokensAreUnambiguous(token, descr, logger());
// Now try to handle open or short form tokens. Iterate until the stack can
// no longer be unwound.