diff options
Diffstat (limited to 'src/core/parser/utils')
-rw-r--r-- | src/core/parser/utils/TokenizedData.cpp | 10 | ||||
-rw-r--r-- | src/core/parser/utils/TokenizedData.hpp | 12 |
2 files changed, 20 insertions, 2 deletions
diff --git a/src/core/parser/utils/TokenizedData.cpp b/src/core/parser/utils/TokenizedData.cpp index c3c4f98..d8a8b37 100644 --- a/src/core/parser/utils/TokenizedData.cpp +++ b/src/core/parser/utils/TokenizedData.cpp @@ -29,8 +29,7 @@ namespace ousia { /** * Maximum token length. */ -constexpr TokenLength MaxTokenLength = - std::numeric_limits<TokenLength>::max(); +constexpr TokenLength MaxTokenLength = std::numeric_limits<TokenLength>::max(); namespace { /** @@ -510,6 +509,13 @@ TokenizedData::TokenizedData(SourceId sourceId) { } +TokenizedData::TokenizedData(const std::string &data, SourceOffset offsStart, + SourceId sourceId) + : TokenizedData(sourceId) +{ + append(data, offsStart); +} + TokenizedData::~TokenizedData() {} size_t TokenizedData::append(const std::string &data, SourceOffset offsStart, diff --git a/src/core/parser/utils/TokenizedData.hpp b/src/core/parser/utils/TokenizedData.hpp index b72ca02..bc937f2 100644 --- a/src/core/parser/utils/TokenizedData.hpp +++ b/src/core/parser/utils/TokenizedData.hpp @@ -96,6 +96,18 @@ public: TokenizedData(SourceId sourceId); /** + * Creates a new instance of TokenizedData, takes a SourceId and an initial + * string buffer. + * + * @param data is the string that should be appended to the buffer. + * @param offsStart is the start offset in bytes in the input file. + * @param sourceId is the source identifier that should be used for + * constructing the location when returning tokens. + */ + TokenizedData(const std::string &data, SourceOffset offsStart = 0, + SourceId sourceId = InvalidSourceId); + + /** * Destructor. Needs to be defined explicitly for freeing a shared pointer * of the incomplete TokenizedDataImpl type. */ |