diff options
| author | Andreas Stöckel <astoecke@techfak.uni-bielefeld.de> | 2015-03-03 00:30:38 +0100 |
|---|---|---|
| committer | Andreas Stöckel <astoecke@techfak.uni-bielefeld.de> | 2015-03-03 00:30:38 +0100 |
| commit | 1c33913ebb5d9202575d3ca99bd17366d30f2261 (patch) | |
| tree | ef2b3fb8528268e4bae7cdd9e5b78f9b7a89c29d /src/core/parser/utils/TokenizedData.cpp | |
| parent | cdae062d0cbc19ce605df24b2fff5e3808f21ca6 (diff) | |
Started restructuring and adapting Stack class, reenabled unit tests (does not compile right now)
Diffstat (limited to 'src/core/parser/utils/TokenizedData.cpp')
| -rw-r--r-- | src/core/parser/utils/TokenizedData.cpp | 10 |
1 files changed, 8 insertions, 2 deletions
diff --git a/src/core/parser/utils/TokenizedData.cpp b/src/core/parser/utils/TokenizedData.cpp index c3c4f98..d8a8b37 100644 --- a/src/core/parser/utils/TokenizedData.cpp +++ b/src/core/parser/utils/TokenizedData.cpp @@ -29,8 +29,7 @@ namespace ousia { /** * Maximum token length. */ -constexpr TokenLength MaxTokenLength = - std::numeric_limits<TokenLength>::max(); +constexpr TokenLength MaxTokenLength = std::numeric_limits<TokenLength>::max(); namespace { /** @@ -510,6 +509,13 @@ TokenizedData::TokenizedData(SourceId sourceId) { } +TokenizedData::TokenizedData(const std::string &data, SourceOffset offsStart, + SourceId sourceId) + : TokenizedData(sourceId) +{ + append(data, offsStart); +} + TokenizedData::~TokenizedData() {} size_t TokenizedData::append(const std::string &data, SourceOffset offsStart, |
