Skip to content

Commit

Permalink
Correct memory indexing issue. (#165)
Browse files Browse the repository at this point in the history
* Correct memory reservation.

* Fix the vmImage version for MacOS CI pipeline.

Co-authored-by: Wenbing Li <[email protected]>
  • Loading branch information
joburkho and wenbingl committed Oct 4, 2021
1 parent 11c2d6b commit 94beef0
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion ci_build/azure-pipelines/mshost.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ jobs:

- job: MacOSWheel
pool:
vmImage: 'macOS-latest'
vmImage: 'macOS-10.15'

strategy:
matrix:
Expand Down
6 changes: 3 additions & 3 deletions operators/tokenizer/blingfire_sentencebreaker.cc
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#include <locale>
#include <codecvt>
#include <algorithm>
#include <memory>

KernelBlingFireSentenceBreaker::KernelBlingFireSentenceBreaker(OrtApi api, const OrtKernelInfo* info) : BaseKernel(api, info), max_sentence(-1) {
model_data_ = ort_.KernelInfoGetAttribute<std::string>(info, "model");
Expand Down Expand Up @@ -41,10 +42,9 @@ void KernelBlingFireSentenceBreaker::Compute(OrtKernelContext* context) {

std::string& input_string = input_data[0];
int max_length = 2 * input_string.size() + 1;
std::string output_str;
output_str.reserve(max_length);
std::unique_ptr<char[]> output_str = std::make_unique<char[]>(max_length);

int output_length = TextToSentencesWithOffsetsWithModel(input_string.data(), input_string.size(), output_str.data(), nullptr, nullptr, max_length, model_.get());
int output_length = TextToSentencesWithOffsetsWithModel(input_string.data(), input_string.size(), output_str.get(), nullptr, nullptr, max_length, model_.get());
if (output_length < 0) {
ORT_CXX_API_THROW(MakeString("splitting input:\"", input_string, "\" failed"), ORT_INVALID_ARGUMENT);
}
Expand Down

0 comments on commit 94beef0

Please sign in to comment.