Skip to content

Commit

Permalink
added support for all buffersizes
Browse files Browse the repository at this point in the history
  • Loading branch information
faressc committed Oct 23, 2023
1 parent 80bc092 commit 712971b
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
6 changes: 3 additions & 3 deletions source/dsp/inference/InferenceManager.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@ void InferenceManager::processBlock(juce::AudioBuffer<float> &buffer) {
receiveRingBuffer.pushSample(receiveBuffer.popSample(0), 0);
}
auto &sendBuffer = inferenceThread.getModelInputBuffer();
if (sendRingBuffer.getAvailableSamples(0) >= MODEL_INPUT_SIZE) {
// add the available samples from the sendBuffer otherwise with if MODEL_INPUT_SIZE % spec.maximumBlockSize != 0 samples get stuck there
if (sendRingBuffer.getAvailableSamples(0) + sendBuffer.getAvailableSamples(0) >= MODEL_INPUT_SIZE) {
while (sendRingBuffer.getAvailableSamples(0) > 0) {
sendBuffer.pushSample(sendRingBuffer.popSample(0), 0);
}
Expand All @@ -71,15 +72,14 @@ void InferenceManager::processOutput(juce::AudioBuffer<float> &buffer) {
break;
}
}

if (receiveRingBuffer.getAvailableSamples(0) >= buffer.getNumSamples()) {
for (int sample = 0; sample < buffer.getNumSamples(); ++sample) {
buffer.setSample(0, sample, receiveRingBuffer.popSample(0));
}
}
else {
inferenceCounter++;
std::cout << "missing samples" << std::endl;
std::cout << "##### missing samples" << std::endl;
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion source/dsp/inference/backends/OnnxRuntimeProcessor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,4 @@ void OnnxRuntimeProcessor::processBlock(std::array<float, MODEL_INPUT_SIZE_BACKE
for (size_t i = 0; i < MODEL_OUTPUT_SIZE_BACKEND; i++) {
output[i] = outputTensors[0].GetTensorMutableData<float>()[i];
}
}
}
2 changes: 1 addition & 1 deletion source/dsp/inference/utils/InferenceConfig.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ enum InferenceBackend {
#define MAX_INFERENCE_TIME 128
#define MODEL_LATENCY 0

#endif //NN_INFERENCE_TEMPLATE_INFERENCECONFIG_H
#endif //NN_INFERENCE_TEMPLATE_INFERENCECONFIG_H

0 comments on commit 712971b

Please sign in to comment.