Skip to content

Commit

Permalink
audio is recorded but fast and distorted
Browse files Browse the repository at this point in the history
  • Loading branch information
Shaji Khan committed Mar 17, 2024
1 parent 8ddf6c9 commit 9581aa0
Show file tree
Hide file tree
Showing 3 changed files with 153 additions and 27 deletions.
3 changes: 2 additions & 1 deletion app/src/main/cpp/Meter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ int Meter::autoincrease_callback(vringbuffer_t *vrb, bool first_call, int readin
}

int Meter::updateMeterOutput (AudioBuffer * buffer) {
// LOGD("HIT");
float * data = buffer->data ;
float * raw = buffer -> raw ;
int samples = buffer -> pos ;
Expand Down Expand Up @@ -143,7 +144,7 @@ int Meter::updateMeterOutput (AudioBuffer * buffer) {
} else {
if (tunerEnabled or videoRecording) {
if ((jfloatArray1_index + samples) >= TUNER_ARRAY_SIZE) {
envOutput->CallStaticVoidMethod(mainActivityOutput, setTuner, jfloatArray1, samples, false);
envOutput->CallStaticVoidMethod(mainActivityOutput, setTuner, jfloatArray1, jfloatArray1_index, false);
jfloatArray1_index = 0 ;
} else {
envOutput->SetFloatArrayRegion(jfloatArray1, jfloatArray1_index, samples, raw);
Expand Down
87 changes: 72 additions & 15 deletions app/src/main/java/com/shajikhan/ladspa/amprack/Camera2.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.AudioFormat;
import android.media.CamcorderProfile;
import android.media.ImageReader;
import android.media.MediaCodec;
Expand Down Expand Up @@ -48,15 +49,29 @@ public class Camera2 {
private static final int IFRAME_INTERVAL = 1; // 10 seconds between I-frames
private int mWidth = -1;
public long presentationTimeUs = 0 ;
long frame = 0 ;
private int mHeight = -1;
// bit rate, in bits per second
private int mBitRate = -1;
public MediaCodec mEncoder, audioEncoder;
public MediaCodec mEncoder, audioEncoder = null;
ByteBuffer[] audioInputBuffers ;

private Surface mInputSurface;
private MediaMuxer mMuxer;
private int mTrackIndex = -1, audioTrackIndex = -1;
class Timestamp {
long start ;

Timestamp () {
start = System.nanoTime() / 1000 ;
}

long get () {
return (System.nanoTime() / 1000) - start ;
}
}

Timestamp timestamp = null ;
public MediaMuxer mMuxer;
public int mTrackIndex = -1, audioTrackIndex = -1;
public int audioIndex ;
public boolean mMuxerStarted;

Expand Down Expand Up @@ -269,6 +284,7 @@ private void prepareEncoder() {
outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, 160000);
outputFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 16384);
outputFormat.setInteger(MediaFormat.KEY_PCM_ENCODING, AudioFormat.ENCODING_PCM_FLOAT);

// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
Expand Down Expand Up @@ -319,6 +335,8 @@ private void prepareEncoder() {

mTrackIndex = -1;
mMuxerStarted = false;

presentationTimeUs = System.nanoTime()/1000;
}

/**
Expand All @@ -328,6 +346,7 @@ private void releaseEncoder() {
Log.d(TAG, "releaseEncoder: stopping encoder");
mMuxerStarted = false;
mEncoder.signalEndOfInputStream();
timestamp = null ;

if (mEncoder != null) {
mEncoder.stop();
Expand All @@ -353,6 +372,8 @@ private void releaseEncoder() {

mTrackIndex = -1 ;
audioTrackIndex = -1 ;
frame = 0 ;
presentationTimeUs = 0 ;
}

class EncoderCallback extends MediaCodec.Callback {
Expand Down Expand Up @@ -395,7 +416,29 @@ public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {

@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
mBufferInfo = info;
/*
if (mainActivity.avBuffer.size() > 0) {
MainActivity.AVBuffer data = mainActivity.avBuffer.pop();
int inputBufferId = mainActivity.camera2.audioEncoder.dequeueInputBuffer(5000);
int eos = 0;
if (!mainActivity.videoRecording)
eos = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = mainActivity.camera2.audioEncoder.getInputBuffer(inputBufferId);
inputBuffer.asFloatBuffer().put(data.floatBuffer);
presentationTimeUs = mainActivity.camera2.mBufferInfo.presentationTimeUs; // - mainActivity.camera2.presentationTimeUs ;
Log.d(TAG, "setTuner: pushed data of size " + String.format("%d [%d]", data.size, presentationTimeUs));
mainActivity.camera2.audioEncoder.queueInputBuffer(inputBufferId, 0, data.size, presentationTimeUs, eos);
;
mainActivity.camera2.audioIndex = inputBufferId;
}
}
*/

Log.d(TAG, "onOutputBufferAvailable: video " + String.format(" [%d: %d]", info.size, info.presentationTimeUs));
if (! mMuxerStarted) {
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
Expand All @@ -404,35 +447,49 @@ public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNu
if (mTrackIndex == -1)
mTrackIndex = mMuxer.addTrack(newFormat);

int aIndex = audioEncoder.dequeueOutputBuffer(bufferInfo, 5000) ;
if (aIndex >= 0) {
Log.d(TAG, "onInputBufferAvailable: added audio track");
MediaFormat format = audioEncoder.getOutputFormat();
Log.d(TAG, String.format("format: %s", format.toString()));
audioTrackIndex = mMuxer.addTrack(format);
} else {
Log.e(TAG, "onOutputBufferAvailable: dequeue input buffer: " + aIndex, null);
mMuxer.setOrientationHint(cameraCharacteristicsHashMap.get(cameraId).get(CameraCharacteristics.SENSOR_ORIENTATION));

timestamp = new Timestamp() ;
if (audioTrackIndex == -1) {
outPutByteBuffer = codec.getOutputBuffer(index);
codec.releaseOutputBuffer(index, false);
return;
}

mMuxer.setOrientationHint(cameraCharacteristicsHashMap.get(cameraId).get(CameraCharacteristics.SENSOR_ORIENTATION));
Log.d(TAG, "onOutputBufferAvailable: starting muxer");
mMuxer.start();
mMuxerStarted = true;
presentationTimeUs = info.presentationTimeUs;
}

outPutByteBuffer = codec.getOutputBuffer(index);
// info.presentationTimeUs = info.presentationTimeUs - presentationTimeUs;

// info.presentationTimeUs = timestamp.get();
// Log.d(TAG, "onOutputBufferAvailable: writing muxer frame at " + info.presentationTimeUs);
mMuxer.writeSampleData(mTrackIndex, outPutByteBuffer, info);
codec.releaseOutputBuffer(index, false);
// byte[] outDate = new byte[info.size];
// outPutByteBuffer.get(outDate);


/*
int aIndex = audioEncoder.dequeueOutputBuffer(bufferInfo, 5000) ;
if (aIndex > 0) {
int counter = 0 ;
while (aIndex > 0) {
bufferInfo.presentationTimeUs = info.presentationTimeUs + counter;
// bufferInfo.presentationTimeUs = info.presentationTimeUs - presentationTimeUs;
audioBuffer = audioEncoder.getOutputBuffer(aIndex);
Log.d(TAG, "onOutputBufferAvailable: presentaton time: " + bufferInfo.presentationTimeUs + counter );
mMuxer.writeSampleData(audioTrackIndex, audioBuffer, bufferInfo);
audioEncoder.releaseOutputBuffer(aIndex, false);
Log.d(TAG, "onOutputBufferAvailable: popped data of size " + bufferInfo.size + " " + bufferInfo.presentationTimeUs);
aIndex = audioEncoder.dequeueOutputBuffer(bufferInfo, 5000) ;
counter += 1000 ;
}
*/

mBufferInfo = info;
}

@Override
Expand Down
90 changes: 79 additions & 11 deletions app/src/main/java/com/shajikhan/ladspa/amprack/MainActivity.java
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,9 @@ static class AVBuffer {
FloatBuffer floatBuffer ;
int size ;
}
static LinkedList<AVBuffer> avBuffer = new LinkedList<>();
public static LinkedList<AVBuffer> avBuffer = new LinkedList<>();
static int avEncoderIndex = 0 ;
static long presentationTimeUs = 0;
public static long presentationTimeUs = 0;
int totalBytesRead = 0;

ExtendedFloatingActionButton fab ;
Expand Down Expand Up @@ -1378,6 +1378,9 @@ public void toggleEffect(boolean isPlaying) {

if (record.isChecked())
record.setChecked(false);
if (rack.toggleVideo.isChecked())
rack.toggleVideo.setChecked(false);

stopEffect();
notificationManager.cancelAll();
running = false ;
Expand Down Expand Up @@ -2715,29 +2718,88 @@ static void setMixerMeter (float inputValue, float outputValue) {
outputMeter.setProgress((int) (outputValue * 100));
}

static int audioIn = 0, audioOut = 0 ;
static void setTuner (float [] data, int size) {
if (mainActivity.videoRecording && mainActivity.camera2.mainActivity.camera2.mMuxerStarted) {
mainActivity.camera2.frame += size ;
if (mainActivity.videoRecording && mainActivity.camera2.timestamp != null) {
int inputBufferId = mainActivity.camera2.audioEncoder.dequeueInputBuffer(5000);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = mainActivity.camera2.audioEncoder.getInputBuffer (inputBufferId);
inputBuffer.asFloatBuffer().put(data);
presentationTimeUs = presentationTimeUs + size / mainActivity.camera2.sampleRate;
ByteBuffer inputBuffer = null;

if (inputBufferId >= 0)
inputBuffer = mainActivity.camera2.audioEncoder.getInputBuffer (inputBufferId);

int eos = 0 ;
if (! mainActivity.videoRecording)
eos = MediaCodec.BUFFER_FLAG_END_OF_STREAM ;

if (inputBufferId >= 0 && inputBuffer != null) {
inputBuffer.clear();
inputBuffer.rewind();
for (int i = 0 ; i < size ; i ++) {
inputBuffer.putFloat(data [i]);
}

inputBuffer.rewind();
// presentationTimeUs = System.nanoTime() / 1000 ; //computePresentationTimeNsec(mainActivity.camera2.frame, mainActivity.camera2.sampleRate);

mainActivity.camera2.audioEncoder.queueInputBuffer(inputBufferId, 0, size, presentationTimeUs, 0);;
presentationTimeUs = computePresentationTimeNsec(mainActivity.camera2.frame, mainActivity.camera2.sampleRate) ;
// presentationTimeUs = mainActivity.camera2.timestamp.get();
// presentationTimeUs = mainActivity.camera2.mBufferInfo.presentationTimeUs;
Log.d(TAG, "[aac]: pushed data of size " + String.format ("%d [%d]", size, presentationTimeUs));
mainActivity.camera2.audioEncoder.queueInputBuffer(inputBufferId, 0, size, presentationTimeUs, eos);;
mainActivity.camera2.audioIndex = inputBufferId ;
audioIn += size ;
}

MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int aIndex = mainActivity.camera2.audioEncoder.dequeueOutputBuffer(bufferInfo, 5000) ;
int counter = 0 ;
if (aIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (mainActivity.camera2.audioTrackIndex != -1)
Log.e(TAG, "[aac] output format changed! " + mainActivity.camera2.audioEncoder.getOutputFormat());
else {
mainActivity.camera2.audioTrackIndex = mainActivity.camera2.mMuxer.addTrack(
mainActivity.camera2.audioEncoder.getOutputFormat());

Log.d(TAG, "setTuner: audio track index " + mainActivity.camera2.audioTrackIndex);
}

}

while (aIndex >= 0 && mainActivity.camera2.audioTrackIndex != -1) {
audioOut += bufferInfo.size;
// bufferInfo.presentationTimeUs = mainActivity.camera2.mBufferInfo.presentationTimeUs ;
// bufferInfo.presentationTimeUs = info.presentationTimeUs - presentationTimeUs;
// mainActivity.camera2.audioEncoder.getOutputBuffer(aIndex);

// if (mainActivity.camera2.audioTrackIndex == -1) {
// mainActivity.camera2.audioTrackIndex = mainActivity.camera2.mMuxer.addTrack(
// mainActivity.camera2.audioEncoder.getOutputFormat()
// );

if (mainActivity.camera2.mMuxerStarted) {
ByteBuffer buffer = mainActivity.camera2.audioEncoder.getOutputBuffer(aIndex);
if (buffer != null) {
buffer.rewind();
mainActivity.camera2.mMuxer.writeSampleData(mainActivity.camera2.audioTrackIndex, buffer, bufferInfo);
Log.d(TAG, "[aac]: popped data of size " + bufferInfo.size + " " + bufferInfo.presentationTimeUs);
}
}

mainActivity.camera2.audioEncoder.releaseOutputBuffer(aIndex, false);
aIndex = mainActivity.camera2.audioEncoder.dequeueOutputBuffer(bufferInfo, 5000) ;
}

Log.d(TAG, String.format ("audio in [%d]: audio out [%s]", audioIn, audioOut));
/*
AVBuffer buffer = new AVBuffer();
buffer.size = size;
buffer.floatBuffer = FloatBuffer.wrap(data);
avBuffer.addLast(buffer);
mainActivity.camera2.audioEncoder.flush();
*/
} else {
presentationTimeUs = 0 ;
audioIn = audioOut = 0 ;
}

if (! mainActivity.tunerEnabled)
Expand Down Expand Up @@ -3449,4 +3511,10 @@ int getCameraSensorOrientation(CameraCharacteristics characteristics) {
public static void pushToVideo (float [] data, int nframes) {

}

private static long computePresentationTimeNsec(long frameIndex, int sampleRate) {
final long ONE_BILLION = 1000000000;
return frameIndex * ONE_BILLION / sampleRate;
}

}

0 comments on commit 9581aa0

Please sign in to comment.