Skip to content

Commit

Permalink
implementing video recording
Browse files Browse the repository at this point in the history
  • Loading branch information
Shaji Khan committed Mar 14, 2024
1 parent ebdc6dd commit f47171b
Show file tree
Hide file tree
Showing 2 changed files with 200 additions and 2 deletions.
200 changes: 198 additions & 2 deletions app/src/main/java/com/shajikhan/ladspa/amprack/Camera2.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ImageReader;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
Expand All @@ -23,12 +27,33 @@
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;

public class Camera2 {
final String TAG = getClass().getSimpleName();
// parameters for the encoder
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 15fps
private static final int IFRAME_INTERVAL = 3; // 10 seconds between I-frames
private int mWidth = -1;
private int mHeight = -1;
// bit rate, in bits per second
private int mBitRate = -1;
private MediaCodec mEncoder;
private Surface mInputSurface;
private MediaMuxer mMuxer;
private int mTrackIndex;
private boolean mMuxerStarted;

// allocate one of these up front so we don't need to do it every time
private MediaCodec.BufferInfo mBufferInfo;

private TextureView textureView;
MainActivity mainActivity;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
Expand All @@ -46,6 +71,7 @@ public class Camera2 {
protected CameraCaptureSession cameraCaptureSessions;
protected CaptureRequest captureRequest;
protected CaptureRequest.Builder captureRequestBuilder;
protected CaptureRequest.Builder videoBuilder;
private Size imageDimension;
private ImageReader imageReader;
ArrayList<String> cameras;
Expand All @@ -57,12 +83,13 @@ public class Camera2 {
Camera2(MainActivity mainActivity_) {
mainActivity = mainActivity_;
textureView = mainActivity_.rack.videoTexture;
cameras = new ArrayList<>();
cameraCharacteristicsHashMap = new HashMap<>();
}

public void openCamera() {
CameraManager manager = (CameraManager) mainActivity.getSystemService(mainActivity.CAMERA_SERVICE);
cameras = new ArrayList<>();
cameraCharacteristicsHashMap = new HashMap<>();

Log.e(TAG, "is camera open");
try {
for (String s: manager.getCameraIdList()) {
Expand Down Expand Up @@ -152,7 +179,12 @@ public void createCameraPreview() {
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
videoBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
captureRequestBuilder.addTarget(surface);

prepareEncoder();
videoBuilder.addTarget(mInputSurface);

cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
Expand Down Expand Up @@ -182,6 +214,7 @@ protected void updatePreview() {
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
cameraCaptureSessions.setRepeatingRequest(videoBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Expand All @@ -197,4 +230,167 @@ public void closeCamera() {
imageReader = null;
}
}

private void prepareEncoder() {
mBufferInfo = new MediaCodec.BufferInfo();
///| Todo: fixme: get width and height from camera
mWidth = 900 ;
mHeight = 1600 ;
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);

// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
Log.d(TAG, "format: " + format);

// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
//
// If you want to have two EGL contexts -- one for display, one for recording --
// you will likely want to defer instantiation of CodecInputSurface until after the
// "display" EGL context is created, then modify the eglCreateContext call to
// take eglGetCurrentContext() as the share_context argument.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
throw new RuntimeException(e);
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();

// Output filename. Ideally this would use Context.getFilesDir() rather than a
// hard-coded output directory.
SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy_HH.mm.ss");
Date date = new Date();
mainActivity.lastRecordedFileName = formatter.format(date);
mainActivity.lastRecordedFileName = mainActivity.dir.getAbsolutePath() + "/" + mainActivity.lastRecordedFileName + ".mp4";
String outputPath = mainActivity.lastRecordedFileName;

// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
try {
mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
throw new RuntimeException("MediaMuxer creation failed", ioe);
}

mTrackIndex = -1;
mMuxerStarted = false;
}

/**
* Releases encoder resources. May be called after partial / failed initialization.
*/
private void releaseEncoder() {
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}

/**
* Extracts all pending data from the encoder.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
*/
private void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
Log.d(TAG, "drainEncoder(" + endOfStream + ")");

if (endOfStream) {
Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}

ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);

// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}

if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}

if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}

// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);

mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
}

mEncoder.releaseOutputBuffer(encoderStatus, false);

if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ public class MainActivity extends AppCompatActivity implements ActivityCompat.On

private static final String CHANNEL_ID = "default";
Surface surface_ = null;
SurfaceTexture surfaceTexture;
public boolean headphoneWarning = true;
static Context context;
static MainActivity mainActivity;
Expand Down Expand Up @@ -3261,6 +3262,7 @@ public void onSurfaceTextureAvailable(SurfaceTexture surface,
// surface.setDefaultBufferSize(cameraPreviewSize_.getWidth(),
// cameraPreviewSize_.getHeight());
surface_ = new Surface(surface);
surfaceTexture = surface;
// camera2.createCameraPreview();
// camera2.updatePreview();

Expand Down

0 comments on commit f47171b

Please sign in to comment.