//(1)打开Camera
cam = Camera.open();
//(2)设置texture
cam.setPreviewTexture(texID);
//(3)设置Camera参数
Camera.Parameters parameters = cam.getParameters();
cam.setParameters(parameters);
// (4)开始采集
cam.startPreview();
//(5)结束
cam.stopPreview();
cam.release();
try {
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.FROYO) {
int numberOfCameras = Camera.getNumberOfCameras();
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < numberOfCameras; i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == facing) {
mDefaultCameraID = i;
mFacing = facing;
}
}
}
stopPreview();
if (mCameraDevice != null)
mCameraDevice.release();
if (mDefaultCameraID >= 0) {
mCameraDevice = Camera.open(mDefaultCameraID);
} else {
mCameraDevice = Camera.open();
mFacing = Camera.CameraInfo.CAMERA_FACING_BACK; //default: back facing
}
} catch (Exception e) {
LogUtil.e(TAG, "Open Camera Failed!");
e.printStackTrace();
mCameraDevice = null;
return false;
}
/**
* Attempts to find a preview size that matches the provided width and height (which
* specify the dimensions of the encoded video). If it fails to find a match it just
* uses the default preview size for video.
*
* TODO: should do a best-fit match, e.g.
* https://github.com/commonsguy/cwac-camera/blob/master/camera/src/com/commonsware/cwac/camera/CameraUtils.java
*/
public static void choosePreviewSize(Camera.Parameters parms, int width, int height) {
// We should make sure that the requested MPEG size is less than the preferred
// size, and has the same aspect ratio.
Camera.Size ppsfv = parms.getPreferredPreviewSizeForVideo();
if (ppsfv != null) {
Log.d(TAG, "Camera preferred preview size for video is " +
ppsfv.width + "x" + ppsfv.height);
}
//for (Camera.Size size : parms.getSupportedPreviewSizes()) {
// Log.d(TAG, "supported: " + size.width + "x" + size.height);
//}
for (Camera.Size size : parms.getSupportedPreviewSizes()) {
if (size.width == width && size.height == height) {
parms.setPreviewSize(width, height);
return;
}
}
Log.w(TAG, "Unable to set preview size to " + width + "x" + height);
if (ppsfv != null) {
parms.setPreviewSize(ppsfv.width, ppsfv.height);
}
// else use whatever the default size is
}
设置图像格式 mParams.setPreviewFormat(ImageFormat.YV12);
如果需要对视频帧添加滤镜等渲染操作,那么就必须把图像格式设置为RGB格式:
mParams.setPictureFormat(PixelFormat.JPEG);
其它参数参考: public void initCamera(int previewRate) {
if (mCameraDevice == null) {
LogUtil.e(TAG, "initCamera: Camera is not opened!");
return;
}
mParams = mCameraDevice.getParameters();
List supportedPictureFormats = mParams.getSupportedPictureFormats();
for (int fmt : supportedPictureFormats) {
LogUtil.i(TAG, String.format("Picture Format: %x", fmt));
}
mParams.setPictureFormat(PixelFormat.JPEG);
List.Size> picSizes = mParams.getSupportedPictureSizes();
Camera.Size picSz = null;
Collections.sort(picSizes, comparatorBigger);
for (Camera.Size sz : picSizes) {
LogUtil.i(TAG, String.format("Supported picture size: %d x %d", sz.width, sz.height));
if (picSz == null || (sz.width >= mPictureWidth && sz.height >= mPictureHeight)) {
picSz = sz;
}
}
List.Size> prevSizes = mParams.getSupportedPreviewSizes();
Camera.Size prevSz = null;
Collections.sort(prevSizes, comparatorBigger);
for (Camera.Size sz : prevSizes) {
LogUtil.i(TAG, String.format("Supported preview size: %d x %d", sz.width, sz.height));
if (prevSz == null || (sz.width >= mPreferPreviewWidth && sz.height >=
mPreferPreviewHeight)) {
prevSz = sz;
}
}
List frameRates = mParams.getSupportedPreviewFrameRates();
int fpsMax = 0;
for (Integer n : frameRates) {
LogUtil.i(TAG, "Supported frame rate: " + n);
if (fpsMax < n) {
fpsMax = n;
}
}
mParams.setPreviewSize(prevSz.width, prevSz.height);
mParams.setPictureSize(picSz.width, picSz.height);
List focusModes = mParams.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
mParams.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
previewRate = fpsMax;
mParams.setPreviewFrameRate(previewRate); //设置相机预览帧率
// mParams.setPreviewFpsRange(20, 60);
try {
mCameraDevice.setParameters(mParams);
} catch (Exception e) {
e.printStackTrace();
}
mParams = mCameraDevice.getParameters();
Camera.Size szPic = mParams.getPictureSize();
Camera.Size szPrev = mParams.getPreviewSize();
mPreviewWidth = szPrev.width;
mPreviewHeight = szPrev.height;
mPictureWidth = szPic.width;
mPictureHeight = szPic.height;
LogUtil.i(TAG, String.format("Camera Picture Size: %d x %d", szPic.width, szPic.height));
LogUtil.i(TAG, String.format("Camera Preview Size: %d x %d", szPrev.width, szPrev.height));
}
Codecs operate on three kinds of data: compressed data, raw audio data and raw video data. All three kinds of data can be processed using ByteBuffers, but you should use a Surface for raw video data to improve codec performance. Surface uses native video buffers without mapping or copying them to ByteBuffers; thus, it is much more efficient. You normally cannot access the raw video data when using a Surface, but you can use the ImageReader class to access unsecured decoded (raw) video frames. This may still be more efficient than using ByteBuffers, as some native buffers may be mapped into direct ByteBuffers. When using ByteBuffer mode, you can access raw video frames using the Image class and getInput/OutputImage(int).主要流程如下:
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, config.mWidth, config
.mHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, config.mVBitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, config.mFPS);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) LogUtil.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (Exception e) {
e.printStackTrace();
if (mOnPrepare != null) {
mOnPrepare.onPrepare(false);
}
return;
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
encoderStatus: ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
(3) setup buffer info
mBufferInfo.presentationTimeUs = mTimestamp;
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
(4) write to muxer
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
(5) release buffer
mEncoder.releaseOutputBuffer(encoderStatus, false);
}
纤细代码如下:
protected void drain(boolean endOfStream) {
if (mWeakMuxer == null) {
LogUtil.w(TAG, "muxer is unexpectedly null");
return;
}
IMuxer muxer = mWeakMuxer.get();
if (VERBOSE) LogUtil.d(TAG, "drain(" + endOfStream + ")");
if (endOfStream) {
if (VERBOSE) LogUtil.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (VERBOSE) LogUtil.d(TAG, "drain: status = " + encoderStatus);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) LogUtil.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
// if (muxer.isStarted()) {
// throw new RuntimeException("format changed twice");
// }
MediaFormat newFormat = mEncoder.getOutputFormat();
LogUtil.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = muxer.addTrack(IMuxer.TRACK_VIDEO_ID, newFormat);
if (!muxer.start()) {
synchronized ((muxer)) {
while (!muxer.isStarted() && !endOfStream)
try {
LogUtil.d(TAG, "drain: wait...");
muxer.wait(100);
} catch (final InterruptedException e) {
break;
}
}
}
} else if (encoderStatus < 0) {
LogUtil.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) LogUtil.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!muxer.isStarted()) {
// throw new RuntimeException("muxer hasn't started");
return;
}
mBufferInfo.presentationTimeUs = mTimestamp;
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
LogUtil.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
LogUtil.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) LogUtil.d(TAG, "end of stream reached");
}
break; // out of while
}
}//if encode status
}//while
}
mEncoder.signalEndOfInputStream();
protected void encode(final ByteBuffer buffer, final int length, final long
presentationTimeUs) {
if (!mRunning || mEncoder == null || !mCodecPrepared) {
LogUtil.w(TAG, "encode: Audio encode thread is not running yet.");
return;
}
final ByteBuffer[] inputBuffers = mEncoder.getInputBuffers(); //illegal state
// exception
while (mRunning) {
final int inputBufferIndex = mEncoder.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
// if (DEBUG) LogUtil.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// send EOS
mIsEOS = true;
LogUtil.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
mEncoder.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
mEncoder.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
break;
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
LogUtil.d(TAG, "encode: wait for MediaCodec encoder is ready to encode");
}
}
}
mMuxer = new MediaMuxer(mOutputPath, format);
(2)添加音频/视频流
/**
* assign encoder to muxer
*
* @param trackID
* @param format
* @return minus value indicate error
*/
public synchronized int addTrack(int trackID, final MediaFormat format) {
if (mIsStarted)
throw new IllegalStateException("muxer already started");
final int trackIndex = mMuxer.addTrack(format);
if (trackID == TRACK_VIDEO_ID) {
LogUtil.d(TAG, "addTrack: add video track = " + trackIndex);
mIsVideoAdded = true;
} else if (trackID == TRACK_AUDIO_ID) {
LogUtil.d(TAG, "addTrack: add audio track = " + trackIndex);
mIsAudioAdded = true;
}
return trackIndex;
}
(3)开始
/**
* request readyStart recording from encoder
*
* @return true when muxer is ready to write
*/
@Override
public synchronized boolean start() {
LogUtil.v(TAG, "readyStart:");
if ((mHasAudio == mIsAudioAdded)
&& (mHasVideo == mIsVideoAdded)) {
mMuxer.start();
mIsStarted = true;
if (mOnPrepared != null) {
mOnPrepared.onPrepare(true);
}
LogUtil.v(TAG, "MediaMuxer started:");
}
return mIsStarted;
}
(3)写入数据
/**
* write encoded data to muxer
*
* @param trackIndex
* @param byteBuf
* @param bufferInfo
*/
@Override
public synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf,
final MediaCodec.BufferInfo bufferInfo) {
if (mIsStarted)
mMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
}
av_opt_set(c->priv_data, "preset", "superfast", 0);
bitrate,fps等条件相同情况: