一、前言
前一段时间一直在做新的项目,已经很久没更新文章了,今天终于闲下来了,笔者很高兴呐,趁这时间,分享一下(直播)拉流端视频编码。iOS8.0以后,可以直接用系统的VideoToolbox框架进行编解码,CPU占用率相比较ffmpeg来说会低很多,在做拉流时,我就是用的这个框架。下面废话也不多说了,直接讲解步骤上代码。
二、视频硬编码
1.编码器类DDHardwareVideoEncoder.h中,该类继承自DDVideoEncoding(编码器抽象接口类)其中,DDLiveVideoConfiguration是视频配置文件,里面是视频帧率、输入方向、分辨率等视频相关属性,具体文件实现如下:
#import "DDVideoEncoding.h"
@interface DDHardwareVideoEncoder : NSObject
#pragma mark - Initializer
///=============================================================================
/// @name Initializer
///=============================================================================
- (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
+ (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
@property (nonatomic, strong, nonnull) DDLiveVideoConfiguration *configuration;
// 是否插入关键帧
@property (assign, nonatomic) BOOL isInsertKeyFrame;
@end
2.编码器抽象接口类DDVideoEncoding.h文件实现如下:
其中DDVideoFrame类是编码成功后数据处理类,里面有每帧编码成功后的data(h264裸流)、videoFrameRate(帧率)、frameCount(帧数)、timestamp(时间戳)等属性。
#import
#import "DDVideoFrame.h"
#import "DDLiveVideoConfiguration.h"
@protocol DDVideoEncoding;
/// 编码器编码后回调
@protocol DDVideoEncodingDelegate
@required
- (void)videoEncoder:(nullable id)encoder videoFrame:(nullable DDVideoFrame*)frame;
@end
/// 编码器抽象的接口
@protocol DDVideoEncoding
@required
- (void)encodeVideoData:(nullable CVImageBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp;
- (void)stopEncoder;
@optional
@property (nonatomic, assign) NSInteger videoBitRate;
- (nullable instancetype)initWithVideoStreamConfiguration:(nullable DDLiveVideoConfiguration*)configuration;
- (void)setDelegate:(nullable id)delegate;
3.下面是具体实现的DDHardwareVideoEncoder.m类文件
#import "DDHardwareVideoEncoder.h"
#import
@interface DDHardwareVideoEncoder (){
VTCompressionSessionRef compressionSession;
NSInteger frameCount;
NSData *sps;
NSData *pps;
}
@property (nonatomic,weak) id h264Delegate;
@property (nonatomic) BOOL isBackGround;
@property (nonatomic) NSInteger currentVideoBitRate;
@property (assign, nonatomic) uint64_t lastTimestamp;
@end
@implementation DDHardwareVideoEncoder
#pragma mark -- LifeCycle
- (instancetype)initWithVideoStreamConfiguration:(DDLiveVideoConfiguration *)configuration{
if(self = [super init]){
_configuration = configuration;
[self initCompressionSession];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterBackground:) name:UIApplicationWillResignActiveNotification object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterForeground:) name:UIApplicationDidBecomeActiveNotification object:nil];
}
return self;
}
- (void)updateConfiguration {
[self initCompressionSession];
self->sps = NULL;
}
- (void)clearCompressionSession {
if(compressionSession){
VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid);
VTCompressionSessionInvalidate(compressionSession);
CFRelease(compressionSession);
compressionSession = NULL;
}
}
- (void)initCompressionSession{
[self clearCompressionSession];
[self configCompressionSession];
}
- (void)configCompressionSession {
// VideoCompressonOutputCallback回调函数:视频图像编码成功后调用
OSStatus status = VTCompressionSessionCreate(NULL, _configuration.videoSize.width, _configuration.videoSize.height, kCMVideoCodecType_H264, NULL, NULL, NULL, VideoCompressonOutputCallback, (__bridge void *)self, &compressionSession);
if(status != noErr){
return;
}
_currentVideoBitRate = _configuration.videoBitRate;
// 设置关键帧间隔
status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval,(__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval));
status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration,(__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval));
// 设置期望帧率
status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)@(_configuration.videoFrameRate));
//设置码率,均值,单位是byte
status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(_configuration.videoBitRate)); // bps
status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)@[@(_configuration.videoMaxBitRate), @1]); // Bps
// 设置实时编码输出(避免延迟)
status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
// status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanFalse);
// status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_H264EntropyMode, kVTH264EntropyMode_CAVLC);
VTCompressionSessionPrepareToEncodeFrames(compressionSession);
}
- (void)setVideoBitRate:(NSInteger)videoBitRate{
if(_isBackGround) return;
VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)@[@(_configuration.videoMaxBitRate), @1]);
_currentVideoBitRate = videoBitRate;
}
-(NSInteger)videoBitRate{
return _currentVideoBitRate;
}
- (void)dealloc{
if(compressionSession != NULL)
{
VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid);
VTCompressionSessionInvalidate(compressionSession);
CFRelease(compressionSession);
compressionSession = NULL;
}
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
#pragma mark -- DDVideoEncoder
- (void)encodeVideoData:(CVImageBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp {
if(_isBackGround) return;
NSInteger timeCha = timeStamp - self.lastTimestamp;
if (timeCha - 1000/(int32_t)_configuration.videoFrameRate < 0) {
return;
}
self.lastTimestamp = timeStamp;
frameCount ++;
CMTime presentationTimeStamp = CMTimeMake(frameCount, 1000);
VTEncodeInfoFlags flags;
CMTime duration = CMTimeMake(1, (int32_t)_configuration.videoFrameRate);
NSDictionary *properties = nil;
if(frameCount % (int32_t)_configuration.videoMaxKeyframeInterval == 0 || self.isInsertKeyFrame == YES){
properties = @{(__bridge NSString *)kVTEncodeFrameOptionKey_ForceKeyFrame: @YES};
}
NSNumber *timeNumber = @(timeStamp);
// 编码,编码成功后调用回调函数
OSStatus statusCode = VTCompressionSessionEncodeFrame(compressionSession, pixelBuffer, presentationTimeStamp, duration, (__bridge CFDictionaryRef)properties, (__bridge_retained void *)timeNumber, &flags);
if (frameCount > 262143) { // 该数值根据后台要求定义
frameCount = 0;
}
self.isInsertKeyFrame = NO;
if (statusCode != noErr) {
VTCompressionSessionInvalidate(compressionSession);
compressionSession = NULL;
return;
}
}
- (void)stopEncoder{
VTCompressionSessionCompleteFrames(compressionSession, kCMTimeIndefinite);
}
- (void)setDelegate:(id)delegate{
_h264Delegate = delegate;
}
#pragma mark -- NSNotification
- (void)willEnterBackground:(NSNotification*)notification{
_isBackGround = YES;
}
- (void)willEnterForeground:(NSNotification*)notification{
[self initCompressionSession];
_isBackGround = NO;
}
#pragma mark -- VideoCallBack
// 视频编码成功后回调,将编码成功的CMSampleBuffer转换成H264码流
static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer) {
if(!sampleBuffer) return;
CFArrayRef array = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true);
if(!array) return;
CFDictionaryRef dic = (CFDictionaryRef)CFArrayGetValueAtIndex(array, 0);
if(!dic) return;
BOOL keyframe = !CFDictionaryContainsKey(dic, kCMSampleAttachmentKey_NotSync);
uint64_t timeStamp = [((__bridge_transfer NSNumber*)VTFrameRef) longLongValue];
DDHardwareVideoEncoder *videoEncoder = (__bridge DDHardwareVideoEncoder *)VTref;
if(status != noErr){
return;
}
if (keyframe && !videoEncoder->sps)
{
// 描述信息
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
size_t sparameterSetSize, sparameterSetCount;
const uint8_t *sparameterSet;
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0 );
if (statusCode == noErr)
{
size_t pparameterSetSize, pparameterSetCount;
const uint8_t *pparameterSet;
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
if (statusCode == noErr)
{
videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
}
}
}
// 编码后的数据结构
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char *dataPointer;
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
if (statusCodeRet == noErr) {
size_t bufferOffset = 0;
static const int AVCCHeaderLength = 4;
while (bufferOffset < totalLength - AVCCHeaderLength) {
// Read the NAL unit length
uint32_t NALUnitLength = 0;
memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
DDVideoFrame *videoFrame = [[DDVideoFrame alloc] init];
videoFrame.timestamp = timeStamp;
videoFrame.isKeyFrame = keyframe;
videoFrame.frameCount = videoEncoder->frameCount;
videoFrame.videoFrameRate = videoEncoder.configuration.videoFrameRate;
videoFrame.videoWidth = videoEncoder.configuration.videoSize.width;
videoFrame.videoHeight = videoEncoder.configuration.videoSize.height;
NSData *h264Data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength];
// 以后数据处理,根据各自后台要求数据格式处理
NSMutableData *mData = [NSMutableData data];
const char bytes[] = "x00x00x00x01";
size_t length = (sizeof bytes) - 1;
NSData *ByteHeader = [NSData dataWithBytes:bytes length:length];
if (keyframe) {
[mData appendData:ByteHeader];
[mData appendData:videoEncoder->sps];
[mData appendData:ByteHeader];
[mData appendData:videoEncoder->pps];
}
[mData appendData:ByteHeader];
[mData appendData:h264Data];
videoFrame.data = mData;
if(videoEncoder.h264Delegate && [videoEncoder.h264Delegate respondsToSelector:@selector(videoEncoder:videoFrame:)]){
[videoEncoder.h264Delegate videoEncoder:videoEncoder videoFrame:videoFrame]; // 数据传出去之后,实现该代理方法,根据后台数据格式进行数据封装,然后发送
}
bufferOffset += AVCCHeaderLength + NALUnitLength;
}
}
}
@end