天天看點

ios視訊和音頻采集

ios視訊和音頻采集以及預覽

本文将說明如何用ios做視訊和音頻的采集,以及預覽,預覽采用的是系統自帶的AVCaptureVideoPreviewLayer和UIView,視訊采集用AVCaptureSession,音頻采集用AudioQueue,音頻采集如果用AVCaptureSession設定參數比較麻煩。下面是具體代碼

//
//  Lvs_Ios_Device_Collection.h
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs.zwg All rights reserved.
//

#include <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import "Lvs_Info.h"

//音頻采集用(AudioQueue)

//添加視訊,音頻資料輸出的代理,固定代理名稱及調用的函數
@interface Lvs_Ios_Device_Collection : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>


//音頻采集buff數量
#define kNumberBuffers 3

//音頻采集結構體(AudioQueue)
typedef struct AQRecorderState {
    AudioStreamBasicDescription  mDataFormat;                          //format
    AudioQueueRef                mQueue;                               //audio queue
    AudioQueueBufferRef          mBuffers[kNumberBuffers];             //buff資料緩存
    UInt32                       bufferByteSize;                       //每個緩存的大小
    Float64                      audio_seconde_time;                   //音頻采集緩存時常(這個緩存要足夠長滿足後面處理時間,否則會丢資料,暫定0.5秒)
    Module_StreamInfo *          ModuleStreamInfo_Out_Audio_data;      //輸出的資料資訊結構體(用于擷取資料的結構體)
} AQRecorderState;


//用于視訊資料輸出的代理方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;

//init
- (int)device_init: (Module_Info **) ModuleInfo_Current andCurrentLength: (int *) CurrentLength;
//uinit
- (void)device_uinit: (Module_Info *) ModuleInfo_Current;

//write
- (int)device_write: (Module_Info *) ModuleInfo_Current andUpperLength: (int) UpperLength
              andModuleInfo_Next_video: (Module_Info *) ModuleInfo_Next_video andNextLength_video: (int) NextLength_video
              andModuleInfo_Next_audio: (Module_Info *) ModuleInfo_Next_audio andNextLength_audio: (int)NextLength_audio;


@end
           
//
//  Lvs_Ios_Device_Collection.m
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs. All rights reserved.
//

#import "Lvs_Ios_Device_Collection.h"


@implementation Lvs_Ios_Device_Collection

int m_stream_num = 3;                                        //這裡隻有前攝像頭,後攝像頭,麥克風三路(音頻用AudioQueue,這裡隻是預留出來)
//video
std::map<int,AVCaptureSession *> m_list_capture_session;     //session
AVCaptureConnection *videoCaptureConnection_back = nil;      //視訊後攝像頭輸出資料用的connection
AVCaptureConnection *videoCaptureConnection_front = nil;     //視訊前攝像頭輸出資料用的connection
AVCaptureConnection *audioCaptureConnection = nil;           //音頻輸出資料用的connection
long long up_time_video_front = 0;                           //前面攝像頭的上一幀時間戳
long long up_time_video_back = 0;                            //後面攝像頭的上一幀時間戳
//audio
static AQRecorderState m_aqData = {0};                       //音頻采集結構體(AudioQueue)
long long up_time_audio = 0;                                 //音頻的上一幀時間戳

//用于擷取資料的位址
Module_Info * m_device_module_info_collection;               //0:後面攝像頭 1:前面攝像頭 2:麥克風
int m_device_module_length_collection;

//根據stream_id擷取對應的session
-(AVCaptureSession *)get_list_session: (int)stream_id
{
    return m_list_capture_session[stream_id];
}

-(AQRecorderState *)get_AQRecorderState
{
    return &m_aqData;
}

//擷取connection (type 0->video 1->audio) (backorfront 0->back 1->front)
-(AVCaptureConnection *)get_connection: (int)type andbackorfront: (int) backorfront
{
    //video
    if (type == 0)
    {
        if (backorfront == 0)
        {
            return videoCaptureConnection_back;
        }
        else if(backorfront == 1)
        {
            return videoCaptureConnection_front;
        }
    }
    //audio
    else if(type ==1)
    {
        return audioCaptureConnection;
    }
    return nil;
}

//用于視訊資料輸出的代理方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    //NSLog(@"delegate");
    if (connection == videoCaptureConnection_back) //0
    {
        //NSLog(@"videoCaptureConnection_back");
        
        CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        
        //LOCK
        if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
        {
            //buf
            UInt8 *bufferPtr_y = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
            UInt8 *bufferPtr_uv = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
            //bufsize
            size_t buffSize = CVPixelBufferGetDataSize(imageBuffer);   //這個值不準要自己算
            //width
            size_t width = CVPixelBufferGetWidth(imageBuffer);
            //height
            size_t height = CVPixelBufferGetHeight(imageBuffer);
            //PlaneCount
            size_t PlaneCount = CVPixelBufferGetPlaneCount(imageBuffer);
            //NSLog(@"buffSize %d",buffSize);
            //NSLog(@"width %d",width);
            //NSLog(@"height %d",height);
            //NSLog(@"PlaneCount %d",PlaneCount);
            
            //擷取1970到現在的時間毫秒
            NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]*1000;
            long long theTime = [[NSNumber numberWithDouble:nowtime] longLongValue];
            
            if(theTime > up_time_video_back)
            {
                //拷貝資料
                m_device_module_info_collection[0].ModuleStreamInfo_Out->ActualLen = width * height + width * height /2;
                memcpy(m_device_module_info_collection[0].ModuleStreamInfo_Out->Buf,bufferPtr_y, width * height);
                memcpy(m_device_module_info_collection[0].ModuleStreamInfo_Out->Buf + width * height, bufferPtr_uv, width * height /2);
                
                m_device_module_info_collection[0].ModuleStreamInfo_Out->VideoInfo.pts = theTime;
                m_device_module_info_collection[0].ModuleStreamInfo_Out->VideoInfo.dts = theTime;
                up_time_video_back = theTime;
            }
            else
            {
                m_device_module_info_collection[0].ModuleStreamInfo_Out->ActualLen = 0;
            }
        }
        //Unlock
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
    else if(connection == videoCaptureConnection_front)  //1
    {
        //NSLog(@"videoCaptureConnection_front");
        
        CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        
        //LOCK
        if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
        {
            //buf
            UInt8 *bufferPtr_y = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
            UInt8 *bufferPtr_uv = (UInt8*)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
            //bufsize
            size_t buffSize = CVPixelBufferGetDataSize(imageBuffer);  //這個值不準要自己算
            //width
            size_t width = CVPixelBufferGetWidth(imageBuffer);
            //height
            size_t height = CVPixelBufferGetHeight(imageBuffer);
            //PlaneCount
            size_t PlaneCount = CVPixelBufferGetPlaneCount(imageBuffer);
            //NSLog(@"buffSize %d",buffSize);
            //NSLog(@"width %d",width);
            //NSLog(@"height %d",height);
            //NSLog(@"PlaneCount %d",PlaneCount);
            
            //擷取1970到現在的時間毫秒
            NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]*1000;
            long long theTime = [[NSNumber numberWithDouble:nowtime] longLongValue];
            
            if (theTime > up_time_video_front) {
                //拷貝資料
                m_device_module_info_collection[1].ModuleStreamInfo_Out->ActualLen = width * height + width * height /2;
                memcpy(m_device_module_info_collection[1].ModuleStreamInfo_Out->Buf,bufferPtr_y, width * height);
                memcpy(m_device_module_info_collection[1].ModuleStreamInfo_Out->Buf + width * height, bufferPtr_uv, width * height /2);
                
                m_device_module_info_collection[1].ModuleStreamInfo_Out->VideoInfo.pts = theTime;
                m_device_module_info_collection[1].ModuleStreamInfo_Out->VideoInfo.dts = theTime;
                up_time_video_front = theTime;
            }
            else
            {
                m_device_module_info_collection[1].ModuleStreamInfo_Out->ActualLen = 0;
            }

        }
        //Unlock
        CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }
    else if(connection == audioCaptureConnection)
    {
        NSLog(@"audioCaptureConnection");
    }
    else
    {
        NSLog(@"otherCaptureConnection");
    }
}

//音頻采集資料回調
static void lvsAudioQueueInputCallback(void                        *aqData,
                               AudioQueueRef                       inAQ,
                               AudioQueueBufferRef                 inBuffer,
                               const AudioTimeStamp                *inStartTime,
                               UInt32                              inNumPackets,
                               const AudioStreamPacketDescription  *inPacketDesc)
{
    AQRecorderState *pAqData = (AQRecorderState *) aqData;
    
    //擷取1970到現在的時間毫秒
    NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]*1000;
    long long theTime = [[NSNumber numberWithDouble:nowtime] longLongValue];
    
    if (inNumPackets > 0)
    {
        if (theTime > up_time_audio)
        {
            pAqData->ModuleStreamInfo_Out_Audio_data->ActualLen = inBuffer->mAudioDataByteSize;
            //拷貝資料
            memcpy(pAqData->ModuleStreamInfo_Out_Audio_data->Buf,(char*)inBuffer->mAudioData,pAqData->ModuleStreamInfo_Out_Audio_data->ActualLen);
            
            
            pAqData->ModuleStreamInfo_Out_Audio_data->AudioInfo.pts = theTime;
            pAqData->ModuleStreamInfo_Out_Audio_data->AudioInfo.dts = theTime;
        }
        else{
            pAqData->ModuleStreamInfo_Out_Audio_data->ActualLen = 0;
        }
    }


    //清0
    AudioQueueEnqueueBuffer(pAqData->mQueue,inBuffer,0,NULL);
}

//計算音頻緩沖區大小
void DeriveBufferSize (AudioQueueRef audioQueue,AudioStreamBasicDescription *ASBDescription, Float64 seconds,UInt32 *outBufferSize)
{
    static const int maxBufferSize = 0x50000;
    
    int maxPacketSize = (*ASBDescription).mBytesPerPacket;
    if (maxPacketSize == 0)
    {
        UInt32 maxVBRPacketSize = sizeof(maxPacketSize);
        AudioQueueGetProperty(audioQueue,kAudioQueueProperty_MaximumOutputPacketSize,&maxPacketSize,&maxVBRPacketSize);
    }
    
    Float64 numBytesForTime =
    (*ASBDescription).mSampleRate * maxPacketSize * seconds;
    *outBufferSize = numBytesForTime < maxBufferSize ? numBytesForTime : maxBufferSize;
}


- (int)device_init: (Module_Info **) ModuleInfo_Current andCurrentLength: (int *) CurrentLength
{
    int device_length = 0; //裝置數量包括攝像頭和麥克風一共
    NSArray * devices = [AVCaptureDevice devices];
    NSError *error = nil;
    
    Module_Info * pmodule_info = nil;
    pmodule_info = *ModuleInfo_Current;
    pmodule_info = (Module_Info*)realloc(pmodule_info, m_stream_num * sizeof(Module_Info));
    for (int i = 0; i< m_stream_num;i++)
    {
        pmodule_info[i].ModuleStreamInfo_In = nil;
        pmodule_info[i].ModuleStreamInfo_Out = nil;
    }
    
    for (AVCaptureDevice *device in devices)
    {
        NSLog(@"Device name: %@", [device localizedName]);
        
        //session init
        AVCaptureSession *session = [[AVCaptureSession alloc] init];
        
        
        if ([device hasMediaType:AVMediaTypeVideo])
        {
            if ([device position] == AVCaptureDevicePositionBack)
            {
                NSLog(@"Device position : back");
                
                //設定分辨率
                if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
                {
                    session.sessionPreset = AVCaptureSessionPreset640x480;
                }
                //Create and Configure the Device and Device Input
                AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
                
                if (!input)
                {
                    // Handle the error appropriately
                }
                if ([session canAddInput:input])
                {
                    [session addInput:input];
                }
                
                //Create and Configure the Data Output
                AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
                
                
                //AVCaptureVideoOrientation
                
                //添加輸出資料隊列
                dispatch_queue_t queue = dispatch_queue_create("video_back_queue", NULL);
                [output setSampleBufferDelegate:self queue:queue]; //setSampleBufferDelegate是:資料輸出的代理
                dispatch_release(queue);
                //設定參數 (kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange nv12)
                NSDictionary *setting =[[NSDictionary alloc] initWithObjectsAndKeys:
                                        [NSNumber numberWithInt:640], (id)kCVPixelBufferWidthKey,
                                        [NSNumber numberWithInt:480], (id)kCVPixelBufferHeightKey,
                                        [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],(id)kCVPixelBufferPixelFormatTypeKey,
                                        nil];
                output.videoSettings = setting;
                output.alwaysDiscardsLateVideoFrames = YES;
                output.minFrameDuration = CMTimeMake(1.0, 25.0);
                
                if ([session canAddOutput:output])
                {
                    [session addOutput:output];
                }
                
                [setting release];
                
                //get connection
                videoCaptureConnection_back = [output connectionWithMediaType:AVMediaTypeVideo];
                //設定旋轉方向
                [videoCaptureConnection_back setVideoOrientation:AVCaptureVideoOrientationPortrait];
                
                //put session
                m_list_capture_session[device_length] = (session);
                
                //put info
                pmodule_info[device_length].Is_ToNext_Module = 1;
                //header
                Module_HeaderInfo_In_Video_Dispose * pheaderinfo_device_video = nil; //這裡用In_Video_Dispose_header代替
                pheaderinfo_device_video = new Module_HeaderInfo_In_Video_Dispose();
                pheaderinfo_device_video->stream_id = device_length;
                pmodule_info[device_length].ModuleHeaderInfo.HeaderInfo = pheaderinfo_device_video;
                //streaminfo
                Module_StreamInfo *pmodulestreaminfo = nil;
                pmodulestreaminfo = new Module_StreamInfo();
                pmodulestreaminfo->stream_id = device_length;
                pmodulestreaminfo->CodecType = CodecType_Video;
                pmodulestreaminfo->VideoInfo.PixFormat = LVS_PIX_FMT_NV12;
                pmodulestreaminfo->VideoInfo.Width = 480;  //豎屏寬高對調
                pmodulestreaminfo->VideoInfo.Height = 640; //豎屏寬高對調
                pmodulestreaminfo->VideoInfo.pts = -1;
                pmodulestreaminfo->VideoInfo.dts = -1;
                pmodulestreaminfo->VideoInfo.timebase_num = 1;
                pmodulestreaminfo->VideoInfo.timebase_den = 1000;
                pmodulestreaminfo->BufLen = 640*480 *3 +100;
                pmodulestreaminfo->Buf = (char *)calloc(pmodulestreaminfo->BufLen, sizeof(char));
                pmodulestreaminfo->ActualLen = 0;
                pmodule_info[device_length].ModuleStreamInfo_Out =(pmodulestreaminfo);
            }
            else
            {
                NSLog(@"Device position : front");
                
                //設定分辨率
                if ([session canSetSessionPreset:AVCaptureSessionPreset640x480])
                {
                    session.sessionPreset = AVCaptureSessionPreset640x480;
                }
                //Create and Configure the Device and Device Input
                AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
                if (!input)
                {
                    // Handle the error appropriately
                }
                if ([session canAddInput:input])
                {
                    [session addInput:input];
                }
                
                //Create and Configure the Data Output
                AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
                
                //添加輸出資料隊列
                dispatch_queue_t queue = dispatch_queue_create("video_front_queue", NULL);
                [output setSampleBufferDelegate:self queue:queue];    //setSampleBufferDelegate是:資料輸出的代理
                dispatch_release(queue);
                
                //設定參數 (kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange nv12)
                NSDictionary *setting =[[NSDictionary alloc] initWithObjectsAndKeys:
                                        [NSNumber numberWithInt:640], (id)kCVPixelBufferWidthKey,
                                        [NSNumber numberWithInt:480], (id)kCVPixelBufferHeightKey,
                                        [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],(id)kCVPixelBufferPixelFormatTypeKey,
                                        nil];
                output.videoSettings = setting;
                output.alwaysDiscardsLateVideoFrames = YES;
                output.minFrameDuration = CMTimeMake(1.0, 25.0);
                
                if ([session canAddOutput:output])
                {
                    [session addOutput:output];
                }
                
                [setting release];
                
                //get connection
                videoCaptureConnection_front = [output connectionWithMediaType:AVMediaTypeVideo];
                //設定旋轉方向
                [videoCaptureConnection_front setVideoOrientation:AVCaptureVideoOrientationPortrait];
                
                //put session
                m_list_capture_session[device_length] = (session);
                
                //put info
                pmodule_info[device_length].Is_ToNext_Module = 1;
                //header
                Module_HeaderInfo_In_Video_Dispose * pheaderinfo_device_video = nil; //這裡用In_Video_Dispose_header代替
                pheaderinfo_device_video = new Module_HeaderInfo_In_Video_Dispose();
                pheaderinfo_device_video->stream_id = device_length;
                pmodule_info[device_length].ModuleHeaderInfo.HeaderInfo = pheaderinfo_device_video;
                //streaminfo
                Module_StreamInfo *pmodulestreaminfo = nil;
                pmodulestreaminfo = new Module_StreamInfo();
                pmodulestreaminfo->stream_id = device_length;
                pmodulestreaminfo->CodecType = CodecType_Video;
                pmodulestreaminfo->VideoInfo.PixFormat = LVS_PIX_FMT_NV12;
                pmodulestreaminfo->VideoInfo.Width = 480;  //豎屏寬高對調
                pmodulestreaminfo->VideoInfo.Height = 640; //豎屏寬高對調
                pmodulestreaminfo->VideoInfo.pts = -1;
                pmodulestreaminfo->VideoInfo.dts = -1;
                pmodulestreaminfo->VideoInfo.timebase_num = 1;
                pmodulestreaminfo->VideoInfo.timebase_den = 1000;
                pmodulestreaminfo->BufLen = 640*480 *3 +100;
                pmodulestreaminfo->Buf = (char *)calloc(pmodulestreaminfo->BufLen, sizeof(char));
                pmodulestreaminfo->ActualLen = 0;
                pmodule_info[device_length].ModuleStreamInfo_Out =(pmodulestreaminfo);
            }
        }
        if ([device hasMediaType:AVMediaTypeAudio])
        {
            /*
            //AVCaptureSession采集音頻的時候設定參數有問題,是以用AudioQueue做音頻采集
             
            //input
            AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
            if (error)
            {
                NSLog(@"Error getting video input device: %@", error.description);
            }
            
            if ([session canAddInput:audioInput])
            {
                [session addInput:audioInput];
            }
            else
            {
                NSLog(@"Error: %@", error);
            }
        
            //output
            AVCaptureAudioDataOutput * audioOutput = [[AVCaptureAudioDataOutput alloc] init];
            
            //添加輸出資料隊列
            dispatch_queue_t audioqueue = dispatch_queue_create("audio_queue", DISPATCH_QUEUE_SERIAL);
            [audioOutput setSampleBufferDelegate:self queue:audioqueue];   //setSampleBufferDelegate是:資料輸出的代理
            dispatch_release(audioqueue);
            
            //設定參數
            NSDictionary *setting = [[NSMutableDictionary alloc] initWithObjectsAndKeys:
                                     [NSNumber numberWithInt: kAudioFormatLinearPCM], (id)AVFormatIDKey,
                                     [NSNumber numberWithFloat:44100], (id)AVSampleRateKey,
                                     [NSNumber numberWithInt:2], (id)AVNumberOfChannelsKey,
                                     [NSNumber numberWithInt:128000], (id)AVEncoderBitRateKey,
                                     [NSNumber numberWithInt:16], (id)AVLinearPCMBitDepthKey,
                                     nil];
            
            //[audioOutput setAudioSettings:setting]; //audioOutput.audioSettings = setting;用這個不好使
            //audioOutput.audioSettings = setting;
            
            
            if ([session canAddOutput:audioOutput])
            {
                [session addOutput:audioOutput];
            }
            else
            {
                NSLog(@"error: %@", error);
            }
            
            [setting release];
            
            //get connection
            audioCaptureConnection= [audioOutput connectionWithMediaType:AVMediaTypeAudio];
            
            //put session
            m_list_capture_session[device_length] = (session);
            */
            
            //設定formatter
            m_aqData.mDataFormat.mFormatID         = kAudioFormatLinearPCM;         
            m_aqData.mDataFormat.mSampleRate       = 44100.0;
            m_aqData.mDataFormat.mChannelsPerFrame = 2;
            m_aqData.mDataFormat.mBitsPerChannel   = 16;
            m_aqData.mDataFormat.mBytesPerPacket   = m_aqData.mDataFormat.mBytesPerFrame =  m_aqData.mDataFormat.mChannelsPerFrame * sizeof (SInt16);
            m_aqData.mDataFormat.mFramesPerPacket  = 1;
            //kLinearPCMFormatFlagIsBigEndian這裡不能設定這個否出來的pcm 資料是大位元組的,編碼資料會有問題
            m_aqData.mDataFormat.mFormatFlags = /*kLinearPCMFormatFlagIsBigEndian | */kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
            
            //建立采集隊列并設定回調:lvsAudioQueueInputCallback
            AudioQueueNewInput (&m_aqData.mDataFormat,lvsAudioQueueInputCallback,&m_aqData,NULL,kCFRunLoopCommonModes,0,&m_aqData.mQueue);
            
            UInt32 dataFormatSize = sizeof (m_aqData.mDataFormat);
            AudioQueueGetProperty(m_aqData.mQueue,kAudioQueueProperty_StreamDescription,&m_aqData.mDataFormat,&dataFormatSize);
            
            //計算音頻緩沖區大小
            m_aqData.audio_seconde_time = 0.4;  //0.4秒
            DeriveBufferSize(m_aqData.mQueue,&m_aqData.mDataFormat,m_aqData.audio_seconde_time,&m_aqData.bufferByteSize);
            
            //準備一組音頻隊列緩沖區
            for (int i = 0; i < kNumberBuffers; ++i)
            {
                AudioQueueAllocateBuffer(m_aqData.mQueue,m_aqData.bufferByteSize,&m_aqData.mBuffers[i]);
                //清0
                AudioQueueEnqueueBuffer (m_aqData.mQueue,m_aqData.mBuffers[i], 0, NULL);
            }
            
            //put info
            pmodule_info[device_length].Is_ToNext_Module = 1;
            //header
            Module_HeaderInfo_In_Audio_Dispose * pheaderinfo_device_audio = nil; //這裡用In_Audio_Dispose_header代替
            pheaderinfo_device_audio = new Module_HeaderInfo_In_Audio_Dispose();
            pheaderinfo_device_audio->stream_id = device_length;
            pmodule_info[device_length].ModuleHeaderInfo.HeaderInfo = pheaderinfo_device_audio;
            //streaminfo
            Module_StreamInfo *pmodulestreaminfo = nil;
            pmodulestreaminfo = new Module_StreamInfo();
            pmodulestreaminfo->stream_id = device_length;
            pmodulestreaminfo->CodecType = CodecType_Audio;
            pmodulestreaminfo->AudioInfo.Channel = 2;
            pmodulestreaminfo->AudioInfo.SampleFormat = LVS_SAMPLE_FMT_S16;
            pmodulestreaminfo->AudioInfo.SampleRate = SampleRate_44100;
            pmodulestreaminfo->AudioInfo.pts = -1;
            pmodulestreaminfo->AudioInfo.dts = -1;
            pmodulestreaminfo->AudioInfo.timebase_num = 1;
            pmodulestreaminfo->AudioInfo.timebase_den = 1000;
            pmodulestreaminfo->BufLen = 100 * 1024 * 2 * 2;
            pmodulestreaminfo->Buf = (char *)calloc(pmodulestreaminfo->BufLen, sizeof(char));
            pmodulestreaminfo->ActualLen = 0;
            pmodule_info[device_length].ModuleStreamInfo_Out =(pmodulestreaminfo);
            
            //用于資料callback 的參數擷取資料
            m_aqData.ModuleStreamInfo_Out_Audio_data = pmodulestreaminfo;
        }
        
        //裝置stream_id
        device_length ++;
    }
    
    *CurrentLength = device_length;
    *ModuleInfo_Current = pmodule_info;
    
    m_device_module_info_collection = pmodule_info;
    m_device_module_length_collection = device_length;
    
    return 1;
}

- (void)device_uinit: (Module_Info *) ModuleInfo_Current
{
    if (ModuleInfo_Current !=nil)
    {
        for (int i = 0; i <m_stream_num; i++)
        {
            //header
            if (ModuleInfo_Current[i].ModuleHeaderInfo.HeaderInfo != nil)
            {
                delete (ModuleInfo_Current[i].ModuleHeaderInfo.HeaderInfo);
                ModuleInfo_Current[i].ModuleHeaderInfo.HeaderInfo = nil;
            }
            //streaminfo_in
            if(ModuleInfo_Current[i].ModuleStreamInfo_In != nil)
            {
                if(ModuleInfo_Current[i].ModuleStreamInfo_In->Buf != nil)
                {
                    free(ModuleInfo_Current[i].ModuleStreamInfo_In->Buf);
                    ModuleInfo_Current[i].ModuleStreamInfo_In->Buf = nil;
                    ModuleInfo_Current[i].ModuleStreamInfo_In->ActualLen = 0;
                    ModuleInfo_Current[i].ModuleStreamInfo_In->BufLen = 0;
                }
                delete (ModuleInfo_Current[i].ModuleStreamInfo_In);
                ModuleInfo_Current[i].ModuleStreamInfo_In = nil;
            }
            //streaminfo_out
            if(ModuleInfo_Current[i].ModuleStreamInfo_Out != nil)
            {
                if(ModuleInfo_Current[i].ModuleStreamInfo_Out->Buf != nil)
                {
                    free(ModuleInfo_Current[i].ModuleStreamInfo_Out->Buf);
                    ModuleInfo_Current[i].ModuleStreamInfo_Out->Buf = nil;
                    ModuleInfo_Current[i].ModuleStreamInfo_Out->ActualLen = 0;
                    ModuleInfo_Current[i].ModuleStreamInfo_Out->BufLen = 0;
                }
                delete (ModuleInfo_Current[i].ModuleStreamInfo_Out);
                ModuleInfo_Current[i].ModuleStreamInfo_Out = nil;
            }
        }
        free(ModuleInfo_Current);
        ModuleInfo_Current = nil;
    }
    
    if(m_list_capture_session.size() >0)
    {
        m_list_capture_session.clear();
    }
    
    return;
}

- (int)device_write: (Module_Info *) ModuleInfo_Current andUpperLength: (int) UpperLength
andModuleInfo_Next_video: (Module_Info *) ModuleInfo_Next_video andNextLength_video: (int) NextLength_video
andModuleInfo_Next_audio: (Module_Info *) ModuleInfo_Next_audio andNextLength_audio: (int)NextLength_audio
{
    return 1;
}

@end
           
//
//  Lvs_Ios_Device_Preview.h
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs.zwg All rights reserved.
//

#include <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>

@interface Lvs_Ios_Device_Preview : NSObject

//顯示視訊
- (void) ShowPreview: (AVCaptureSession *) session andImageView: (void *) imageview;

@end
           
//
//  Lvs_Ios_Device_Preview.m
//  LvsIos
//
//  Created by mx on 16/9/5.
//  Copyright © 2016年 lvs. All rights reserved.
//

#import "Lvs_Ios_Device_Preview.h"

@implementation Lvs_Ios_Device_Preview


- (void) ShowPreview: (AVCaptureSession *) session andImageView: (void *) imageview
{
    //得到傳進來的顯示視窗的uividew
    UIView * view = (UIView *)imageview;
    
    // 添加界面顯示
    AVCaptureVideoPreviewLayer *previewLayer = nil;
    previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:session];
    previewLayer.frame = [[view layer] bounds];
    previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [[previewLayer connection] setVideoOrientation:AVCaptureVideoOrientationPortrait];
    [view.layer addSublayer:previewLayer];
}

@end
           

如有錯誤請指正:

交流請加QQ群:62054820

QQ:379969650.