原生api解码mp4并实现播放功能
基础知识
Android系统提供了三个类MediaExtractor,MediaFormat,MediaCodec来帮助用户实现音视频解码,接下来分别介绍一下各自的功能。
-
MediaExtractor
-Extractor抽油烟机,负责将指定类型的媒体文件从文件中找到轨道,并填充到MediaCodec的缓冲区中
-
MediaFormat
封装描述媒体数据格式的信息,无论是音频还是视频。媒体数据的格式被指定为字符串/值对。所有格式通用的键,所有未标记为可选的键都是必需的:
名称 值类型 描述
KEY_MIME 串 格式的类型。
KEY_MAX_INPUT_SIZE 整数 可选,输入数据缓冲区的最大大小
KEY_BIT_RATE 整数 仅编码器,所需比特率(以比特/秒为单位)
-
MediaCodec
-访问底层媒体编码,能够完成媒体编码和解码
- -
功能实现
解码播放实现的步骤为:
- 将资源加载到extractor
- 获取视频所在轨道
- 设置extractor选中视频所在轨道
- 创将解码视频的MediaCodec,decoder
- 开始循环,直到视频资源的末尾
- 将extractor中资源以一个单位填充进decoder的输入缓冲区
- decoder将解码之后的视频填充到输出缓冲区
- decoder释放输出缓冲区的同时,将缓冲区中数据渲染到surface
实现代码如下所示
音频解码的代码如下
private class AudioThread extends Thread {
private int audioInputBufferSize;
private AudioTrack audioTrack;
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void run() {
MediaExtractor audioExtractor = new MediaExtractor();
MediaCodec audioCodec = null;
try {
audioExtractor.setDataSource(filePath);
} catch (IOException e) {
e.printStackTrace();
}
for (int i = ; i < audioExtractor.getTrackCount(); i++) {
MediaFormat mediaFormat = audioExtractor.getTrackFormat(i);
String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
audioExtractor.selectTrack(i);
int audioChannels = mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int audioSampleRate = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int minBufferSize = AudioTrack.getMinBufferSize(audioSampleRate,
(audioChannels == ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
AudioFormat.ENCODING_PCM_16BIT);
int maxInputSize = mediaFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
audioInputBufferSize = minBufferSize > ? minBufferSize * : maxInputSize;
int frameSizeInBytes = audioChannels * ;
audioInputBufferSize = (audioInputBufferSize / frameSizeInBytes) * frameSizeInBytes;
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
audioSampleRate,
(audioChannels == ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
AudioFormat.ENCODING_PCM_16BIT,
audioInputBufferSize,
AudioTrack.MODE_STREAM);
audioTrack.play();
Log.v(TAG, "audio play");
//
try {
audioCodec = MediaCodec.createDecoderByType(mime);
audioCodec.configure(mediaFormat, null, null, );
} catch (IOException e) {
e.printStackTrace();
}
break;
}
}
if (audioCodec == null) {
Log.v(TAG, "audio decoder null");
return;
}
audioCodec.start();
//
final ByteBuffer[] buffers = audioCodec.getOutputBuffers();
int sz = buffers[].capacity();
if (sz <= )
sz = audioInputBufferSize;
byte[] mAudioOutTempBuf = new byte[sz];
MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] inputBuffers = audioCodec.getInputBuffers();
ByteBuffer[] outputBuffers = audioCodec.getOutputBuffers();
boolean isAudioEOS = false;
long startMs = System.currentTimeMillis();
while (!Thread.interrupted()) {
if (!isPlaying) {
continue;
}
if (!isAudioEOS) {
isAudioEOS = putBufferToCoder(audioExtractor, audioCodec, inputBuffers);
}
//
int outputBufferIndex = audioCodec.dequeueOutputBuffer(audioBufferInfo, TIMEOUT_US);
switch (outputBufferIndex) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.v(TAG, "format changed");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.v(TAG, "超时");
break;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = audioCodec.getOutputBuffers();
Log.v(TAG, "output buffers changed");
break;
default:
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
//延时操作
//如果缓冲区里的可展示时间>当前视频播放的进度,就休眠一下
sleepRender(audioBufferInfo, startMs);
if (audioBufferInfo.size > ) {
if (mAudioOutTempBuf.length < audioBufferInfo.size) {
mAudioOutTempBuf = new byte[audioBufferInfo.size];
}
outputBuffer.position();
outputBuffer.get(mAudioOutTempBuf, , audioBufferInfo.size);
outputBuffer.clear();
if (audioTrack != null)
audioTrack.write(mAudioOutTempBuf, , audioBufferInfo.size);
}
//
audioCodec.releaseOutputBuffer(outputBufferIndex, false);
break;
}
if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != ) {
Log.v(TAG, "buffer stream end");
break;
}
}//end while
audioCodec.stop();
audioCodec.release();
audioExtractor.release();
audioTrack.stop();
audioTrack.release();
}
}
视频解码任务的代码如下
private class VideoThread extends Thread {
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
@Override
public void run() {
MediaExtractor videoExtractor = new MediaExtractor();
MediaCodec videoCodec = null;
try {
videoExtractor.setDataSource(filePath);
} catch (IOException e) {
e.printStackTrace();
}
int videoTrackIndex;
//获取视频所在轨道
videoTrackIndex = getMediaTrackIndex(videoExtractor, "video/");
if (videoTrackIndex >= ) {
MediaFormat mediaFormat = videoExtractor.getTrackFormat(videoTrackIndex);
int width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
int height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
float time = mediaFormat.getLong(MediaFormat.KEY_DURATION) / ;
callBack.videoAspect(width, height, time);
videoExtractor.selectTrack(videoTrackIndex);
try {
videoCodec = MediaCodec.createDecoderByType(mediaFormat.getString(MediaFormat.KEY_MIME));
videoCodec.configure(mediaFormat, surface, null, );
} catch (IOException e) {
e.printStackTrace();
}
}
if (videoCodec == null) {
Log.v(TAG, "MediaCodec null");
return;
}
videoCodec.start();
MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] inputBuffers = videoCodec.getInputBuffers();
// ByteBuffer[] outputBuffers = videoCodec.getOutputBuffers();
boolean isVideoEOS = false;
long startMs = System.currentTimeMillis();
while (!Thread.interrupted()) {
if (!isPlaying) {
continue;
}
//将资源传递到解码器
if (!isVideoEOS) {
isVideoEOS = putBufferToCoder(videoExtractor, videoCodec, inputBuffers);
}
int outputBufferIndex = videoCodec.dequeueOutputBuffer(videoBufferInfo, TIMEOUT_US);
switch (outputBufferIndex) {
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.v(TAG, "format changed");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.v(TAG, "超时");
break;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
//outputBuffers = videoCodec.getOutputBuffers();
Log.v(TAG, "output buffers changed");
break;
default:
//直接渲染到Surface时使用不到outputBuffer
//ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
//延时操作
//如果缓冲区里的可展示时间>当前视频播放的进度,就休眠一下
sleepRender(videoBufferInfo, startMs);
//渲染
videoCodec.releaseOutputBuffer(outputBufferIndex, true);
break;
}
if ((videoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != ) {
Log.v(TAG, "buffer stream end");
break;
}
}//end while
videoCodec.stop();
videoCodec.release();
videoExtractor.release();
}
}