天天看點

ffmpeg m4a 轉pcm_FFmpeg 将MP3轉PCM并用AudioTrack播放

音頻處理和前幾篇視訊處理大同小異,隻不過用的轉換方法不太一樣(swr_convert)。

一、代碼

1、java 部分;

public class MyPlayer {

static{

System.loadLibrary("avcodec-56");

System.loadLibrary("avdevice-56");

System.loadLibrary("avfilter-5");

System.loadLibrary("avformat-56");

System.loadLibrary("avutil-54");

System.loadLibrary("postproc-53");

System.loadLibrary("swresample-1");

System.loadLibrary("swscale-3");

System.loadLibrary("native-lib");

}

public native void sound(String input,String output);

private AudioTrack audioTrack;

//C調用

public void createAudio(int sampleRateInHz,int nb_channals) {

int channaleConfig;

if (nb_channals == 1) {

channaleConfig = AudioFormat.CHANNEL_OUT_MONO;

} else if (nb_channals == 2) {

channaleConfig = AudioFormat.CHANNEL_OUT_STEREO;

} else {

channaleConfig = AudioFormat.CHANNEL_OUT_MONO;

}

int bufferSize = AudioTrack.getMinBufferSize(sampleRateInHz,

channaleConfig,AudioFormat.ENCODING_PCM_16BIT);

audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,

sampleRateInHz,channaleConfig,

AudioFormat.ENCODING_PCM_16BIT,bufferSize,AudioTrack.MODE_STREAM);

audioTrack.play();

}

//C調用 傳入音頻資料

public synchronized void playTrack(byte[] buffer, int lenth) {

if (audioTrack != null && audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {

audioTrack.write(buffer, 0, lenth);

}

}

}

MainActivity :

public class MainActivity extends AppCompatActivity {

String input;

String output;

MyPlayer player;

@Override

protected void onCreate(Bundle savedInstanceState) {

super.onCreate(savedInstanceState);

setContentView(R.layout.activity_main);

input = new File(Environment.getExternalStorageDirectory(), "input.mp3").getAbsolutePath();

output = new File(Environment.getExternalStorageDirectory(), "output.pcm").getAbsolutePath();

player = new MyPlayer();

}

public void play(View view) {

player.sound(input, output);

}

}

2、c代碼:

#include

#include

#include

extern "C" {

#include "libavcodec/avcodec.h"

#include "libavformat/avformat.h"

#include "libswresample/swresample.h"

#include "libswscale/swscale.h"

#include

#include

}

#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR,"keke",FORMAT,##__VA_ARGS__);

extern "C"

JNIEXPORT void

JNICALL

Java_com_example_ffmpegaudio_MyPlayer_sound(

JNIEnv *env,

jobject instance, jstring input_, jstring output_) {

const char *input = env->GetStringUTFChars(input_, 0);

const char *output = env->GetStringUTFChars(output_, 0);

av_register_all();

AVFormatContext *pFormatContext = avformat_alloc_context();

if (avformat_open_input(&pFormatContext, input, NULL, NULL) != 0) {

LOGE("打開輸入檔案失敗");

}

if (avformat_find_stream_info(pFormatContext, NULL) < 0) {

LOGE("擷取資訊失敗");

}

int audio_stream_index = -1;

for (int i = 0; i < pFormatContext->nb_streams; ++i) {

if (pFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {

audio_stream_index = i;

break;

}

}

AVCodecContext *pCodecCtx = pFormatContext->streams[audio_stream_index]->codec;

AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);

if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {

}

AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));

AVFrame *frame;

frame = av_frame_alloc();

SwrContext *swrContext = swr_alloc();

int got_frame;

uint8_t *out_buffer = (uint8_t *) av_malloc(44100 * 2);

uint64_t out_ch_layout = AV_CH_LAYOUT_STEREO;

enum AVSampleFormat out_format = AV_SAMPLE_FMT_S16;

int out_sample_rate = pCodecCtx->sample_rate;

swr_alloc_set_opts(swrContext, out_ch_layout, out_format, out_sample_rate,

pCodecCtx->channel_layout, pCodecCtx->sample_fmt, pCodecCtx->sample_rate, 0,

NULL);

swr_init(swrContext);

int out_channals_nb = av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO);

jclass my_player = env->GetObjectClass(instance);

jmethodID createAudio = env->GetMethodID(my_player, "createAudio", "(II)V");

env->CallVoidMethod(instance,createAudio,44100,out_channals_nb);

jmethodID playTrack = env->GetMethodID(my_player, "playTrack", "([BI)V");

while (av_read_frame(pFormatContext, packet)>=0) {

if (packet->stream_index == audio_stream_index) {

avcodec_decode_audio4(pCodecCtx,frame,&got_frame,packet);

if (got_frame) {

LOGE("解碼");

swr_convert(swrContext, &out_buffer, 44100 * 2, (const uint8_t **) frame->data, frame->nb_samples);

int size = av_samples_get_buffer_size(NULL, out_channals_nb, frame->nb_samples, AV_SAMPLE_FMT_S16, 1);

jbyteArray audio_sample_array = env->NewByteArray(size);

env->SetByteArrayRegion(audio_sample_array, 0, size, (const jbyte *) out_buffer);

env->CallVoidMethod(instance,playTrack,audio_sample_array,size);

env->DeleteLocalRef(audio_sample_array);

}

}

}

av_frame_free(&frame);

swr_free(&swrContext);

avcodec_close(pCodecCtx);

avformat_close_input(&pFormatContext);

env->ReleaseStringUTFChars(input_, input);

env->ReleaseStringUTFChars(output_, output);

}