在 read_thread() 線程中調用 stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]); 函數,
此函數 decoder_start(&is->viddec, video_thread, ffp, “ff_video_dec”) 啟動了 video_thread 線程,
本篇代碼從android程式空間,看看ijkPlayer 基本輪廓特征。
第 1 部分 wideo_thread 線程來源
//>源碼路徑: libavformat/ff_ffplay.c 中
static int video_thread(void *arg)
{
FFPlayer *ffp = (FFPlayer *)arg;
int ret = 0;
if (ffp->node_vdec) {
ret = ffpipenode_run_sync(ffp->node_vdec);
}
return ret;
}
//>源碼路徑: libavformat/ff_ffpipenode.c 中
int ffpipenode_run_sync(IJKFF_Pipenode *node)
{
return node->func_run_sync(node);
}
第 2 部分 此 func_run_sync() 是指針,他究竟是誰。
需要梳理此指針 node->func_run_sync 指向的函數實作,下面内容可以從 1 ~ 7 這種方式看,會對 ijkPlayer 有更清晰的認識。
//> 7. SDL_AMediaCodecJava_init() 初始化函數内容
static SDL_AMediaCodec* SDL_AMediaCodecJava_init(JNIEnv *env, jobject android_media_codec)
{
SDLTRACE("%s", __func__);
jobject global_android_media_codec = (*env)->NewGlobalRef(env, android_media_codec);
if (J4A_ExceptionCheck__catchAll(env) || !global_android_media_codec) {
return NULL;
}
SDL_AMediaCodec *acodec = SDL_AMediaCodec_CreateInternal(sizeof(SDL_AMediaCodec_Opaque));
if (!acodec) {
SDL_JNI_DeleteGlobalRefP(env, &global_android_media_codec);
return NULL;
}
SDL_AMediaCodec_Opaque *opaque = acodec->opaque;
opaque->android_media_codec = global_android_media_codec;
acodec->opaque_class = &g_amediacodec_class;
acodec->func_delete = SDL_AMediaCodecJava_delete;
acodec->func_configure = NULL;
acodec->func_configure_surface = SDL_AMediaCodecJava_configure_surface;
acodec->func_start = SDL_AMediaCodecJava_start; //> 函數内容在下
acodec->func_stop = SDL_AMediaCodecJava_stop;
acodec->func_flush = SDL_AMediaCodecJava_flush;
acodec->func_writeInputData = SDL_AMediaCodecJava_writeInputData;
acodec->func_dequeueInputBuffer = SDL_AMediaCodecJava_dequeueInputBuffer;
acodec->func_queueInputBuffer = SDL_AMediaCodecJava_queueInputBuffer;
acodec->func_dequeueOutputBuffer = SDL_AMediaCodecJava_dequeueOutputBuffer;
acodec->func_getOutputFormat = SDL_AMediaCodecJava_getOutputFormat;
acodec->func_releaseOutputBuffer = SDL_AMediaCodecJava_releaseOutputBuffer;
acodec->func_isInputBuffersValid = SDL_AMediaCodecJava_isInputBuffersValid;
SDL_AMediaCodec_increaseReference(acodec);
return acodec;
}
//> 函數内容在此
static sdl_amedia_status_t SDL_AMediaCodecJava_start(SDL_AMediaCodec* acodec)
{
SDLTRACE("%s", __func__);
JNIEnv *env = NULL;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: SetupThreadEnv failed", __func__);
return SDL_AMEDIA_ERROR_UNKNOWN;
}
SDL_AMediaCodec_Opaque *opaque = (SDL_AMediaCodec_Opaque *)acodec->opaque;
jobject android_media_codec = opaque->android_media_codec;
J4AC_MediaCodec__start(env, android_media_codec);
if (J4A_ExceptionCheck__catchAll(env)) { //> 此函數是回調 android 程式。
ALOGE("%s: start failed", __func__);
return SDL_AMEDIA_ERROR_UNKNOWN;
}
return SDL_AMEDIA_OK;
}
//> 此函數
static volatile int g_amediacodec_object_serial;
int SDL_AMediaCodec_create_object_serial()
{
int object_serial = __sync_add_and_fetch(&g_amediacodec_object_serial, 1);
if (object_serial == 0)
object_serial = __sync_add_and_fetch(&g_amediacodec_object_serial, 1);
return object_serial;
}
//> __sync_fetch_and_add 函數的作用,在多線程中,對簡單的變量運算能保證結果的正确.
//> 6. 源碼路徑:ijksdl_codec_android_mediacodec_java.c 檔案中
SDL_AMediaCodec* SDL_AMediaCodecJava_createByCodecName(JNIEnv *env, const char *codec_name)
{
SDLTRACE("%s", __func__);
jobject android_media_codec = J4AC_MediaCodec__createByCodecName__withCString__catchAll(env, codec_name);
if (J4A_ExceptionCheck__catchAll(env) || !android_media_codec) {
return NULL;
}
SDL_AMediaCodec* acodec = SDL_AMediaCodecJava_init(env, android_media_codec); //> 初始化 解碼器控制接口,見 7 子節描述
acodec->object_serial = SDL_AMediaCodec_create_object_serial(); //> 見 7 子節描述
SDL_JNI_DeleteLocalRefP(env, &android_media_codec);
return acodec;
}
//> 5. 源碼路徑:pipeline/ffpipenode_androd_mediacodec_vdec.c 中,
IJKFF_Pipenode *ffpipenode_init_decoder_from_android_mediacodec(FFPlayer *ffp, IJKFF_Pipeline *pipeline, SDL_Vout *vout)
{
if (SDL_Android_GetApiLevel() < IJK_API_16_JELLY_BEAN)
return NULL;
if (!ffp || !ffp->is)
return NULL;
IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque));
if (!node)
return node;
VideoState *is = ffp->is;
IJKFF_Pipenode_Opaque *opaque = node->opaque;
JNIEnv *env = NULL;
node->func_destroy = func_destroy;
if (ffp->mediacodec_sync) { ///> 未見此參數初始化值
node->func_run_sync = func_run_sync_loop;
} else {
node->func_run_sync = func_run_sync; //> 指向此函數,第 1 部分中視訊線程運作的函數
}
node->func_flush = func_flush;
opaque->pipeline = pipeline;
opaque->ffp = ffp;
opaque->decoder = &is->viddec; //> 指向 is->viddec 視訊解碼器
opaque->weak_vout = vout;
opaque->acodec_mutex = SDL_CreateMutex();
opaque->acodec_cond = SDL_CreateCond();
opaque->acodec_first_dequeue_output_mutex = SDL_CreateMutex();
opaque->acodec_first_dequeue_output_cond = SDL_CreateCond();
opaque->any_input_mutex = SDL_CreateMutex();
opaque->any_input_cond = SDL_CreateCond();
if (!opaque->acodec_cond || !opaque->acodec_cond || !opaque->acodec_first_dequeue_output_mutex || !opaque->acodec_first_dequeue_output_cond) {
ALOGE("%s:open_video_decoder: SDL_CreateCond() failed\n", __func__);
goto fail;
}
opaque->codecpar = avcodec_parameters_alloc();
if (!opaque->codecpar)
goto fail;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s:create: SetupThreadEnv failed\n", __func__);
goto fail;
}
ALOGI("%s:use default mediacodec name: %s\n", __func__, ffp->mediacodec_default_name);
strcpy(opaque->mcc.codec_name, ffp->mediacodec_default_name);
opaque->acodec = SDL_AMediaCodecJava_createByCodecName(env, ffp->mediacodec_default_name); //> 建立 android 回調接口相關
if (!opaque->acodec) {
goto fail;
}
return node;
fail:
ALOGW("%s: init fail\n", __func__);
ffpipenode_free_p(&node);
return NULL;
}
//> 4. 調用 ffpipenode_init_decoder_from_android_mediacodec() 者
static IJKFF_Pipenode *func_init_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
IJKFF_Pipenode *node = NULL;
if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2)
node = ffpipenode_init_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);
return node;
}
//> 3. 調用 func_init_video_decoder() 者
IJKFF_Pipeline *ffpipeline_create_from_android(FFPlayer *ffp)
{
ALOGD("ffpipeline_create_from_android()\n");
IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
if (!pipeline)
return pipeline;
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
opaque->ffp = ffp;
opaque->surface_mutex = SDL_CreateMutex();
opaque->left_volume = 1.0f; //> 播放器音量控制
opaque->right_volume = 1.0f;
if (!opaque->surface_mutex) {
ALOGE("ffpipeline-android:create SDL_CreateMutex failed\n");
goto fail;
}
pipeline->func_destroy = func_destroy;
pipeline->func_open_video_decoder = func_open_video_decoder; //> 打開視訊解碼器
pipeline->func_open_audio_output = func_open_audio_output; //> 打開音頻解碼器
pipeline->func_init_video_decoder = func_init_video_decoder;
pipeline->func_config_video_decoder = func_config_video_decoder;
return pipeline;
fail:
ffpipeline_free_p(&pipeline);
return NULL;
}
//> 2. 調用 ffpipeline_create_from_android() 者,下函數源碼路徑:ijkplayer_android.c 檔案中
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
IjkMediaPlayer *mp = ijkmp_create(msg_loop); //> 建立 FFPlayer 播放器,初始化 ffp->msg_queue 使用者和android程式資訊交換。
if (!mp)
goto fail;
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface(); //> 初始化 vout->create_overlay = func_create_overlay
if (!mp->ffplayer->vout)
goto fail;
mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer); //> 見 3部分注釋
if (!mp->ffplayer->pipeline)
goto fail;
ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);//> 建立視訊管道
return mp;
fail:
ijkmp_dec_ref_p(&mp);
return NULL;
}
//> 1. 調用 ijkmp_android_create() 者,下函數源碼路徑:ijkplayer_jni.c 檔案中
static void IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
MPTRACE("%s\n", __func__);
IjkMediaPlayer *mp = ijkmp_android_create(message_loop);
JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN);
jni_set_media_player(env, thiz, mp);
ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this));
ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
}
至此
我們看到此程式是使用者在 android 程式空間通過 JNI 接口 _native_setup() 建立播放器調用此程式。
我們把此部分相關資料結構梳理一下,這樣就能達到提綱挈領的作用。
struct IjkMediaPlayer {
volatile int ref_count;
pthread_mutex_t mutex;
FFPlayer *ffplayer; //> 1. FFPlayer 指向播放器的指針
int (*msg_loop)(void*); //> 2. msg_loop 用來與播放器交換資訊線程
SDL_Thread *msg_thread;
SDL_Thread _msg_thread;
int mp_state;
char *data_source; //> 3. 使用者設定的播放源位址内容
void *weak_thiz; //> 4. 關聯 android 的 jobject 對象
int restart;
int restart_from_beginning;
int seek_req;
long seek_msec;
};
typedef struct IJKFF_Pipeline_Opaque {
FFPlayer *ffp; //> 指向播放器
SDL_mutex *surface_mutex;
jobject jsurface;
volatile bool is_surface_need_reconfigure;
bool (*mediacodec_select_callback)(void *opaque, ijkmp_mediacodecinfo_context *mcc);
void *mediacodec_select_callback_opaque; //> 回調接口
SDL_Vout *weak_vout;
float left_volume;
float right_volume;
} IJKFF_Pipeline_Opaque;
總結:
- 使用者在 android 程式空間通過 JNI 接口 _native_setup() 建立播放器;
- IjkMediaPlayer 基本特征如結構體定義,有一個FFPlayer、資訊交換能力;
- 通過 Pipeline 增加控制接口;
- 把播放器相關事件,通過 SDL_AMediaCodecJava_init() 函數實作接口回調功能;
-
使用者通過JNI 接口調用 IjkMediaPlayer_prepareAsync() 函數,讓播放器狀态轉換為預備态,
在此過程中調用 open_stream() 函數,建立 read_thread() 和 video_thread() 線程。
至此,結合前面幾篇代碼走讀内容,我們對 ijkPlayer 整體特性能夠建立起輪廓性認知。
第 3 部分 ijkPlayer 資訊如何交換
看看 message_loop() 函數實作如下:
static int message_loop(void *arg)
{
MPTRACE("%s\n", __func__);
JNIEnv *env = NULL;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: SetupThreadEnv failed\n", __func__);
return -1;
}
IjkMediaPlayer *mp = (IjkMediaPlayer*) arg;
JNI_CHECK_GOTO(mp, env, NULL, "mpjni: native_message_loop: null mp", LABEL_RETURN);
message_loop_n(env, mp);
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
MPTRACE("message_loop exit");
return 0;
}
//> 循環主體函數
static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp)
{
jobject weak_thiz = (jobject) ijkmp_get_weak_thiz(mp);
JNI_CHECK_GOTO(weak_thiz, env, NULL, "mpjni: message_loop_n: null weak_thiz", LABEL_RETURN);
while (1) {
AVMessage msg;
int retval = ijkmp_get_msg(mp, &msg, 1); //> 擷取事件内容、第一層 資訊處理循環體
if (retval < 0)
break;
// block-get should never return 0
assert(retval > 0);
switch (msg.what) { //> 第二層 資訊處理循環主體
case FFP_MSG_FLUSH:
MPTRACE("FFP_MSG_FLUSH:\n");
post_event(env, weak_thiz, MEDIA_NOP, 0, 0);
break;
case FFP_MSG_ERROR:
MPTRACE("FFP_MSG_ERROR: %d\n", msg.arg1);
post_event(env, weak_thiz, MEDIA_ERROR, MEDIA_ERROR_IJK_PLAYER, msg.arg1);
break;
case FFP_MSG_PREPARED:
MPTRACE("FFP_MSG_PREPARED:\n");
post_event(env, weak_thiz, MEDIA_PREPARED, 0, 0);
break;
case FFP_MSG_COMPLETED:
MPTRACE("FFP_MSG_COMPLETED:\n");
post_event(env, weak_thiz, MEDIA_PLAYBACK_COMPLETE, 0, 0);
break;
case FFP_MSG_VIDEO_SIZE_CHANGED: //> 分辨率改變
MPTRACE("FFP_MSG_VIDEO_SIZE_CHANGED: %d, %d\n", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_SET_VIDEO_SIZE, msg.arg1, msg.arg2);
break;
case FFP_MSG_SAR_CHANGED:
MPTRACE("FFP_MSG_SAR_CHANGED: %d, %d\n", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_SET_VIDEO_SAR, msg.arg1, msg.arg2);
break;
case FFP_MSG_VIDEO_RENDERING_START:
MPTRACE("FFP_MSG_VIDEO_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_RENDERING_START, 0);
break;
case FFP_MSG_AUDIO_RENDERING_START:
MPTRACE("FFP_MSG_AUDIO_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_RENDERING_START, 0);
break;
case FFP_MSG_VIDEO_ROTATION_CHANGED: //> 橫豎屏變化
MPTRACE("FFP_MSG_VIDEO_ROTATION_CHANGED: %d\n", msg.arg1);
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_ROTATION_CHANGED, msg.arg1);
break;
case FFP_MSG_AUDIO_DECODED_START:
MPTRACE("FFP_MSG_AUDIO_DECODED_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_DECODED_START, 0);
break;
case FFP_MSG_VIDEO_DECODED_START:
MPTRACE("FFP_MSG_VIDEO_DECODED_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_DECODED_START, 0);
break;
case FFP_MSG_OPEN_INPUT:
MPTRACE("FFP_MSG_OPEN_INPUT:\n"); //> 打開輸入資料
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_OPEN_INPUT, 0);
break;
case FFP_MSG_FIND_STREAM_INFO:
MPTRACE("FFP_MSG_FIND_STREAM_INFO:\n"); //> 解碼器比對成功
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_FIND_STREAM_INFO, 0);
break;
case FFP_MSG_COMPONENT_OPEN:
MPTRACE("FFP_MSG_COMPONENT_OPEN:\n"); //> 打開流
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_COMPONENT_OPEN, 0);
break;
case FFP_MSG_BUFFERING_START:
MPTRACE("FFP_MSG_BUFFERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_START, msg.arg1);
break;
case FFP_MSG_BUFFERING_END:
MPTRACE("FFP_MSG_BUFFERING_END:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_END, msg.arg1);
break;
case FFP_MSG_BUFFERING_UPDATE:
// MPTRACE("FFP_MSG_BUFFERING_UPDATE: %d, %d", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_BUFFERING_UPDATE, msg.arg1, msg.arg2);
break;
case FFP_MSG_BUFFERING_BYTES_UPDATE:
break;
case FFP_MSG_BUFFERING_TIME_UPDATE:
break;
case FFP_MSG_SEEK_COMPLETE:
MPTRACE("FFP_MSG_SEEK_COMPLETE:\n");
post_event(env, weak_thiz, MEDIA_SEEK_COMPLETE, 0, 0);
break;
case FFP_MSG_ACCURATE_SEEK_COMPLETE:
MPTRACE("FFP_MSG_ACCURATE_SEEK_COMPLETE:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_MEDIA_ACCURATE_SEEK_COMPLETE, msg.arg1);
break;
case FFP_MSG_PLAYBACK_STATE_CHANGED:
break;
case FFP_MSG_TIMED_TEXT:
if (msg.obj) {
jstring text = (*env)->NewStringUTF(env, (char *)msg.obj);
post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, text);
J4A_DeleteLocalRef__p(env, &text);
}
else {
post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, NULL);
}
break;
case FFP_MSG_GET_IMG_STATE:
if (msg.obj) {
jstring file_name = (*env)->NewStringUTF(env, (char *)msg.obj);
post_event2(env, weak_thiz, MEDIA_GET_IMG_STATE, msg.arg1, msg.arg2, file_name);
J4A_DeleteLocalRef__p(env, &file_name);
}
else {
post_event2(env, weak_thiz, MEDIA_GET_IMG_STATE, msg.arg1, msg.arg2, NULL);
}
break;
case FFP_MSG_VIDEO_SEEK_RENDERING_START:
MPTRACE("FFP_MSG_VIDEO_SEEK_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_SEEK_RENDERING_START, msg.arg1);
break;
case FFP_MSG_AUDIO_SEEK_RENDERING_START:
MPTRACE("FFP_MSG_AUDIO_SEEK_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_SEEK_RENDERING_START, msg.arg1);
break;
default:
ALOGE("unknown FFP_MSG_xxx(%d)\n", msg.what);
break;
}
msg_free_res(&msg);
}
LABEL_RETURN:
;
}
第一層 資訊循環處理
/* need to call msg_free_res for freeing the resouce obtained in msg */
int ijkmp_get_msg(IjkMediaPlayer *mp, AVMessage *msg, int block)
{
assert(mp);
while (1) {
int continue_wait_next_msg = 0;
int retval = msg_queue_get(&mp->ffplayer->msg_queue, msg, block); //> 從 播放器的資訊隊列中擷取消息
if (retval <= 0)
return retval;
switch (msg->what) { //> 根據資訊内容控制播放器狀态
case FFP_MSG_PREPARED: //> 播放器進入預備态
MPTRACE("ijkmp_get_msg: FFP_MSG_PREPARED\n");
pthread_mutex_lock(&mp->mutex);
if (mp->mp_state == MP_STATE_ASYNC_PREPARING) {
ijkmp_change_state_l(mp, MP_STATE_PREPARED);
} else {
// FIXME: 1: onError() ?
av_log(mp->ffplayer, AV_LOG_DEBUG, "FFP_MSG_PREPARED: expecting mp_state==MP_STATE_ASYNC_PREPARING\n");
}
if (!mp->ffplayer->start_on_prepared) {
ijkmp_change_state_l(mp, MP_STATE_PAUSED);
}
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_MSG_COMPLETED:
MPTRACE("ijkmp_get_msg: FFP_MSG_COMPLETED\n");
pthread_mutex_lock(&mp->mutex);
mp->restart = 1;
mp->restart_from_beginning = 1;
ijkmp_change_state_l(mp, MP_STATE_COMPLETED);
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_MSG_SEEK_COMPLETE:
MPTRACE("ijkmp_get_msg: FFP_MSG_SEEK_COMPLETE\n");
pthread_mutex_lock(&mp->mutex);
mp->seek_req = 0;
mp->seek_msec = 0;
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_REQ_START: //> 啟動播放器
MPTRACE("ijkmp_get_msg: FFP_REQ_START\n");
continue_wait_next_msg = 1;
pthread_mutex_lock(&mp->mutex);
if (0 == ikjmp_chkst_start_l(mp->mp_state)) {
// FIXME: 8 check seekable
if (mp->restart) {
if (mp->restart_from_beginning) {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: restart from beginning\n");
retval = ffp_start_from_l(mp->ffplayer, 0);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
} else {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: restart from seek pos\n");
retval = ffp_start_l(mp->ffplayer);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
}
mp->restart = 0;
mp->restart_from_beginning = 0;
} else {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: start on fly\n");
retval = ffp_start_l(mp->ffplayer);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
}
}
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_REQ_PAUSE: //> 暫停播放器
MPTRACE("ijkmp_get_msg: FFP_REQ_PAUSE\n");
continue_wait_next_msg = 1;
pthread_mutex_lock(&mp->mutex);
if (0 == ikjmp_chkst_pause_l(mp->mp_state)) {
int pause_ret = ffp_pause_l(mp->ffplayer);
if (pause_ret == 0)
ijkmp_change_state_l(mp, MP_STATE_PAUSED);
}
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_REQ_SEEK:
MPTRACE("ijkmp_get_msg: FFP_REQ_SEEK\n");
continue_wait_next_msg = 1;
pthread_mutex_lock(&mp->mutex);
if (0 == ikjmp_chkst_seek_l(mp->mp_state)) {
mp->restart_from_beginning = 0;
if (0 == ffp_seek_to_l(mp->ffplayer, msg->arg1)) {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_SEEK: seek to %d\n", (int)msg->arg1);
}
}
pthread_mutex_unlock(&mp->mutex);
break;
}
if (continue_wait_next_msg) {
msg_free_res(msg);
continue;
}
return retval;
}
return -1;
}
//> 從隊列中擷取事件内容
/* return < 0 if aborted, 0 if no msg and > 0 if msg. */
inline static int msg_queue_get(MessageQueue *q, AVMessage *msg, int block)
{
AVMessage *msg1;
int ret;
SDL_LockMutex(q->mutex);
for (;;) {
if (q->abort_request) {
ret = -1;
break;
}
msg1 = q->first_msg;
if (msg1) {
q->first_msg = msg1->next;
if (!q->first_msg)
q->last_msg = NULL;
q->nb_messages--;
*msg = *msg1;
msg1->obj = NULL;
#ifdef FFP_MERGE
av_free(msg1);
#else
msg1->next = q->recycle_msg;
q->recycle_msg = msg1;
#endif
ret = 1;
break;
} else if (!block) {
ret = 0;
break;
} else {
SDL_CondWait(q->cond, q->mutex);
}
}
SDL_UnlockMutex(q->mutex);
return ret;
}
總結:
- 在建立 ijkPlayer 時,把 msg_loop() 函數與播放器關聯起來;
-
在消息處理采用 二層處理機制,第一層 ffplay 播放器相關事件處理,
第二層是安卓與播放器間的消息交換處理。