1.ijkplayer分层
- ijkplayer java层api:github.com/JeffMony/ij…
- ijkplayer 播放器 native层接口:github.com/JeffMony/ij…
- ijkplayer 播放器 native层回调:github.com/JeffMony/ij…
- ijkplayer sdl层接口:github.com/JeffMony/ij…
- java层 播放器封装的类是github.com/JeffMony/ij…
我们所有的六层调用都从这个类开始;
2.IjkMediaPlayer 类介绍
IjkMediaPlayer 继承 AbstractMediaPlayer; AbstractMediaPlayer 实现 IMediaPlayer 接口; IMediaPlayer 接口是播放器总接口,播放器的功能非常全;
OnMediaCodecSelectListener 接口 是选择的codec的回调,便于做性能统计;
public interface OnMediaCodecSelectListener {
String onMediaCodecSelect(IMediaPlayer mp, String mimeType, int profile, int level);
}
OnNativeInvokeListener 接口是播放器网络回调的接口;
public interface OnNativeInvokeListener {
int CTRL_WILL_TCP_OPEN = 0x20001; // NO ARGS
int CTRL_DID_TCP_OPEN = 0x20002; // ARG_ERROR, ARG_FAMILIY, ARG_IP, ARG_PORT, ARG_FD
int CTRL_WILL_HTTP_OPEN = 0x20003; // ARG_URL, ARG_SEGMENT_INDEX, ARG_RETRY_COUNTER
int CTRL_WILL_LIVE_OPEN = 0x20005; // ARG_URL, ARG_RETRY_COUNTER
int CTRL_WILL_CONCAT_RESOLVE_SEGMENT = 0x20007; // ARG_URL, ARG_SEGMENT_INDEX, ARG_RETRY_COUNTER
int EVENT_WILL_HTTP_OPEN = 0x1; // ARG_URL
int EVENT_DID_HTTP_OPEN = 0x2; // ARG_URL, ARG_ERROR, ARG_HTTP_CODE
int EVENT_WILL_HTTP_SEEK = 0x3; // ARG_URL, ARG_OFFSET
int EVENT_DID_HTTP_SEEK = 0x4; // ARG_URL, ARG_OFFSET, ARG_ERROR, ARG_HTTP_CODE, ARG_FILE_SIZE
String ARG_URL = "url";
String ARG_SEGMENT_INDEX = "segment_index";
String ARG_RETRY_COUNTER = "retry_counter";
String ARG_ERROR = "error";
String ARG_FAMILIY = "family";
String ARG_IP = "ip";
String ARG_PORT = "port";
String ARG_FD = "fd";
String ARG_OFFSET = "offset";
String ARG_HTTP_CODE = "http_code";
String ARG_FILE_SIZE = "file_size";
/*
* @return true if invoke is handled
* @throws Exception on any error
*/
boolean onNativeInvoke(int what, Bundle args);
}
3. 播放器流程
要想使用ijkplayer 播放器,开始需要依次调用代码:
mPlayer = new IjkMediaPlayer();
mPlayer.setSurface(surface);
mPlayer.setDataSource(content, uri, headers);
mPlayer.prepareAsync();
mPlayer.start();
我们要从这几个调用步骤来一步一步深入剖析一下ijkplayer的整体调用流程和原理;
4.播放器初始化
简单的一句代码,是ijkplayer 播放器初始化;
mPlayer = new IjkMediaPlayer();
github.com/JeffMony/ij… ijkplayer/ijkmedia/ijkplayer/android/ijkplayer_jni.c ijkplayer/ijkmedia/ijkplayer/android/ijkplayer_android.c ijkplayer/ijkmedia/ijkplayer/android/pipeline/ffpipeline_android.c ijkplayer/ijkmedia/ijkplayer/ijkplayer.c ijkplayer/ijkmedia/ijkplayer/ijkplayer_internal.h ijkplayer/ijkmedia/ijkplayer/ff_ffplay.c ijkplayer/ijkmedia/ijkplayer/ff_ffplay_def.h ijkplayer/ijkmedia/ijkplayer/ijkmeta.c ijkplayer/ijkmedia/ijksdl/android/ijksdl_vout_android_surface.c ijkplayer/ijkmedia/ijksdl/android/ijksdl_vout_android_nativewindow.c ijkplayer/ijkmedia/ijksdl/ijksdl_vout_internal.h
ijkplayer初始化执行的流程涉及的文件较多,我们主要从流程中讲解代码,从代码中讲到原理;
4.1 初始化加载so
IjkMediaPlayer.java 中执行了初始化加载so的操作,主要加载下面3个so;
public static void loadLibrariesOnce(IjkLibLoader libLoader) {
synchronized (IjkMediaPlayer.class) {
if (!mIsLibLoaded) {
if (libLoader == null)
libLoader = sLocalLibLoader;
libLoader.loadLibrary("ijkffmpeg");
libLoader.loadLibrary("ijksdl");
libLoader.loadLibrary("ijkplayer");
mIsLibLoaded = true;
}
}
}
ijkplayer 从 java层调用到native层,接口文件是ijkplayer/ijkmedia/ijkplayer/android/ijkplayer_jni.c 中的映射关系;
static JNINativeMethod g_methods[] = {
{
"_setDataSource",
"(Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;)V",
(void *) IjkMediaPlayer_setDataSourceAndHeaders
},
{ "_setDataSourceFd", "(I)V", (void *) IjkMediaPlayer_setDataSourceFd },
{ "_setDataSource", "(Ltv/danmaku/ijk/media/player/misc/IMediaDataSource;)V", (void *)IjkMediaPlayer_setDataSourceCallback },
{ "_setAndroidIOCallback", "(Ltv/danmaku/ijk/media/player/misc/IAndroidIO;)V", (void *)IjkMediaPlayer_setAndroidIOCallback },
{ "_setVideoSurface", "(Landroid/view/Surface;)V", (void *) IjkMediaPlayer_setVideoSurface },
{ "_prepareAsync", "()V", (void *) IjkMediaPlayer_prepareAsync },
{ "_start", "()V", (void *) IjkMediaPlayer_start },
{ "_stop", "()V", (void *) IjkMediaPlayer_stop },
{ "seekTo", "(J)V", (void *) IjkMediaPlayer_seekTo },
{ "_pause", "()V", (void *) IjkMediaPlayer_pause },
{ "isPlaying", "()Z", (void *) IjkMediaPlayer_isPlaying },
{ "getCurrentPosition", "()J", (void *) IjkMediaPlayer_getCurrentPosition },
{ "getDuration", "()J", (void *) IjkMediaPlayer_getDuration },
{ "_release", "()V", (void *) IjkMediaPlayer_release },
{ "_reset", "()V", (void *) IjkMediaPlayer_reset },
{ "setVolume", "(FF)V", (void *) IjkMediaPlayer_setVolume },
{ "getAudioSessionId", "()I", (void *) IjkMediaPlayer_getAudioSessionId },
{ "native_init", "()V", (void *) IjkMediaPlayer_native_init },
{ "native_setup", "(Ljava/lang/Object;)V", (void *) IjkMediaPlayer_native_setup },
{ "native_finalize", "()V", (void *) IjkMediaPlayer_native_finalize },
{ "_setOption", "(ILjava/lang/String;Ljava/lang/String;)V", (void *) IjkMediaPlayer_setOption },
{ "_setOption", "(ILjava/lang/String;J)V", (void *) IjkMediaPlayer_setOptionLong },
{ "_getColorFormatName", "(I)Ljava/lang/String;", (void *) IjkMediaPlayer_getColorFormatName },
{ "_getVideoCodecInfo", "()Ljava/lang/String;", (void *) IjkMediaPlayer_getVideoCodecInfo },
{ "_getAudioCodecInfo", "()Ljava/lang/String;", (void *) IjkMediaPlayer_getAudioCodecInfo },
{ "_getMediaMeta", "()Landroid/os/Bundle;", (void *) IjkMediaPlayer_getMediaMeta },
{ "_setLoopCount", "(I)V", (void *) IjkMediaPlayer_setLoopCount },
{ "_getLoopCount", "()I", (void *) IjkMediaPlayer_getLoopCount },
{ "_getPropertyFloat", "(IF)F", (void *) ijkMediaPlayer_getPropertyFloat },
{ "_setPropertyFloat", "(IF)V", (void *) ijkMediaPlayer_setPropertyFloat },
{ "_getPropertyLong", "(IJ)J", (void *) ijkMediaPlayer_getPropertyLong },
{ "_setPropertyLong", "(IJ)V", (void *) ijkMediaPlayer_setPropertyLong },
{ "_setStreamSelected", "(IZ)V", (void *) ijkMediaPlayer_setStreamSelected },
{ "native_profileBegin", "(Ljava/lang/String;)V", (void *) IjkMediaPlayer_native_profileBegin },
{ "native_profileEnd", "()V", (void *) IjkMediaPlayer_native_profileEnd },
{ "native_setLogLevel", "(I)V", (void *) IjkMediaPlayer_native_setLogLevel },
{ "_setFrameAtTime", "(Ljava/lang/String;JJII)V", (void *) IjkMediaPlayer_setFrameAtTime },
};
4.2 初始化播放器消息机制
EventHandler 是 Handler的子类,专门处理播放器的回调;播放器有很多回调,例如 onPrepared onInfo onVideoSizeChanged 等等,这些回调从底层回调上来之后,需要通过Handler 分发,然后再回调给开发者;
底层通过postEventFromNative 函数回调上来,看到最后的mp.mEventHandler.sendMessage(m); 统一在EventHandler 中处理;
@CalledByNative
private static void postEventFromNative(Object weakThiz, int what,
int arg1, int arg2, Object obj) {
if (weakThiz == null)
return;
@SuppressWarnings("rawtypes")
IjkMediaPlayer mp = (IjkMediaPlayer) ((WeakReference) weakThiz).get();
if (mp == null) {
return;
}
if (what == MEDIA_INFO && arg1 == MEDIA_INFO_STARTED_AS_NEXT) {
// this acquires the wakelock if needed, and sets the client side
// state
mp.start();
}
if (mp.mEventHandler != null) {
Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2, obj);
mp.mEventHandler.sendMessage(m);
}
}
接下来调用ijkplayer_jni.c 中的IjkMediaPlayer_native_setup 方法;
static void
IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
MPTRACE("%s\n", __func__);
IjkMediaPlayer *mp = ijkmp_android_create(message_loop);
JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN);
jni_set_media_player(env, thiz, mp);
ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this));
ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
}
执行 ijkmp_android_create(message_loop); 传入的 message_loop函数指针; message_loop 调用到 message_loop_n;
static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp)
{
jobject weak_thiz = (jobject) ijkmp_get_weak_thiz(mp);
JNI_CHECK_GOTO(weak_thiz, env, NULL, "mpjni: message_loop_n: null weak_thiz", LABEL_RETURN);
while (1) {
AVMessage msg;
int retval = ijkmp_get_msg(mp, &msg, 1);
if (retval < 0)
break;
// block-get should never return 0
assert(retval > 0);
switch (msg.what) {
case FFP_MSG_FLUSH:
MPTRACE("FFP_MSG_FLUSH:\n");
post_event(env, weak_thiz, MEDIA_NOP, 0, 0);
break;
case FFP_MSG_ERROR:
MPTRACE("FFP_MSG_ERROR: %d\n", msg.arg1);
post_event(env, weak_thiz, MEDIA_ERROR, MEDIA_ERROR_IJK_PLAYER, msg.arg1);
break;
case FFP_MSG_PREPARED:
MPTRACE("FFP_MSG_PREPARED:\n");
post_event(env, weak_thiz, MEDIA_PREPARED, 0, 0);
break;
case FFP_MSG_COMPLETED:
MPTRACE("FFP_MSG_COMPLETED:\n");
post_event(env, weak_thiz, MEDIA_PLAYBACK_COMPLETE, 0, 0);
break;
case FFP_MSG_VIDEO_SIZE_CHANGED:
MPTRACE("FFP_MSG_VIDEO_SIZE_CHANGED: %d, %d\n", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_SET_VIDEO_SIZE, msg.arg1, msg.arg2);
break;
case FFP_MSG_SAR_CHANGED:
MPTRACE("FFP_MSG_SAR_CHANGED: %d, %d\n", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_SET_VIDEO_SAR, msg.arg1, msg.arg2);
break;
case FFP_MSG_VIDEO_RENDERING_START:
MPTRACE("FFP_MSG_VIDEO_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_RENDERING_START, 0);
break;
case FFP_MSG_AUDIO_RENDERING_START:
MPTRACE("FFP_MSG_AUDIO_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_RENDERING_START, 0);
break;
case FFP_MSG_VIDEO_ROTATION_CHANGED:
MPTRACE("FFP_MSG_VIDEO_ROTATION_CHANGED: %d\n", msg.arg1);
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_ROTATION_CHANGED, msg.arg1);
break;
case FFP_MSG_AUDIO_DECODED_START:
MPTRACE("FFP_MSG_AUDIO_DECODED_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_DECODED_START, 0);
break;
case FFP_MSG_VIDEO_DECODED_START:
MPTRACE("FFP_MSG_VIDEO_DECODED_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_DECODED_START, 0);
break;
case FFP_MSG_OPEN_INPUT:
MPTRACE("FFP_MSG_OPEN_INPUT:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_OPEN_INPUT, 0);
break;
case FFP_MSG_FIND_STREAM_INFO:
MPTRACE("FFP_MSG_FIND_STREAM_INFO:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_FIND_STREAM_INFO, 0);
break;
case FFP_MSG_COMPONENT_OPEN:
MPTRACE("FFP_MSG_COMPONENT_OPEN:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_COMPONENT_OPEN, 0);
break;
case FFP_MSG_BUFFERING_START:
MPTRACE("FFP_MSG_BUFFERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_START, msg.arg1);
break;
case FFP_MSG_BUFFERING_END:
MPTRACE("FFP_MSG_BUFFERING_END:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_END, msg.arg1);
break;
case FFP_MSG_BUFFERING_UPDATE:
// MPTRACE("FFP_MSG_BUFFERING_UPDATE: %d, %d", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_BUFFERING_UPDATE, msg.arg1, msg.arg2);
break;
case FFP_MSG_BUFFERING_BYTES_UPDATE:
break;
case FFP_MSG_BUFFERING_TIME_UPDATE:
break;
case FFP_MSG_SEEK_COMPLETE:
MPTRACE("FFP_MSG_SEEK_COMPLETE:\n");
post_event(env, weak_thiz, MEDIA_SEEK_COMPLETE, 0, 0);
break;
case FFP_MSG_ACCURATE_SEEK_COMPLETE:
MPTRACE("FFP_MSG_ACCURATE_SEEK_COMPLETE:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_MEDIA_ACCURATE_SEEK_COMPLETE, msg.arg1);
break;
case FFP_MSG_PLAYBACK_STATE_CHANGED:
break;
case FFP_MSG_TIMED_TEXT:
if (msg.obj) {
jstring text = (*env)->NewStringUTF(env, (char *)msg.obj);
post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, text);
J4A_DeleteLocalRef__p(env, &text);
}
else {
post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, NULL);
}
break;
case FFP_MSG_GET_IMG_STATE:
if (msg.obj) {
jstring file_name = (*env)->NewStringUTF(env, (char *)msg.obj);
post_event2(env, weak_thiz, MEDIA_GET_IMG_STATE, msg.arg1, msg.arg2, file_name);
J4A_DeleteLocalRef__p(env, &file_name);
}
else {
post_event2(env, weak_thiz, MEDIA_GET_IMG_STATE, msg.arg1, msg.arg2, NULL);
}
break;
case FFP_MSG_VIDEO_SEEK_RENDERING_START:
MPTRACE("FFP_MSG_VIDEO_SEEK_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_SEEK_RENDERING_START, msg.arg1);
break;
case FFP_MSG_AUDIO_SEEK_RENDERING_START:
MPTRACE("FFP_MSG_AUDIO_SEEK_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_SEEK_RENDERING_START, msg.arg1);
break;
default:
ALOGE("unknown FFP_MSG_xxx(%d)\n", msg.what);
break;
}
msg_free_res(&msg);
}
LABEL_RETURN:
;
}
message_loop_n 函数中有一些 无线循环,无线循环中通过 ijkmp_get_msg 来不断获取播放器当前的状态信息,然后执行 post_event 调用到 java层的 postEventFromNative 函数,这就串起来整个流程了;
int retval = ijkmp_get_msg(mp, &msg, 1);
while循环中会调用ijkmp_get_msg 函数,应该是即时获取一下播放器当前状态信息; ijkplayer/ijkmedia/ijkplayer/ijkplayer.c
int ijkmp_get_msg(IjkMediaPlayer *mp, AVMessage *msg, int block)
{
assert(mp);
while (1) {
int continue_wait_next_msg = 0;
int retval = msg_queue_get(&mp->ffplayer->msg_queue, msg, block);
if (retval <= 0)
return retval;
switch (msg->what) {
case FFP_MSG_PREPARED:
MPTRACE("ijkmp_get_msg: FFP_MSG_PREPARED\n");
pthread_mutex_lock(&mp->mutex);
if (mp->mp_state == MP_STATE_ASYNC_PREPARING) {
ijkmp_change_state_l(mp, MP_STATE_PREPARED);
} else {
// FIXME: 1: onError() ?
av_log(mp->ffplayer, AV_LOG_DEBUG, "FFP_MSG_PREPARED: expecting mp_state==MP_STATE_ASYNC_PREPARING\n");
}
if (!mp->ffplayer->start_on_prepared) {
ijkmp_change_state_l(mp, MP_STATE_PAUSED);
}
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_MSG_COMPLETED:
MPTRACE("ijkmp_get_msg: FFP_MSG_COMPLETED\n");
pthread_mutex_lock(&mp->mutex);
mp->restart = 1;
mp->restart_from_beginning = 1;
ijkmp_change_state_l(mp, MP_STATE_COMPLETED);
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_MSG_SEEK_COMPLETE:
MPTRACE("ijkmp_get_msg: FFP_MSG_SEEK_COMPLETE\n");
pthread_mutex_lock(&mp->mutex);
mp->seek_req = 0;
mp->seek_msec = 0;
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_REQ_START:
MPTRACE("ijkmp_get_msg: FFP_REQ_START\n");
continue_wait_next_msg = 1;
pthread_mutex_lock(&mp->mutex);
if (0 == ikjmp_chkst_start_l(mp->mp_state)) {
// FIXME: 8 check seekable
if (mp->restart) {
if (mp->restart_from_beginning) {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: restart from beginning\n");
retval = ffp_start_from_l(mp->ffplayer, 0);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
} else {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: restart from seek pos\n");
retval = ffp_start_l(mp->ffplayer);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
}
mp->restart = 0;
mp->restart_from_beginning = 0;
} else {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: start on fly\n");
retval = ffp_start_l(mp->ffplayer);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
}
}
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_REQ_PAUSE:
MPTRACE("ijkmp_get_msg: FFP_REQ_PAUSE\n");
continue_wait_next_msg = 1;
pthread_mutex_lock(&mp->mutex);
if (0 == ikjmp_chkst_pause_l(mp->mp_state)) {
int pause_ret = ffp_pause_l(mp->ffplayer);
if (pause_ret == 0)
ijkmp_change_state_l(mp, MP_STATE_PAUSED);
}
pthread_mutex_unlock(&mp->mutex);
break;
case FFP_REQ_SEEK:
MPTRACE("ijkmp_get_msg: FFP_REQ_SEEK\n");
continue_wait_next_msg = 1;
pthread_mutex_lock(&mp->mutex);
if (0 == ikjmp_chkst_seek_l(mp->mp_state)) {
mp->restart_from_beginning = 0;
if (0 == ffp_seek_to_l(mp->ffplayer, msg->arg1)) {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_SEEK: seek to %d\n", (int)msg->arg1);
}
}
pthread_mutex_unlock(&mp->mutex);
break;
}
if (continue_wait_next_msg) {
msg_free_res(msg);
continue;
}
return retval;
}
return -1;
}
这个函数中也有一个while(1) 循环, 循环中会不断从mp->ffplayer->msg_queue中取出播放器的状态信息,然后赋给 msg对象;
int retval = msg_queue_get(&mp->ffplayer->msg_queue, msg, block);
上面只是ijkplayer 状态回调的核心代码流程,这块还是值得重新开一个章节讲解的,便于大家清晰的理解ijkplayer的状态回调机制;
5. setDataSource 和 setSurface
ijkplayer/ijkmedia/ijkplayer/ijkplayer.c
ijkplayer/ijkmedia/ijkplayer/ijkplayer_internal.h
ijkplayer/ijkmedia/ijkplayer/android/ijkplayer_android.c
ijkplayer/ijkmedia/ijksdl/android/ijksdl_vout_android_surface.c
ijkplayer/ijkmedia/ijksdl/android/ijksdl_vout_android_nativewindow.c
ijkplayer/ijkmedia/ijkplayer/android/pipeline/ffpipeline_android.c
初始化IjkMediaPlayer 实例之后,我们需要调用 setSurface 和 setDataSource 两个方法,将需要播放的url和显示界面的surface设置进去,准备接下来的播放;
setDataSource 将url设置成功后,将播放器状态置为 MP_STATE_INITIALIZED 状态,ijkplayer播放器状态之间切换,我们后续的章节会讲到;
setDataSource 最终设置到 ijkplayer/ijkmedia/ijkplayer/ijkplayer.c 中的ijkmp_set_data_source_l 方法;
static int ijkmp_set_data_source_l(IjkMediaPlayer *mp, const char *url)
{
assert(mp);
assert(url);
// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
freep((void**)&mp->data_source);
mp->data_source = strdup(url);
if (!mp->data_source)
return EIJK_OUT_OF_MEMORY;
ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);
return 0;
}
视频url 设置到 ijkplayer/ijkmedia/ijkplayer/ijkplayer_internal.h 中IjkMediaPlayer结构体的 data_source 中,这在后续请求视频url的时候肯定会用到;
struct IjkMediaPlayer {
volatile int ref_count;
pthread_mutex_t mutex;
FFPlayer *ffplayer;
int (*msg_loop)(void*);
SDL_Thread *msg_thread;
SDL_Thread _msg_thread;
int mp_state;
char *data_source;
void *weak_thiz;
int restart;
int restart_from_beginning;
int seek_req;
long seek_msec;
};
上层调用 setSurface 调用到ijkplayer/ijkmedia/ijkplayer/android/ijkplayer_android.c
void ijkmp_android_set_surface_l(JNIEnv *env, IjkMediaPlayer *mp, jobject android_surface)
{
if (!mp || !mp->ffplayer || !mp->ffplayer->vout)
return;
SDL_VoutAndroid_SetAndroidSurface(env, mp->ffplayer->vout, android_surface);
ffpipeline_set_surface(env, mp->ffplayer->pipeline, android_surface);
}
SDL_VoutAndroid_SetAndroidSurface 函数传入 android_surface,android_surface 通过ANativeWindow_fromSurface函数 转化为 ANativeWindow,ANativeWindow 是android opengl 底层绘制的画布窗口;本地显示使用opengl来显示;
ijkplayer/ijkmedia/ijkplayer/android/pipeline/ffpipeline_android.c 中将初始化时 定义的mp->ffplayer->pipeline 和 android_surface 传入底层;
int ffpipeline_set_surface(JNIEnv *env, IJKFF_Pipeline* pipeline, jobject surface)
{
ALOGD("%s()\n", __func__);
if (!check_ffpipeline(pipeline, __func__))
return -1;
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
if (!opaque->surface_mutex)
return -1;
ffpipeline_lock_surface(pipeline);
{
jobject prev_surface = opaque->jsurface;
if ((surface == prev_surface) ||
(surface && prev_surface && (*env)->IsSameObject(env, surface, prev_surface))) {
// same object, no need to reconfigure
} else {
SDL_VoutAndroid_setAMediaCodec(opaque->weak_vout, NULL);
if (surface) {
opaque->jsurface = (*env)->NewGlobalRef(env, surface);
} else {
opaque->jsurface = NULL;
}
opaque->is_surface_need_reconfigure = true;
if (prev_surface != NULL) {
SDL_JNI_DeleteGlobalRefP(env, &prev_surface);
}
}
}
ffpipeline_unlock_surface(pipeline);
return 0;
}
ffpipeline_android.c 是管理 音视频解码的管线管理模块,底层的解码模块通过这个文件管理;
if ((surface == prev_surface) ||
(surface && prev_surface && (*env)->IsSameObject(env, surface, prev_surface))) {
// same object, no need to reconfigure
}
如果当前传入的surface 和 之前的surface相同,不需要处理,如果不相同,需要设置解码器SDL_VoutAndroid_setAMediaCodec和opaque->jsurface 属性,后续会在这个对象上渲染数据;
6. prepareAsync 请求数据
ijkplayer/ijkmedia/ijkplayer/android/ijkplayer_jni.c
ijkplayer/ijkmedia/ijkplayer/ijkplayer.c
ijkplayer/ijkmedia/ijkplayer/ff_ffplay.c
从上序列图看出来,真正执行解析的工作应该在 ff_ffplay.c 中的 stream_open 函数中; 这个函数主要完成4个工作;
- 1.初始化视频流、字幕流、音频流的队列,准备接受解析这3种流的数据;
- 2.子线程准备解析视频帧
- 3.子线程解析media url
- 4.初始化解码器
3.子线程解析media url ----> 是 prepareAsync 流程中最核心的工作; read_thread 方法是一个非常庞大的函数,将近600行,不贴代码了;
首先简单介绍一下ijkplayer/extra/ffmpeg/libavformat/avformat.h 中的 AVFormatContext 结构体,这个结构体是承载解析 音视频之后的原始数据,例如一个视频文件,里面有视频流,字幕流,音频流,可能不止一个音频流,通过调用相关的解析函数都会解析出来,解析完成之后,都会存放在这个结构体中;
/**
* A list of all streams in the file. New streams are created with
* avformat_new_stream().
*
* - demuxing: streams are created by libavformat in avformat_open_input().
* If AVFMTCTX_NOHEADER is set in ctx_flags, then new streams may also
* appear in av_read_frame().
* - muxing: streams are created by the user before avformat_write_header().
*
* Freed by libavformat in avformat_free_context().
*/
AVStream **streams;
视频中所有的track 流都会放在这个AVStream 指针数组中;
首先调用ic = avformat_alloc_context(); 分配context 空间;
err = avformat_open_input(&ic, is->filename, is->iformat, &ffp->format_opts);
avformat_open_input 函数 主要负责解析 视频url,不管是不是网络的url还是本地的url,这个函数也相当复杂,解析完成后,存到ic地址中,这个ic就是AVFormatContext结构体;
现在ic中 streams 元素已经包含 各种track 流信息;调用 av_find_best_stream 取出对应的 视频流,音频流,字幕流 信息;
st_index[AVMEDIA_TYPE_VIDEO] =
av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO,
st_index[AVMEDIA_TYPE_VIDEO], -1, NULL, 0);
st_index[AVMEDIA_TYPE_AUDIO] =
av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO,
st_index[AVMEDIA_TYPE_AUDIO],
st_index[AVMEDIA_TYPE_VIDEO],
NULL, 0);
st_index[AVMEDIA_TYPE_SUBTITLE] =
av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE,
st_index[AVMEDIA_TYPE_SUBTITLE],
(st_index[AVMEDIA_TYPE_AUDIO] >= 0 ?
st_index[AVMEDIA_TYPE_AUDIO] :
st_index[AVMEDIA_TYPE_VIDEO]),
NULL, 0);
然后分别打开这个track 流,读取里面的内容,具体的读取函数是stream_component_open,大家现在了解一下即可,我们了解一下大概流程,这些具体的解析过程我都会单独拎出来讲解的;
/* open the streams */
if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
stream_component_open(ffp, st_index[AVMEDIA_TYPE_AUDIO]);
} else {
ffp->av_sync_type = AV_SYNC_VIDEO_MASTER;
is->av_sync_type = ffp->av_sync_type;
}
ret = -1;
if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
ret = stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]);
}
if (is->show_mode == SHOW_MODE_NONE)
is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT;
if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
stream_component_open(ffp, st_index[AVMEDIA_TYPE_SUBTITLE]);
}
ffp_notify_msg1(ffp, FFP_MSG_COMPONENT_OPEN);
这一切都完成,才能回调onPrepared;视频宽高获取到的情况下回调onVideoSizeChanged(...)函数;
if (is->video_st && is->video_st->codecpar) {
AVCodecParameters *codecpar = is->video_st->codecpar;
ffp_notify_msg3(ffp, FFP_MSG_VIDEO_SIZE_CHANGED, codecpar->width, codecpar->height);
ffp_notify_msg3(ffp, FFP_MSG_SAR_CHANGED, codecpar->sample_aspect_ratio.num, codecpar->sample_aspect_ratio.den);
}
ffp->prepared = true;
ffp_notify_msg1(ffp, FFP_MSG_PREPARED);
7.播放视频
start 函数调用相对简单,直接调用到ijkplayer/ijkmedia/ijkplayer/ijkplayer.c 中的ijkmp_start_l函数
static int ijkmp_start_l(IjkMediaPlayer *mp)
{
assert(mp);
MP_RET_IF_FAILED(ikjmp_chkst_start_l(mp->mp_state));
ffp_remove_msg(mp->ffplayer, FFP_REQ_START);
ffp_remove_msg(mp->ffplayer, FFP_REQ_PAUSE);
ffp_notify_msg1(mp->ffplayer, FFP_REQ_START);
return 0;
}
直接两个remove msg,然后notify msg (FFP_REQ_START) ,我们上面还说过,播放器初始化的时候,ijkplayer会启动一个消息循环机制来机制各方发来的消息,这时候调用ffp_notify_msg1 ,表示启动这个消息,我们直接到消息处理的地方看看收到消息如何处理;
case FFP_REQ_START:
MPTRACE("ijkmp_get_msg: FFP_REQ_START\n");
continue_wait_next_msg = 1;
pthread_mutex_lock(&mp->mutex);
if (0 == ikjmp_chkst_start_l(mp->mp_state)) {
// FIXME: 8 check seekable
if (mp->restart) {
if (mp->restart_from_beginning) {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: restart from beginning\n");
retval = ffp_start_from_l(mp->ffplayer, 0);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
} else {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: restart from seek pos\n");
retval = ffp_start_l(mp->ffplayer);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
}
mp->restart = 0;
mp->restart_from_beginning = 0;
} else {
av_log(mp->ffplayer, AV_LOG_DEBUG, "ijkmp_get_msg: FFP_REQ_START: start on fly\n");
retval = ffp_start_l(mp->ffplayer);
if (retval == 0)
ijkmp_change_state_l(mp, MP_STATE_STARTED);
}
}
pthread_mutex_unlock(&mp->mutex);
break;
可以看到最终会启动播放函数来播放视频;
8.小结
- 本章我们只是简单介绍一下播放器的播放流程,以及播放流程上主要的执行文件和执行函数;
- 各个模块的原理后续文章更新;