赞
踩
AudioTrack是Android SDK所提供的播放PCM音频的技术,与mediacodec类似,IJKPLAYER对此使用的是以native层反射到Java层的播放能力。
关于AudioTrack的官方文档,请参考AudioTrack官方文档
- IJKFF_Pipeline结构体是对Android和iOS平台video的软硬解以及audio的播放操作的抽象,2端遵循共同的接口规范,包括各自平台的硬解和ffmpeg软解;
IJKFF_Pipeline结构体定义:
- typedef struct IJKFF_Pipeline_Opaque IJKFF_Pipeline_Opaque;
- typedef struct IJKFF_Pipeline IJKFF_Pipeline;
- struct IJKFF_Pipeline {
- SDL_Class *opaque_class;
- IJKFF_Pipeline_Opaque *opaque;
-
- void (*func_destroy) (IJKFF_Pipeline *pipeline);
- IJKFF_Pipenode *(*func_open_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
- SDL_Aout *(*func_open_audio_output) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
- IJKFF_Pipenode *(*func_init_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
- int (*func_config_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
- };
- IJKFF_Pipeline_Opaque结构体则是IJKFF_Pipeline结构体相关数据的封装;
展开IJKFF_Pipeline_Opaque结构体:
- typedef struct IJKFF_Pipeline_Opaque {
- FFPlayer *ffp;
- SDL_mutex *surface_mutex;
- jobject jsurface;
- volatile bool is_surface_need_reconfigure;
-
- bool (*mediacodec_select_callback)(void *opaque, ijkmp_mediacodecinfo_context *mcc);
- void *mediacodec_select_callback_opaque;
-
- SDL_Vout *weak_vout;
-
- float left_volume;
- float right_volume;
- } IJKFF_Pipeline_Opaque;
创建pipeline对象调用链:
Java native_setup() => IjkMediaPlayer_native_setup()=> ijkmp_android_create() => ffpipeline_create_from_android()
展开ijkmp_android_create方法:
- IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
- {
- IjkMediaPlayer *mp = ijkmp_create(msg_loop);
- if (!mp)
- goto fail;
-
- mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
- if (!mp->ffplayer->vout)
- goto fail;
-
- mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
- if (!mp->ffplayer->pipeline)
- goto fail;
-
- ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);
-
- return mp;
-
- fail:
- ijkmp_dec_ref_p(&mp);
- return NULL;
- }
再展开ffpipeline_create_from_android方法:
- IJKFF_Pipeline *ffpipeline_create_from_android(FFPlayer *ffp)
- {
- ALOGD("ffpipeline_create_from_android()\n");
- IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
- if (!pipeline)
- return pipeline;
-
- IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
- opaque->ffp = ffp;
- opaque->surface_mutex = SDL_CreateMutex();
- opaque->left_volume = 1.0f;
- opaque->right_volume = 1.0f;
- if (!opaque->surface_mutex) {
- ALOGE("ffpipeline-android:create SDL_CreateMutex failed\n");
- goto fail;
- }
-
- pipeline->func_destroy = func_destroy;
- pipeline->func_open_video_decoder = func_open_video_decoder;
- pipeline->func_open_audio_output = func_open_audio_output;
- pipeline->func_init_video_decoder = func_init_video_decoder;
- pipeline->func_config_video_decoder = func_config_video_decoder;
-
- return pipeline;
- fail:
- ffpipeline_free_p(&pipeline);
- return NULL;
- }
- 该结构体抽象了Android和iOS端对声音硬件的所有操作,2端几乎遵循共同的接口规范;
- struct SDL_Aout {
- SDL_mutex *mutex;
- double minimal_latency_seconds;
-
- SDL_Class *opaque_class;
- SDL_Aout_Opaque *opaque;
- void (*free_l)(SDL_Aout *vout);
- int (*open_audio)(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained);
- void (*pause_audio)(SDL_Aout *aout, int pause_on);
- void (*flush_audio)(SDL_Aout *aout);
- void (*set_volume)(SDL_Aout *aout, float left, float right);
- void (*close_audio)(SDL_Aout *aout);
-
- double (*func_get_latency_seconds)(SDL_Aout *aout);
- void (*func_set_default_latency_seconds)(SDL_Aout *aout, double latency);
-
- // optional
- void (*func_set_playback_rate)(SDL_Aout *aout, float playbackRate);
- void (*func_set_playback_volume)(SDL_Aout *aout, float playbackVolume);
- int (*func_get_audio_persecond_callbacks)(SDL_Aout *aout);
-
- // Android only
- int (*func_get_audio_session_id)(SDL_Aout *aout);
- };
- 该结构体是SDL_Aout结构的内部数据成员,封装了对音频操作相关的参数、数据与mutex和cond等;
- typedef struct SDL_Aout_Opaque {
- SDL_cond *wakeup_cond;
- SDL_mutex *wakeup_mutex;
-
- SDL_AudioSpec spec;
- SDL_Android_AudioTrack* atrack;
- uint8_t *buffer;
- int buffer_size;
-
- volatile bool need_flush;
- volatile bool pause_on;
- volatile bool abort_request;
-
- volatile bool need_set_volume;
- volatile float left_volume;
- volatile float right_volume;
-
- SDL_Thread *audio_tid;
- SDL_Thread _audio_tid;
-
- int audio_session_id;
-
- volatile float speed;
- volatile bool speed_changed;
- } SDL_Aout_Opaque;
ffp_prepare_async_l() => ffpipeline_open_audio_output() => func_open_audio_output() => SDL_AoutAndroid_CreateForAudioTrack()
最后,走到此处创建SDL_Aout对象:
- SDL_Aout *SDL_AoutAndroid_CreateForAudioTrack()
- {
- SDL_Aout *aout = SDL_Aout_CreateInternal(sizeof(SDL_Aout_Opaque));
- if (!aout)
- return NULL;
-
- SDL_Aout_Opaque *opaque = aout->opaque;
- opaque->wakeup_cond = SDL_CreateCond();
- opaque->wakeup_mutex = SDL_CreateMutex();
- opaque->speed = 1.0f;
-
- aout->opaque_class = &g_audiotrack_class;
- aout->free_l = aout_free_l;
- aout->open_audio = aout_open_audio;
- aout->pause_audio = aout_pause_audio;
- aout->flush_audio = aout_flush_audio;
- aout->set_volume = aout_set_volume;
- aout->close_audio = aout_close_audio;
- aout->func_get_audio_session_id = aout_get_audio_session_id;
- aout->func_set_playback_rate = func_set_playback_rate;
-
- return aout;
- }
主要做以下事情:
- 打开audio,其实是分配一个jobject类型的AudioTrack对象,并把音频源的参数传递给它,后续对AudioTrack的操作均使用该对象;
- 值得一提的是,IJKPLAYER对音频源的channel和pcm格式以及采样率有限制,比如只允许播放单双通道、16bit或8bit的pcm格式、采样率也必须在4000~48000之间;
- 保存AudioTrack所能播放的音频参数在is->audio_tgt中,后续音频参数变更重采样之用;
- 开启audio_thread线程异步处理对AudioTrack的操作;
- 设置AudioTrack的缺省时延,用于音视频同步时纠正音频的时钟;
- // 设置缺省时延,若有func_set_default_latency_seconds回调则通过回调更新,没有则设置变量minimal_latency_seconds的值
- SDL_AoutSetDefaultLatencySeconds(ffp->aout, ((double)(2 * spec.size)) / audio_hw_params->bytes_per_sec);
完整调用链:
read_thread() => stream_component_open() => audio_open() => SDL_AoutOpenAudio() => aout_open_audio() => aout_open_audio_n()
我们主要来看看aout_open_audio_n函数:
- 将音频源的采样参数告知给AudioTrack,分配一个jobject类型的AudioTrack对象,后续对AudioTrack的操作均是由此对象发起;
- 按getMinBufferSize() * 2分配一个用于缓存PCM数据的buffer,一定大于256byte;
- 开启一个audio_thread线程,用以异步执行对AudioTrack的操作,诸如setVolume() / pause() / flush() / close_audio() / setPlaybackRate()等;
- 将audio硬件PCM播放的参数在全局is->audio_tgt变量中,后续参数变更重采样用;
- 设置播放的初始音量;
- static int aout_open_audio_n(JNIEnv *env, SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained)
- {
- assert(desired);
- SDL_Aout_Opaque *opaque = aout->opaque;
-
- opaque->spec = *desired;
- // 将pcm采样参数告知audiotrack
- opaque->atrack = SDL_Android_AudioTrack_new_from_sdl_spec(env, desired);
- if (!opaque->atrack) {
- ALOGE("aout_open_audio_n: failed to new AudioTrcak()");
- return -1;
- }
-
- opaque->buffer_size = SDL_Android_AudioTrack_get_min_buffer_size(opaque->atrack);
- if (opaque->buffer_size <= 0) {
- ALOGE("aout_open_audio_n: failed to getMinBufferSize()");
- SDL_Android_AudioTrack_free(env, opaque->atrack);
- opaque->atrack = NULL;
- return -1;
- }
-
- opaque->buffer = malloc(opaque->buffer_size);
- if (!opaque->buffer) {
- ALOGE("aout_open_audio_n: failed to allocate buffer");
- SDL_Android_AudioTrack_free(env, opaque->atrack);
- opaque->atrack = NULL;
- return -1;
- }
-
- if (obtained) {
- SDL_Android_AudioTrack_get_target_spec(opaque->atrack, obtained);
- SDLTRACE("audio target format fmt:0x%x, channel:0x%x", (int)obtained->format, (int)obtained->channels);
- }
-
- opaque->audio_session_id = SDL_Android_AudioTrack_getAudioSessionId(env, opaque->atrack);
- ALOGI("audio_session_id = %d\n", opaque->audio_session_id);
-
- opaque->pause_on = 1;
- opaque->abort_request = 0;
- opaque->audio_tid = SDL_CreateThreadEx(&opaque->_audio_tid, aout_thread, aout, "ff_aout_android");
- if (!opaque->audio_tid) {
- ALOGE("aout_open_audio_n: failed to create audio thread");
- SDL_Android_AudioTrack_free(env, opaque->atrack);
- opaque->atrack = NULL;
- return -1;
- }
-
- return 0;
- }
此外,这里介绍一下getMinBufferSize函数:
- getMinBufferSize会综合考虑硬件情况(诸如是否支持采样率,硬件本身的延迟情况等)后,得出一个最小缓冲区的大小。一般我们分配的缓冲大小会是它的整数倍。
- 对Android SDK的AudioTrack异步执行操作,如pause()/play()/setVolume()/flush()/setSpped();
- 通过sdl_audio_callback回调copy固定256byte的PCM数据,然后喂给AudioTrack播放;
所有对AudioTrack的操作,都是在此线程里异步执行:
- 值得一提的是,若播放器处于pause状态时,该线程会一直条件等待opaque->pause_on非false(也即可播状态)或程序退出,线程空转;
- static int aout_thread_n(JNIEnv *env, SDL_Aout *aout)
- {
- SDL_Aout_Opaque *opaque = aout->opaque;
- SDL_Android_AudioTrack *atrack = opaque->atrack;
- SDL_AudioCallback audio_cblk = opaque->spec.callback;
- void *userdata = opaque->spec.userdata;
- uint8_t *buffer = opaque->buffer;
- // 单次喂给AudioTrack的PCM的bytes,不宜喂得太少,也不宜太多,单次应能播一会儿,5ms
- int copy_size = 256;
-
- assert(atrack);
- assert(buffer);
-
- SDL_SetThreadPriority(SDL_THREAD_PRIORITY_HIGH);
-
- if (!opaque->abort_request && !opaque->pause_on)
- SDL_Android_AudioTrack_play(env, atrack);
-
- while (!opaque->abort_request) {
- SDL_LockMutex(opaque->wakeup_mutex);
- if (!opaque->abort_request && opaque->pause_on) {
- SDL_Android_AudioTrack_pause(env, atrack);
- // 若暂停了,当前线程一直在此条件等待播放
- while (!opaque->abort_request && opaque->pause_on) {
- SDL_CondWaitTimeout(opaque->wakeup_cond, opaque->wakeup_mutex, 1000);
- }
- if (!opaque->abort_request && !opaque->pause_on) {
- if (opaque->need_flush) {
- opaque->need_flush = 0;
- SDL_Android_AudioTrack_flush(env, atrack);
- }
- SDL_Android_AudioTrack_play(env, atrack);
- }
- }
- if (opaque->need_flush) {
- opaque->need_flush = 0;
- SDL_Android_AudioTrack_flush(env, atrack);
- }
- if (opaque->need_set_volume) {
- opaque->need_set_volume = 0;
- SDL_Android_AudioTrack_set_volume(env, atrack, opaque->left_volume, opaque->right_volume);
- }
- if (opaque->speed_changed) {
- opaque->speed_changed = 0;
- SDL_Android_AudioTrack_setSpeed(env, atrack, opaque->speed);
- }
- SDL_UnlockMutex(opaque->wakeup_mutex);
-
- // copy解码后的pcm数据,每次固定256byte
- audio_cblk(userdata, buffer, copy_size);
- if (opaque->need_flush) {
- SDL_Android_AudioTrack_flush(env, atrack);
- opaque->need_flush = false;
- }
-
- if (opaque->need_flush) {
- opaque->need_flush = 0;
- SDL_Android_AudioTrack_flush(env, atrack);
- } else {
- // 将pcm数据喂给AudioTrack播放
- int written = SDL_Android_AudioTrack_write(env, atrack, buffer, copy_size);
- if (written != copy_size) {
- ALOGW("AudioTrack: not all data copied %d/%d", (int)written, (int)copy_size);
- }
- }
-
- // TODO: 1 if callback return -1 or 0
- }
-
- SDL_Android_AudioTrack_free(env, atrack);
- return 0;
- }
- audio_thread通过callback的方式从解码后的音频队列FrameQueue拷贝走固定长度256byte的pcm数据;
- /* prepare a new audio buffer */
- static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
- {
- FFPlayer *ffp = opaque;
- VideoState *is = ffp->is;
- int audio_size, len1;
- if (!ffp || !is) {
- memset(stream, 0, len);
- return;
- }
-
- ffp->audio_callback_time = av_gettime_relative();
-
- if (ffp->pf_playback_rate_changed) {
- ffp->pf_playback_rate_changed = 0;
- #if defined(__ANDROID__)
- if (!ffp->soundtouch_enable) {
- SDL_AoutSetPlaybackRate(ffp->aout, ffp->pf_playback_rate);
- }
- #else
- SDL_AoutSetPlaybackRate(ffp->aout, ffp->pf_playback_rate);
- #endif
- }
- if (ffp->pf_playback_volume_changed) {
- ffp->pf_playback_volume_changed = 0;
- SDL_AoutSetPlaybackVolume(ffp->aout, ffp->pf_playback_volume);
- }
-
- // 循环是确保copy走len字节的pcm数据
- while (len > 0) {
- if (is->audio_buf_index >= is->audio_buf_size) {
- audio_size = audio_decode_frame(ffp);
- if (audio_size < 0) {
- /* if error, just output silence */
- is->audio_buf = NULL;
- is->audio_buf_size = SDL_AUDIO_MIN_BUFFER_SIZE / is->audio_tgt.frame_size * is->audio_tgt.frame_size;
- } else {
- if (is->show_mode != SHOW_MODE_VIDEO)
- update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
- is->audio_buf_size = audio_size;
- }
- is->audio_buf_index = 0;
- }
- if (is->auddec.pkt_serial != is->audioq.serial) {
- is->audio_buf_index = is->audio_buf_size;
- // 静音播放
- memset(stream, 0, len);
- // flush掉seek前后的pcm数据
- SDL_AoutFlushAudio(ffp->aout);
- break;
- }
- len1 = is->audio_buf_size - is->audio_buf_index;
- if (len1 > len)
- len1 = len;
- if (!is->muted && is->audio_buf && is->audio_volume == SDL_MIX_MAXVOLUME)
- // 在此copy走pcm数据
- memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
- else {
- memset(stream, 0, len1);
- if (!is->muted && is->audio_buf)
- SDL_MixAudio(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1, is->audio_volume);
- }
- len -= len1;
- stream += len1;
- is->audio_buf_index += len1;
- }
- is->audio_write_buf_size = is->audio_buf_size - is->audio_buf_index;
- /* Let's assume the audio driver that is used by SDL has two periods. */
- if (!isnan(is->audio_clock)) {
- // 计算Audio参考时钟时应将硬件里的PCM样本缓存考虑进去(opensl es and audiounit),以及is->audio_write_buf_size
- set_clock_at(&is->audclk, is->audio_clock - (double)(is->audio_write_buf_size) / is->audio_tgt.bytes_per_sec - SDL_AoutGetLatencySeconds(ffp->aout), is->audio_clock_serial, ffp->audio_callback_time / 1000000.0);
- sync_clock_to_slave(&is->extclk, &is->audclk);
- }
- if (!ffp->first_audio_frame_rendered) {
- ffp->first_audio_frame_rendered = 1;
- ffp_notify_msg1(ffp, FFP_MSG_AUDIO_RENDERING_START);
- }
- if (is->latest_audio_seek_load_serial == is->audio_clock_serial) {
- int latest_audio_seek_load_serial = __atomic_exchange_n(&(is->latest_audio_seek_load_serial), -1, memory_order_seq_cst);
- if (latest_audio_seek_load_serial == is->audio_clock_serial) {
- if (ffp->av_sync_type == AV_SYNC_AUDIO_MASTER) {
- ffp_notify_msg2(ffp, FFP_MSG_AUDIO_SEEK_RENDERING_START, 1);
- } else {
- ffp_notify_msg2(ffp, FFP_MSG_AUDIO_SEEK_RENDERING_START, 0);
- }
- }
- }
- if (ffp->render_wait_start && !ffp->start_on_prepared && is->pause_req) {
- while (is->pause_req && !is->abort_request) {
- SDL_Delay(20);
- }
- }
- }
- ......
-
- // 从FrameQueue队列里取走256个byte的pcm数据
- audio_cblk(userdata, buffer, copy_size);
- if (opaque->need_flush) {
- SDL_Android_AudioTrack_flush(env, atrack);
- opaque->need_flush = false;
- }
-
- if (opaque->need_flush) {
- opaque->need_flush = 0;
- SDL_Android_AudioTrack_flush(env, atrack);
- } else {
- // 将从FrameQueue队列里copy过来的pcm数据喂给AudioTrack播放
- int written = SDL_Android_AudioTrack_write(env, atrack, buffer, copy_size);
- if (written != copy_size) {
- ALOGW("AudioTrack: not all data copied %d/%d", (int)written, (int)copy_size);
- }
- }
-
- ......
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。