赞
踩
按照之前的编译步骤我们会编译得到使用ffmpeg需要的文件,现在就使用ffmpeg实现最简单的视频播放
把include
文件夹放到cpp
目录下面。
main
目录下面新建jniLibs
目录把lib
文件下的so文件都放进去。
defaultConfig {
...
externalNativeBuild {
ndk {
abiFilters "arm64-v8a"
}
}
}
...
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
cmake_minimum_required(VERSION 3.22.1) project("androidvideoplayer") # 定义目录名称 set(ffmpeg_lib_dir ${CMAKE_SOURCE_DIR}/../jniLibs/${ANDROID_ABI}) set(ffmpeg_head_dir ${CMAKE_SOURCE_DIR}) # 引入FFmpeg头文件 include_directories(${ffmpeg_head_dir}/include) # 添加ffmpeg相关的so库 add_library( avutil SHARED IMPORTED ) set_target_properties( avutil PROPERTIES IMPORTED_LOCATION ${ffmpeg_lib_dir}/libavutil.so ) add_library( # 生成的库的名字 swresample # 动态库 SHARED # 源文件 IMPORTED ) set_target_properties( swresample PROPERTIES IMPORTED_LOCATION ${ffmpeg_lib_dir}/libswresample.so ) add_library( avcodec SHARED IMPORTED ) set_target_properties( avcodec PROPERTIES IMPORTED_LOCATION ${ffmpeg_lib_dir}/libavcodec.so ) add_library( avfilter SHARED IMPORTED) set_target_properties( avfilter PROPERTIES IMPORTED_LOCATION ${ffmpeg_lib_dir}/libavfilter.so ) add_library( swscale SHARED IMPORTED) set_target_properties( swscale PROPERTIES IMPORTED_LOCATION ${ffmpeg_lib_dir}/libswscale.so ) add_library( avformat SHARED IMPORTED) set_target_properties( avformat PROPERTIES IMPORTED_LOCATION ${ffmpeg_lib_dir}/libavformat.so ) add_library( # 生成so的名称 androidvideoplayer SHARED # 自己写的c文件,记得每次创建文件要在这里声明,不然无法调用其他库 ffplayer/logger.h ffplayer/ffplayer.cpp ffplayer/ffplayer.h native-lib.cpp) find_library( log-lib log) target_link_libraries( androidvideoplayer -landroid ## 连接 FFmpeg 相关的库 avutil swscale avcodec avfilter swresample avformat ${log-lib})
companion object {
// 加载so库
init {
System.loadLibrary("androidvideoplayer")
}
}
// 定义接口方法
external fun play(videoPath: String, surface: Surface): Int
布局使用SurfaceView
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<SurfaceView
android:id="@+id/surface_view"
android:layout_width="match_parent"
android:layout_height="200dp" />
</LinearLayout>
设置SurfaceView
binding.surfaceView.holder.addCallback(object : SurfaceHolder.Callback { override fun surfaceCreated(holder: SurfaceHolder) { lifecycleScope.launch(Dispatchers.IO) { play("${filesDir}/private.mp4", binding.surfaceView.holder.surface) } } override fun surfaceChanged( holder: SurfaceHolder, format: Int, width: Int, height: Int ) { } override fun surfaceDestroyed(holder: SurfaceHolder) { } })
我这里直接在data/data/{packageName}/files/ 下面放了一个MP4格式的视频。
#ifndef ANDROIDVIDEOPLAYER_LOGGER_H #define ANDROIDVIDEOPLAYER_LOGGER_H #ifdef ANDROID #include <android/log.h> #define LOG_TAG "AndroidVideoPlayer" #define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, format, ##__VA_ARGS__) #define LOGD(format, ...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, format, ##__VA_ARGS__) #define LOGI(format, ...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, format, ##__VA_ARGS__) #else #define LOGE(format, ...) printf(LOG_TAG format "\n", ##__VA_ARGS__) #define LOGD(format, ...) printf(LOG_TAG format "\n", ##__VA_ARGS__) #define LOGI(format, ...) printf(LOG_TAG format "\n", ##__VA_ARGS__) #endif #endif //ANDROIDVIDEOPLAYER_LOGGER_H
#ifndef ANDROIDVIDEOPLAYER_FFPLAYER_H #define ANDROIDVIDEOPLAYER_FFPLAYER_H #ifdef __cplusplus extern "C" { #endif #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libavfilter/avfilter.h" #include "libavutil/log.h" #include <libavutil/imgutils.h> #include <libswscale/swscale.h> #include "libswresample/swresample.h" #include "libavutil/time.h" #include <unistd.h> #include <android/native_window.h> #include <android/native_window_jni.h> #ifdef __cplusplus } #endif class FFplayer { private: int width = 0; int height = 0; int bufferSize = 0; int videoIndex = -1; AVFormatContext *avFormatContext = NULL; AVCodec *avCodec = NULL; AVCodecContext *avCodecContext = NULL; AVPacket *avPacket = NULL; AVFrame *avFrame = NULL; AVFrame *avFrameRGB = NULL; SwsContext *swsContext = NULL; uint8_t *out_buffer = NULL; ANativeWindow_Buffer windowBuffer; public: /** * native 播放 * @param video_path * @param nativeWindow * @return */ int native_play(const char *video_path, ANativeWindow *nativeWindow); }; #endif //ANDROIDVIDEOPLAYER_FFPLAYER_H
#include <jni.h> #include <cstdio> #include "logger.h" #include "ffplayer.h" int FFplayer::native_play(const char *video_path, ANativeWindow *nativeWindow) { int ret = 0; int64_t stime = av_gettime_relative(); LOGD("StartTime: %ld", stime); // 创建avFormatContext 上下文 avFormatContext = avformat_alloc_context(); // 打开输入文件 if (avformat_open_input(&avFormatContext, video_path, nullptr, nullptr) != 0) { LOGE("Could not open input stream"); goto finish; } // 查找文件流信息 if (avformat_find_stream_info(avFormatContext, nullptr) < 0) { LOGE("Could not find stream information"); goto finish; } // 查找视频轨道 for (int index = 0; index < avFormatContext->nb_streams; index++) { if (avFormatContext->streams[index]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { videoIndex = index; break; } } if (videoIndex == -1) { LOGE("Could not find a video stream"); goto finish; } // 查找解码器 avCodec = const_cast<AVCodec *>(avcodec_find_decoder(avFormatContext->streams[videoIndex]->codecpar->codec_id)); if (avCodec == nullptr) { LOGE("could not find codec"); goto finish; } // 创建分配解码器上下文 avCodecContext = avcodec_alloc_context3(avCodec); // 复制解码器参数到解码器上下文 avcodec_parameters_to_context(avCodecContext, avFormatContext->streams[videoIndex]->codecpar); // 打开解码器 ret = avcodec_open2(avCodecContext, avCodec, nullptr); if (ret < 0){ LOGE("Could not open codec"); goto finish; } // 视频宽高 width = avCodecContext->width; height = avCodecContext->height; LOGD("ScreenInfo:width-> %d, height-> %d", width, height); // 分配 frame 和packet 空间 avFrame = av_frame_alloc(); avPacket = (AVPacket *)av_malloc(sizeof(AVPacket)); avFrameRGB = av_frame_alloc(); // 绑定输出buffer bufferSize = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 1); out_buffer = (uint8_t *)av_malloc(bufferSize * sizeof(uint8_t)); av_image_fill_arrays(avFrameRGB->data, avFrameRGB->linesize, out_buffer, AV_PIX_FMT_RGBA, width, height, 1); // 图像格式转换上下文 swsContext = sws_getContext(width, height, avCodecContext->pix_fmt, width, height, AV_PIX_FMT_RGBA, SWS_BICUBIC, nullptr, nullptr, nullptr); // 修改 windowBuffer 大小 和 像素格式 (YUV -> RGB) if (ANativeWindow_setBuffersGeometry(nativeWindow, width, height, WINDOW_FORMAT_RGBA_8888) < 0){ LOGE("Could not set buffers geometry"); ANativeWindow_release(nativeWindow); goto finish; } // 循环读Packet while (av_read_frame(avFormatContext, avPacket) >= 0){ if (avPacket->stream_index == videoIndex){ // 把packet 传进 解码器 if (avcodec_send_packet(avCodecContext, avPacket) != 0) { return -1; } // 获取 解码后的图像帧 while (avcodec_receive_frame(avCodecContext, avFrame) == 0){ // 转化格式 把avFrame 转化到 avFrameRGB sws_scale(swsContext, (const uint8_t *const *) avFrame->data, avFrame->linesize, 0, avCodecContext->height, avFrameRGB->data, avFrameRGB->linesize); // 锁住 window下一个surface if (ANativeWindow_lock(nativeWindow, &windowBuffer, nullptr) < 0){ LOGE("cannot lock window"); } else { // 逐行复制 avFrameRGB -> windowBuffer out_buffer auto *bufferBits = (uint8_t *) windowBuffer.bits; for (int h = 0; h < height; h++) { memcpy(bufferBits + h * windowBuffer.stride * 4, out_buffer + h * avFrameRGB->linesize[0], avFrameRGB->linesize[0]); } // 解锁 window的surface 并post去显示 ANativeWindow_unlockAndPost(nativeWindow); } // 解码速度大于正常的播放速度处理;使画面刷新频率为正常值 int64_t pts = avFrame->pts; av_frame_unref(avFrame); AVRational time_base = avFormatContext->streams[videoIndex]->time_base; double timestamp = (double)pts * av_q2d(time_base); int64_t master_clock = av_gettime_relative() - stime; int64_t diff = (int64_t)(timestamp * 1000 * 1000) - master_clock; // microseconds if (diff > 0) { usleep(diff); } } } av_packet_unref(avPacket); } // 释放内存 sws_freeContext(swsContext); finish: av_free(avPacket); av_free(avFrameRGB); avcodec_close(avCodecContext); avcodec_free_context(&avCodecContext); avformat_close_input(&avFormatContext); return 0; }
新建的项目中本来就有个stringFromJNI
,我们直接新建一个方法,AS输入函数名称会自动生成整个函数,灰常的方便,当然也可以使用动态注册函数的方式,这样更方便统一管理。
#include <jni.h> #include <string> #include "ffplayer/ffplayer.h" #include "ffplayer/logger.h" extern "C" JNIEXPORT jint JNICALL Java_com_hsw_androidvideoplayer_FFPlayerActivity_play(JNIEnv *env, jobject thiz, jstring videoPath, jobject surface) { const char *video_path = env->GetStringUTFChars(videoPath, nullptr); // 创建nativeWindow ANativeWindow *window = ANativeWindow_fromSurface(env, surface); if (window == nullptr) { LOGE("Could not get native window from surface"); return -1; } // 封装类 FFplayer nativePlayer ;// = NativePlayer() nativePlayer.native_play(video_path, window); env->ReleaseStringUTFChars(videoPath, video_path); return 0; }
现在我们就实现了一个最基础的视频播放器。
这里我们只是熟悉下ffmpeg的api,了解播放一个视频需要什么api,api的功能是什么。
播放功能还很不健全,还有点BUG,后面会持续补充功能:
TODO :
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。