1. Android FFmpeg开发基本流程
(1)X264/FFmpeg + NDK编译
(2)ffmpeg.so + 编解码C代码
(3)Android.mk 编译
(4)JNI
(5)JAVA代码调用
基本流程如下图所示:
本文涉及FFmpeg,x264编译。
2. NDK配置
首先需要配置NDK开发环境,略
(1)设置$NDK环境变量
# Detect NDK
if [[ -z "$NDK" ]]; then
echo "The NDK dir is empty, If the shell can not run normally, you should set the NDK variable to your local ndk.dir"
exit 1
fi
(2)检查系统类型
# Detect OS
OS=`uname`
HOST_ARCH=`uname -m`
export CCACHE=; type ccache >/dev/null 2>&1 && export CCACHE=ccache
if [ $OS == 'Linux' ]; then
export HOST_SYSTEM=linux-$HOST_ARCH
elif [ $OS == 'Darwin' ]; then
export HOST_SYSTEM=darwin-$HOST_ARCH
fi
(3)配置Sysroot和cross_prefix
SYSROOT=$NDK/platforms/android-16/arch-arm
CROSS_PREFIX=$NDK/toolchains/arm-linux-androideabi-4.9/prebuilt/$HOST_SYSTEM/bin/arm-linux-androideabi-
2. x264编译
2.1 下载源代码
git clone http://git.videolan.org/git/x264.git
2.2 编译脚本
#!/bin/bash
echo "###### 开始编译 x264 ######"
SOURCE=$TARGET_X264_DIR
cd $SOURCE
#PREFIX指定的是编译输出路径,不指定默认是/usr/local/lib和/usr/local/include
#PREFIX=../build
EXTRA_CFLAGS="-march=armv7-a -mfloat-abi=softfp -mfpu=neon -D__ARM_ARCH_7__ -D__ARM_ARCH_7A__"
EXTRA_LDFLAGS="-nostdlib"
./configure \
--prefix=$PREFIX \
--cross-prefix=$CROSS_PREFIX \
--extra-cflags="$EXTRA_CFLAGS" \
--extra-ldflags="$EXTRA_LDFLAGS" \
--enable-static \
--enable-pic \
--enable-strip \
--disable-cli \
--host=arm-linux \
--sysroot=$SYSROOT
make clean
make && make install
这里面需要注意的是:
(1)PREFIX指定的是编译输出路径,不指定默认是/usr/local/lib和/usr/local/include
这里讲ffmpeg和x264统一放在工程根目录中build文件夹
(2)编译时不能进行多线程,否则汇编优化报错,如下:
make -j4
make: [common/arm/deblock-a.o] Error 127 (ignored)
/Users/guohe/Android/android-ndk/toolchains/arm-linux-androideabi-4.9/prebuilt/darwin-x86_64/bin/arm-linux-androideabi-gcc -I. -I. -c -DSTACK_ALIGNMENT=4 -DPIC -DHIGH_BIT_DEPTH=0 -DBIT_DEPTH=8 -o common/arm/predict-a.o common/arm/predict-a.S
/bin/sh: j4: command not found
2.3. 结论
Linux中编译x264:
sudo ./configure --enable-shared --prefix=/usr/local
sudo make
sudo make install
通过对比发现:Android中编译x264,最基本的就是配置SYSROOT和CROSS_PREFIX
3. 编译FFmpeg
3.1 下载源代码
git clone git://source.ffmpeg.org/ffmpeg.git $FFMPEG_SOURCE_DIR
3.2 编译脚本
ADD_H264_FEATURE="--enable-encoder=aac \
--enable-decoder=aac \
--enable-encoder=libx264 \
--enable-libx264 \
--extra-cflags=-I$PREFIX/include \
--extra-ldflags=-L$PREFIX/lib "
./configure \
--prefix=$PREFIX \
--enable-pthreads \
--enable-gpl \
--enable-version3 \
--enable-nonfree \
--enable-static \
--enable-small \
--enable-asm \
--enable-neon \
--cross-prefix=$CROSS_PREFIX \
--target-os=linux \
--arch=arm \
--enable-cross-compile \
--sysroot=$SYSROOT \
$ADD_H264_FEATURE
make clean
make -j4
make install
注:
(1)跨平台编译特殊配置
–sysroot=
–target-os=linux
(2)支持x264
–enable-encoder=libx264 \
–enable-libx264 \
–extra-cflags=-I
(3)编译优化
–enable-asm \
–enable-neon \
3.3 编译结果
至此,ffmpeg和x264已经编译完成,编译结果如下:
4. NDK + FFmpeg开发
ffmpeg,x264已经编译完成,下面就聊聊怎么用ffmpeg库进行开发。
下面展示一个基于FFmpeg开发的解码程序,其功能是从一个视频中解码5帧数据。
4.1 复制编译FFmpeg,x264库与头文件
cp $PREFIX/lib/*.a ./jni/lib
cp -r $PREFIX/include/* ./jni/
4.2 编写Android.mk
Android.mk主要包含几下几块:
(1)引入库文件
libavcodec.a libavdevice.a libavfilter.a libavformat.a libavutil.a libpostproc.a libswresample.a libswscale.a libx264.a
#static version of libavutil
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil_static
LOCAL_SRC_FILES:= lib/libavutil.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
(2)编译源文件
需要设置:
LOCAL_MODULE //库名称
LOCAL_SRC_FILES //源代码文件
LOCAL_LDLIBS //额外链接的库
LOCAL_CFLAGS //CFlags
(3)编译何种类型
include $(BUILD_SHARED_LIBRARY)
(4)详细脚本如下
LOCAL_PATH := $(call my-dir)
#include $(call all-subdir-makefiles)
#static version of libavcodec
include $(CLEAR_VARS)
LOCAL_MODULE:= libavcodec_static
LOCAL_SRC_FILES:= lib/libavcodec.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
#static version of libavformat
include $(CLEAR_VARS)
LOCAL_MODULE:= libavformat_static
LOCAL_SRC_FILES:= lib/libavformat.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
#static version of libswscale
include $(CLEAR_VARS)
LOCAL_MODULE:= libswscale_static
LOCAL_SRC_FILES:= lib/libswscale.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
#static version of libavutil
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil_static
LOCAL_SRC_FILES:= lib/libavutil.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
#static version of libavdevice
include $(CLEAR_VARS)
LOCAL_MODULE:= libavdevice_static
LOCAL_SRC_FILES:= lib/libavdevice.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
#static version of libavfilter
include $(CLEAR_VARS)
LOCAL_MODULE:= libavfilter_static
LOCAL_SRC_FILES:= lib/libavfilter.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
#static version of libswresample
include $(CLEAR_VARS)
LOCAL_MODULE:= libswresample_static
LOCAL_SRC_FILES:= lib/libswresample.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
#static version of libpostproc
include $(CLEAR_VARS)
LOCAL_MODULE:= libpostproc_static
LOCAL_SRC_FILES:= lib/libpostproc.a
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
#static version of libx264
include $(CLEAR_VARS)
LOCAL_MODULE:= libx264_static
LOCAL_SRC_FILES:= lib/libx264.a
LOCAL_CFLAGS := -march=armv7-a -mfloat-abi=softfp -mfpu=neon -O3 -ffast-math -funroll-loops
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := decoder.c encoder.c
LOCAL_LDLIBS := -llog -lz
LOCAL_CFLAGS := -march=armv7-a -mfloat-abi=softfp -mfpu=neon -O3 -ffast-math -funroll-loops
LOCAL_WHOLE_STATIC_LIBRARIES := libavformat_static \
libavcodec_static \
libavutil_static \
libpostproc_static \
libswscale_static \
libswresample_static \
libx264_static \
libavfilter_static \
libavdevice_static \
include $(BUILD_SHARED_LIBRARY)
4.3 解码程序
Native程序一般分为两块:
(1)JNI调用接口
/*
* Class: com_example_jnidemo_MainActivity
* Method: info
* Signature: (Ljava/lang/String;)V
*/
JNIEXPORT void JNICALL Java_com_example_jnidemo_MainActivity_info
(JNIEnv *, jobject, jstring);
(2)功能模块
略
详细代码如下:
#include <jni.h>
#include <android/log.h>
#include <stdio.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavformat/avio.h>
#include <libavutil/fifo.h>
#include <libavutil/avutil.h>
#include <libavutil/mem.h>
#include <libswscale/swscale.h>
#include "com_example_jnidemo_MainActivity.h"
#define LOG_TAG "FFMPEG INFO"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame) {
FILE* pFile;
char szFileName[32];
int y;
sprintf(szFileName,"/mnt/sdcard/test/frame%d.ppm",iFrame);
LOGI("filename : %s",szFileName);
pFile = fopen(szFileName,"w");
if(pFile == NULL){
LOGI("can not open file %s",szFileName);
return ;
}
fprintf(pFile,"P6\n%d %d\n255\n",width,height);
for(y=0;y<width;++y){
LOGI("Write file AVFrame");
fwrite(pFrame->data[0]+y*pFrame->linesize[0],1,width*3,pFile);
}
fclose(pFile);
LOGI("close file %s",szFileName);
}
JNIEXPORT void JNICALL Java_com_example_jnidemo_MainActivity_info(JNIEnv *env, jobject obj, jstring jpath){
const jbyte* path = (*env)->GetStringUTFChars(env,jpath,NULL);
AVFormatContext *pFormatCtx = NULL;
int i, videoStream;
AVCodecContext *pCodecCtxOrig = NULL;
AVCodecContext *pCodecCtx = NULL;
AVCodec *pCodec = NULL;
AVFrame *pFrame = NULL;
AVFrame *pFrameRGB = NULL;
AVPacket packet;
int frameFinished;
int numBytes;
uint8_t *buffer = NULL;
struct SwsContext *sws_ctx = NULL;
av_register_all();
// if(avformat_open_input(&pFormatCtx, path, NULL, NULL)!=0)
// {
// LOGE("Could not open the file : %s",path);
// return ;
// }
int err_code;
if(err_code=avformat_open_input(&pFormatCtx, path, NULL, NULL))
{
char buf[256];
av_strerror(err_code, buf, 1024);
LOGE("Couldn't open file %s: %d(%s)", path, err_code, buf);
return;
}
if(avformat_find_stream_info(pFormatCtx, NULL)<0)
return ;
av_dump_format(pFormatCtx, 0, path, 0);
videoStream=-1;
for(i=0; i<pFormatCtx->nb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoStream=i;
break;
}
if(videoStream==-1)
return ;
pCodecCtxOrig=pFormatCtx->streams[videoStream]->codec;
pCodec=avcodec_find_decoder(pCodecCtxOrig->codec_id);
if(pCodec==NULL)
{
LOGE("Unsupported codec!\n");
return ;
}
pCodecCtx = avcodec_alloc_context3(pCodec);
if(avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0)
{
LOGE("Couldn't copy codec context");
return ;
}
if(avcodec_open2(pCodecCtx, pCodec, NULL)<0)
return ;
pFrame=av_frame_alloc();
pFrameRGB=av_frame_alloc();
if(pFrameRGB==NULL)
return ;
numBytes=avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width,pCodecCtx->height);
buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24,pCodecCtx->width, pCodecCtx->height);
sws_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_RGB24,
SWS_BILINEAR,
NULL,
NULL,
NULL
);
i=0;
while(av_read_frame(pFormatCtx, &packet)>=0) {
if(packet.stream_index==videoStream) {
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if(frameFinished) {
sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
if(++i<=5)
SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i);
}
}
av_free_packet(&packet);
}
av_free(buffer);
av_frame_free(&pFrameRGB);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avcodec_close(pCodecCtxOrig);
avformat_close_input(&pFormatCtx);
(*env)->ReleaseStringUTFChars(env,jpath,path);
}
4.4 编译native程序
进入jni目录,执行:
$NDK_PATH/ndk-build
编译结果:
编译完成后,在lib目录下可以看到编译的so文件,在Android.mk中设置的库文件名为ffmpeg,那么编译的文件应该是”libffmpeg.so”
4.5 JAVA调用JNI
(1)加载库文件
static {
System.loadLibrary("ffmpeg");
}
(2)JNI接口
所有的JNI接口都有native关键字
private native void info(String path);
(3)详细代码
package com.example.jnidemo;
import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
String path = "/sdcard/test/test.mp4";
info(path);
}
private native void info(String path);
static {
System.loadLibrary("ffmpeg");
}
}