ffmpeg库文件在android平台的使用

zxg2006 2012-04-20 11:30:45
千辛万苦编译出libffmpeg.so(http://www.cnblogs.com/scottwong/archive/2010/12/17/1909455.html,边调边修改),再按照这位大牛的方法(http://tq09931.iteye.com/blog/1011895),也把第一个ffmpeg函数给运行起来了,可是要用到其他的函数就头大了,都是undefined!
C文件:

/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <string.h>
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#ifdef __cplusplus
extern "C" {
#endif

#include <ffmpeg/libavcodec/avcodec.h>
//#include <ffmpeg/libavformat/avformat.h>
//#include <ffmpeg/libavutil/avutil.h>


#define PI 3.14159265358979323846

#ifdef HAVE_AV_CONFIG_H
#undef HAVE_AV_CONFIG_H
#endif


//#include "avcodec.h"
//#include "define.h"

//#define INBUF_SIZE 4096

struct AVCodec *codec=NULL; // Codec
struct AVCodecContext *c=NULL; // Codec Context
struct AVFrame *picture=NULL; // Frame
AVPacket avpkt;

int iWidth=0;
int iHeight=0;

int *colortab=NULL;
int *u_b_tab=NULL;
int *u_g_tab=NULL;
int *v_g_tab=NULL;
int *v_r_tab=NULL;

//short *tmp_pic=NULL;

unsigned int *rgb_2_pix=NULL;
unsigned int *r_2_pix=NULL;
unsigned int *g_2_pix=NULL;
unsigned int *b_2_pix=NULL;

void DeleteYUVTab()
{
// av_free(tmp_pic);

av_free(colortab);
av_free(rgb_2_pix);
}

void CreateYUVTab_16()
{
int i;
int u, v;

// tmp_pic = (short*)av_malloc(iWidth*iHeight*2); // 缓存 iWidth * iHeight * 16bits

colortab = (int *)av_malloc(4*256*sizeof(int));
u_b_tab = &colortab[0*256];
u_g_tab = &colortab[1*256];
v_g_tab = &colortab[2*256];
v_r_tab = &colortab[3*256];

for (i=0; i<256; i++)
{
u = v = (i-128);

u_b_tab[i] = (int) ( 1.772 * u);
u_g_tab[i] = (int) ( 0.34414 * u);
v_g_tab[i] = (int) ( 0.71414 * v);
v_r_tab[i] = (int) ( 1.402 * v);
}

rgb_2_pix = (unsigned int *)av_malloc(3*768*sizeof(unsigned int));

r_2_pix = &rgb_2_pix[0*768];
g_2_pix = &rgb_2_pix[1*768];
b_2_pix = &rgb_2_pix[2*768];

for(i=0; i<256; i++)
{
r_2_pix[i] = 0;
g_2_pix[i] = 0;
b_2_pix[i] = 0;
}

for(i=0; i<256; i++)
{
r_2_pix[i+256] = (i & 0xF8) << 8;
g_2_pix[i+256] = (i & 0xFC) << 3;
b_2_pix[i+256] = (i ) >> 3;
}

for(i=0; i<256; i++)
{
r_2_pix[i+512] = 0xF8 << 8;
g_2_pix[i+512] = 0xFC << 3;
b_2_pix[i+512] = 0x1F;
}

r_2_pix += 256;
g_2_pix += 256;
b_2_pix += 256;
}

void DisplayYUV_16(unsigned int *pdst1, unsigned char *y, unsigned char *u, unsigned char *v, int width, int height, int src_ystride, int src_uvstride, int dst_ystride)
{
int i, j;
int r, g, b, rgb;

int yy, ub, ug, vg, vr;

unsigned char* yoff;
unsigned char* uoff;
unsigned char* voff;

unsigned int* pdst=pdst1;

int width2 = width/2;
int height2 = height/2;

if(width2>iWidth/2)
{
width2=iWidth/2;

y+=(width-iWidth)/4*2;
u+=(width-iWidth)/4;
v+=(width-iWidth)/4;
}
if(height2>iHeight)
height2=iHeight;

//LOGD("height2....%d",height2);
for(j=0; j<height2; j++) // 一次2x2共四个像素
{
yoff = y + j * 2 * src_ystride;
uoff = u + j * src_uvstride;
voff = v + j * src_uvstride;

for(i=0; i<width2; i++)
{
yy = *(yoff+(i<<1));
ub = u_b_tab[*(uoff+i)];
ug = u_g_tab[*(uoff+i)];
vg = v_g_tab[*(voff+i)];
vr = v_r_tab[*(voff+i)];

b = yy + ub;
g = yy - ug - vg;
r = yy + vr;

rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];

yy = *(yoff+(i<<1)+1);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;

pdst[(j*dst_ystride+i)] = (rgb)+((r_2_pix[r] + g_2_pix[g] + b_2_pix[b])<<16);

yy = *(yoff+(i<<1)+src_ystride);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;

rgb = r_2_pix[r] + g_2_pix[g] + b_2_pix[b];

yy = *(yoff+(i<<1)+src_ystride+1);
b = yy + ub;
g = yy - ug - vg;
r = yy + vr;

pdst [((2*j+1)*dst_ystride+i*2)>>1] = (rgb)+((r_2_pix[r] + g_2_pix[g] + b_2_pix[b])<<16);
}
}
}

//====================================================

jint Java_com_watch_VView_InitDecoder(JNIEnv* env, jobject thiz, jint width, jint height)
{
iWidth = width;
iHeight = height;


av_init_packet(&avpkt);

/* set end of buffer to 0 (this ensures that no overreading happens for damaged mpeg streams) */
//memset(inbuf + INBUF_SIZE, 0, FF_INPUT_BUFFER_PADDING_SIZE);

printf("Video decoding\n");

/* find the mpeg1 video decoder */
codec = avcodec_find_decoder(CODEC_ID_H264);
if (!codec) {
fprintf(stderr, "codec not found\n");
return 0;//exit(1);
}

c= avcodec_alloc_context();
picture= avcodec_alloc_frame();

if(codec->capabilities&CODEC_CAP_TRUNCATED)
c->flags|= CODEC_FLAG_TRUNCATED; /* we do not send complete frames */
/* For some codecs, such as msmpeg4 and mpeg4, width and height
MUST be initialized there because this information is not
available in the bitstream. */

/* open it */
if (avcodec_open(c, codec) < 0) {
fprintf(stderr, "could not open codec\n");
//exit(1);
return 0;
}
return 1;
}

/*
* Class: h264_com_VView
* Method: UninitDecoder
* Signature: ()I
*/
jint Java_com_watch_VView_UninitDecoder(JNIEnv* env, jobject thiz)
{
//LOGD("UninitDecoder");
if(c)
{
decode_end(c);
free(c->priv_data);

free(c);
c = NULL;
}

if(picture)
{
free(picture);
picture = NULL;
}
avcodec_close(c);
av_free(c);
av_free(picture);
DeleteYUVTab();

return 1;
}

/*
* Class: h264_com_VView
* Method: DecoderNal
* Signature: ([B[I)I
*/
jint Java_com_watch_VView_DecoderNal(JNIEnv* env, jobject thiz, jbyteArray in, jint nalLen, jbyteArray out)
{
int i;
int imod;
int got_picture;

jbyte * Buf = (jbyte*)(*env)->GetByteArrayElements(env, in, 0);
jbyte * Pixel= (jbyte*)(*env)->GetByteArrayElements(env, out, 0);
//avpkt.size = nalLen;
//avpkt.data = Buf;
//int consumed_bytes = avcodec_decode_video2(c, picture, &got_picture, &avpkt);// nalLen);
int consumed_bytes = avcodec_decode_video(c, picture, &got_picture, Buf, nalLen);

//LOGD("decode_frame");
if(consumed_bytes > 0)
{
DisplayYUV_16((int*)Pixel, picture->data[0], picture->data[1], picture->data[2], c->width, c->height, picture->linesize[0], picture->linesize[1], iWidth);
/*
for(i=0; i<c->height; i++)
fwrite(picture->data[0] + i * picture->linesize[0], 1, c->width, outf);

for(i=0; i<c->height/2; i++)
fwrite(picture->data[1] + i * picture->linesize[1], 1, c->width/2, outf);

for(i=0; i<c->height/2; i++)
fwrite(picture->data[2] + i * picture->linesize[2], 1, c->width/2, outf);
// */
}
//LOGD("DisplayYUV_16");
(*env)->ReleaseByteArrayElements(env, in, Buf, 0);
(*env)->ReleaseByteArrayElements(env, out, Pixel, 0);

return consumed_bytes;
}
#ifdef __cplusplus
}
#endif
...全文
634 5 打赏 收藏 转发到动态 举报
写回复
用AI写文章
5 条回复
切换为时间正序
请发表友善的回复…
发表回复
jj100432177 2012-09-24
  • 打赏
  • 举报
回复
你好 我也将ffmpeg 打了个 .so的包
但是不知道里面都有什么函数,
我想把 yuv420 的数据流 保存成视频,都需要做什么操作,或者有什么参考资料吗?


知道的话 告诉下吧, 弄了几天了, 还没头绪, 谢谢了
zxg2006 2012-04-20
  • 打赏
  • 举报
回复
[Quote=引用 2 楼 的回复:]
首先,你贴的c文件是jni部分,用ffmpeg0.6.3 + ndk r5 编译
然后java部分用loadlibrary加载这个编译出来的so文件,当然也要加载上ffmpeg的so

只要jni部分能编译出so文件,java部分在loadlibrary所需要的so,不会出现你那样的问题

最大可能就是你用错了
[/Quote]
我的用法是:
1、先ffmpeg0.6.3 + ndk r5 在ubuntu下编译出的so文件,
2、按照网上方法,把这个so文件放在ndk的libs文件夹下,
3、然后在自己的工程中编写jni文件,
4、把ffmpeg源码也放到了jni文件夹下,
5、修改android.mk文件,添加libffmpeg.so的库路径,
6、在jni文件中包含要调用的函数所在的h文件,
7、调用函数。。。
请问是这样用吗?
我这样来调用avcodec_version()都是可以的。
不知道是不是源码版本上的问题?
avcodec_decode_video2是avcodec_decode_video 的新版本用法?

Android.mk
LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
PATH_TO_FFMPEG_SOURCE:=$(LOCAL_PATH)/ffmpeg
LOCAL_C_INCLUDES += $(PATH_TO_FFMPEG_SOURCE)
LOCAL_LDLIBS := -lffmpeg
LOCAL_MODULE := H264Android
LOCAL_SRC_FILES := H264Android.c

LOCAL_LDLIBS:=-L$(SYSROOT)/usr/lib -llog
include $(BUILD_SHARED_LIBRARY)
rightorwrong 2012-04-20
  • 打赏
  • 举报
回复
首先,你贴的c文件是jni部分,用ffmpeg0.6.3 + ndk r5 编译
然后java部分用loadlibrary加载这个编译出来的so文件,当然也要加载上ffmpeg的so

只要jni部分能编译出so文件,java部分在loadlibrary所需要的so,不会出现你那样的问题

最大可能就是你用错了
zxg2006 2012-04-20
  • 打赏
  • 举报
回复

E:/zxg/workstation/androidApplication/WacthLogin/obj/local/armeabi/objs/H264Android/H264Android.o: In function `Java_com_watch_VView_DecoderNal':
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:285: undefined reference to `avcodec_decode_video'
E:/zxg/workstation/androidApplication/WacthLogin/obj/local/armeabi/objs/H264Android/H264Android.o: In function `DeleteYUVTab':
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:68: undefined reference to `av_free'
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:69: undefined reference to `av_free'
E:/zxg/workstation/androidApplication/WacthLogin/obj/local/armeabi/objs/H264Android/H264Android.o: In function `Java_com_watch_VView_UninitDecoder':
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:249: undefined reference to `decode_end'
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:261: undefined reference to `avcodec_close'
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:262: undefined reference to `av_free'
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:263: undefined reference to `av_free'
E:/zxg/workstation/androidApplication/WacthLogin/obj/local/armeabi/objs/H264Android/H264Android.o: In function `Java_com_watch_VView_InitDecoder':
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:207: undefined reference to `av_init_packet'
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:215: undefined reference to `avcodec_find_decoder'
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:221: undefined reference to `avcodec_alloc_context'
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:222: undefined reference to `avcodec_alloc_frame's definition or declaration, which is pr
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:231: undefined reference to `avcodec_open'
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:231: undefined reference to `avcodec_open'ecContext' declared inside parameter list
E:/zxg/workstation/androidApplication/WacthLogin/obj/local/armeabi/objs/H264Android/H264Android.o: In function `CreateYUVTab_16':inside parameter list
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:79: undefined reference to `av_malloc'VCodecContext' declared inside parameter list
E:/zxg/workstation/androidApplication/WacthLogin/jni/H264Android.c:95: undefined reference to `av_malloc'VCodecContext' declared inside parameter list
collect2: ld returned 1 exit statuson/WacthLogin/jni/ffmpeg/libavcodec/avcodec.h:1826: warning: 'struct AVCodecContext' declared inside parameter list
/cygdrive/d/android-sdk-windows/ndk/android-ndk-r5/build/core/build-binary.mk:266: recipe for target `/cygdrive/e/zxg/workstation/androidApplication/WacthLo
obj/local/armeabi/libH264Android.so' failedLogin/jni/ffmpeg/libavcodec/avcodec.h:2091: warning: 'struct AVCodecContext' declared inside parameter list
make: *** [/cygdrive/e/zxg/workstation/androidApplication/WacthLogin/obj/local/armeabi/libH264Android.so] Error 1ntext' declared inside parameter list
E:/zxg/workstation/androidApplication/WacthLogin/jni/ffmpeg/libavcodec/avcodec.h:3341: warning: 'struct AVCodecContext' declared inside parameter list

网上有朋友提到版本问题,试了几个版本了,不行。我的so文件是ffmpeg0.6.3 + ndk r5 在ubuntu下编译的,所以在jni中放入的也是ffmpeg0.6.3的源码,请教各位这是啥情况呢?可以应对?
zxg2006 2012-04-20
  • 打赏
  • 举报
回复
发现了!LOCAL_LDLIBS:=-L$(SYSROOT)/usr/lib -llog覆盖LOCAL_LDLIBS := -lffmpeg 了!!!

2,542

社区成员

发帖
与我相关
我的任务
社区描述
专题开发/技术/项目 多媒体/流媒体开发
社区管理员
  • 多媒体/流媒体开发社区
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧