如何在Android上使用FFmpeg解码图像参考文章[原]如何在Android用FFmpeg解码图像,如何在Android上使用SDL2.0来显示图像参考[原]零基础学习SDL开发之在Android使用SDL2.0显示BMP图。有了以上两篇文章的基础我们就可以在Android使用FFmpeg来解码图像并且使用SDL2.0来显示输出的图像了。

博主的开发环境:Ubuntu 14.04 64位,Eclipse+CDT+ADT+NDK。

在文章开始之前假定你已经知道如何使用NDK编译FFmpeg,以及知道如何移植SDL2.0到Android平台上来了,如有不明白的可以参考文章开头的两篇文章。

工程中的目录结构:ffmpeg文件夹下是ffmpeg相关的头文件以及编译的Android makefile文件,SDL文件夹下面是SDL2.0相关的头文件以及相关的源码和编译用的Android makefile 文件,player文件下是用来解码与显示相关的代码和Android makefile。

一、参考[原]零基础学习SDL开发之在Android使用SDL2.0显示BMP图创建一个工程,创建jni文件,在jni下创建SDL文件夹,将相应的头文件以及源码拷贝过来。在这里也可以参考[原]如何在Android用FFmpeg解码图像将编译好的动态库作为PREBUILT_SHARED_LIBRARY来加载。SDL文件下的Android.mk内容:

LOCAL_PATH := $(call my-dir)############################# SDL shared library############################include $(CLEAR_VARS)LOCAL_MODULE := SDL2LOCAL_C_INCLUDES := $(LOCAL_PATH)/includeLOCAL_EXPORT_C_INCLUDES := $(LOCAL_C_INCLUDES)LOCAL_SRC_FILES := \    $(subst $(LOCAL_PATH)/,, \    $(wildcard $(LOCAL_PATH)/src/*.c) \    $(wildcard $(LOCAL_PATH)/src/audio/*.c) \    $(wildcard $(LOCAL_PATH)/src/audio/android/*.c) \    $(wildcard $(LOCAL_PATH)/src/audio/dummy/*.c) \    $(LOCAL_PATH)/src/atomic/SDL_atomic.c \    $(LOCAL_PATH)/src/atomic/SDL_spinlock.c.arm \    $(wildcard $(LOCAL_PATH)/src/core/android/*.c) \    $(wildcard $(LOCAL_PATH)/src/cpuinfo/*.c) \    $(wildcard $(LOCAL_PATH)/src/dynapi/*.c) \    $(wildcard $(LOCAL_PATH)/src/events/*.c) \    $(wildcard $(LOCAL_PATH)/src/file/*.c) \    $(wildcard $(LOCAL_PATH)/src/haptic/*.c) \    $(wildcard $(LOCAL_PATH)/src/haptic/dummy/*.c) \    $(wildcard $(LOCAL_PATH)/src/joystick/*.c) \    $(wildcard $(LOCAL_PATH)/src/joystick/android/*.c) \    $(wildcard $(LOCAL_PATH)/src/loadso/dlopen/*.c) \    $(wildcard $(LOCAL_PATH)/src/power/*.c) \    $(wildcard $(LOCAL_PATH)/src/power/android/*.c) \    $(wildcard $(LOCAL_PATH)/src/filesystem/dummy/*.c) \    $(wildcard $(LOCAL_PATH)/src/render/*.c) \    $(wildcard $(LOCAL_PATH)/src/render/*/*.c) \    $(wildcard $(LOCAL_PATH)/src/stdlib/*.c) \    $(wildcard $(LOCAL_PATH)/src/thread/*.c) \    $(wildcard $(LOCAL_PATH)/src/thread/pthread/*.c) \    $(wildcard $(LOCAL_PATH)/src/timer/*.c) \    $(wildcard $(LOCAL_PATH)/src/timer/unix/*.c) \    $(wildcard $(LOCAL_PATH)/src/video/*.c) \    $(wildcard $(LOCAL_PATH)/src/video/android/*.c) \    $(wildcard $(LOCAL_PATH)/src/test/*.c))LOCAL_CFLAGS += -DGL_GLEXT_PROTOTYPESLOCAL_LDLIBS := -ldl -lGLESv1_CM -lGLESv2 -llog -landroidinclude $(BUILD_SHARED_LIBRARY)############################# SDL static library#############################LOCAL_MODULE := SDL2_static#LOCAL_MODULE_FILENAME := libSDL2#LOCAL_SRC_FILES += $(LOCAL_PATH)/src/main/android/SDL_android_main.c#LOCAL_LDLIBS := #LOCAL_EXPORT_LDLIBS := -Wl,--undefined=Java_org_libsdl_app_SDLActivity_nativeInit -ldl -lGLESv1_CM -lGLESv2 -llog -landroid#include $(BUILD_STATIC_LIBRARY)

二、参考[原]如何在Android用FFmpeg解码图像, 在工程中新建一个ffmpeg文件夹,将与ffmpeg相关的头文件include进来。ffmpeg文件夹下的Android.mk内容:

LOCAL_PATH := $(call my-dir)include $(CLEAR_VARS)LOCAL_MODULE := ffmpegLOCAL_SRC_FILES := /path/to/build/ffmpeg/libffmpeg.soinclude $(PREBUILT_SHARED_LIBRARY)

三、新建player文件夹,用来编写解码与显示文件。player.c文件内容:

/* * SDL_Lesson.c * *  Created on: Aug 12, 2014 *      Author: clarck */#include <jni.h>#include <android/native_window_jni.h>#include "SDL.h"#include "SDL_thread.h"#include "SDL_events.h"#include "../include/logger.h"#include "../ffmpeg/include/libavcodec/avcodec.h"#include "../ffmpeg/include/libavformat/avformat.h"#include "../ffmpeg/include/libavutil/pixfmt.h"#include "../ffmpeg/include/libswscale/swscale.h"int main(int argc, char *argv[]) {    char *file_path = argv[1];    LOGI("file_path:%s", file_path);    AVFormatContext *pFormatCtx;    AVCodecContext *pCodecCtx;    AVCodec *pCodec;    AVFrame *pFrame, *pFrameYUV;    AVPacket *packet;    uint8_t *out_buffer;    SDL_Texture *bmp = NULL;    SDL_Window *screen = NULL;    SDL_Rect rect;    SDL_Event event;    static struct SwsContext *img_convert_ctx;    int videoStream, i, numBytes;    int ret, got_picture;    av_register_all();    pFormatCtx = avformat_alloc_context();    if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {        LOGE("Could not initialize SDL - %s. \n", SDL_GetError());        exit(1);    }    if (avformat_open_input(&pFormatCtx, file_path, NULL, NULL) != 0) {        LOGE("can't open the file. \n");        return -1;    }    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {        LOGE("Could't find stream infomation.\n");        return -1;    }    videoStream = 1;    for (i = 0; i < pFormatCtx->nb_streams; i++) {        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {            videoStream = i;        }    }    LOGI("videoStream:%d", videoStream);    if (videoStream == -1) {        LOGE("Didn't find a video stream.\n");        return -1;    }    pCodecCtx = pFormatCtx->streams[videoStream]->codec;    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);    if (pCodec == NULL) {        LOGE("Codec not found.\n");        return -1;    }    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {        LOGE("Could not open codec.\n");        return -1;    }    pFrame = av_frame_alloc();    pFrameYUV = av_frame_alloc();    //---------------------------init sdl---------------------------//    screen = SDL_CreateWindow("My Player Window", SDL_WINDOWPOS_UNDEFINED,            SDL_WINDOWPOS_UNDEFINED, pCodecCtx->width, pCodecCtx->height,            SDL_WINDOW_FULLSCREEN | SDL_WINDOW_OPENGL);    SDL_Renderer *renderer = SDL_CreateRenderer(screen, -1, 0);    bmp = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,            SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);    //-------------------------------------------------------------//    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,            pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,            AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);    numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,            pCodecCtx->height);    out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));    avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P,            pCodecCtx->width, pCodecCtx->height);    rect.x = 0;    rect.y = 0;    rect.w = pCodecCtx->width;    rect.h = pCodecCtx->height;    int y_size = pCodecCtx->width * pCodecCtx->height;    packet = (AVPacket *) malloc(sizeof(AVPacket));    av_new_packet(packet, y_size);    av_dump_format(pFormatCtx, 0, file_path, 0);    while (av_read_frame(pFormatCtx, packet) >= 0) {        if (packet->stream_index == videoStream) {            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,                    packet);            if (ret < 0) {                LOGE("decode error.\n");                return -1;            }            LOGI("got_picture:%d", got_picture);            if (got_picture) {                sws_scale(img_convert_ctx,                        (uint8_t const * const *) pFrame->data,                        pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data,                        pFrameYUV->linesize);                ////iPitch 计算yuv一行数据占的字节数                //SDL_UpdateTexture(bmp, &rect, pFrameYUV->data[0], pFrameYUV->linesize[0]);                SDL_UpdateYUVTexture(bmp, &rect,                        pFrameYUV->data[0], pFrameYUV->linesize[0],                        pFrameYUV->data[1], pFrameYUV->linesize[1],                        pFrameYUV->data[2], pFrameYUV->linesize[2]);                SDL_RenderClear(renderer);                SDL_RenderCopy(renderer, bmp, &rect, &rect);                SDL_RenderPresent(renderer);            }            SDL_Delay(50);        }        av_free_packet(packet);        SDL_PollEvent(&event);        switch (event.type) {        case SDL_QUIT:            SDL_Quit();            exit(0);            break;        default:            break;        }    }    SDL_DestroyTexture(bmp);    av_free(out_buffer);    av_free(pFrameYUV);    avcodec_close(pCodecCtx);    avformat_close_input(&pFormatCtx);    return 0;}

四、编写player文件夹下面的Android makefile,内容如下:

LOCAL_PATH := $(call my-dir)include $(CLEAR_VARS)LOCAL_MODULE := playerSDL_PATH := ../SDLFFMPEG_PATH := ../ffmpegLOCAL_C_INCLUDES := $(LOCAL_PATH)/includeLOCAL_C_INCLUDES += $(LOCAL_PATH)/$(SDL_PATH)/includeLOCAL_C_INCLUDES += $(LOCAL_PATH)/$(FFMPEG_PATH)/include# Add your application source files here...LOCAL_SRC_FILES := $(SDL_PATH)/src/main/android/SDL_android_main.c LOCAL_SRC_FILES += player.cLOCAL_SHARED_LIBRARIES := SDL2LOCAL_SHARED_LIBRARIES += ffmpegLOCAL_LDLIBS := -lGLESv1_CM -lGLESv2 -lloginclude $(BUILD_SHARED_LIBRARY)

五、修改SDLActivity.java文件内容,用来加载libffmpeg.so以及libplayer.so,以及修改用来解码的文件路径。

// Load the .so    static {        System.loadLibrary("ffmpeg");        System.loadLibrary("SDL2");        //System.loadLibrary("SDL2_image");        //System.loadLibrary("SDL2_mixer");        //System.loadLibrary("SDL2_net");        //System.loadLibrary("SDL2_ttf");        System.loadLibrary("player");    }
/**    Simple nativeInit() runnable*/class SDLMain implements Runnable {    @Override    public void run() {        // Runs SDL_main()        String sdcard = Environment.getExternalStorageDirectory().getAbsolutePath();        SDLActivity.nativeInit("/sdcard/a.mp4");        //Log.v("SDL", "SDL thread terminated");    }}

工程项目截图:


运行效果截图:

更多相关文章

  1. Android:保护自己开发的Android应用程序
  2. Android系统架构-----Android的系统体系架构
  3. Android系统架构-----Android的系统体系架构
  4. 使用Android内部的DownloadProvider下载文件,并获取cache权限
  5. A folder failed to be renamed or moved--安装Android(安卓)SDK
  6. Android手机操作系统中的常用术语
  7. Android热修复(2):AndFix热修复框架的使用
  8. 开发可统计单词个数的Android驱动程序(1)
  9. 创建Android库的方法及Android(安卓).aar文件用法小结

随机推荐

  1. c语言有且唯一的函数是什么?
  2. c语言文件读写函数有哪些?
  3. C语言中do while语句的用法是什么?
  4. c语言如何统计字符串中每个字符出现的次
  5. 在c程序中“%”是只能用于整数运算的运算
  6. c++万能头文件是什么?
  7. C语言中的每条可执行语句都将转换成什么
  8. c语言中*是什么意思?
  9. strcmp在c语言中是什么意思?
  10. 你知道C语言的标识符命名规则是什么吗(详