如何在Android用FFmpeg解码图像
2个回答
展开全部
fetch code
用git把ffmpeg(我用的github上FFmpeg-Android)和x264(vlc的官方git)分别都clone下来。
build x264
在x264目录里面写一个myconfig.sh(其实直接把这些命令打在终端也行,问题是有的时候需要改来改去,不如写个文件改起来方便)
export NDK=/opt/android-ndk
export PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.6/prebuilt
export PLATFORM=$NDK/platforms/android-14/arch-arm
export PREFIX=/home/mingkai/softwares/x264
./configure \
--enable-pic \
--enable-static \
--enable-shared \
--disable-asm \
--disable-cli \
--host=arm-linux \
--cross-prefix="/opt/android-ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-"
\
--sysroot=$PLATFORM \
--prefix=$PREFIX
其中prefix貌似直接用"arm-linux-androideabi-“也可以。
然后可以make和make install了。(记得改PREFIX等环境变量)
build FFmpeg
这个是从github上FFmpeg-Android里面的FFmpeg-Android.sh里面改了改一些参数。
最主要的是FFMPEG_FLAGS,里面都是一些关于FFmpeg的参数设定,尤其是是否启用encoder/decoder之类的。
还有一点就是再下面EXTRA_CFLAGS里面加上
“-I/path/to/x264/include”
EXTRA_LDFLAGS里面加上
“-L/path/to/x264/lib -lx264”。
用git把ffmpeg(我用的github上FFmpeg-Android)和x264(vlc的官方git)分别都clone下来。
build x264
在x264目录里面写一个myconfig.sh(其实直接把这些命令打在终端也行,问题是有的时候需要改来改去,不如写个文件改起来方便)
export NDK=/opt/android-ndk
export PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.6/prebuilt
export PLATFORM=$NDK/platforms/android-14/arch-arm
export PREFIX=/home/mingkai/softwares/x264
./configure \
--enable-pic \
--enable-static \
--enable-shared \
--disable-asm \
--disable-cli \
--host=arm-linux \
--cross-prefix="/opt/android-ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-"
\
--sysroot=$PLATFORM \
--prefix=$PREFIX
其中prefix貌似直接用"arm-linux-androideabi-“也可以。
然后可以make和make install了。(记得改PREFIX等环境变量)
build FFmpeg
这个是从github上FFmpeg-Android里面的FFmpeg-Android.sh里面改了改一些参数。
最主要的是FFMPEG_FLAGS,里面都是一些关于FFmpeg的参数设定,尤其是是否启用encoder/decoder之类的。
还有一点就是再下面EXTRA_CFLAGS里面加上
“-I/path/to/x264/include”
EXTRA_LDFLAGS里面加上
“-L/path/to/x264/lib -lx264”。
展开全部
解决方案:
include/libswscale/jni;
SDL_mutex *mutex;
int allocated;
SDL_Thread *parse_tid: clarck
*/*source height & width*/./
#include "
unsigned int audio_buf_size;
SDL_Renderer *renderer;
uint8_t *audio_pkt_data;
SDL_cond *pictq_cond。
/
int64_t audio_tgt_channel_layout;
#include "include/SDL_events;include/.;
PacketQueue audioq;*
* SDL_Lesson;logger;.h".h"include/
struct SwsContext *sws_ctx.h"/
typedef struct VideoState {
char filename[1024].h"
SDL_cond *cond;
uint8_t *audio_buf;
#include "ffmpeg/
enum AVSampleFormat audio_tgt_fmt;swscale, *last_pkt.h", pictq_windex;
int audio_src_freq.h"
AVFrame* rawdata;
enum AVSampleFormat audio_src_fmt;
int pictq_size;
PacketQueue videoq;
int audio_tgt_freq;
#include <, pictq_rindex;
SDL_Thread *audio_tid;
int audio_tgt_channels;
int size.;ffmpeg/
VideoState *global_video_state.h".c
*
* Created on; 1 second of 48khz 32bit audio
typedef struct PacketQueue {
AVPacketList *first_pkt;ffmpeg/
int videoStream;libavcodec/
AVStream *audio_st创建一个VideoPicture结构体用来保存解码出来的图像., audioStream; /;libavutil/android/native_window_jni;
int width;
unsigned int audio_buf_index;
#include <.;SDL: Aug 12../
SDL_mutex *pictq_mutex;
AVStream *video_st;
#include "./libswresample/
} PacketQueue;SDL_thread;./
} VideoPicture;
AVFormatContext *ic;
SDL_Texture *bmp./ffmpeg/
struct SwrContext *swr_ctx.h"
int64_t audio_src_channel_layout,uint8_t;
int nb_packets;include/
#include "avcodec./
SDL_Thread *video_tid;
#include ", 2014
* Author, height;
int audio_src_channels;;avformat;include/
AVFrame *audio_frame;.;
#include ",audio_buf2) [AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
int audio_pkt_size;
#include "pixfmt;swresample.h>
DECLARE_ALIGNED(16;
typedef struct VideoPicture {
SDL_Window *screen;;
int quit;
#define SDL_AUDIO_BUFFER_SIZE 1024
#define MAX_AUDIO_SIZE (5 * 16 * 1024)
#define MAX_VIDEO_SIZE (5 * 256 * 1024)
#define FF_ALLOC_EVENT (SDL_USEREVENT)
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
#define VIDEO_PICTURE_QUEUE_SIZE 1
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 /ffmpeg/
} VideoState;libavformat/
AVPacket audio_pkt.h>
AVIOContext *io_ctx;
VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE].h"
#include "
include/libswscale/jni;
SDL_mutex *mutex;
int allocated;
SDL_Thread *parse_tid: clarck
*/*source height & width*/./
#include "
unsigned int audio_buf_size;
SDL_Renderer *renderer;
uint8_t *audio_pkt_data;
SDL_cond *pictq_cond。
/
int64_t audio_tgt_channel_layout;
#include "include/SDL_events;include/.;
PacketQueue audioq;*
* SDL_Lesson;logger;.h".h"include/
struct SwsContext *sws_ctx.h"/
typedef struct VideoState {
char filename[1024].h"
SDL_cond *cond;
uint8_t *audio_buf;
#include "ffmpeg/
enum AVSampleFormat audio_tgt_fmt;swscale, *last_pkt.h", pictq_windex;
int audio_src_freq.h"
AVFrame* rawdata;
enum AVSampleFormat audio_src_fmt;
int pictq_size;
PacketQueue videoq;
int audio_tgt_freq;
#include <, pictq_rindex;
SDL_Thread *audio_tid;
int audio_tgt_channels;
int size.;ffmpeg/
VideoState *global_video_state.h".c
*
* Created on; 1 second of 48khz 32bit audio
typedef struct PacketQueue {
AVPacketList *first_pkt;ffmpeg/
int videoStream;libavcodec/
AVStream *audio_st创建一个VideoPicture结构体用来保存解码出来的图像., audioStream; /;libavutil/android/native_window_jni;
int width;
unsigned int audio_buf_index;
#include <.;SDL: Aug 12../
SDL_mutex *pictq_mutex;
AVStream *video_st;
#include "./libswresample/
} PacketQueue;SDL_thread;./
} VideoPicture;
AVFormatContext *ic;
SDL_Texture *bmp./ffmpeg/
struct SwrContext *swr_ctx.h"
int64_t audio_src_channel_layout,uint8_t;
int nb_packets;include/
#include "avcodec./
SDL_Thread *video_tid;
#include ", 2014
* Author, height;
int audio_src_channels;;avformat;include/
AVFrame *audio_frame;.;
#include ",audio_buf2) [AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
int audio_pkt_size;
#include "pixfmt;swresample.h>
DECLARE_ALIGNED(16;
typedef struct VideoPicture {
SDL_Window *screen;;
int quit;
#define SDL_AUDIO_BUFFER_SIZE 1024
#define MAX_AUDIO_SIZE (5 * 16 * 1024)
#define MAX_VIDEO_SIZE (5 * 256 * 1024)
#define FF_ALLOC_EVENT (SDL_USEREVENT)
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
#define VIDEO_PICTURE_QUEUE_SIZE 1
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 /ffmpeg/
} VideoState;libavformat/
AVPacket audio_pkt.h>
AVIOContext *io_ctx;
VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE].h"
#include "
本回答被提问者和网友采纳
已赞过
已踩过<
评论
收起
你对这个回答的评价是?
推荐律师服务:
若未解决您的问题,请您详细描述您的问题,通过百度律临进行免费专业咨询