Skip to content

Commit

Permalink
separate initzialize audio and video decoders
Browse files Browse the repository at this point in the history
  • Loading branch information
havlenapetr committed Jul 16, 2010
1 parent 4c82385 commit 480af32
Show file tree
Hide file tree
Showing 7 changed files with 254 additions and 129 deletions.
202 changes: 112 additions & 90 deletions jni/ffmpeg/android/com_media_ffmpeg_android_FFMpegPlayerAndroid.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,14 @@ struct ffmpeg_fields_t {
} ffmpeg_fields;

struct ffmpeg_video_t {
bool initzialized;
int stream;
AVCodecContext *codec_ctx;
AVCodec *codec;
} ffmpeg_video;

struct ffmpeg_audio_t {
bool initzialized;
int stream;
AVCodecContext *codec_ctx;
AVCodec *codec;
Expand Down Expand Up @@ -102,12 +104,26 @@ static void FFMpegPlayerAndroid_handleErrors(void* ptr, int level, const char* f
}
}

static int FFMpegPlayerAndroid_initAudio(JNIEnv *env) {
static void FFMpegPlayerAndroid_enableErrorCallback(JNIEnv *env, jobject obj) {
av_log_set_callback(FFMpegPlayerAndroid_handleErrors);
}

static jobject FFMpegPlayerAndroid_initAudio(JNIEnv *env, jobject obj, jobject pAVFormatContext) {
ffmpeg_audio.stream = -1;
for (int i = 0; i < ffmpeg_fields.pFormatCtx->nb_streams; i++) {
if (ffmpeg_fields.pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO) {
ffmpeg_audio.stream = i;
}
if(ffmpeg_audio.stream != -1) {
break;
}
}

if (ffmpeg_audio.stream == -1) {
jniThrowException(env,
"java/io/IOException",
"Didn't find a audio stream");
return -1;
return NULL;
}
// Get a pointer to the codec context for the video stream
ffmpeg_audio.codec_ctx = ffmpeg_fields.pFormatCtx->streams[ffmpeg_audio.stream]->codec;
Expand All @@ -116,87 +132,68 @@ static int FFMpegPlayerAndroid_initAudio(JNIEnv *env) {
jniThrowException(env,
"java/io/IOException",
"Couldn't find audio codec!");
return -1; // Codec not found
return NULL; // Codec not found
}

// Open codec
if (avcodec_open(ffmpeg_audio.codec_ctx, ffmpeg_audio.codec) < 0) {
jniThrowException(env,
"java/io/IOException",
"Could not open audio codec");
return -1; // Could not open codec
return NULL; // Could not open codec
}
return 0;
ffmpeg_audio.initzialized = true;
return AVCodecContext_create(env, ffmpeg_audio.codec_ctx);
}

static int FFMpegPlayerAndroid_initVideo(JNIEnv *env) {
static jobject FFMpegPlayerAndroid_initVideo(JNIEnv *env, jobject obj, jobject pAVFormatContext) {
// Find the first video stream
ffmpeg_video.stream = -1;
for (int i = 0; i < ffmpeg_fields.pFormatCtx->nb_streams; i++) {
if (ffmpeg_fields.pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) {
ffmpeg_video.stream = i;
}
if(ffmpeg_video.stream != -1) {
break;
}
}

if (ffmpeg_video.stream == -1) {
jniThrowException(env,
"java/io/IOException",
"Didn't find a video stream");
return -1;
return NULL;
}

// Get a pointer to the codec context for the video stream
ffmpeg_video.codec_ctx = ffmpeg_fields.pFormatCtx->streams[ffmpeg_video.stream]->codec;
ffmpeg_video.codec = avcodec_find_decoder(ffmpeg_video.codec_ctx->codec_id);
if (ffmpeg_video.codec == NULL) {
jniThrowException(env,
"java/io/IOException",
"Couldn't find video codec!");
return -1; // Codec not found
return NULL; // Codec not found
}

// Open codec
if (avcodec_open(ffmpeg_video.codec_ctx, ffmpeg_video.codec) < 0) {
jniThrowException(env,
"java/io/IOException",
"Could not open video codec");
return -1; // Could not open codec
return NULL; // Could not open codec
}
return 0;
}

static jobject FFMpegPlayerAndroid_init(JNIEnv *env, jobject obj, jobject pAVFormatContext) {
av_log_set_callback(FFMpegPlayerAndroid_handleErrors);

ffmpeg_fields.pFormatCtx = (AVFormatContext *) env->GetIntField(pAVFormatContext, jni_fields.avformatcontext);

// Find the first video stream
ffmpeg_video.stream = -1;
ffmpeg_audio.stream = -1;
for (int i = 0; i < ffmpeg_fields.pFormatCtx->nb_streams; i++) {
if (ffmpeg_fields.pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) {
ffmpeg_video.stream = i;
}
if (ffmpeg_fields.pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO) {
ffmpeg_audio.stream = i;
}
if(ffmpeg_audio.stream != -1 && ffmpeg_video.stream != -1) {
break;
}
}

//__android_log_print(ANDROID_LOG_INFO, TAG, "audio: %i, video: %i", ffmpeg_video.stream, ffmpeg_audio.stream);

if(FFMpegPlayerAndroid_initAudio(env) != 0) {
return NULL;
}

if(FFMpegPlayerAndroid_initVideo(env) != 0) {
return NULL;
}

// Allocate video frame
ffmpeg_fields.pFrame = avcodec_alloc_frame();

int w = ffmpeg_video.codec_ctx->width;
int h = ffmpeg_video.codec_ctx->height;
ffmpeg_fields.img_convert_ctx = sws_getContext(w, h, ffmpeg_video.codec_ctx->pix_fmt, w, h,
PIX_FMT_RGB565, SWS_POINT, NULL, NULL, NULL);

PIX_FMT_RGB565, SWS_POINT, NULL, NULL, NULL);

ffmpeg_video.initzialized = true;
return AVCodecContext_create(env, ffmpeg_video.codec_ctx);
}

static AVFrame *FFMpegPlayerAndroid_createFrame(JNIEnv *env, jobject bitmap) {
void* pixels;
AVFrame* pFrame;
Expand Down Expand Up @@ -227,11 +224,48 @@ static AVFrame *FFMpegPlayerAndroid_createFrame(JNIEnv *env, jobject bitmap) {
return pFrame;
}

static int FFMpegPlayerAndroid_processAudio(JNIEnv *env, AVPacket *packet, int16_t *samples, int samples_size) {
int size = FFMAX(packet->size * sizeof(*samples), samples_size);
if(samples_size < size) {
__android_log_print(ANDROID_LOG_INFO, TAG, "resizing audio buffer from %i to %i", samples_size, size);
av_free(samples);
samples_size = size;
samples = (int16_t *) av_malloc(samples_size);
}

int len = avcodec_decode_audio3(ffmpeg_audio.codec_ctx, samples, &samples_size, packet);
if(AndroidAudioTrack_write(samples, samples_size) <= 0) {
jniThrowException(env,
"java/io/IOException",
"Couldn't write bytes to audio track");
return -1;
}
return 0;
}

static int FFMpegPlayerAndroid_processVideo(JNIEnv *env, jobject obj, AVPacket *packet, AVFrame *pFrameRGB) {
int frameFinished;

// Decode video frame
avcodec_decode_video(ffmpeg_video.codec_ctx, ffmpeg_fields.pFrame, &frameFinished,
packet->data, packet->size);

// Did we get a video frame?
if (frameFinished) {
// Convert the image from its native format to RGB
sws_scale(ffmpeg_fields.img_convert_ctx, ffmpeg_fields.pFrame->data, ffmpeg_fields.pFrame->linesize, 0,
ffmpeg_video.codec_ctx->height, pFrameRGB->data, pFrameRGB->linesize);
env->CallVoidMethod(obj, jni_fields.clb_onVideoFrame);
return 0;
}
return -1;
}

static void FFMpegPlayerAndroid_play(JNIEnv *env, jobject obj, jobject bitmap, jobject audioTrack) {
AVPacket packet;
int result = -1;
int frameFinished;
int audio_sample_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
int samples_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
int16_t* samples;

// Allocate an AVFrame structure
AVFrame *pFrameRGB = FFMpegPlayerAndroid_createFrame(env, bitmap);
Expand All @@ -242,12 +276,14 @@ static void FFMpegPlayerAndroid_play(JNIEnv *env, jobject obj, jobject bitmap, j
return;
}

int16_t *samples = (int16_t *) av_malloc(audio_sample_size);
if(AndroidAudioTrack_register(env, audioTrack) != ANDROID_AUDIOTRACK_RESULT_SUCCESS) {
jniThrowException(env,
"java/io/IOException",
"Couldn't register audio track");
return;
if(ffmpeg_audio.initzialized) {
samples = (int16_t *) av_malloc(samples_size);
if(AndroidAudioTrack_register(env, audioTrack) != ANDROID_AUDIOTRACK_RESULT_SUCCESS) {
jniThrowException(env,
"java/io/IOException",
"Couldn't register audio track");
return;
}
}

status = STATE_PLAYING;
Expand All @@ -264,46 +300,28 @@ static void FFMpegPlayerAndroid_play(JNIEnv *env, jobject obj, jobject bitmap, j
}

// Is this a packet from the video stream?
if (packet.stream_index == ffmpeg_video.stream) {
// Decode video frame
avcodec_decode_video(ffmpeg_video.codec_ctx, ffmpeg_fields.pFrame, &frameFinished,
packet.data, packet.size);

// Did we get a video frame?
if (frameFinished) {
// Convert the image from its native format to RGB
sws_scale(ffmpeg_fields.img_convert_ctx, ffmpeg_fields.pFrame->data, ffmpeg_fields.pFrame->linesize, 0,
ffmpeg_video.codec_ctx->height, pFrameRGB->data, pFrameRGB->linesize);
env->CallVoidMethod(obj, jni_fields.clb_onVideoFrame);
if (packet.stream_index == ffmpeg_video.stream &&
ffmpeg_video.initzialized) {
if(FFMpegPlayerAndroid_processVideo(env, obj, &packet, pFrameRGB) < 0) {
__android_log_print(ANDROID_LOG_ERROR, TAG, "Frame wasn't finished by video decoder");
}
} else if (packet.stream_index == ffmpeg_audio.stream) {
/*
int sample_size = FFMAX(packet.size * sizeof(*samples), audio_sample_size);
if(audio_sample_size < sample_size) {
__android_log_print(ANDROID_LOG_INFO, TAG, "resizing audio buffer from %i to %i", audio_sample_size, sample_size);
av_free(samples);
audio_sample_size = sample_size;
samples = (int16_t *) av_malloc(sample_size);
}
*/
int out_size = audio_sample_size;
int len = avcodec_decode_audio3(ffmpeg_audio.codec_ctx, samples, &out_size, &packet);
if((result = AndroidAudioTrack_write(samples, out_size)) <= 0) {
jniThrowException(env,
"java/io/IOException",
"Couldn't write bytes to audio track");
return;
} else if (packet.stream_index == ffmpeg_audio.stream &&
ffmpeg_audio.initzialized) {
if(FFMpegPlayerAndroid_processAudio(env, &packet, samples, samples_size) < 0 ) {
return; // exception occured so return to java
}
}

// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}

if(AndroidAudioTrack_unregister() != ANDROID_AUDIOTRACK_RESULT_SUCCESS) {
jniThrowException(env,
"java/io/IOException",
"Couldn't unregister audio track");
if(ffmpeg_audio.initzialized) {
if(AndroidAudioTrack_unregister() != ANDROID_AUDIOTRACK_RESULT_SUCCESS) {
jniThrowException(env,
"java/io/IOException",
"Couldn't unregister audio track");
}
}

av_free( samples );
Expand Down Expand Up @@ -342,21 +360,20 @@ static void FFMpegPlayerAndroid_stop(JNIEnv *env, jobject object) {
}

static jobject FFMpegPlayerAndroid_setInputFile(JNIEnv *env, jobject obj, jstring filePath) {
AVFormatContext *pFormatCtx;
const char *_filePath = env->GetStringUTFChars(filePath, NULL);
// Open video file
if(av_open_input_file(&pFormatCtx, _filePath, NULL, 0, NULL) != 0) {
if(av_open_input_file(&ffmpeg_fields.pFormatCtx, _filePath, NULL, 0, NULL) != 0) {
jniThrowException(env,
"java/io/IOException",
"Can't create input file");
}
// Retrieve stream information
if(av_find_stream_info(pFormatCtx)<0) {
if(av_find_stream_info(ffmpeg_fields.pFormatCtx) < 0) {
jniThrowException(env,
"java/io/IOException",
"Couldn't find stream information");
}
return AVFormatContext_create(env, pFormatCtx);
return AVFormatContext_create(env, ffmpeg_fields.pFormatCtx);
}

static void FFMpegPlayerAndroid_setSurface(JNIEnv *env, jobject obj, jobject surface) {
Expand Down Expand Up @@ -385,7 +402,9 @@ static void FFMpegPlayerAndroid_release(JNIEnv *env, jobject obj) {
* JNI registration.
*/
static JNINativeMethod methods[] = {
{ "nativeInit", "(Lcom/media/ffmpeg/FFMpegAVFormatContext;)Lcom/media/ffmpeg/FFMpegAVCodecContext;", (void*) FFMpegPlayerAndroid_init},
{ "nativeInitAudio", "(Lcom/media/ffmpeg/FFMpegAVFormatContext;)Lcom/media/ffmpeg/FFMpegAVCodecContext;", (void*) FFMpegPlayerAndroid_initAudio},
{ "nativeInitVideo", "(Lcom/media/ffmpeg/FFMpegAVFormatContext;)Lcom/media/ffmpeg/FFMpegAVCodecContext;", (void*) FFMpegPlayerAndroid_initVideo},
{ "nativeEnableErrorCallback", "()V", (void*) FFMpegPlayerAndroid_enableErrorCallback},
{ "nativeSetInputFile", "(Ljava/lang/String;)Lcom/media/ffmpeg/FFMpegAVFormatContext;", (void*) FFMpegPlayerAndroid_setInputFile },
{ "nativePause", "(Z)Z", (void*) FFMpegPlayerAndroid_pause},
{ "nativePlay", "(Landroid/graphics/Bitmap;Landroid/media/AudioTrack;)V", (void*) FFMpegPlayerAndroid_play },
Expand All @@ -395,6 +414,9 @@ static JNINativeMethod methods[] = {
};

int register_android_media_FFMpegPlayerAndroid(JNIEnv *env) {
ffmpeg_audio.initzialized = false;
ffmpeg_video.initzialized = false;

jclass clazz = env->FindClass("android/view/Surface");
if(clazz == NULL) {
__android_log_print(ANDROID_LOG_ERROR, TAG, "can't load native surface");
Expand Down
36 changes: 30 additions & 6 deletions src/com/media/ffmpeg/FFMpeg.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,25 @@
import java.io.IOException;

import android.content.Context;
import android.util.Log;

import com.media.ffmpeg.android.FFMpegPlayerAndroid;
import com.media.ffmpeg.config.FFMpegConfig;


public class FFMpeg {

public static final String[] LIBS = new String[] {"jniaudio", "ffmpeg_jni"};
public static final String[] EXTENSIONS = new String[] {".mp4", ".flv", ".avi", ".wmv"};
public static final String[] LIBS = new String[] {
"jniaudio", // used for access to android native AudioTrack class
"ffmpeg_jni" // ffmpeg libs compiled to jni lib
};

public static final String[] EXTENSIONS = new String[] {
".mp4",
".flv",
".avi",
".wmv"
};

private Thread mThread;
private IFFMpegListener mListener;
Expand All @@ -22,17 +32,31 @@ public class FFMpeg {
private FFMpegFile mOutputFile;
private boolean mConverting;

public FFMpeg() {
loadLibs();
public FFMpeg() throws FFMpegException {
if(!loadLibs()) {
throw new FFMpegException(FFMpegException.LEVEL_FATAL, "Couldn't load native libs");
}
native_avcodec_register_all();
native_av_register_all();
mConverting = false;
}

private void loadLibs() {
/**
* loads all native libraries
* @return true if all libraries was loaded, otherwise return false
*/
private boolean loadLibs() {
boolean err = false;
for(int i=0;i<LIBS.length;i++) {
System.loadLibrary(LIBS[i]);
try {
System.loadLibrary(LIBS[i]);
} catch(UnsatisfiedLinkError e) {
// fatal error, we can't load some our libs
Log.d("FFMpeg", "Couldn't load lib: " + LIBS[i] + " - " + e.getMessage());
err = true;
}
}
return !err;
}

public FFMpegUtils getUtils() {
Expand Down
Loading

0 comments on commit 480af32

Please sign in to comment.