Skip to content

Commit

Permalink
started audio support
Browse files Browse the repository at this point in the history
  • Loading branch information
havlenapetr committed Jul 14, 2010
1 parent 4955bdf commit f2d0ff2
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 52 deletions.
57 changes: 26 additions & 31 deletions jni/ffmpeg/android/com_media_ffmpeg_android_FFMpegPlayerAndroid.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ extern "C" {

struct ffmpeg_fields_t {
int videoStream;
int audioStream;
AVCodecContext *pCodecCtx;
AVFrame *pFrame;
AVCodec *pCodec;
Expand Down Expand Up @@ -45,40 +46,23 @@ const char *FFMpegPlayerAndroid_getSignature() {
return "Lcom/media/ffmpeg/android/FFMpegPlayerAndroid;";
}

extern "C" {

static void FFMpegPlayerAndroid_saveFrame(AVFrame *pFrame, int width, int height, int iFrame) {
FILE *pFile;
char szFilename[100];
int y;

// Open file
sprintf(szFilename, "/sdcard/frame%d.ppm", iFrame);
pFile=fopen(szFilename, "wb");
if(pFile==NULL)
return;

// Write header
fprintf(pFile, "P6\n%d %d\n255\n", width, height);

// Write pixel data
for(y=0; y<height; y++)
fwrite(pFrame->data[0]+y*pFrame->linesize[0], 1, width*3, pFile);

// Close file
fclose(pFile);
}

static jintArray FFMpegPlayerAndroid_init(JNIEnv *env, jobject obj, jobject pAVFormatContext) {
ffmpeg_fields.pFormatCtx = (AVFormatContext *) env->GetIntField(pAVFormatContext, fields.avformatcontext);

// Find the first video stream
ffmpeg_fields.videoStream = -1;
for (int i = 0; i < ffmpeg_fields.pFormatCtx->nb_streams; i++)
ffmpeg_fields.audioStream = -1;
for (int i = 0; i < ffmpeg_fields.pFormatCtx->nb_streams; i++) {
if (ffmpeg_fields.pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) {
ffmpeg_fields.videoStream = i;
}
if (ffmpeg_fields.pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO) {
ffmpeg_fields.audioStream = i;
}
if(ffmpeg_fields.audioStream != -1 && ffmpeg_fields.videoStream != -1) {
break;
}
}

if (ffmpeg_fields.videoStream == -1) {
jniThrowException(env,
Expand Down Expand Up @@ -128,12 +112,10 @@ static AVFrame *FFMpegPlayerAndroid_createFrame(JNIEnv *env, jobject bitmap) {
int result = -1;

if ((result = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
__android_log_print(ANDROID_LOG_ERROR, TAG, "AndroidBitmap_getInfo() failed ! error=%d", result);
return NULL;
}

if ((result = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
__android_log_print(ANDROID_LOG_ERROR, TAG, "AndroidBitmap_lockPixels() failed ! error=%d", result);
return NULL;
}

Expand All @@ -153,6 +135,14 @@ static AVFrame *FFMpegPlayerAndroid_createFrame(JNIEnv *env, jobject bitmap) {
return pFrame;
}

// Allocate a structure for storing decoded samples
static void FFMpegPlayerAndroid_processAudio(AVPacket *packet, int16_t *samples) {
// Try to decode the audio from the packet into the frame
int out_size, len;
len = avcodec_decode_audio3(ffmpeg_fields.pCodecCtx, samples,
&out_size, packet);
}

static void FFMpegPlayerAndroid_play(JNIEnv *env, jobject obj, jobject bitmap) {
AVPacket packet;
int result = -1;
Expand All @@ -163,10 +153,13 @@ static void FFMpegPlayerAndroid_play(JNIEnv *env, jobject obj, jobject bitmap) {
if (pFrameRGB == NULL) {
jniThrowException(env,
"java/io/IOException",
"Could allocate an AVFrame structure");
"Couldn't allocate an AVFrame structure");
return;
}

int16_t *samples = (int16_t *) av_malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE);
memset(samples, 0, AVCODEC_MAX_AUDIO_FRAME_SIZE);

status = STATE_PLAYING;
while ((result = av_read_frame(ffmpeg_fields.pFormatCtx, &packet)) >= 0 &&
status == STATE_PLAYING) {
Expand All @@ -175,20 +168,24 @@ static void FFMpegPlayerAndroid_play(JNIEnv *env, jobject obj, jobject bitmap) {
// Decode video frame
avcodec_decode_video(ffmpeg_fields.pCodecCtx, ffmpeg_fields.pFrame, &frameFinished,
packet.data, packet.size);

// Did we get a video frame?
if (frameFinished) {
// Convert the image from its native format to RGB
sws_scale(ffmpeg_fields.img_convert_ctx, ffmpeg_fields.pFrame->data, ffmpeg_fields.pFrame->linesize, 0,
ffmpeg_fields.pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
env->CallVoidMethod(obj, fields.clb_onVideoFrame);
}
} else if (packet.stream_index == ffmpeg_fields.audioStream) {
FFMpegPlayerAndroid_processAudio(&packet, samples);
}

// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
}

av_free( samples );

// Free the RGB image
av_free(pFrameRGB);

Expand All @@ -205,8 +202,6 @@ static void FFMpegPlayerAndroid_play(JNIEnv *env, jobject obj, jobject bitmap) {
__android_log_print(ANDROID_LOG_INFO, TAG, "end of playing");
}

} // end of extern C

static void FFMpegPlayerAndroid_stop(JNIEnv *env, jobject object) {
if(status != STATE_PLAYING) {
return;
Expand Down
10 changes: 10 additions & 0 deletions src/com/media/ffmpeg/android/FFMpegPlayerAndroid.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
Expand All @@ -28,6 +31,7 @@ public class FFMpegPlayerAndroid extends SurfaceView {
private int mSurfaceHeight;
private Context mContext;
private SurfaceHolder mSurfaceHolder;
private AudioTrack mAudioTrack;
private MediaController mMediaController;
private Thread mRenderThread;
private IFFMpegPlayer mListener;
Expand Down Expand Up @@ -56,6 +60,8 @@ private void initVideoView(Context context) {
mFitToScreen = true;
mVideoWidth = 0;
mVideoHeight = 0;
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT,
192000, AudioTrack.MODE_STREAM);
getHolder().addCallback(mSHCallback);
}

Expand Down Expand Up @@ -219,6 +225,10 @@ private void onVideoFrame() {
}
}

private void onAudioBuffer(byte[] buffer) {
mAudioTrack.write(buffer, 0, buffer.length);
}

private void doDraw(Canvas c) {
if(mFitToScreen) {
float scale_x = (float) mSurfaceWidth/ (float) mVideoWidth;
Expand Down
45 changes: 24 additions & 21 deletions src/cz/havlena/ffmpeg/ui/FFMpegActivity.java
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
public class FFMpegActivity extends Activity {

private static final String TAG = "FFMpegActivity";
private static final boolean D = false;

private static final int FILE_SELECT = 0;
public static final String FILE_INPUT = "FFMpeg_file";
Expand Down Expand Up @@ -81,28 +82,30 @@ public void onCreate(Bundle savedInstanceState) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK, TAG);

//startPlayer();
Intent i = getIntent();
if(i.getAction() == null || !i.getAction().equals(Intent.ACTION_INPUT_METHOD_CHANGED)) {
startFileExplorer();
if(D) {
startPlayer("/sdcard/Videos/pixar.flv");
} else {
startPlayer(i.getStringExtra(FILE_INPUT));
/*
mFFMpegController = new FFMpeg();
mFFMpegController.setListener(new FFMpegHandler(this));
String filePath = i.getStringExtra(FILE_INPUT);
mTextViewInputVideo.setText(filePath);
try {
initFFMpeg(filePath);
FFMpegFile input = mFFMpegController.getInputFile();
FFMpegAVFormatContext.Duration duration = input.getContext().getDuration();
mTextViewInputVideoLength.setText(getString(R.string.input_file_info) + " " +
duration.hours + "h " + duration.mins + "min " + duration.secs + "sec");
}
catch (Exception e) {
showError(this, e);
}
*/
Intent i = getIntent();
if(i.getAction() == null || !i.getAction().equals(Intent.ACTION_INPUT_METHOD_CHANGED)) {
startFileExplorer();
} else {
startPlayer(i.getStringExtra(FILE_INPUT));
/*
mFFMpegController = new FFMpeg();
mFFMpegController.setListener(new FFMpegHandler(this));
String filePath = i.getStringExtra(FILE_INPUT);
mTextViewInputVideo.setText(filePath);
try {
initFFMpeg(filePath);
FFMpegFile input = mFFMpegController.getInputFile();
FFMpegAVFormatContext.Duration duration = input.getContext().getDuration();
mTextViewInputVideoLength.setText(getString(R.string.input_file_info) + " " +
duration.hours + "h " + duration.mins + "min " + duration.secs + "sec");
}
catch (Exception e) {
showError(this, e);
}*/
}
}
}

Expand Down

0 comments on commit f2d0ff2

Please sign in to comment.