Android 在3G网络中,FFmpegFrameRecorder视频广播音频比视频帧速度更快
我正在使用Android 在3G网络中,FFmpegFrameRecorder视频广播音频比视频帧速度更快,android,javacv,video-recording,Android,Javacv,Video Recording,我正在使用FFmpegFrameRecorder进行视频广播。问题是音频比视频帧来得快。我正在使用以下代码,但无法生成完整的视频。音频视频时间戳中存在问题 Java代码: import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U; import java.io.IOException; import java.nio.ShortBuffer; import android.app.Activity; import and
FFmpegFrameRecorder
进行视频广播。问题是音频比视频帧来得快。我正在使用以下代码,但无法生成完整的视频。音频视频时间戳中存在问题
Java代码:
import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
import java.io.IOException;
import java.nio.ShortBuffer;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;
import com.googlecode.javacv.FFmpegFrameRecorder;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
public class MainActivity extends Activity implements OnClickListener {
private final static String LOG_TAG = "MainActivity";
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "";
private volatile FFmpegFrameRecorder recorder;
boolean recording = false;
long startTime = 0;
private int sampleAudioRateInHz = 16000;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 24;
private Thread audioThread;
volatile boolean runAudioThread = true;
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private CameraView cameraView;
private IplImage yuvIplimage = null;
private Button recordButton;
private LinearLayout mainLayout;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.activity_main);
initLayout();
initRecorder();
}
@Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
LOG_TAG);
mWakeLock.acquire();
}
}
@Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
recording = false;
}
private void initLayout() {
mainLayout = (LinearLayout) this.findViewById(R.id.record_layout);
recordButton = (Button) findViewById(R.id.recorder_control);
recordButton.setText("Start");
recordButton.setOnClickListener(this);
cameraView = new CameraView(this);
LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
mainLayout.addView(cameraView, layoutParam);
Log.v(LOG_TAG, "added cameraView to mainLayout");
}
private void initRecorder() {
Log.w(LOG_TAG, "initRecorder");
if (yuvIplimage == null) {
// Recreated after frame size is set in surface change method
yuvIplimage = IplImage.create(imageWidth, imageHeight,
IPL_DEPTH_8U, 2);
// yuvIplimage = IplImage.create(imageWidth, imageHeight,
// IPL_DEPTH_32S, 2);
Log.v(LOG_TAG, "IplImage.create");
}
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth,
imageHeight, 1);
Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: "
+ imageWidth + " imageHeight " + imageHeight);
recorder.setFormat("flv");
Log.v(LOG_TAG, "recorder.setFormat(\"flv\")");
recorder.setSampleRate(sampleAudioRateInHz);
Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");
// re-set in the surface changed method as well
recorder.setFrameRate(frameRate);
Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");
// Create audio recording thread
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
}
// Start the capture
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
// This should stop the audio thread from running
runAudioThread = false;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,
"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
// Quit when back button is pushed
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
recordButton.setText("Stop");
} else {
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
recordButton.setText("Start");
}
}
// ---------------------------------------------
// audio thread, gets and encodes audio data
// ---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
// Set the thread priority
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleAudioRateInHz,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
// Audio Capture/Encoding Loop
while (runAudioThread) {
// Read from audioRecord
bufferReadResult = audioRecord.read(audioData, 0,
audioData.length);
if (bufferReadResult > 0) {
// Log.v(LOG_TAG,"audioRecord bufferReadResult: " +
// bufferReadResult);
// Changes in this variable may not be picked up despite it
// being "volatile"
if (recording) {
try {
// Write to FFmpegFrameRecorder
recorder.record(ShortBuffer.wrap(audioData, 0,
bufferReadResult));
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished");
/* Capture/Encoding finished, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}
class CameraView extends SurfaceView implements SurfaceHolder.Callback,
PreviewCallback {
private boolean previewRunning = false;
private SurfaceHolder holder;
private Camera camera;
private byte[] previewBuffer;
long videoTimestamp = 0;
Bitmap bitmap;
Canvas canvas;
public CameraView(Context _context) {
super(_context);
holder = this.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
camera = Camera.open();
try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(this);
Camera.Parameters currentParams = camera.getParameters();
Log.v(LOG_TAG,
"Preview Framerate: "
+ currentParams.getPreviewFrameRate());
Log.v(LOG_TAG,
"Preview imageWidth: "
+ currentParams.getPreviewSize().width
+ " imageHeight: "
+ currentParams.getPreviewSize().height);
// Use these values
imageWidth = currentParams.getPreviewSize().width;
imageHeight = currentParams.getPreviewSize().height;
frameRate = currentParams.getPreviewFrameRate();
bitmap = Bitmap.createBitmap(imageWidth, imageHeight,
Bitmap.Config.ALPHA_8);
/*
* Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth *
* imageHeight *
* ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat
* ())/8); previewBuffer = new byte[imageWidth * imageHeight *
* ImageFormat
* .getBitsPerPixel(currentParams.getPreviewFormat())/8];
* camera.addCallbackBuffer(previewBuffer);
* camera.setPreviewCallbackWithBuffer(this);
*/
camera.startPreview();
previewRunning = true;
} catch (IOException e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.v(LOG_TAG, "Surface Changed: width " + width + " height: "
+ height);
// We would do this if we want to reset the camera parameters
/*
* if (!recording) { if (previewRunning){ camera.stopPreview(); }
*
* try { //Camera.Parameters cameraParameters =
* camera.getParameters(); //p.setPreviewSize(imageWidth,
* imageHeight); //p.setPreviewFrameRate(frameRate);
* //camera.setParameters(cameraParameters);
*
* camera.setPreviewDisplay(holder); camera.startPreview();
* previewRunning = true; } catch (IOException e) {
* Log.e(LOG_TAG,e.getMessage()); e.printStackTrace(); } }
*/
// Get the current parameters
Camera.Parameters currentParams = camera.getParameters();
Log.v(LOG_TAG,
"Preview Framerate: " + currentParams.getPreviewFrameRate());
Log.v(LOG_TAG,
"Preview imageWidth: "
+ currentParams.getPreviewSize().width
+ " imageHeight: "
+ currentParams.getPreviewSize().height);
// Use these values
imageWidth = currentParams.getPreviewSize().width;
imageHeight = currentParams.getPreviewSize().height;
frameRate = currentParams.getPreviewFrameRate();
// Create the yuvIplimage if needed
yuvIplimage = IplImage.create(imageWidth, imageHeight,
IPL_DEPTH_8U, 1);
// yuvIplimage = IplImage.create(imageWidth, imageHeight,
// IPL_DEPTH_32S, 2);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
camera.setPreviewCallback(null);
previewRunning = false;
camera.release();
} catch (RuntimeException e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (yuvIplimage != null && recording) {
videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);
// Put the camera preview frame right into the yuvIplimage
// object
System.out.println("value of data=============" + data);
yuvIplimage.getByteBuffer().put(data);
// FAQ about IplImage:
// - For custom raw processing of data, getByteBuffer() returns
// an NIO direct
// buffer wrapped around the memory pointed by imageData, and
// under Android we can
// also use that Buffer with Bitmap.copyPixelsFromBuffer() and
// copyPixelsToBuffer().
// - To get a BufferedImage from an IplImage, we may call
// getBufferedImage().
// - The createFrom() factory method can construct an IplImage
// from a BufferedImage.
// - There are also a few copy*() methods for
// BufferedImage<->IplImage data transfers.
// Let's try it..
// This works but only on transparency
// Need to find the right Bitmap and IplImage matching types
/*
* bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer());
* //bitmap.setPixel(10,10,Color.MAGENTA);
*
* canvas = new Canvas(bitmap); Paint paint = new Paint();
* paint.setColor(Color.GREEN); float leftx = 20; float topy =
* 20; float rightx = 50; float bottomy = 100; RectF rectangle =
* new RectF(leftx,topy,rightx,bottomy);
* canvas.drawRect(rectangle, paint);
*
* bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer());
*/
// Log.v(LOG_TAG,"Writing Frame");
try {
// Get the correct time
recorder.setTimestamp(videoTimestamp);
// Record the image into FFmpegFrameRecorder
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;
导入java.io.IOException;
导入java.nio.ShortBuffer;
导入android.app.Activity;
导入android.content.Context;
导入android.content.pm.ActivityInfo;
导入android.graphics.Bitmap;
导入android.graphics.Canvas;
导入android.hardware.Camera;
导入android.hardware.Camera.PreviewCallback;
导入android.media.AudioFormat;
导入android.media.AudioRecord;
导入android.media.MediaRecorder;
导入android.os.Bundle;
导入android.os.PowerManager;
导入android.util.Log;
导入android.view.KeyEvent;
导入android.view.SurfaceHolder;
导入android.view.SurfaceView;
导入android.view.view;
导入android.view.view.OnClickListener;
导入android.view.ViewGroup.LayoutParams;
导入android.widget.Button;
导入android.widget.LinearLayout;
导入com.googlecode.javacv.FFmpegFrameRecorder;
导入com.googlecode.javacv.cpp.opencv_core.IplImage;
公共类MainActivity扩展活动实现OnClickListener{
私有最终静态字符串日志\u TAG=“MainActivity”;
private PowerManager.WakeLock mWakeLock;
私有字符串ffmpeg_link=“”;
专用易失性FFmpegFrameRecorder;
布尔记录=假;
长起始时间=0;
私有int样本听力率inHZ=16000;
私有int imageWidth=320;
私有int imageHeight=240;
私有整数帧率=24;
私有线程;
volatile boolean runAudioThread=true;
私人录音;
私有AudioRecordRunnable AudioRecordRunnable;
私人摄影师;
私有IplImage YuvipImage=null;
私人按钮记录按钮;
私人线路布局主要布局;
@凌驾
创建时的公共void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN\u ORIENTATION\u横向);
setContentView(R.layout.activity_main);
initLayout();
initRecorder();
}
@凌驾
受保护的void onResume(){
super.onResume();
if(mWakeLock==null){
PowerManager pm=(PowerManager)getSystemService(Context.POWER\u服务);
mWakeLock=pm.newWakeLock(PowerManager.SCREEN\u BRIGHT\u WAKE\u LOCK,
日志(U标签);
mWakeLock.acquire();
}
}
@凌驾
受保护的void onPause(){
super.onPause();
if(mWakeLock!=null){
mWakeLock.release();
mWakeLock=null;
}
}
@凌驾
受保护的空onDestroy(){
super.ondestory();
记录=假;
}
私有void initLayout(){
mainLayout=(LinearLayout)this.findViewById(R.id.record\u布局);
recordButton=(按钮)findViewById(R.id.recorder\U控件);
recordButton.setText(“开始”);
recordButton.setOnClickListener(此);
cameraView=新cameraView(本);
LinearLayout.LayoutParams layoutParam=新的LinearLayout.LayoutParams(
LayoutParams.MATCH_父级,LayoutParams.MATCH_父级);
mainLayout.addView(cameraView、layoutParam);
Log.v(Log_标签,“将cameraView添加到主布局”);
}
私有void initRecorder(){
Log.w(Log_标签,“initRecorder”);
if(yuvIplimage==null){
//在“曲面更改方法”中设置帧大小后重新创建
yuvIplimage=IplImage.create(imageWidth、imageHeight、,
IPL_深度_8U,2);
//yuvIplimage=IplImage.create(imageWidth、imageHeight、,
//IPL_深度_32S,2);
Log.v(Log_标签,“IplImage.create”);
}
记录器=新的FFmpegFrameRecorder(ffmpeg_链接,图像宽度,
图像高度,1);
Log.v(Log_标签,“FFmpegFrameRecorder:+ffmpeg_链接+”图像宽度:
+图像宽度+图像高度+图像高度);
记录器。设置格式(“flv”);
Log.v(Log_标签,“recorder.setFormat(\“flv\”));
记录器。设置采样率(采样率InHz);
Log.v(Log_标签,“记录器设置采样率(sampleAudioRateInHz)”;
//也可以在“曲面更改”方法中重新设置
记录器。设置帧速率(帧速率);
Log.v(Log_标签,“recorder.setFrameRate(frameRate)”);
//创建音频录制线程
audioRecordRunnable=新的audioRecordRunnable();
audioThread=新线程(audioRecordRunnable);
}
//开始捕获
公共无效开始记录(){
试一试{
recorder.start();
startTime=System.currentTimeMillis();
记录=真;
audioThread.start();
}catch(FFmpegFrameRecorder.e异常){
e、 printStackTrace();
}
}
公开作废停止录制(){
//这将停止音频线程的运行
runAudioThread=false;
if(记录器!=null&&recording){
记录=假;
Log.v(Log_标签,
“在录音机上完成录音、呼叫停止和释放”);
试一试{
录音机。停止();
记录器。释放();
}catch(FFmpegFrameRecorder.e异常){
e、 printStackTrace();
}
记录器=空;
}
}
@凌驾
公共布尔onKeyDown(int-keyCode,KeyEvent事件){
//按下后退按钮时退出
if(keyCode==KeyEvent.keyCode\u BACK){
如果(录音){
停止录制();
}
完成();
返回true;
}
返回super.onKeyDown(keyCode,event);
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.javacv.stream.test2"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="15" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<application
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme" >
<activity
android:name="com.example.javacv.stream.test2.MainActivity"
android:label="@string/title_activity_main" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>