Android 使用Mp4Parser,如果我附加更多视频,音频流将不同步
正如我所说,随着我的视频越来越多,音频越来越不同步。我怎样才能解决这个问题?我有以下代码附加视频Android 使用Mp4Parser,如果我附加更多视频,音频流将不同步,android,video,append,concatenation,mp4parser,Android,Video,Append,Concatenation,Mp4parser,正如我所说,随着我的视频越来越多,音频越来越不同步。我怎样才能解决这个问题?我有以下代码附加视频 public class ConcatenateVideos extends ExecutorAsyncTask<String, Void, Boolean> { private ArrayList<String> video_urls = null; private final String TAG = ConcatenateVideos.class.getSimple
public class ConcatenateVideos extends ExecutorAsyncTask<String, Void, Boolean> {
private ArrayList<String> video_urls = null;
private final String TAG = ConcatenateVideos.class.getSimpleName();
public void setUris(ArrayList<String> videos) {
LogService.log(TAG, "set uris");
if (videos != null) {
video_urls = videos;
this.execute();
}
}
@Override
protected Boolean doInBackground(String... params) {
boolean success = false;
FileInputStream[] videos = new FileInputStream[video_urls.size()];
try {
for (int i = 0; i < video_urls.size(); i++) {
videos[i] = new FileInputStream(video_urls.get(i));
}
success = concatenateVideos(videos);
} catch (Exception e) {
success = false;
LogService.err(TAG, e.getMessage(), e);
}
return success;
}
private boolean concatenateVideos(InputStream[] video_streams) {
boolean success = false;
Movie[] inMovies = new Movie[video_streams.length];
FileChannel fc = null;
Movie result = new Movie();
IsoFile out = null;
try {
for (int i = 0; i < inMovies.length; i++) {
if (video_streams[i] != null) {
inMovies[i] = MovieCreator.build(Channels.newChannel(video_streams[i]));
}
}
List<Track> videoTracks = new LinkedList<Track>();
List<Track> audioTracks = new LinkedList<Track>();
for (Movie m : inMovies) {
for (Track t : m.getTracks()) {
if (t.getHandler().equals("soun")) {
audioTracks.add(t);
}
if (t.getHandler().equals("vide")) {
videoTracks.add(t);
}
}
}
if (audioTracks.size() > 0) {
result.addTrack(new AppendTrack(audioTracks.toArray(new Track[audioTracks.size()])));
}
if (videoTracks.size() > 0) {
result.addTrack(new AppendTrack(videoTracks.toArray(new Track[videoTracks.size()])));
}
out = new DefaultMp4Builder().build(result);
fc = new RandomAccessFile(video_urls.get(0), "rw").getChannel();
for (int i = 1; i < video_urls.size(); i++) {
File f = new File(video_urls.get(i));
LogService.log(TAG, "delete file : " + f.delete());
}
success = true;
} catch (Exception e) {
LogService.err(TAG, e.getMessage(), e);
success = false;
} finally {
try {
LogService.log(TAG, "==========finally");
if (fc != null) {
fc.position(0);
out.getBox(fc);
fc.close();
}
} catch (Exception e) {
LogService.err(TAG, e.getMessage(), e);
}
}
return success;
}
}
公共类ConcatenateVideos扩展了ExecutorAsyncTask{
私有ArrayList video_url=null;
private final String TAG=ConcatenateVideos.class.getSimpleName();
公共void setUris(ArrayList视频){
LogService.log(标签,“设置URI”);
如果(视频!=null){
视频链接=视频;
这是execute();
}
}
@凌驾
受保护的布尔doInBackground(字符串…参数){
布尔成功=假;
FileInputStream[]视频=新的FileInputStream[video_url.size()];
试一试{
对于(int i=0;i0){
result.addTrack(新的AppendTrack(audioTracks.toArray)(新的曲目[audioTracks.size()));
}
如果(videoTracks.size()>0){
result.addTrack(新的AppendTrack(videoTracks.toArray)(新的Track[videoTracks.size()));
}
out=新的DefaultMp4Builder().build(结果);
fc=新的随机访问文件(video_url.get(0),“rw”).getChannel();
对于(int i=1;i
这就是我用来调用ConcatenateVideos函数的服务:
private final String TAG = ConcatenateVideosService.class.getSimpleName();
final Messenger myMessenger = new Messenger(new IncomingHandler());
class IncomingHandler extends Handler {
private Messenger client = null;
@Override
public void handleMessage(Message msg) {
// init messenger
if (client == null) {
client = msg.replyTo;
}
// get the message
Bundle data = msg.getData();
byte dataString = data.getByte("message");
switch (dataString) {
case Constants.INIT_CMD_SERVICE:
LogService.log(TAG, "INIT_CMD_SERVICE:");
break;
case Constants.CONCATE_CMD_SERVICE:
LogService.log(TAG, "CONCATE_CMD_SERVICE:");
ArrayList<String> videos = data.getStringArrayList(Constants.SERVICE_VIDEO_URLS);
ConcatenateVideos concatenateVideos = new ConcatenateVideos() {
@Override
protected void onPostExecute(Boolean result) {
LogService.log(TAG, "onPostExecute() , result : " + result);
super.onPostExecute(result);
// setup the answer
Message answer = Message.obtain();
Bundle bundle = new Bundle();
bundle.putBoolean("result", result);
answer.setData(bundle);
// send the answer
try {
client.send(answer);
} catch (RemoteException e) {
LogService.err(TAG, e.getMessage(), e);
}
}
};
concatenateVideos.setUris(videos);
break;
}
}
}
@Override
public boolean onUnbind(Intent intent) {
stopSelf();
return super.onUnbind(intent);
}
@Override
public IBinder onBind(Intent intent) {
return myMessenger.getBinder();
}
@Override
public void onDestroy() {
super.onDestroy();
}
private final String TAG=ConcatenateVideosService.class.getSimpleName();
最终Messenger myMessenger=新Messenger(新IncomingHandler());
类IncomingHandler扩展了Handler{
私有Messenger客户端=null;
@凌驾
公共无效handleMessage(消息消息消息){
//初始化信使
if(客户端==null){
client=msg.replyTo;
}
//明白了吗
Bundle data=msg.getData();
字节数据字符串=data.getByte(“消息”);
开关(数据串){
case Constants.INIT_CMD_服务:
log(标签,“INIT_CMD_SERVICE:”);
打破
case Constants.CONCATE\u CMD\u服务:
LogService.log(标记“CONCATE\u CMD\u SERVICE:”);
ArrayList videos=data.getStringArrayList(常量.SERVICE\u VIDEO\u URL);
ConcatenateVideos ConcatenateVideos=新的ConcatenateVideos(){
@凌驾
受保护的void onPostExecute(布尔结果){
log(标记为“onPostExecute(),结果:”+result);
super.onPostExecute(结果);
//设置答案
Message answer=Message.get();
Bundle=新Bundle();
bundle.putBoolean(“结果”,result);
答:设置数据(捆绑);
//发送答案
试一试{
客户端发送(应答);
}捕获(远程异常){
err(标记,e.getMessage(),e);
}
}
};
concatenateVideos.setUris(视频);
打破
}
}
}
@凌驾
公共布尔onUnbind(意图){
stopSelf();
返回super.onUnbind(intent);
}
@凌驾
公共IBinder onBind(意向){
返回myMessenger.getBinder();
}
@凌驾
公共空间{
super.ondestory();
}
我的视频以以下质量录制:VideoBitrate-800000
,audioBR-64000
,audioSamplingRate-44100
,MPEG\u 4。H264集装箱
,.AAC,每分钟30帧
。
现在我做了一个测试,如果我做4个视频,视频时间刻度是90000
,音频时间刻度是44100
。
但是在添加视频之后,视频的音频
时间刻度仍然是44100
,但是视频时间刻度是:900
。为什么VideoTimeScale
会改变,而音频时间表不会改变?在许多情况下,录音(音频/视频)的长度不同。假设录音总是10.0秒,视频总是10.1秒。如果你只播放一部这样的电影,音频可能会在视频之前结束。它是自动静音的
如果添加其中两个视频,第一个音频从0秒开始,第二个10.0-unf
@Throws(Exception::class)
fun appendVideos(videoPathList: List<String>, targetFilePath: String) {
val movies = videoPathList.flatMap { file -> listOf(MovieCreator.build(file)) }
val finalMovie = Movie()
val videoTracksTotal = mutableListOf<Track>()
val audioTracksTotal = mutableListOf<Track>()
var audioDuration = 0.0
var videoDuration = 0.0
movies.forEach { movie ->
val videoTracks = mutableListOf<Track>()
val audioTracks = mutableListOf<Track>()
movie.tracks.forEach { track ->
val trackDuration = track.sampleDurations.toList()
.map { t -> t.toDouble() / track.trackMetaData.timescale }.sum()
if (track.handler == "vide") {
videoDuration += trackDuration
videoTracks.add(track)
} else if (track.handler == "soun") {
audioDuration += trackDuration
audioTracks.add(track)
}
}
// Adjusting Durations
adjustDurations(videoTracks, audioTracks, videoDuration, audioDuration).let {
audioDuration = it.audioDuration
videoDuration = it.videoDuration
}
videoTracksTotal.addAll(videoTracks)
audioTracksTotal.addAll(audioTracks)
}
if (videoTracksTotal.isNotEmpty() && audioTracksTotal.isNotEmpty()) {
finalMovie.addTrack(AppendTrack(*videoTracksTotal.toTypedArray()))
finalMovie.addTrack(AppendTrack(*audioTracksTotal.toTypedArray()))
}
val container = DefaultMp4Builder().build(finalMovie)
val fos = FileOutputStream(targetFilePath)
val bb = Channels.newChannel(fos)
container.writeContainer(bb)
fos.close()
}
class Durations(val audioDuration: Double, val videoDuration: Double)
private fun adjustDurations(
videoTracks: MutableList<Track>,
audioTracks: MutableList<Track>,
videoDuration: Double,
audioDuration: Double
): Durations {
var diff = audioDuration - videoDuration
val tracks: MutableList<Track>
var durationOperator: Double
val isAudioProblem: Boolean
when {
// audio and video match, no operations to perform
diff == 0.0 -> {
return Durations(audioDuration, videoDuration)
}
// audio tracks are longer than video
diff > 0 -> {
tracks = audioTracks
durationOperator = audioDuration
isAudioProblem = true
}
// video tracks are longer than audio
else -> {
tracks = videoTracks
durationOperator = videoDuration
diff *= -1.0
isAudioProblem = false
}
}
// Getting the last track in order to operate with it
var track: Track = tracks.last()
var counter: Long = 0
// Reversing SampleDuration list
track.sampleDurations.toList().asReversed().forEach { sampleDuration ->
// Calculating how much this track need to be re-adjusted
if (sampleDuration.toDouble() / track.trackMetaData.timescale > diff) {
return@forEach
}
diff -= sampleDuration.toDouble() / track.trackMetaData.timescale
durationOperator -= sampleDuration.toDouble() / track.trackMetaData.timescale
counter++
}
if (counter != 0L) {
// Cropping track
track = CroppedTrack(track, 0, track.samples.size - counter)
//update the original reference
tracks.removeAt(tracks.lastIndex)
tracks.add(track)
}
// Returning durations
return if (isAudioProblem) {
Durations(durationOperator, videoDuration)
} else {
Durations(audioDuration, durationOperator)
}
}