本章内容教会你如何用java代码实现 两个视频的画面合并 或者前后拼接。原理是使用了javacv开源jar包,代码经过反复修改,已经实现我能想到的最优最快的实现,如果你有更好更快的实现,欢迎评论区留言!!!
先展示一下效果吧!!!
两个视频前后拼接
两个视频画面合并
JavaCV是一款基于JavaCPP调用方式(JNI的一层封装),由多种开源计算机视觉库组成的包装库,封装了包含FFmpeg、OpenCV、tensorflow、caffe、tesseract、libdc1394、OpenKinect、videoInput和ARToolKitPlus等在内的计算机视觉领域的常用库和实用程序类。
JavaCV基于Apache License Version 2.0协议和GPLv2两种协议,JavaCV支持Windows、Linux、MacOS,Android、IOS在内的Java平台上调用这些接口。
JavaCV是一个基于Java语言的计算机视觉库,它提供了对多种计算机视觉库的接口和包装,使得Java开发者可以方便地使用计算机视觉技术。计算机视觉是人工智能领域的一个重要分支,它涉及到图像处理、视频分析、模式识别、机器学习等多个领域。应用场景需要使用到计算机视觉技术,例如人脸识别、目标检测、物体跟踪、手势识别等等。
<!-- https://mvnrepository.com/artifact/org.bytedeco/javacv-platform -->
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>javacv-platform</artifactId>
<version>1.5.5</version>
</dependency>
核心代码,单类实现,idea等编辑器内创建VideoMergeJoin类,复制粘贴下面代码,右键点击run运行主方法即可。
import org.bytedeco.javacv.*;
import org.bytedeco.javacv.Frame;
import java.awt.*;
import java.awt.image.BufferedImage;
/**
* @author tarzan
*/
public class VideoMergeJoin {
final static String VIDEO_PATH1 = "E:\\test.mp4";
final static String VIDEO_PATH2 = "E:\\aivideo\\20231217-190828_with_snd.mp4";
final static String MERGE_PATH= "E:\\test_merge.mp4";
final static String JOIN_PATH= "E:\\test_join.mp4";
public static void main(String[] args) throws Exception {
long start=System.currentTimeMillis();
mergeVideo(VIDEO_PATH1,VIDEO_PATH2,MERGE_PATH);
joinVideo(VIDEO_PATH1,VIDEO_PATH2,JOIN_PATH);
System.out.println("耗时 "+(System.currentTimeMillis()-start)+" ms");
}
public static void joinVideo(String videoPath1,String videoPath2, String outPath) throws Exception {
// 初始化视频源
FFmpegFrameGrabber grabber1 = new FFmpegFrameGrabber(videoPath1);
FFmpegFrameGrabber grabber2 = new FFmpegFrameGrabber(videoPath2);
grabber1.start();
grabber2.start();
// 初始化目标视频
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outPath, grabber1.getImageWidth(), grabber1.getImageHeight(),grabber1.getAudioChannels());
// 录制视频
recorder.start();
while (true){
Frame frame= grabber1.grab();
if (frame == null) {
break;
}
recorder.record(frame);
}
while (true){
Frame frame= grabber2.grab();
if (frame == null) {
break;
}
recorder.record(frame);
}
// 释放资源
grabber1.stop();
grabber2.stop();
recorder.stop();
}
public static void mergeVideo(String videoPath1,String videoPath2, String outPath) throws Exception {
// 初始化视频源
FFmpegFrameGrabber grabber1 = new FFmpegFrameGrabber(videoPath1);
FFmpegFrameGrabber grabber2 = new FFmpegFrameGrabber(videoPath2);
grabber1.start();
grabber2.start();
// 检查帧率是否一样
if (grabber1.getFrameRate() != grabber2.getFrameRate()) {
throw new Exception("Video frame rates are not the same!");
}
// 初始化目标视频
FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outPath, (grabber1.getImageWidth()+grabber2.getImageWidth()), grabber1.getImageHeight(),grabber1.getAudioChannels());
// 录制视频
recorder.start();
int i=1;
int videoSize = grabber1.getLengthInVideoFrames();
while (true){
Frame frame1= grabber1.grab();
if (frame1 == null) {
break;
}
if (frame1.image != null) {
// 将两个帧合并为一个画面
Frame frame2=grabber2.grabImage();
System.out.println("视频共" + videoSize + "帧,正处理第" + i + "帧图片 ");
// 创建一个新的 BufferedImage 用于合并画面
BufferedImage combinedImage = new BufferedImage(grabber1.getImageWidth()+grabber2.getImageWidth(), grabber1.getImageHeight(), BufferedImage.TYPE_3BYTE_BGR);
Graphics2D g2d = combinedImage.createGraphics();
// 在合并画面上绘制两个视频帧
g2d.drawImage(Java2DFrameUtils.toBufferedImage(frame1), 0, 0, null);
g2d.drawImage(Java2DFrameUtils.toBufferedImage(frame2), grabber1.getImageWidth(), 0, null);
g2d.dispose();
// ImageIO.write(combinedImage,"png",new File("E:\\images1\\combinedImage"+i+".png"));
// 将合并后的 BufferedImage 转换为帧并录制到目标视频中
recorder.record(Java2DFrameUtils.toFrame(combinedImage));
i++;
}
if (frame1.samples != null) {
recorder.recordSamples(frame1.sampleRate, frame1.audioChannels, frame1.samples);
}
}
// 释放资源
grabber1.stop();
grabber2.stop();
recorder.stop();
}
}
控制台运行输出日志