java

关注公众号 jb51net

关闭
首页 > 软件编程 > java > Java实时视频转播

Java实现实时视频转播的代码示例

作者:Ha_Ha_Wu

这篇文章主要给大家详细介绍了Java如何实现实时视频转播,文中通过代码实例介绍的非常详细,具有一定的参考价值,感兴趣的小伙伴可以自己动手试一试

实现功能简述

最开始是想做一个在线的美妆功能,就像抖音上录制视频时增加特效一样。由于是后端程序员,我下意识的认为对图像/视频的处理都应该在后台完成,通过某种协议传给前端进行展示,我的项目也是基于此写成的。 主要用到的轮子:spring-boot,javaCV,Libjitsi,Webcam

第一步:后台启动摄像头,抓拍图像

while (isOpened) {
            synchronized (lock) {
                System.out.println("正在添加FFmpag");
                BufferedImage image = webcam.getImage();
                Frame frame = converter.getFrame(image);
                recorder.record(frame);
                if (!isFileStreamOn) {
                    isFileStreamOn = true;
                }
            }
            Thread.yield();
        }

此处用到Webcam包和JavaCV包

Webcam可以通过电脑摄像头获取BufferedImage图片(当然javaCV中也有类似的工具) JavaCV包是用Java封装音视频处理的库,主要逻辑是将图片/视频/音频数据转化为Frame类,基于Frame对象进行操作。 上面代码中我将每张图片用转化器转化成Frame对象(很明显转化器类源码中的逻辑很值得研究),用FFmpegFrameRecorder将传入的图片转化成视频(文件/流)。

其中webcam,recorder和转化器在config中定义为bean:

@Bean
    public Webcam getCam() {
        Webcam webcam = Webcam.getDefault();
        webcam.setViewSize(new Dimension(640, 480));
        return webcam;
    }
@Bean
    public FFmpegFrameRecorder getRecorder(Webcam webcam){
        FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("src/main/resources/static/output.avi",webcam.getViewSize().width,webcam.getViewSize().height);
        recorder.setVideoCodecName("lib264");
        recorder.setVideoCodec(avcodec.AV_CODEC_ID_MPEG4);
        recorder.setFormat("avi");
        recorder.setFrameRate(24);
        return recorder;
    }
@Bean
    public Java2DFrameConverter getConverter(){
        return new Java2DFrameConverter();
    }

第二步:后台获取的视频发送RTP包到前端

while (isFileStreamOn) {
   InputStream stream = new FileInputStream(file);
   synchronized (lock) {
       while (stream.available() > 0) {
           byte[] bytes = new byte[1024];
           int length = stream.read(bytes);
           RawPacket rawPacket = new RawPacket(bytes, 0, length);
           DatagramPacket udpPacket = new DatagramPacket(rawPacket.getBuffer(), rawPacket.getLength(), targetHost, targetPort);
           udpSocket.send(udpPacket);
           System.out.println("发送数据包 " + Arrays.toString(udpPacket.getData()))             }
//         file = new File(file.getAbsoluteFile());
           try (FileWriter writer = new FileWriter(file)) {
                writer.write(""); // 写入空字符串,清空文件内容
           } catch (IOException e) {
                e.printStackTrace();
                }
           stream = new FileInputStream(file);
           }
	}

注意点:

完整代码:

import com.example.meitu2.utils.bfiOps;
import com.github.sarxos.webcam.Webcam;
import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.jitsi.service.neomedia.RawPacket;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import java.awt.image.BufferedImage;
import java.io.*;
import java.net.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@RestController
public class video3Controller {
    @Autowired
    Webcam webcam;
    @Autowired
    List<Webcam> webcams;
    @Autowired
    com.example.meitu2.pojos.websocket websocket;
    @Autowired
    Java2DFrameConverter converter;
    boolean isOpened = false;
    boolean isFileStreamOn = false;
    boolean needsLight = false;
    int lightIndex = 0;
    boolean needDuibidu = false;
    int duibiduIndex = 0;
    @GetMapping("RTPThread")
    public void RTPThread() throws SocketException, UnknownHostException, FFmpegFrameRecorder.Exception, InterruptedException {
        if (!webcam.isOpen()) {
            webcam.open();
        }
        isOpened = true;
        DatagramSocket udpSocket = new DatagramSocket();
        InetAddress targetHost = InetAddress.getByName("localhost");
        int targetPort = 2244;
//        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        File file = new File("Demo.mp4");
        FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(file.getAbsoluteFile(), webcam.getViewSize().width, webcam.getViewSize().height);
        int a = 0;
        recorder.setVideoCodecName("lib264");
        recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
        recorder.setFormat("mp4");
        recorder.setFrameRate(24);
        System.out.println("recorder: " + recorder);
        recorder.start();
        Object lock = new Object();
        new Thread(() -> {
   if(outputStream.size()==1024){  
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                throw new RuntimeException(e);
            }
            try {
                while (isFileStreamOn) {
                    InputStream stream = new FileInputStream(file);
                    synchronized (lock) {
                        while (stream.available() > 0) {
                            byte[] bytes = new byte[1024];
                            int length = stream.read(bytes);
                            RawPacket rawPacket = new RawPacket(bytes, 0, length);
                            DatagramPacket udpPacket = new DatagramPacket(rawPacket.getBuffer(), rawPacket.getLength(), targetHost, targetPort);
                            udpSocket.send(udpPacket);
                            System.out.println("发送数据包 " + Arrays.toString(udpPacket.getData()));
                        }
                        System.out.println("一份file读完,更新file");
//                        file = new File(file.getAbsoluteFile());
                        try (FileWriter writer = new FileWriter(file)) {
                            writer.write(""); // 写入空字符串,清空文件内容
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                        stream = new FileInputStream(file);
                    }
                }
                System.out.println("一号外层while结束");
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }).start();
        while (isOpened) {
            synchronized (lock) {
                System.out.println("正在添加FFmpag");
                BufferedImage image = webcam.getImage();
                Frame frame = converter.getFrame(image);
                recorder.record(frame);
                if (!isFileStreamOn) {
                    isFileStreamOn = true;
                }
            }
            Thread.yield();
        }
        recorder.stop();
        recorder.release();
    }

第三步:如何接收RTP包

由于前端代码不太熟练,我决定用Java写一个后台接收这些文件进行验证

public static void main(String[] args) throws IOException {
        DatagramSocket ds = new DatagramSocket(2244);
        byte[] bytes = new byte[1024];
        int length = bytes.length;
        Object lock = new Object();  //锁似乎没有必要
        Queue<DatagramPacket> dps = new ArrayDeque<>();
        new Thread() {
            @Override
            public void run() {
                try {
                    FileOutputStream outputStream = new FileOutputStream("Demo.mp4");
                    while (!dps.isEmpty()) {
                        byte[] data = dps.poll().getData();
                        outputStream.write(data);
                        System.out.println(data);
                        if (data.length == 0) {
                            break;
                        }
                    }
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }
            }
        }.start();
        while (true) {
            DatagramPacket dp = new DatagramPacket(bytes, length);
            System.out.println("接收到dp"+ Arrays.toString(dp.getData()));
            dps.add(dp);
            ds.receive(dp);
        }
    }

这里也是两个线程异步操作,一个用来接收,一个用来解读

值得一提的是:由于此时数据是分批来的,天然可以用队列有序操作。 而像之前数据是基于流进行传输,对一整个长时间的流数据的操作首先就需要合理的分片,这就要求涉及的线程通过加锁避免冲突。

关于之前几个版本的记录:

1. 后端之间存储mp4文件到静态资源,前端访问到即播放

后台代码:

 @GetMapping("/setVideo")
    public synchronized Result setVideo() throws FFmpegFrameRecorder.Exception {  //默认获取三十秒的视频
        //加锁是为了防止webcam被多个线程调用
        if(!webcam.isOpen()){
            webcam.open();
        }
        num++;
        List<BufferedImage> list = new ArrayList<>();
        long start = System.currentTimeMillis();
        while (System.currentTimeMillis()-start<=10000){
            BufferedImage bfi = webcam.getImage();
            list.add(bfi);
        }
        System.out.println("鹿丸!,开存!");
        webcam.close();
        FFmpegFrameRecorder recorder = new FFmpegFrameRecorder("src/main/resources/static/output.mp4",webcam.getViewSize().width,webcam.getViewSize().height);
        recorder.setVideoCodecName("lib264");
        recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
        recorder.setFormat("mp4");
        recorder.setFrameRate(24);
        System.out.println("recorder: "+recorder);
        recorder.start();
        for (int i = 0; i < list.size(); i++) {
            Frame frame = converter.getFrame(list.get(i));
            recorder.record(frame);
        }
        recorder.stop();
        recorder.release();
        System.out.println("存完");
        return Result.ok("down");
    }

前端代码: 由于前端写的很乱,很多东西都挤在一个vue页面里,不太方面拆分 以下是核心实现代码:

const getVideo = function () {
  axios.get("http://localhost:8080/output.mp4?t="+(new Date()).getTime(), { responseType: 'blob'}).then((result) => {
    console.log("getVideo的result: ", result);
    const blob = new Blob([result.data], { type: 'video/mp4' });
    const videoURL = URL.createObjectURL(blob);
    console.log("videoValue", videoURL);
    video.value = videoURL;
  })
}
const setVideo = function () {
  console.log('发起setVideo,后台开始录制');
  axios.get("http://localhost:8080/setVideo").then((result) => {  //由于是axios异步请求,axios还没把videoURL填充
    console.log("setVideo的result", result);
  })
  setTimeout(function () {
    console.log("发起getVideo,获取后台录制好的avi");
    getVideo();
    setVideo();
  }, 12000);

主要的思路是:

后端循环录制一定时间的视频(比如说是10s),一旦录制好就交给前端进行播放,类似在后端用mp4文件作为缓存一样

还有一些额外的问题,比如,浏览器有时将访问到的视频是播放还是下载,取决于响应头Content-Disposition,可以setHeader强行更改为attachment表示为播放;通过工具将视频编码格式转化成支持浏览器的格式......

2. 用webSocket优化

webSocket的作用是在前后端之间建立平等互通的通道,主要是后台也可以给前端主动发起数据,用这个可以优化之前由于前后端规定时间访问而出现的错位问题

关于webSocket,它的设计还给前后端连接的建立提供了钩子,这个思想在Java中感觉不常见;在vue这种很强调生命周期的语法中很常见 如下:可以在建立连接,关闭连接,接收信息,发送消息时都写逻辑,就和前端能够更好地,平等的完成一致的功能了。

后端代码:

@GetMapping("openCam")
    public void openCam() throws IOException {
        if (!webcam.isOpen()) {
            webcam.open();
        }
        isOpened = true;
        DatagramSocket udpSocket = new DatagramSocket();
        InetAddress targetHost = InetAddress.getByName("localhost");
        int targetPort = 80;
        while (isOpened) {
            List<BufferedImage> list = new ArrayList<>();
            long start = System.currentTimeMillis();
            while (System.currentTimeMillis() - start <= 10000) {
                BufferedImage bfi = webcam.getImage();
                if (needsLight) {
                    bfi = bfiOps.light(bfi, lightIndex);
                }
                if (needDuibidu) {
                    bfi = bfiOps.duibidu(bfi, duibiduIndex);
                }
                list.add(bfi);
            }
            System.out.println("鹿丸!,开存!小节视频长度:" + (System.currentTimeMillis() - start));
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            byte[] byteArray = outputStream.toByteArray();
            FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(outputStream, webcam.getViewSize().width, webcam.getViewSize().height);
            recorder.setVideoCodecName("lib264");
            recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
            recorder.setFormat("mp4");
            recorder.setFrameRate(24);
            System.out.println("recorder: " + recorder);
            recorder.start();
            for (int i = 0; i < list.size(); i++) {
                Frame frame = converter.getFrame(list.get(i));
                recorder.record(frame);
            }
            recorder.stop();
            recorder.release();
            System.out.println("后台保存完毕");
            RawPacket rtpPacket = new RawPacket(byteArray, 0, byteArray.length);//构造中需要传入起始终结,可能表明不是让一遍传完
            DatagramPacket udpPacket = new DatagramPacket(rtpPacket.getBuffer(), rtpPacket.getLength(), targetHost, targetPort);
            udpSocket.send(udpPacket);
            udpSocket.close();
            websocket.sendOneMessage("0", "down");
        }
        webcam.close();
    }

前端代码:

const openCam = function() {
  socket = new WebSocket("ws://localhost:8080/websocket/"+userId);
  socket.onopen = function(){
    console.log("开启");
    axios.get("http://localhost:8080/openCam").then((result)=>{
      console.log(result);
    })
  }
  socket.onmessage = function(msg){
    console.log("收到消息",msg.data);
    if(msg.data === "down"){
        getVideo();
    }
  }
}

以上就是Java实现实时视频转播的代码示例的详细内容,更多关于Java实时视频转播的资料请关注脚本之家其它相关文章!

您可能感兴趣的文章:
阅读全文