提交 fc529b07 编写于 作者: 云逸之's avatar 云逸之 💬

加入延时超时后跳帧

上级 442ffa14
......@@ -41,6 +41,6 @@ build_flags = -DOVER_LOAD_CAM_CONF
;-DCORE_DEBUG_LEVEL=ARDUHAL_LOG_LEVEL_VERBOSE
; 指定帧大小
;-DFRAMESIZE=FRAMESIZE_HVGA
; -DFRAMESIZE=FRAMESIZE_VGA
-DFRAMESIZE=FRAMESIZE_SVGA
-DFRAMESIZE=FRAMESIZE_VGA
; -DFRAMESIZE=FRAMESIZE_SVGA
;-DFRAMESIZE=FRAMESIZE_HD
\ No newline at end of file
......@@ -5,7 +5,7 @@
#ifndef OVER_LOAD_CAM_CONF
#define CAMERA_MODEL_AI_THINKER
#define FRAMESIZE FRAMESIZE_SVGA
#define FRAMESIZE FRAMESIZE_VGA
//#define HE_ZHOU_S3
#endif
......
......@@ -3,6 +3,7 @@ http.port=8003
http.clients.limit=10
stream.port=8004
stream.bind.port=8004
frame.delay.skip.timeout=3000
udp.video.buffer.pool.size=500
udp.video.dispatcher.thread.size=8
udp.video.channel.size=128
\ No newline at end of file
......@@ -38,7 +38,7 @@ public class UDP2Main {
}
public static void main(String[] args) {
System.out.println("version:" + version);
System.out.println("----- version:" + version + " -----\r\n[gitcode page] => https://gitcode.net/qq_26700087/simpleVideoServer \r\n");
AsyncTaskExecutor asyncTaskExecutor = new AsyncTaskExecutor();
asyncTaskExecutor.start();
......@@ -56,6 +56,7 @@ public class UDP2Main {
deviceChannel.setBufferPool(bufferPool);
deviceChannel.setStreamPort(getIntProp("stream.port", 8004));
deviceChannel.setDispatcherPoolSize(getIntProp("udp.video.dispatcher.thread.size", 8));
deviceChannel.setFrameDelayTimeout(getIntProp("frame.delay.skip.timeout" , 2000));
deviceChannel.setVideoChannelCount(videoChannelCount);
udpDeviceSnChannelBinder.start();
deviceChannel.start();
......
......@@ -8,12 +8,12 @@ public interface HttpConstant {
byte[] uri = "GET /video".getBytes(StandardCharsets.UTF_8);
int URI_LEN = uri.length;
byte[] NOT_FOUND = ("HTTP/1.1 404 \r\n" +
"Content-Type: text/html; charset=utf-8\r\n" +
"Content-Length: 12" +
"\r\n" +
"\r\n" +
"<h3>404</h3>").getBytes(StandardCharsets.UTF_8);
byte[] NOT_FOUND = ("""
HTTP/1.1 404 \r
Content-Type: text/html; charset=utf-8\r
Content-Length: 12\r
\r
<h3>404</h3>""").getBytes(StandardCharsets.UTF_8);
String PART_BOUNDARY = "123456789000000000000987654321";
String STREAM_RESP_HEAD = "HTTP/1.1 200 OK\r\n" +
"Content-Type: multipart/x-mixed-replace;boundary=" + PART_BOUNDARY + "\r\n" +
......
......@@ -24,6 +24,10 @@ public class BufferPool {
this.bufferPoolSize = bufferPoolSize;
}
public int getBufferPoolSize() {
return bufferPoolSize;
}
private void init() {
for (int i = 0; i < bufferPoolSize; i++) {
frameBufferPool.add(new FrameBuffer(new byte[RECEIVE_BUFFER_SIZE]));
......
......@@ -2,11 +2,13 @@ package org.btik.server.video.device.udp2;
public class FrameBuffer {
// 2 + 4 + 2字节 2 字节的0 4字节ip 2字节端口
int channelIndex = -1;
volatile int channelIndex = -1;
byte[] data;
int size;
volatile int size;
volatile long time;
public FrameBuffer(byte[] data) {
this.data = data;
......
......@@ -38,6 +38,11 @@ public class NewUDPDeviceChannel extends Thread implements DevChannel {
private int videoChannelCount = 128;
/**
* 防止接收图片过快而,浏览器接收过慢,导致帧积压,当帧接收时间到转发时间超过以下延时之后将会被丢弃。
*/
private long frameDelayTimeout = 2000;
public void setStreamPort(int streamPort) {
this.streamPort = streamPort;
}
......@@ -54,6 +59,10 @@ public class NewUDPDeviceChannel extends Thread implements DevChannel {
this.videoChannelCount = videoChannelCount;
}
public void setFrameDelayTimeout(long frameDelayTimeout) {
this.frameDelayTimeout = frameDelayTimeout;
}
/**
* 可选的输入值 1 2 4 8 16 32 64 128 256几个数字,根据cpu核数和设备的数量选择合适的值
* ,输入其它值也会被映射到以上值,如果只有一个摄像头设备那就一个足够,线程数太多而cpu核数过少,
......@@ -83,6 +92,13 @@ public class NewUDPDeviceChannel extends Thread implements DevChannel {
frameDispatchers[i] = msgDispatcher;
executorService.submit(msgDispatcher);
}
System.out.printf("""
dispatcherPoolSize: %d \r
frameDelayTimeout: %d\r
streamPort:[UDP] %d\r
bufferPoolSize:%d\r
""",
dispatcherPoolSize, frameDelayTimeout, streamPort, bufferPool.getBufferPoolSize());
System.out.println("udp channel loaded");
super.start();
......@@ -99,7 +115,8 @@ public class NewUDPDeviceChannel extends Thread implements DevChannel {
// 最后一位是通道索引,故长度 -1 才是照片数据
frameBuffer.size = datagramPacket.getLength() - 1;
frameBuffer.channelIndex = frameBuffer.data[frameBuffer.size] & 0xff;
frameDispatchers[frameBuffer.channelIndex & dispatcherPoolSize - 1 ].messages.add(frameBuffer);
frameBuffer.time = System.currentTimeMillis();
frameDispatchers[frameBuffer.channelIndex & dispatcherPoolSize - 1].messages.add(frameBuffer);
// 切换缓冲区
frameBuffer = bufferPool.getFrameBuffer();
datagramPacket.setData(frameBuffer.data);
......@@ -138,10 +155,20 @@ public class NewUDPDeviceChannel extends Thread implements DevChannel {
try {
int channelIndex = segment.channelIndex;
if (channelIndex < 0 || channelIndex > videoChannelCount) {
System.out.print("\rThe channel index is illegal ");
System.out.print(channelIndex);
continue;
}
VideoChannel videoChannel = videoChannelTable[channelIndex];
if (videoChannel == null) {
System.out.print("\rvideoChannel index is not registered and may need to be restarted the ESP ,frame size: ");
System.out.print(segment.size);
continue;
}
// 延时跳帧
if (System.currentTimeMillis() > segment.time + frameDelayTimeout) {
System.out.print("\rframe time out continue, size : ");
System.out.print(segment.size);
continue;
}
videoChannel.sendFrame(segment.data, segment.size);
......
......@@ -48,7 +48,7 @@ public class DeviceSnChannelBinder extends Thread {
channelIndexQueue.add(i);
}
this.port = port;
System.out.println("sn channel binder started");
System.out.printf("snChannelBinderPort:[TCP] %d \r\nbinder stared\r\n", port);
}
......
......@@ -20,7 +20,6 @@ import java.util.concurrent.Executors;
/***
* 以http mjpeg 合成视频流对VideoServer的实现
*
* */
public class BioHttpVideoServer extends Thread implements HttpConstant, VideoServer {
private boolean runFlag = true;
......@@ -50,6 +49,7 @@ public class BioHttpVideoServer extends Thread implements HttpConstant, VideoSer
@Override
public synchronized void start() {
super.start();
System.out.printf("httpPort:[TCP] %d\n" , httpPort);
System.out.println("bio video server started");
}
......@@ -79,7 +79,7 @@ public class BioHttpVideoServer extends Thread implements HttpConstant, VideoSer
continue;
}
String channelStr = new String(channel);
System.out.println("pre open" + new Date());
System.out.println("pre open " + new Date());
executorService.submit(() -> doStreamOpen(client, channelStr));
} catch (IOException e) {
disConnect(client, e);
......@@ -143,7 +143,7 @@ public class BioHttpVideoServer extends Thread implements HttpConstant, VideoSer
public VideoChannel createChannel(byte[] channelId) {
channelId[0] = HTTP_PATH_SEPARATOR;
String channelIdPath = new String(channelId);
System.out.println("new channel:");
System.out.println("\r\nChannel " +channelIdPath + " is online");
printHttpAddress(channelIdPath);
return videoChannelMap.computeIfAbsent(channelIdPath,
channelIdStr -> new MJPEGVideoChannel(channelIdStr, asyncTaskExecutor));
......
......@@ -7,12 +7,13 @@ import org.btik.server.video.device.iface.VideoChannel;
import java.io.IOException;
import java.io.OutputStream;
import java.net.Socket;
import java.net.*;
import java.util.Collections;
import java.util.Date;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
/**
* 视频频道
* 在线的摄像头均有一个频道
......@@ -24,7 +25,7 @@ public class MJPEGVideoChannel implements VideoChannel, HttpConstant {
/**
* 暂时没有用,debug时可以分辨属于哪个设备
*/
private String channelId;
private final String channelId;
private final Set<Socket> clients = Collections.newSetFromMap(new ConcurrentHashMap<>());
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册