Compare commits
13 Commits
98cdaad09e
...
master
Author | SHA1 | Date | |
---|---|---|---|
e3af31f1e0
|
|||
ed8c594a30
|
|||
ea952e1981
|
|||
c85581749b
|
|||
a134513b9c
|
|||
7c780e0017
|
|||
54246257c9
|
|||
d4aa4eabb4
|
|||
08a09e3b15
|
|||
00a5bceffb
|
|||
8ba524087d
|
|||
1ad3ded60d
|
|||
d1e7834b8c
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,2 +1,3 @@
|
||||
.venv
|
||||
.idea
|
||||
build
|
41
README.md
41
README.md
@ -3,38 +3,37 @@
|
||||
This project is a demo for streaming video output from multiple "client" devices to one "server". This is a basic demo
|
||||
of what sauron-cv seeks to accomplish.
|
||||
|
||||
## Installation
|
||||
## Building
|
||||
|
||||
It is strongly recommended that you use a virtual environment. These instructions will assume you are using venv, you
|
||||
can substitute this with your preferred environment management. You will need to make sure that the `virtualenv` package
|
||||
is installed globally, either via pip or the `python3-virtualenv` package in your system package manager.
|
||||
This project uses Meson for build management.
|
||||
|
||||
When first cloning this repo, run the following:
|
||||
### Install Dependencies
|
||||
|
||||
Install the following packages from your distribution package manager:
|
||||
- `meson`
|
||||
- `opencv`
|
||||
- `boost`
|
||||
|
||||
A better procedure for this (hopefully involving Meson) will be added / documented at a later date.
|
||||
|
||||
### Setup Meson
|
||||
|
||||
```shell
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
meson setup build
|
||||
```
|
||||
|
||||
This will create a virtual environment, enter that virtual environment, and install the required packages.
|
||||
*NOTE FOR JETBRAINS / CLION USERS: PLEASE SET YOUR MESON BUILD DIRECTORY TO `build` IN THE IDE SETTINGS UNDER "Build /
|
||||
Execution / Deployment" -> "Meson"*
|
||||
|
||||
If you start a new shell, you will need to re-enter the virtual environment:
|
||||
### Compiling
|
||||
|
||||
```shell
|
||||
source .venv/bin/activate
|
||||
meson build client # for client only
|
||||
meson build server # for server only
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
### Client
|
||||
|
||||
```shell
|
||||
python -m client
|
||||
```
|
||||
|
||||
### Server
|
||||
|
||||
```shell
|
||||
python -m server
|
||||
./build/client # for client application
|
||||
./build/server # for server application
|
||||
```
|
43
client.cpp
Normal file
43
client.cpp
Normal file
@ -0,0 +1,43 @@
|
||||
#include <opencv2/videoio.hpp>
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
#include <netinet/in.h>
|
||||
#include <sys/socket.h>
|
||||
#include <unistd.h>
|
||||
#include "transfer.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
int main() {
|
||||
// create video capture
|
||||
cv::VideoCapture cap = cv::VideoCapture(0);
|
||||
|
||||
// create socket
|
||||
int clientSocket = socket(AF_INET, SOCK_STREAM, 0);
|
||||
|
||||
// specifying address
|
||||
sockaddr_in serverAddress;
|
||||
serverAddress.sin_family = AF_INET;
|
||||
serverAddress.sin_port = htons(8080);
|
||||
serverAddress.sin_addr.s_addr = INADDR_ANY;
|
||||
|
||||
cv::Mat image = cv::Mat::zeros(cv::Size(640, 480), CV_8UC3);
|
||||
|
||||
info("Ready to connect.");
|
||||
// sending connection request
|
||||
connect(clientSocket, reinterpret_cast<sockaddr *>(&serverAddress),
|
||||
sizeof(serverAddress));
|
||||
|
||||
info("Connected.");
|
||||
// create buffer for serialization
|
||||
vector<uchar> imgbuf;
|
||||
|
||||
while (true) {
|
||||
cap.read(image);
|
||||
trace("Sending image");
|
||||
sendImage(clientSocket, image, imgbuf);
|
||||
}
|
||||
|
||||
close(clientSocket);
|
||||
return 0;
|
||||
}
|
76
client.py
76
client.py
@ -1,76 +0,0 @@
|
||||
import cv2
|
||||
import socket
|
||||
import numpy as np
|
||||
import uuid
|
||||
|
||||
uuid = uuid.uuid4()
|
||||
|
||||
from common import StdPacket, InterlacedPacket, DoublyInterlacedPacket
|
||||
|
||||
UDP_IP = "127.0.0.1"
|
||||
UDP_PORT = 5005
|
||||
|
||||
# Create a VideoCapture object
|
||||
cap = cv2.VideoCapture(0) # 0 represents the default camera
|
||||
|
||||
# Check if camera opened successfully
|
||||
if not cap.isOpened():
|
||||
print("Error opening video stream or file")
|
||||
|
||||
def send_packet(sock, packet):
|
||||
sock.sendto(packet, (UDP_IP, UDP_PORT))
|
||||
|
||||
sock = socket.socket(socket.AF_INET, # Internet
|
||||
socket.SOCK_DGRAM) # UDP
|
||||
|
||||
def breakdown_image_norm(frame):
|
||||
(cols, rows, colors) = frame.shape
|
||||
# break the array down into 16x16 chunks, then transmit them as UDP packets
|
||||
for i in range(0, cols, 16):
|
||||
for j in range(0, rows, 16):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
pkt = StdPacket(uuid, j, i, frame[i:i + 16, j:j + 16])
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
def breakdown_image_interlaced(frame):
|
||||
(cols, rows, colors) = frame.shape
|
||||
# break the array into 16x32 chunks. we'll split those further into odd and even rows
|
||||
# and send each as UDP packets. this should make packet loss less obvious
|
||||
for i in range(0, cols, 32):
|
||||
for j in range(0, rows, 16):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
pkt = InterlacedPacket(uuid, j, i, False, frame[i:i + 32:2, j:j + 16])
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
for i in range(0, cols, 32):
|
||||
for j in range(0, rows, 16):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
pkt = InterlacedPacket(uuid, j, i, True, frame[i + 1:i + 32:2, j:j + 16])
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
def breakdown_image_dint(frame):
|
||||
(cols, rows, colors) = frame.shape
|
||||
# break the array into 16x32 chunks. we'll split those further into odd and even rows
|
||||
# and send each as UDP packets. this should make packet loss less obvious
|
||||
for l in range(0, 4):
|
||||
for i in range(0, cols, 32):
|
||||
for j in range(0, rows, 32):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
i_even = l % 2 == 0
|
||||
j_even = l >= 2
|
||||
pkt = DoublyInterlacedPacket(uuid, j, i, j_even, i_even, frame[i + i_even:i + 32:2, j + j_even:j + 32:2])
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
while True:
|
||||
# Capture frame-by-frame
|
||||
ret, frame = cap.read()
|
||||
|
||||
# If frame is read correctly, ret is True
|
||||
if not ret:
|
||||
print("Can't receive frame (stream end?). Exiting ...")
|
||||
break
|
||||
|
||||
breakdown_image_dint(frame)
|
||||
|
||||
# Release the capture and close all windows
|
||||
cap.release()
|
58
legacy/README.md
Normal file
58
legacy/README.md
Normal file
@ -0,0 +1,58 @@
|
||||
# Video Streaming Proof of Concept
|
||||
|
||||
This project is a demo for streaming video output from multiple "client" devices to one "server". This is a basic demo
|
||||
of what sauron-cv seeks to accomplish.
|
||||
|
||||
## Installation
|
||||
|
||||
It is strongly recommended that you use a virtual environment. These instructions will assume you are using venv, you
|
||||
can substitute this with your preferred environment management. You will need to make sure that the `virtualenv` package
|
||||
is installed globally, either via pip or the `python3-virtualenv` package in your system package manager.
|
||||
|
||||
When first cloning this repo, run the following:
|
||||
|
||||
```shell
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
This will create a virtual environment, enter that virtual environment, and install the required packages.
|
||||
|
||||
If you start a new shell, you will need to re-enter the virtual environment:
|
||||
|
||||
```shell
|
||||
source .venv/bin/activate
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
### Client
|
||||
|
||||
To run the client with a localhost target:
|
||||
|
||||
```shell
|
||||
python -m client
|
||||
```
|
||||
|
||||
To target an external server, provide its IP address using the `-s` flag:
|
||||
|
||||
```shell
|
||||
python -m client -s [server IP address]
|
||||
```
|
||||
|
||||
### Server
|
||||
|
||||
```shell
|
||||
python -m server
|
||||
```
|
||||
|
||||
### Common Flags
|
||||
|
||||
Make sure that these match between your client and server!!
|
||||
|
||||
| Short | Long | Description | Default |
|
||||
|-------|---|---|---|
|
||||
| `-p` | `--port` | The port for the client / server to communicate on. | `5005` |
|
||||
| `-W` | `--width` | Image width in pixels. | `640` |
|
||||
| `-H` | `--height` | Image height in pixels. | `480` |
|
129
legacy/client.py
Normal file
129
legacy/client.py
Normal file
@ -0,0 +1,129 @@
|
||||
import argparse
|
||||
|
||||
import cv2
|
||||
import socket
|
||||
import numpy as np
|
||||
import uuid
|
||||
|
||||
from common import StdPacket, InterlacedPacket, DoublyInterlacedPacket, TiledImagePacket
|
||||
|
||||
|
||||
def send_packet(sock, packet):
|
||||
sock.sendto(packet, (UDP_IP, UDP_PORT))
|
||||
|
||||
def breakdown_image_norm(frame, last_frame):
|
||||
(cols, rows, colors) = frame.shape
|
||||
# break the array down into 16x16 chunks, then transmit them as UDP packets
|
||||
for i in range(0, cols, 16):
|
||||
for j in range(0, rows, 16):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
arr = frame[i:i + 16, j:j + 16]
|
||||
last_arr = last_frame[i:i + 16, j:j + 16]
|
||||
# only update if image segments are different
|
||||
if not np.allclose(arr, last_arr):
|
||||
pkt = StdPacket(uuid, j, i, arr)
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
def breakdown_image_interlaced(frame, last_frame):
|
||||
(cols, rows, colors) = frame.shape
|
||||
# break the array into 16x32 chunks. we'll split those further into odd and even rows
|
||||
# and send each as UDP packets. this should make packet loss less obvious
|
||||
for i in range(0, cols, 32):
|
||||
for j in range(0, rows, 16):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
arr = frame[i:i + 32:2, j:j + 16]
|
||||
last_arr = last_frame[i:i + 32:2, j:j + 16]
|
||||
if not np.allclose(arr, last_arr):
|
||||
pkt = InterlacedPacket(uuid, j, i, False, arr)
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
for i in range(0, cols, 32):
|
||||
for j in range(0, rows, 16):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
arr = frame[i + 1:i + 32:2, j:j + 16]
|
||||
last_arr = last_frame[i + 1:i + 32:2, j:j + 16]
|
||||
# only update if image segments are different
|
||||
if not np.allclose(arr, last_arr):
|
||||
pkt = InterlacedPacket(uuid, j, i, True, arr)
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
def breakdown_image_dint(frame, last_frame):
|
||||
(cols, rows, colors) = frame.shape
|
||||
# break the array into 16x32 chunks. we'll split those further into odd and even rows
|
||||
# and send each as UDP packets. this should make packet loss less obvious
|
||||
for l in range(0, 4):
|
||||
for i in range(0, cols, 32):
|
||||
for j in range(0, rows, 32):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
i_even = l % 2 == 0
|
||||
j_even = l >= 2
|
||||
|
||||
# breakdown image
|
||||
arr = frame[i + i_even:i + 32:2, j + j_even:j + 32:2]
|
||||
last_arr = last_frame[i + i_even:i + 32:2, j + j_even:j + 32:2]
|
||||
# only update if image segments are different
|
||||
if not np.allclose(arr, last_arr):
|
||||
pkt = DoublyInterlacedPacket(uuid, j, i, j_even, i_even, arr)
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
def breakdown_image_tiled(frame):
|
||||
(cols, rows, colors) = frame.shape
|
||||
# break the array into 16x32 chunks. we'll split those further into odd and even rows
|
||||
# and send each as UDP packets. this should make packet loss less obvious
|
||||
xslice = cols // 16
|
||||
yslice = rows // 16
|
||||
for i in range(0, xslice):
|
||||
for j in range(0, yslice):
|
||||
# print("Sending frame segment (%d, %d)", i, j)
|
||||
pkt = TiledImagePacket(uuid, j, i, rows, cols, frame[i:cols:xslice, j:rows:yslice])
|
||||
send_packet(sock, pkt.to_bytestr())
|
||||
|
||||
if __name__ == '__main__':
|
||||
# argument parser
|
||||
parser = argparse.ArgumentParser(description="Proof-of-concept client for sauron-cv")
|
||||
parser.add_argument("-p", "--port", type=int, default=5005)
|
||||
parser.add_argument("-s", "--server", type=str, default="127.0.0.1")
|
||||
parser.add_argument("-W", "--width", type=int, default=640)
|
||||
parser.add_argument("-H", "--height", type=int, default=480)
|
||||
parser.add_argument("-d", "--device", type=int, default=0)
|
||||
args = parser.parse_args()
|
||||
|
||||
# give this client its very own UUID
|
||||
uuid = uuid.uuid4()
|
||||
|
||||
# give target IP address
|
||||
UDP_IP = args.server
|
||||
UDP_PORT = args.port
|
||||
WIDTH = args.width
|
||||
HEIGHT = args.height
|
||||
DEVICE = args.device
|
||||
|
||||
# Create a VideoCapture object
|
||||
cap = cv2.VideoCapture(DEVICE) # 0 represents the default camera
|
||||
cap.set(cv2.CAP_PROP_FRAME_WIDTH, WIDTH)
|
||||
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, HEIGHT)
|
||||
|
||||
# Check if camera opened successfully
|
||||
if not cap.isOpened():
|
||||
print("Error opening video stream or file")
|
||||
|
||||
# create the socket
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
frame = np.zeros((HEIGHT, WIDTH, 3), dtype=np.uint8)
|
||||
last_frame = np.zeros((HEIGHT, WIDTH, 3), dtype=np.uint8)
|
||||
|
||||
while True:
|
||||
last_frame = frame.copy()
|
||||
# Capture frame-by-frame
|
||||
ret, frame = cap.read()
|
||||
|
||||
# If frame is read correctly, ret is True
|
||||
if not ret:
|
||||
print("Can't receive frame (stream end?). Exiting ...")
|
||||
break
|
||||
|
||||
breakdown_image_dint(frame, last_frame)
|
||||
|
||||
# Release the capture and close all windows
|
||||
cap.release()
|
@ -3,7 +3,18 @@ from abc import ABC, abstractmethod
|
||||
import numpy as np
|
||||
from uuid import UUID
|
||||
|
||||
# The basic structure of the packet is as follows:
|
||||
# FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF FFFF ... FFFF
|
||||
# | uuid | | | |
|
||||
# | | | | image data: contains the 16x16 image slice bitpacked from a NumPy array
|
||||
# | | | y: the y position of this packet in the original image
|
||||
# | | x: the x position of this packet in the original image
|
||||
# | uuid: matches the packet to the requesting client
|
||||
# Other packet types may change this structure. Need to standardize this somehow.
|
||||
|
||||
class Packet(ABC):
|
||||
"""Generic structure for a video streaming packet. Contains a slice of the full image, which will be reconstructed
|
||||
by the server."""
|
||||
size: int
|
||||
def __init__(self, uuid: UUID, x: int, y: int, array: np.ndarray):
|
||||
self.uuid = uuid
|
||||
@ -13,13 +24,16 @@ class Packet(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def to_bytestr(self) -> bytes:
|
||||
"""Convert a packet object into a bytestring."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def apply(self, image: np.ndarray) -> np.ndarray:
|
||||
"""Apply this packet to an image."""
|
||||
pass
|
||||
|
||||
class StdPacket(Packet):
|
||||
"""A standard packet with no interlacing. Sends a 16x16 chunk of an image."""
|
||||
size = 16 + 4 + 4 + 768
|
||||
def __init__(self, uuid: UUID, x: int, y: int, array: np.ndarray):
|
||||
super().__init__(uuid, x, y, array)
|
||||
@ -40,6 +54,7 @@ class StdPacket(Packet):
|
||||
return image
|
||||
|
||||
def from_bytes_std(b: bytes) -> StdPacket:
|
||||
"""Convert a byte string obtained via UDP into a packet object."""
|
||||
uuid = UUID(bytes = b[0:16])
|
||||
x = int.from_bytes(b[16:20], signed = False)
|
||||
y = int.from_bytes(b[20:24], signed = False)
|
||||
@ -49,6 +64,7 @@ def from_bytes_std(b: bytes) -> StdPacket:
|
||||
|
||||
|
||||
class InterlacedPacket(Packet):
|
||||
"""A packet with horizontal interlacing. Sends half of a 16x32 chunk of an image, every other row"""
|
||||
size = 16 + 4 + 4 + 4 + 768
|
||||
def __init__(self, uuid: UUID, x: int, y: int, even: bool, array: np.ndarray):
|
||||
super().__init__(uuid, x, y, array)
|
||||
@ -72,6 +88,7 @@ class InterlacedPacket(Packet):
|
||||
|
||||
|
||||
def from_bytes_int(b: bytes) -> InterlacedPacket:
|
||||
"""Convert a byte string obtained via UDP into a packet object."""
|
||||
uuid = UUID(bytes=b[0:16])
|
||||
x = int.from_bytes(b[16:20], signed=False)
|
||||
y = int.from_bytes(b[20:24], signed=False)
|
||||
@ -81,6 +98,8 @@ def from_bytes_int(b: bytes) -> InterlacedPacket:
|
||||
return InterlacedPacket(uuid, x, y, even, array)
|
||||
|
||||
class DoublyInterlacedPacket(Packet):
|
||||
"""A packet with horizontal interlacing. Sends one quarter of a 32x32 chunk of an image. This will alternate rows
|
||||
and columns based on the value of even_x and even_y."""
|
||||
size = 16 + 4 + 4 + 4 + 768
|
||||
def __init__(self, uuid: UUID, x: int, y: int, even_x: bool, even_y: bool, array: np.ndarray):
|
||||
super().__init__(uuid, x, y, array)
|
||||
@ -106,6 +125,7 @@ class DoublyInterlacedPacket(Packet):
|
||||
|
||||
|
||||
def from_bytes_dint(b: bytes) -> DoublyInterlacedPacket:
|
||||
"""Convert a byte string obtained via UDP into a packet object."""
|
||||
uuid = UUID(bytes=b[0:16])
|
||||
x = int.from_bytes(b[16:20], signed=False)
|
||||
y = int.from_bytes(b[20:24], signed=False)
|
||||
@ -114,3 +134,38 @@ def from_bytes_dint(b: bytes) -> DoublyInterlacedPacket:
|
||||
array = np.frombuffer(b[28:], np.uint8).reshape(16, 16, 3)
|
||||
|
||||
return DoublyInterlacedPacket(uuid, x, y, even_x, even_y, array)
|
||||
|
||||
class TiledImagePacket(Packet):
|
||||
"""Distributed selection from image."""
|
||||
size = 16 + 4 + 4 + 768
|
||||
|
||||
def __init__(self, uuid: UUID, x: int, y: int, width: int, height: int, array: np.ndarray):
|
||||
super().__init__(uuid, x, y, array)
|
||||
self.width = width
|
||||
self.height = height
|
||||
self.xslice = width // 16
|
||||
self.yslice = height // 16
|
||||
|
||||
def to_bytestr(self) -> bytes:
|
||||
bytestr = b""
|
||||
bytestr += self.uuid.bytes
|
||||
bytestr += self.x.to_bytes(length=4, signed=False)
|
||||
bytestr += self.y.to_bytes(length=4, signed=False)
|
||||
bytestr += self.array.tobytes()
|
||||
return bytestr
|
||||
|
||||
def apply(self, image: np.ndarray) -> np.ndarray:
|
||||
x = self.x
|
||||
y = self.y
|
||||
arr = self.array
|
||||
image[y:self.height:self.yslice, x:self.width:self.xslice] = arr
|
||||
return image
|
||||
|
||||
def from_bytes_tiled(b: bytes) -> TiledImagePacket:
|
||||
"""Convert a byte string obtained via UDP into a packet object."""
|
||||
uuid = UUID(bytes = b[0:16])
|
||||
x = int.from_bytes(b[16:20], signed = False)
|
||||
y = int.from_bytes(b[20:24], signed = False)
|
||||
array = np.frombuffer(b[24:], np.uint8).reshape(16, 16, 3)
|
||||
|
||||
return TiledImagePacket(uuid, x, y, 640, 480, array)
|
4
legacy/requirements.txt
Normal file
4
legacy/requirements.txt
Normal file
@ -0,0 +1,4 @@
|
||||
opencv-python
|
||||
numpy
|
||||
rich
|
||||
asyncudp
|
112
legacy/server.py
Normal file
112
legacy/server.py
Normal file
@ -0,0 +1,112 @@
|
||||
from typing import Dict
|
||||
|
||||
import cv2
|
||||
import socket
|
||||
import numpy as np
|
||||
from datetime import datetime
|
||||
import asyncio
|
||||
import asyncudp
|
||||
import argparse
|
||||
from rich.console import Console
|
||||
|
||||
from common import Packet, DoublyInterlacedPacket, from_bytes_dint
|
||||
|
||||
class Client:
|
||||
"""Class for tracking client state, including current frame data and time since last update."""
|
||||
def __init__(self):
|
||||
self.last_updated = datetime.now()
|
||||
self.frame = np.ndarray((HEIGHT, WIDTH, 3), dtype=np.uint8)
|
||||
|
||||
def update(self, pkt: Packet):
|
||||
"""Apply a packet to the client frame. Update last client update to current time."""
|
||||
self.frame = pkt.apply(self.frame)
|
||||
|
||||
self.last_updated = datetime.now()
|
||||
|
||||
def latency(self) -> float:
|
||||
"""Return the time since the last client update."""
|
||||
return (datetime.now() - self.last_updated).total_seconds()
|
||||
|
||||
def read(self) -> np.ndarray:
|
||||
"""Return the current frame."""
|
||||
return self.frame
|
||||
|
||||
|
||||
# Dictionary of client states stored by UUID
|
||||
frames: Dict[str, Client] = {}
|
||||
|
||||
async def show_frames():
|
||||
"""Asynchronous coroutine to display frames in OpenCV debug windows."""
|
||||
while True:
|
||||
# drop clients that have not sent packets for > 5 seconds
|
||||
for id in list(frames.keys()):
|
||||
if frames[id].latency() >= 5:
|
||||
console.log(f"Client likely lost connection, dropping [bold red]{id}[/bold red]")
|
||||
cv2.destroyWindow(id)
|
||||
frames.pop(id)
|
||||
else:
|
||||
# show the latest available frame
|
||||
cv2.imshow(id, frames[id].read())
|
||||
|
||||
cv2.waitKey(1)
|
||||
# this is necessary to allow asyncio to swap between reading packets and rendering frames
|
||||
await asyncio.sleep(0.05)
|
||||
|
||||
async def listen(ip: str, port: int):
|
||||
"""Asynchronous coroutine to listen for / read client connections."""
|
||||
sock = await asyncudp.create_socket(local_addr=(ip, port))
|
||||
|
||||
console.log("Ready to accept connections.", style="bold green")
|
||||
|
||||
while True:
|
||||
# receive packets
|
||||
data, addr = await sock.recvfrom()
|
||||
|
||||
if data:
|
||||
# convert the byte string into a packet object
|
||||
pkt = from_bytes_dint(data)
|
||||
|
||||
uuid = str(pkt.uuid)
|
||||
|
||||
# if this is a new client, give it a new image
|
||||
if uuid not in frames.keys():
|
||||
console.log(f"New client acquired, naming [bold cyan]{uuid}[bold cyan]")
|
||||
frames[uuid] = Client()
|
||||
|
||||
frames[uuid].update(pkt)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# argument parser
|
||||
parser = argparse.ArgumentParser(description="Proof-of-concept server for sauron-cv")
|
||||
parser.add_argument("-p", "--port", type=int, default=5005)
|
||||
parser.add_argument("-l", "--listen", type=str, default="0.0.0.0")
|
||||
parser.add_argument("-W", "--width", type=int, default=640)
|
||||
parser.add_argument("-H", "--height", type=int, default=480)
|
||||
args = parser.parse_args()
|
||||
|
||||
# console
|
||||
console = Console()
|
||||
|
||||
# assign constants based on argument parser
|
||||
UDP_IP = args.listen
|
||||
UDP_PORT = args.port
|
||||
|
||||
HEIGHT = args.height
|
||||
WIDTH = args.width
|
||||
|
||||
# create the async event loop
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
# create async tasks for reading network packets, displaying windows
|
||||
loop.create_task(listen(UDP_IP, UDP_PORT))
|
||||
loop.create_task(show_frames())
|
||||
try:
|
||||
loop.run_forever()
|
||||
finally:
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
loop.close()
|
||||
|
||||
# Release the capture and close all windows
|
||||
cv2.destroyAllWindows()
|
178
logging.h
Normal file
178
logging.h
Normal file
@ -0,0 +1,178 @@
|
||||
#ifndef LOGGING_H
|
||||
#define LOGGING_H
|
||||
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
#include <chrono>
|
||||
#include <ctime>
|
||||
|
||||
// Logging levels
|
||||
#define LOG_ERROR 0
|
||||
#define LOG_WARN 1
|
||||
#define LOG_INFO 2
|
||||
#define LOG_DEBUG 3
|
||||
#define LOG_TRACE 4
|
||||
|
||||
// Set logging arguments
|
||||
// TODO: set these by compiler arguments or env vars or something
|
||||
#define LOGGING LOG_DEBUG // logging level
|
||||
#define LOGGING_COLOR true // enable color
|
||||
#define LOGGING_TIMESTAMP true // enable timestamp
|
||||
#define LOGGING_TIMESTAMP_FMT "%Y-%m-%dT%H:%M:%S%z" // timestamp format (local time)
|
||||
#define LOGGING_POSITION true // display position (only works on C++20 or newer)
|
||||
|
||||
// Color codes
|
||||
#define ANSI_RESET "\033[0m"
|
||||
#define ANSI_BLACK "\033[30m" /* Black */
|
||||
#define ANSI_RED "\033[31m" /* Red */
|
||||
#define ANSI_GREEN "\033[32m" /* Green */
|
||||
#define ANSI_YELLOW "\033[33m" /* Yellow */
|
||||
#define ANSI_BLUE "\033[34m" /* Blue */
|
||||
#define ANSI_MAGENTA "\033[35m" /* Magenta */
|
||||
#define ANSI_CYAN "\033[36m" /* Cyan */
|
||||
#define ANSI_WHITE "\033[37m" /* White */
|
||||
#define ANSI_BOLD "\033[1m" /* Bold */
|
||||
|
||||
template <typename T>
|
||||
void print(T t) {
|
||||
std::cout << t << std::endl;
|
||||
}
|
||||
|
||||
template <typename T, typename... Args>
|
||||
void print(T t, Args... args) {
|
||||
std::cout << t;
|
||||
print(args...);
|
||||
}
|
||||
|
||||
inline void printTimestamp() {
|
||||
#if LOGGING_TIMESTAMP
|
||||
auto now = std::chrono::system_clock::now();
|
||||
auto time_c = std::chrono::system_clock::to_time_t(now);
|
||||
std::tm time_tm;
|
||||
localtime_r(&time_c, &time_tm);
|
||||
std::cout << std::put_time(&time_tm, LOGGING_TIMESTAMP_FMT) << ": ";
|
||||
#endif
|
||||
}
|
||||
|
||||
// if we're on C++20 or later, then use the source_location header and add source location to logs
|
||||
#if __cplusplus >= 202002L
|
||||
#include <source_location>
|
||||
|
||||
inline void printPosition(std::source_location& location) {
|
||||
#if LOGGING_POSITION
|
||||
std::cout << location.file_name() << ":" << location.function_name << ":" << ANSI_CYAN << location.line() << ANSI_RESET << ": ";
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void printHeader(std::string name, std::string color, std::source_location& location) {
|
||||
#if LOGGING_COLOR
|
||||
std::cout << ANSI_BOLD << color << "[" << name << "] " << ANSI_RESET;
|
||||
printTimestamp();
|
||||
printPosition(location);
|
||||
#else
|
||||
printHeader(name);
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void printHeader(std::string name, std::source_location& location) {
|
||||
std::cout << "[" << name << "] ";
|
||||
printTimestamp();
|
||||
printPosition(location);
|
||||
}
|
||||
|
||||
template <typename... Args, typename Sl = std::source_location>
|
||||
void trace(Args... args, Sl location = std::source_location::current()) {
|
||||
#if LOGGING >= LOG_TRACE
|
||||
printHeader("TRACE", ANSI_CYAN, location);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename... Args, typename Sl = std::source_location>
|
||||
void debug(Args... args, Sl location = std::source_location::current()) {
|
||||
#if LOGGING >= LOG_DEBUG
|
||||
printHeader("DEBUG", ANSI_MAGENTA, location);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename... Args, typename Sl = std::source_location>
|
||||
void info(Args... args, Sl location = std::source_location::current()) {
|
||||
#if LOGGING >= LOG_INFO
|
||||
printHeader("INFO", ANSI_GREEN, location);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename... Args, typename Sl = std::source_location>
|
||||
void warn(Args... args, Sl location = std::source_location::current()) {
|
||||
#if LOGGING >= LOG_WARN
|
||||
printHeader("WARN", ANSI_YELLOW, location);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename... Args, typename Sl = std::source_location>
|
||||
void error(Args... args, Sl location = std::source_location::current()) {
|
||||
#if LOGGING >= LOG_ERROR
|
||||
printHeader("ERROR", ANSI_RED, location);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
#else
|
||||
inline void printHeader(std::string name, std::string color) {
|
||||
#if LOGGING_COLOR
|
||||
std::cout << ANSI_BOLD << color << "[" << name << "] " << ANSI_RESET;
|
||||
printTimestamp();
|
||||
#else
|
||||
printHeader(name);
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void printHeader(std::string name) {
|
||||
std::cout << "[" << name << "] ";
|
||||
printTimestamp();
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
void trace(Args... args) {
|
||||
#if LOGGING >= LOG_TRACE
|
||||
printHeader("TRACE", ANSI_CYAN);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
void debug(Args... args) {
|
||||
#if LOGGING >= LOG_DEBUG
|
||||
printHeader("DEBUG", ANSI_MAGENTA);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
void info(Args... args) {
|
||||
#if LOGGING >= LOG_INFO
|
||||
printHeader("INFO", ANSI_GREEN);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
void warn(Args... args) {
|
||||
#if LOGGING >= LOG_WARN
|
||||
printHeader("WARN", ANSI_YELLOW);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename... Args>
|
||||
void error(Args... args) {
|
||||
#if LOGGING >= LOG_ERROR
|
||||
printHeader("ERROR", ANSI_RED);
|
||||
print(args...);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif //LOGGING_H
|
33
meson.build
Normal file
33
meson.build
Normal file
@ -0,0 +1,33 @@
|
||||
#=======================================================================================================================
|
||||
# PROJECT SETTINGS
|
||||
#=======================================================================================================================
|
||||
project('video-streaming-poc', 'cpp', version : '0.0.1-SNAPSHOT',
|
||||
default_options : ['c_std=c17', 'cpp_std=c++20'])
|
||||
|
||||
#=======================================================================================================================
|
||||
# DEPENDENCIES
|
||||
#=======================================================================================================================
|
||||
# opencv dependency
|
||||
opencv = dependency('opencv4', version : '>=4.0.0')
|
||||
# boost dependency
|
||||
boost = dependency('boost')
|
||||
|
||||
#=======================================================================================================================
|
||||
# SOURCE FILES
|
||||
#=======================================================================================================================
|
||||
# common files between client / server
|
||||
common = ['transfer.h', 'logging.h']
|
||||
# client-only files
|
||||
client = common + ['client.cpp']
|
||||
# server-only files
|
||||
server = common + ['server.cpp']
|
||||
|
||||
#=======================================================================================================================
|
||||
# BUILD TARGETS
|
||||
#=======================================================================================================================
|
||||
# client executable
|
||||
client_exe = executable('client', client,
|
||||
dependencies : [opencv, boost])
|
||||
# server executable
|
||||
server_exe = executable('server', server,
|
||||
dependencies : [opencv, boost])
|
@ -1,2 +0,0 @@
|
||||
opencv-python
|
||||
numpy
|
55
server.cpp
Normal file
55
server.cpp
Normal file
@ -0,0 +1,55 @@
|
||||
#include <opencv2/highgui.hpp>
|
||||
#include <opencv2/core/mat.hpp>
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
#include <netinet/in.h>
|
||||
#include <sys/socket.h>
|
||||
#include <unistd.h>
|
||||
#include "transfer.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
int main() {
|
||||
// TODO: read image data from socket instead of VideoCapture
|
||||
// creating socket
|
||||
int serverSocket = socket(AF_INET, SOCK_STREAM, 0);
|
||||
|
||||
// specifying the address
|
||||
sockaddr_in serverAddress;
|
||||
serverAddress.sin_family = AF_INET;
|
||||
serverAddress.sin_port = htons(8080);
|
||||
serverAddress.sin_addr.s_addr = INADDR_ANY;
|
||||
|
||||
// binding socket.
|
||||
bind(serverSocket, reinterpret_cast<sockaddr *>(&serverAddress),
|
||||
sizeof(serverAddress));
|
||||
|
||||
info("Ready to accept connections.");
|
||||
// listening to the assigned socket
|
||||
listen(serverSocket, 5);
|
||||
|
||||
// accepting connection request
|
||||
int clientSocket = accept(serverSocket, nullptr, nullptr);
|
||||
info("Client connected.");
|
||||
|
||||
// TODO: handle multiple images
|
||||
cv::Mat image = cv::Mat::zeros(cv::Size(640, 480), CV_8UC3);
|
||||
|
||||
bool running = true;
|
||||
// TODO: make this asynchronous. probably do that in tandem with setting up networking
|
||||
while (running) {
|
||||
// receive data
|
||||
vector<uchar> buffer;
|
||||
trace("Receiving image");
|
||||
recvImage(clientSocket, buffer);
|
||||
|
||||
trace("Applying new data to image");
|
||||
applyImage(image, &buffer);
|
||||
|
||||
trace("Displaying image");
|
||||
imshow("image", image);
|
||||
running = cv::waitKey(30) != 27;
|
||||
}
|
||||
close(serverSocket);
|
||||
return 0;
|
||||
}
|
91
server.py
91
server.py
@ -1,91 +0,0 @@
|
||||
from typing import Dict
|
||||
|
||||
import cv2
|
||||
import socket
|
||||
import numpy as np
|
||||
from datetime import datetime
|
||||
import asyncio
|
||||
|
||||
from common import DoublyInterlacedPacket, from_bytes_dint
|
||||
|
||||
# bind any IP address
|
||||
UDP_IP = ""
|
||||
UDP_PORT = 5005
|
||||
|
||||
HEIGHT = 480
|
||||
WIDTH = 640
|
||||
|
||||
class Client:
|
||||
def __init__(self):
|
||||
self.last_updated = datetime.now()
|
||||
self.frame = np.ndarray((HEIGHT, WIDTH, 3), dtype=np.uint8)
|
||||
|
||||
def update(self, pkt: DoublyInterlacedPacket):
|
||||
self.frame = pkt.apply(self.frame)
|
||||
|
||||
self.last_updated = datetime.now()
|
||||
|
||||
def latency(self) -> float:
|
||||
return (datetime.now() - self.last_updated).total_seconds()
|
||||
|
||||
def read(self) -> np.ndarray:
|
||||
return self.frame
|
||||
|
||||
|
||||
frames: Dict[str, Client] = {}
|
||||
|
||||
async def read_packet():
|
||||
while True:
|
||||
for i in range(0, 1200):
|
||||
# break the array down into 16-bit chunks, then transmit them as UDP packets
|
||||
try:
|
||||
data, addr = sock.recvfrom(DoublyInterlacedPacket.size) # buffer size is 768 bytes
|
||||
# print("received packet from", addr)
|
||||
if data:
|
||||
pkt = from_bytes_dint(data)
|
||||
|
||||
uuid = str(pkt.uuid)
|
||||
|
||||
if uuid not in frames.keys():
|
||||
print("New client acquired, naming %s", uuid)
|
||||
frames[uuid] = Client()
|
||||
|
||||
frames[uuid].update(pkt)
|
||||
|
||||
except BlockingIOError:
|
||||
pass
|
||||
|
||||
await asyncio.sleep(0.001)
|
||||
|
||||
async def show_frames():
|
||||
while True:
|
||||
# Display the resulting frame
|
||||
for id in list(frames.keys()):
|
||||
if frames[id].latency() >= 5:
|
||||
print("Client likely lost connection, dropping %s", id)
|
||||
cv2.destroyWindow(id)
|
||||
frames.pop(id)
|
||||
else:
|
||||
cv2.imshow(id, frames[id].read())
|
||||
|
||||
cv2.waitKey(1)
|
||||
await asyncio.sleep(0.01)
|
||||
|
||||
if __name__ == "__main__":
|
||||
sock = socket.socket(socket.AF_INET, # Internet
|
||||
socket.SOCK_DGRAM) # UDP
|
||||
sock.setblocking(False)
|
||||
sock.bind((UDP_IP, UDP_PORT))
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.create_task(read_packet())
|
||||
loop.create_task(show_frames())
|
||||
try:
|
||||
loop.run_forever()
|
||||
finally:
|
||||
loop.run_until_complete(loop.shutdown_asyncgens())
|
||||
loop.close()
|
||||
|
||||
# Release the capture and close all windows
|
||||
cv2.destroyAllWindows()
|
101
transfer.h
Normal file
101
transfer.h
Normal file
@ -0,0 +1,101 @@
|
||||
#ifndef TRANSFER_H
|
||||
#define TRANSFER_H
|
||||
|
||||
#include <chrono>
|
||||
#include <opencv2/core/mat.hpp>
|
||||
#include <opencv2/imgcodecs.hpp>
|
||||
#include <sys/socket.h>
|
||||
#include <logging.h>
|
||||
#include <vector>
|
||||
|
||||
using namespace std;
|
||||
|
||||
struct imageHeader {
|
||||
size_t size;
|
||||
chrono::milliseconds timestamp;
|
||||
};
|
||||
|
||||
chrono::milliseconds getMillis() {
|
||||
// Get current time
|
||||
const auto now = chrono::system_clock::now();
|
||||
|
||||
// Get time since epoch in milliseconds
|
||||
const chrono::duration ms = chrono::duration_cast<chrono::milliseconds>(now.time_since_epoch());
|
||||
|
||||
return chrono::milliseconds(ms.count());
|
||||
}
|
||||
|
||||
inline void serializeImage(const cv::Mat& image, std::vector<uchar>& buffer) {
|
||||
cv::imencode(".jpg", image, buffer);
|
||||
}
|
||||
|
||||
int sendImage(int socket, const cv::Mat& image, std::vector<uchar>& buffer) {
|
||||
serializeImage(image, buffer);
|
||||
size_t totalSent = 0;
|
||||
|
||||
// first send the size of the serialized image
|
||||
const size_t size = buffer.size();
|
||||
const chrono::milliseconds timestamp = getMillis();
|
||||
imageHeader header;
|
||||
header.size = size;
|
||||
header.timestamp = timestamp;
|
||||
trace("Buffer size: ", size);
|
||||
if (const ssize_t sent = send(socket, &header, sizeof(header), 0); sent == -1) {
|
||||
error("Error sending data header");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// then start sending the serialized image
|
||||
while (totalSent < size) {
|
||||
const ssize_t sent = send(socket, buffer.data() + totalSent, size - totalSent, 0);
|
||||
if (sent == -1) {
|
||||
error("Error sending data");
|
||||
return -1;
|
||||
}
|
||||
totalSent += sent;
|
||||
debug("Packet sent (", sent, " bytes, total ", totalSent, " bytes)");
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int recvImage(int socket, std::vector<uchar>& buffer) {
|
||||
// first receive the size of the image
|
||||
imageHeader header;
|
||||
if (ssize_t recvd = recv(socket, &header, sizeof(imageHeader), 0); recvd <= 0) {
|
||||
error("Error receiving data header");
|
||||
return -1;
|
||||
}
|
||||
|
||||
size_t dataSize = header.size;
|
||||
chrono::milliseconds sentTime = header.timestamp;
|
||||
|
||||
trace("Buffer size: ", dataSize);
|
||||
|
||||
// resize the buffer to fit the whole image
|
||||
buffer.resize(dataSize);
|
||||
|
||||
// start receiving the image until the whole thing arrives
|
||||
size_t totalReceived = 0;
|
||||
while (totalReceived < dataSize) {
|
||||
ssize_t bytesReceived = recv(socket, buffer.data() + totalReceived, dataSize, 0);
|
||||
if (bytesReceived <= 0) {
|
||||
error("Error receiving data");
|
||||
buffer.clear();
|
||||
return -1;
|
||||
}
|
||||
totalReceived += bytesReceived;
|
||||
debug("Packet received (", bytesReceived, " bytes, total ", totalReceived, " bytes)");
|
||||
}
|
||||
chrono::milliseconds currentTime = getMillis();
|
||||
|
||||
chrono::milliseconds diff = currentTime - sentTime;
|
||||
debug("Packet latency: ", diff.count(), "ms");
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool applyImage(cv::Mat& image, std::vector<uchar> *src) {
|
||||
// decode the image into an OpenCV Mat
|
||||
cv::imdecode(*src, cv::IMREAD_UNCHANGED, &image);
|
||||
return true;
|
||||
}
|
||||
#endif //TRANSFER_H
|
Reference in New Issue
Block a user