Compare commits

...

11 Commits

12 changed files with 616 additions and 109 deletions

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
.venv .venv
.idea .idea
build

View File

@ -3,56 +3,37 @@
This project is a demo for streaming video output from multiple "client" devices to one "server". This is a basic demo This project is a demo for streaming video output from multiple "client" devices to one "server". This is a basic demo
of what sauron-cv seeks to accomplish. of what sauron-cv seeks to accomplish.
## Installation ## Building
It is strongly recommended that you use a virtual environment. These instructions will assume you are using venv, you This project uses Meson for build management.
can substitute this with your preferred environment management. You will need to make sure that the `virtualenv` package
is installed globally, either via pip or the `python3-virtualenv` package in your system package manager.
When first cloning this repo, run the following: ### Install Dependencies
Install the following packages from your distribution package manager:
- `meson`
- `opencv`
- `boost`
A better procedure for this (hopefully involving Meson) will be added / documented at a later date.
### Setup Meson
```shell ```shell
python -m venv .venv meson setup build
source .venv/bin/activate
pip install -r requirements.txt
``` ```
This will create a virtual environment, enter that virtual environment, and install the required packages. *NOTE FOR JETBRAINS / CLION USERS: PLEASE SET YOUR MESON BUILD DIRECTORY TO `build` IN THE IDE SETTINGS UNDER "Build /
Execution / Deployment" -> "Meson"*
If you start a new shell, you will need to re-enter the virtual environment: ### Compiling
```shell ```shell
source .venv/bin/activate meson build client # for client only
meson build server # for server only
``` ```
## Running ## Running
### Client
To run the client with a localhost target:
```shell ```shell
python -m client ./build/client # for client application
./build/server # for server application
``` ```
To target an external server, provide its IP address using the `-s` flag:
```shell
python -m client -s [server IP address]
```
### Server
```shell
python -m server
```
### Common Flags
Make sure that these match between your client and server!!
| Short | Long | Description | Default |
|-------|---|---|---|
| `-p` | `--port` | The port for the client / server to communicate on. | `5005` |
| `-W` | `--width` | Image width in pixels. | `640` |
| `-H` | `--height` | Image height in pixels. | `480` |

43
client.cpp Normal file
View File

@ -0,0 +1,43 @@
#include <opencv2/videoio.hpp>
#include <cstring>
#include <iostream>
#include <netinet/in.h>
#include <sys/socket.h>
#include <unistd.h>
#include "transfer.h"
using namespace std;
int main() {
// create video capture
cv::VideoCapture cap = cv::VideoCapture(0);
// create socket
int clientSocket = socket(AF_INET, SOCK_STREAM, 0);
// specifying address
sockaddr_in serverAddress;
serverAddress.sin_family = AF_INET;
serverAddress.sin_port = htons(8080);
serverAddress.sin_addr.s_addr = INADDR_ANY;
cv::Mat image = cv::Mat::zeros(cv::Size(640, 480), CV_8UC3);
info("Ready to connect.");
// sending connection request
connect(clientSocket, reinterpret_cast<sockaddr *>(&serverAddress),
sizeof(serverAddress));
info("Connected.");
// create buffer for serialization
vector<uchar> imgbuf;
while (true) {
cap.read(image);
trace("Sending image");
sendImage(clientSocket, image, imgbuf);
}
close(clientSocket);
return 0;
}

58
legacy/README.md Normal file
View File

@ -0,0 +1,58 @@
# Video Streaming Proof of Concept
This project is a demo for streaming video output from multiple "client" devices to one "server". This is a basic demo
of what sauron-cv seeks to accomplish.
## Installation
It is strongly recommended that you use a virtual environment. These instructions will assume you are using venv, you
can substitute this with your preferred environment management. You will need to make sure that the `virtualenv` package
is installed globally, either via pip or the `python3-virtualenv` package in your system package manager.
When first cloning this repo, run the following:
```shell
python -m venv .venv
source .venv/bin/activate
pip install -r requirements.txt
```
This will create a virtual environment, enter that virtual environment, and install the required packages.
If you start a new shell, you will need to re-enter the virtual environment:
```shell
source .venv/bin/activate
```
## Running
### Client
To run the client with a localhost target:
```shell
python -m client
```
To target an external server, provide its IP address using the `-s` flag:
```shell
python -m client -s [server IP address]
```
### Server
```shell
python -m server
```
### Common Flags
Make sure that these match between your client and server!!
| Short | Long | Description | Default |
|-------|---|---|---|
| `-p` | `--port` | The port for the client / server to communicate on. | `5005` |
| `-W` | `--width` | Image width in pixels. | `640` |
| `-H` | `--height` | Image height in pixels. | `480` |

View File

@ -5,37 +5,49 @@ import socket
import numpy as np import numpy as np
import uuid import uuid
from common import StdPacket, InterlacedPacket, DoublyInterlacedPacket from common import StdPacket, InterlacedPacket, DoublyInterlacedPacket, TiledImagePacket
def send_packet(sock, packet): def send_packet(sock, packet):
sock.sendto(packet, (UDP_IP, UDP_PORT)) sock.sendto(packet, (UDP_IP, UDP_PORT))
def breakdown_image_norm(frame): def breakdown_image_norm(frame, last_frame):
(cols, rows, colors) = frame.shape (cols, rows, colors) = frame.shape
# break the array down into 16x16 chunks, then transmit them as UDP packets # break the array down into 16x16 chunks, then transmit them as UDP packets
for i in range(0, cols, 16): for i in range(0, cols, 16):
for j in range(0, rows, 16): for j in range(0, rows, 16):
# print("Sending frame segment (%d, %d)", i, j) # print("Sending frame segment (%d, %d)", i, j)
pkt = StdPacket(uuid, j, i, frame[i:i + 16, j:j + 16]) arr = frame[i:i + 16, j:j + 16]
last_arr = last_frame[i:i + 16, j:j + 16]
# only update if image segments are different
if not np.allclose(arr, last_arr):
pkt = StdPacket(uuid, j, i, arr)
send_packet(sock, pkt.to_bytestr()) send_packet(sock, pkt.to_bytestr())
def breakdown_image_interlaced(frame): def breakdown_image_interlaced(frame, last_frame):
(cols, rows, colors) = frame.shape (cols, rows, colors) = frame.shape
# break the array into 16x32 chunks. we'll split those further into odd and even rows # break the array into 16x32 chunks. we'll split those further into odd and even rows
# and send each as UDP packets. this should make packet loss less obvious # and send each as UDP packets. this should make packet loss less obvious
for i in range(0, cols, 32): for i in range(0, cols, 32):
for j in range(0, rows, 16): for j in range(0, rows, 16):
# print("Sending frame segment (%d, %d)", i, j) # print("Sending frame segment (%d, %d)", i, j)
pkt = InterlacedPacket(uuid, j, i, False, frame[i:i + 32:2, j:j + 16]) arr = frame[i:i + 32:2, j:j + 16]
last_arr = last_frame[i:i + 32:2, j:j + 16]
if not np.allclose(arr, last_arr):
pkt = InterlacedPacket(uuid, j, i, False, arr)
send_packet(sock, pkt.to_bytestr()) send_packet(sock, pkt.to_bytestr())
for i in range(0, cols, 32): for i in range(0, cols, 32):
for j in range(0, rows, 16): for j in range(0, rows, 16):
# print("Sending frame segment (%d, %d)", i, j) # print("Sending frame segment (%d, %d)", i, j)
pkt = InterlacedPacket(uuid, j, i, True, frame[i + 1:i + 32:2, j:j + 16]) arr = frame[i + 1:i + 32:2, j:j + 16]
last_arr = last_frame[i + 1:i + 32:2, j:j + 16]
# only update if image segments are different
if not np.allclose(arr, last_arr):
pkt = InterlacedPacket(uuid, j, i, True, arr)
send_packet(sock, pkt.to_bytestr()) send_packet(sock, pkt.to_bytestr())
def breakdown_image_dint(frame): def breakdown_image_dint(frame, last_frame):
(cols, rows, colors) = frame.shape (cols, rows, colors) = frame.shape
# break the array into 16x32 chunks. we'll split those further into odd and even rows # break the array into 16x32 chunks. we'll split those further into odd and even rows
# and send each as UDP packets. this should make packet loss less obvious # and send each as UDP packets. this should make packet loss less obvious
@ -45,7 +57,25 @@ def breakdown_image_dint(frame):
# print("Sending frame segment (%d, %d)", i, j) # print("Sending frame segment (%d, %d)", i, j)
i_even = l % 2 == 0 i_even = l % 2 == 0
j_even = l >= 2 j_even = l >= 2
pkt = DoublyInterlacedPacket(uuid, j, i, j_even, i_even, frame[i + i_even:i + 32:2, j + j_even:j + 32:2])
# breakdown image
arr = frame[i + i_even:i + 32:2, j + j_even:j + 32:2]
last_arr = last_frame[i + i_even:i + 32:2, j + j_even:j + 32:2]
# only update if image segments are different
if not np.allclose(arr, last_arr):
pkt = DoublyInterlacedPacket(uuid, j, i, j_even, i_even, arr)
send_packet(sock, pkt.to_bytestr())
def breakdown_image_tiled(frame):
(cols, rows, colors) = frame.shape
# break the array into 16x32 chunks. we'll split those further into odd and even rows
# and send each as UDP packets. this should make packet loss less obvious
xslice = cols // 16
yslice = rows // 16
for i in range(0, xslice):
for j in range(0, yslice):
# print("Sending frame segment (%d, %d)", i, j)
pkt = TiledImagePacket(uuid, j, i, rows, cols, frame[i:cols:xslice, j:rows:yslice])
send_packet(sock, pkt.to_bytestr()) send_packet(sock, pkt.to_bytestr())
if __name__ == '__main__': if __name__ == '__main__':
@ -80,7 +110,11 @@ if __name__ == '__main__':
# create the socket # create the socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
frame = np.zeros((HEIGHT, WIDTH, 3), dtype=np.uint8)
last_frame = np.zeros((HEIGHT, WIDTH, 3), dtype=np.uint8)
while True: while True:
last_frame = frame.copy()
# Capture frame-by-frame # Capture frame-by-frame
ret, frame = cap.read() ret, frame = cap.read()
@ -89,7 +123,7 @@ if __name__ == '__main__':
print("Can't receive frame (stream end?). Exiting ...") print("Can't receive frame (stream end?). Exiting ...")
break break
breakdown_image_dint(frame) breakdown_image_dint(frame, last_frame)
# Release the capture and close all windows # Release the capture and close all windows
cap.release() cap.release()

View File

@ -134,3 +134,38 @@ def from_bytes_dint(b: bytes) -> DoublyInterlacedPacket:
array = np.frombuffer(b[28:], np.uint8).reshape(16, 16, 3) array = np.frombuffer(b[28:], np.uint8).reshape(16, 16, 3)
return DoublyInterlacedPacket(uuid, x, y, even_x, even_y, array) return DoublyInterlacedPacket(uuid, x, y, even_x, even_y, array)
class TiledImagePacket(Packet):
"""Distributed selection from image."""
size = 16 + 4 + 4 + 768
def __init__(self, uuid: UUID, x: int, y: int, width: int, height: int, array: np.ndarray):
super().__init__(uuid, x, y, array)
self.width = width
self.height = height
self.xslice = width // 16
self.yslice = height // 16
def to_bytestr(self) -> bytes:
bytestr = b""
bytestr += self.uuid.bytes
bytestr += self.x.to_bytes(length=4, signed=False)
bytestr += self.y.to_bytes(length=4, signed=False)
bytestr += self.array.tobytes()
return bytestr
def apply(self, image: np.ndarray) -> np.ndarray:
x = self.x
y = self.y
arr = self.array
image[y:self.height:self.yslice, x:self.width:self.xslice] = arr
return image
def from_bytes_tiled(b: bytes) -> TiledImagePacket:
"""Convert a byte string obtained via UDP into a packet object."""
uuid = UUID(bytes = b[0:16])
x = int.from_bytes(b[16:20], signed = False)
y = int.from_bytes(b[20:24], signed = False)
array = np.frombuffer(b[24:], np.uint8).reshape(16, 16, 3)
return TiledImagePacket(uuid, x, y, 640, 480, array)

View File

@ -1,3 +1,4 @@
opencv-python opencv-python
numpy numpy
rich rich
asyncudp

View File

@ -5,6 +5,7 @@ import socket
import numpy as np import numpy as np
from datetime import datetime from datetime import datetime
import asyncio import asyncio
import asyncudp
import argparse import argparse
from rich.console import Console from rich.console import Console
@ -34,33 +35,6 @@ class Client:
# Dictionary of client states stored by UUID # Dictionary of client states stored by UUID
frames: Dict[str, Client] = {} frames: Dict[str, Client] = {}
async def read_packet():
"""Asynchronous coroutine to read UDP packets from the client(s)."""
while True:
# we repeat this a ton of times all at once to hopefully capture all of the image data
for i in range(0, 1600):
try:
data, addr = sock.recvfrom(DoublyInterlacedPacket.size) # packet buffer size based on the packet size
# print("received packet from", addr)
if data:
pkt = from_bytes_dint(data)
uuid = str(pkt.uuid)
# if this is a new client, give it a new image
if uuid not in frames.keys():
console.log(f"New client acquired, naming [bold cyan]{uuid}[bold cyan]")
frames[uuid] = Client()
stat.update(f"[bold yellow]{len(frames.keys())}[/bold yellow] clients connected.")
frames[uuid].update(pkt)
except BlockingIOError:
pass
# this is necessary to allow asyncio to swap between reading packets and rendering frames
await asyncio.sleep(0.001)
async def show_frames(): async def show_frames():
"""Asynchronous coroutine to display frames in OpenCV debug windows.""" """Asynchronous coroutine to display frames in OpenCV debug windows."""
while True: while True:
@ -70,20 +44,43 @@ async def show_frames():
console.log(f"Client likely lost connection, dropping [bold red]{id}[/bold red]") console.log(f"Client likely lost connection, dropping [bold red]{id}[/bold red]")
cv2.destroyWindow(id) cv2.destroyWindow(id)
frames.pop(id) frames.pop(id)
stat.update(f"[bold yellow]{len(frames.keys())}[/bold yellow] clients connected.")
else: else:
# show the latest available frame # show the latest available frame
cv2.imshow(id, frames[id].read()) cv2.imshow(id, frames[id].read())
cv2.waitKey(1) cv2.waitKey(1)
# this is necessary to allow asyncio to swap between reading packets and rendering frames # this is necessary to allow asyncio to swap between reading packets and rendering frames
await asyncio.sleep(0.01) await asyncio.sleep(0.05)
async def listen(ip: str, port: int):
"""Asynchronous coroutine to listen for / read client connections."""
sock = await asyncudp.create_socket(local_addr=(ip, port))
console.log("Ready to accept connections.", style="bold green")
while True:
# receive packets
data, addr = await sock.recvfrom()
if data:
# convert the byte string into a packet object
pkt = from_bytes_dint(data)
uuid = str(pkt.uuid)
# if this is a new client, give it a new image
if uuid not in frames.keys():
console.log(f"New client acquired, naming [bold cyan]{uuid}[bold cyan]")
frames[uuid] = Client()
frames[uuid].update(pkt)
if __name__ == "__main__": if __name__ == "__main__":
# argument parser # argument parser
parser = argparse.ArgumentParser(description="Proof-of-concept server for sauron-cv") parser = argparse.ArgumentParser(description="Proof-of-concept server for sauron-cv")
parser.add_argument("-p", "--port", type=int, default=5005) parser.add_argument("-p", "--port", type=int, default=5005)
parser.add_argument("-l", "--listen", type=str, default="") parser.add_argument("-l", "--listen", type=str, default="0.0.0.0")
parser.add_argument("-W", "--width", type=int, default=640) parser.add_argument("-W", "--width", type=int, default=640)
parser.add_argument("-H", "--height", type=int, default=480) parser.add_argument("-H", "--height", type=int, default=480)
args = parser.parse_args() args = parser.parse_args()
@ -98,22 +95,12 @@ if __name__ == "__main__":
HEIGHT = args.height HEIGHT = args.height
WIDTH = args.width WIDTH = args.width
# create the UDP socket
sock = socket.socket(socket.AF_INET, # Internet
socket.SOCK_DGRAM) # UDP
sock.setblocking(False)
sock.bind((UDP_IP, UDP_PORT))
console.log("Ready to accept connections.", style="bold green")
with console.status("[bold yellow]0[/bold yellow] clients connected.", spinner="pong") as stat:
# create the async event loop # create the async event loop
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
# create async tasks for reading network packets, displaying windows # create async tasks for reading network packets, displaying windows
loop.create_task(read_packet()) loop.create_task(listen(UDP_IP, UDP_PORT))
loop.create_task(show_frames()) loop.create_task(show_frames())
try: try:
loop.run_forever() loop.run_forever()

178
logging.h Normal file
View File

@ -0,0 +1,178 @@
#ifndef LOGGING_H
#define LOGGING_H
#include <iostream>
#include <iomanip>
#include <chrono>
#include <ctime>
// Logging levels
#define LOG_ERROR 0
#define LOG_WARN 1
#define LOG_INFO 2
#define LOG_DEBUG 3
#define LOG_TRACE 4
// Set logging arguments
// TODO: set these by compiler arguments or env vars or something
#define LOGGING LOG_DEBUG // logging level
#define LOGGING_COLOR true // enable color
#define LOGGING_TIMESTAMP true // enable timestamp
#define LOGGING_TIMESTAMP_FMT "%Y-%m-%dT%H:%M:%S%z" // timestamp format (local time)
#define LOGGING_POSITION true // display position (only works on C++20 or newer)
// Color codes
#define ANSI_RESET "\033[0m"
#define ANSI_BLACK "\033[30m" /* Black */
#define ANSI_RED "\033[31m" /* Red */
#define ANSI_GREEN "\033[32m" /* Green */
#define ANSI_YELLOW "\033[33m" /* Yellow */
#define ANSI_BLUE "\033[34m" /* Blue */
#define ANSI_MAGENTA "\033[35m" /* Magenta */
#define ANSI_CYAN "\033[36m" /* Cyan */
#define ANSI_WHITE "\033[37m" /* White */
#define ANSI_BOLD "\033[1m" /* Bold */
template <typename T>
void print(T t) {
std::cout << t << std::endl;
}
template <typename T, typename... Args>
void print(T t, Args... args) {
std::cout << t;
print(args...);
}
inline void printTimestamp() {
#if LOGGING_TIMESTAMP
auto now = std::chrono::system_clock::now();
auto time_c = std::chrono::system_clock::to_time_t(now);
std::tm time_tm;
localtime_r(&time_c, &time_tm);
std::cout << std::put_time(&time_tm, LOGGING_TIMESTAMP_FMT) << ": ";
#endif
}
// if we're on C++20 or later, then use the source_location header and add source location to logs
#if __cplusplus >= 202002L
#include <source_location>
inline void printPosition(std::source_location& location) {
#if LOGGING_POSITION
std::cout << location.file_name() << ":" << location.function_name << ":" << ANSI_CYAN << location.line() << ANSI_RESET << ": ";
#endif
}
inline void printHeader(std::string name, std::string color, std::source_location& location) {
#if LOGGING_COLOR
std::cout << ANSI_BOLD << color << "[" << name << "] " << ANSI_RESET;
printTimestamp();
printPosition(location);
#else
printHeader(name);
#endif
}
inline void printHeader(std::string name, std::source_location& location) {
std::cout << "[" << name << "] ";
printTimestamp();
printPosition(location);
}
template <typename... Args, typename Sl = std::source_location>
void trace(Args... args, Sl location = std::source_location::current()) {
#if LOGGING >= LOG_TRACE
printHeader("TRACE", ANSI_CYAN, location);
print(args...);
#endif
}
template <typename... Args, typename Sl = std::source_location>
void debug(Args... args, Sl location = std::source_location::current()) {
#if LOGGING >= LOG_DEBUG
printHeader("DEBUG", ANSI_MAGENTA, location);
print(args...);
#endif
}
template <typename... Args, typename Sl = std::source_location>
void info(Args... args, Sl location = std::source_location::current()) {
#if LOGGING >= LOG_INFO
printHeader("INFO", ANSI_GREEN, location);
print(args...);
#endif
}
template <typename... Args, typename Sl = std::source_location>
void warn(Args... args, Sl location = std::source_location::current()) {
#if LOGGING >= LOG_WARN
printHeader("WARN", ANSI_YELLOW, location);
print(args...);
#endif
}
template <typename... Args, typename Sl = std::source_location>
void error(Args... args, Sl location = std::source_location::current()) {
#if LOGGING >= LOG_ERROR
printHeader("ERROR", ANSI_RED, location);
print(args...);
#endif
}
#else
inline void printHeader(std::string name, std::string color) {
#if LOGGING_COLOR
std::cout << ANSI_BOLD << color << "[" << name << "] " << ANSI_RESET;
printTimestamp();
#else
printHeader(name);
#endif
}
inline void printHeader(std::string name) {
std::cout << "[" << name << "] ";
printTimestamp();
}
template <typename... Args>
void trace(Args... args) {
#if LOGGING >= LOG_TRACE
printHeader("TRACE", ANSI_CYAN);
print(args...);
#endif
}
template <typename... Args>
void debug(Args... args) {
#if LOGGING >= LOG_DEBUG
printHeader("DEBUG", ANSI_MAGENTA);
print(args...);
#endif
}
template <typename... Args>
void info(Args... args) {
#if LOGGING >= LOG_INFO
printHeader("INFO", ANSI_GREEN);
print(args...);
#endif
}
template <typename... Args>
void warn(Args... args) {
#if LOGGING >= LOG_WARN
printHeader("WARN", ANSI_YELLOW);
print(args...);
#endif
}
template <typename... Args>
void error(Args... args) {
#if LOGGING >= LOG_ERROR
printHeader("ERROR", ANSI_RED);
print(args...);
#endif
}
#endif
#endif //LOGGING_H

33
meson.build Normal file
View File

@ -0,0 +1,33 @@
#=======================================================================================================================
# PROJECT SETTINGS
#=======================================================================================================================
project('video-streaming-poc', 'cpp', version : '0.0.1-SNAPSHOT',
default_options : ['c_std=c17', 'cpp_std=c++20'])
#=======================================================================================================================
# DEPENDENCIES
#=======================================================================================================================
# opencv dependency
opencv = dependency('opencv4', version : '>=4.0.0')
# boost dependency
boost = dependency('boost')
#=======================================================================================================================
# SOURCE FILES
#=======================================================================================================================
# common files between client / server
common = ['transfer.h', 'logging.h']
# client-only files
client = common + ['client.cpp']
# server-only files
server = common + ['server.cpp']
#=======================================================================================================================
# BUILD TARGETS
#=======================================================================================================================
# client executable
client_exe = executable('client', client,
dependencies : [opencv, boost])
# server executable
server_exe = executable('server', server,
dependencies : [opencv, boost])

55
server.cpp Normal file
View File

@ -0,0 +1,55 @@
#include <opencv2/highgui.hpp>
#include <opencv2/core/mat.hpp>
#include <cstring>
#include <iostream>
#include <netinet/in.h>
#include <sys/socket.h>
#include <unistd.h>
#include "transfer.h"
using namespace std;
int main() {
// TODO: read image data from socket instead of VideoCapture
// creating socket
int serverSocket = socket(AF_INET, SOCK_STREAM, 0);
// specifying the address
sockaddr_in serverAddress;
serverAddress.sin_family = AF_INET;
serverAddress.sin_port = htons(8080);
serverAddress.sin_addr.s_addr = INADDR_ANY;
// binding socket.
bind(serverSocket, reinterpret_cast<sockaddr *>(&serverAddress),
sizeof(serverAddress));
info("Ready to accept connections.");
// listening to the assigned socket
listen(serverSocket, 5);
// accepting connection request
int clientSocket = accept(serverSocket, nullptr, nullptr);
info("Client connected.");
// TODO: handle multiple images
cv::Mat image = cv::Mat::zeros(cv::Size(640, 480), CV_8UC3);
bool running = true;
// TODO: make this asynchronous. probably do that in tandem with setting up networking
while (running) {
// receive data
vector<uchar> buffer;
trace("Receiving image");
recvImage(clientSocket, buffer);
trace("Applying new data to image");
applyImage(image, &buffer);
trace("Displaying image");
imshow("image", image);
running = cv::waitKey(30) != 27;
}
close(serverSocket);
return 0;
}

101
transfer.h Normal file
View File

@ -0,0 +1,101 @@
#ifndef TRANSFER_H
#define TRANSFER_H
#include <chrono>
#include <opencv2/core/mat.hpp>
#include <opencv2/imgcodecs.hpp>
#include <sys/socket.h>
#include <logging.h>
#include <vector>
using namespace std;
struct imageHeader {
size_t size;
chrono::milliseconds timestamp;
};
chrono::milliseconds getMillis() {
// Get current time
const auto now = chrono::system_clock::now();
// Get time since epoch in milliseconds
const chrono::duration ms = chrono::duration_cast<chrono::milliseconds>(now.time_since_epoch());
return chrono::milliseconds(ms.count());
}
inline void serializeImage(const cv::Mat& image, std::vector<uchar>& buffer) {
cv::imencode(".jpg", image, buffer);
}
int sendImage(int socket, const cv::Mat& image, std::vector<uchar>& buffer) {
serializeImage(image, buffer);
size_t totalSent = 0;
// first send the size of the serialized image
const size_t size = buffer.size();
const chrono::milliseconds timestamp = getMillis();
imageHeader header;
header.size = size;
header.timestamp = timestamp;
trace("Buffer size: ", size);
if (const ssize_t sent = send(socket, &header, sizeof(header), 0); sent == -1) {
error("Error sending data header");
return -1;
}
// then start sending the serialized image
while (totalSent < size) {
const ssize_t sent = send(socket, buffer.data() + totalSent, size - totalSent, 0);
if (sent == -1) {
error("Error sending data");
return -1;
}
totalSent += sent;
debug("Packet sent (", sent, " bytes, total ", totalSent, " bytes)");
}
return 0;
}
int recvImage(int socket, std::vector<uchar>& buffer) {
// first receive the size of the image
imageHeader header;
if (ssize_t recvd = recv(socket, &header, sizeof(imageHeader), 0); recvd <= 0) {
error("Error receiving data header");
return -1;
}
size_t dataSize = header.size;
chrono::milliseconds sentTime = header.timestamp;
trace("Buffer size: ", dataSize);
// resize the buffer to fit the whole image
buffer.resize(dataSize);
// start receiving the image until the whole thing arrives
size_t totalReceived = 0;
while (totalReceived < dataSize) {
ssize_t bytesReceived = recv(socket, buffer.data() + totalReceived, dataSize, 0);
if (bytesReceived <= 0) {
error("Error receiving data");
buffer.clear();
return -1;
}
totalReceived += bytesReceived;
debug("Packet received (", bytesReceived, " bytes, total ", totalReceived, " bytes)");
}
chrono::milliseconds currentTime = getMillis();
chrono::milliseconds diff = currentTime - sentTime;
debug("Packet latency: ", diff.count(), "ms");
return 0;
}
bool applyImage(cv::Mat& image, std::vector<uchar> *src) {
// decode the image into an OpenCV Mat
cv::imdecode(*src, cv::IMREAD_UNCHANGED, &image);
return true;
}
#endif //TRANSFER_H