Compare commits

...

7 Commits

Author SHA1 Message Date
ee8604b234 use last camera device instead of first
Some checks are pending
ContinuousDelivery / linux-ubuntu (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousDelivery / windows (push) Waiting to run
ContinuousDelivery / windows-asan (push) Waiting to run
ContinuousDelivery / release (push) Blocked by required conditions
ContinuousIntegration / linux (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousIntegration / macos (push) Waiting to run
ContinuousIntegration / windows (push) Waiting to run
this should prio new devices (like virtual cameras)
2024-10-05 11:59:12 +02:00
3475f0751f prep for toxav multithreading 2024-10-05 11:17:44 +02:00
09c8bbfcc6 video src now picks the best <=1080p mode
Some checks failed
ContinuousDelivery / linux-ubuntu (push) Has been cancelled
ContinuousDelivery / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Has been cancelled
ContinuousDelivery / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Has been cancelled
ContinuousDelivery / windows (push) Has been cancelled
ContinuousDelivery / windows-asan (push) Has been cancelled
ContinuousIntegration / linux (push) Has been cancelled
ContinuousIntegration / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Has been cancelled
ContinuousIntegration / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Has been cancelled
ContinuousIntegration / macos (push) Has been cancelled
ContinuousIntegration / windows (push) Has been cancelled
ContinuousDelivery / release (push) Has been cancelled
2024-10-03 16:42:07 +02:00
14a726ad75 move DVT conversion to connection
Some checks are pending
ContinuousDelivery / linux-ubuntu (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousDelivery / windows (push) Waiting to run
ContinuousDelivery / windows-asan (push) Waiting to run
ContinuousDelivery / release (push) Blocked by required conditions
ContinuousIntegration / linux (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousIntegration / macos (push) Waiting to run
ContinuousIntegration / windows (push) Waiting to run
2024-10-03 16:05:21 +02:00
7cb4f67f96 use sdl yuv to yuv conversion, and better fallbacks
Some checks are pending
ContinuousDelivery / linux-ubuntu (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousDelivery / windows (push) Waiting to run
ContinuousDelivery / windows-asan (push) Waiting to run
ContinuousDelivery / release (push) Blocked by required conditions
ContinuousIntegration / linux (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousIntegration / macos (push) Waiting to run
ContinuousIntegration / windows (push) Waiting to run
display intended main screen intervals
actually report min interval in debug view and more
2024-10-03 12:33:52 +02:00
a290bec8f1 add experimental NV12 to IYUV conversion routine
Some checks are pending
ContinuousDelivery / linux-ubuntu (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousDelivery / windows (push) Waiting to run
ContinuousDelivery / windows-asan (push) Waiting to run
ContinuousDelivery / release (push) Blocked by required conditions
ContinuousIntegration / linux (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousIntegration / macos (push) Waiting to run
ContinuousIntegration / windows (push) Waiting to run
2024-10-02 23:35:33 +02:00
2554229211 sdl camera input source
Some checks are pending
ContinuousDelivery / linux-ubuntu (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousDelivery / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousDelivery / windows (push) Waiting to run
ContinuousDelivery / windows-asan (push) Waiting to run
ContinuousDelivery / release (push) Blocked by required conditions
ContinuousIntegration / linux (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:arm64-v8a vcpkg_toolkit:arm64-android]) (push) Waiting to run
ContinuousIntegration / android (map[ndk_abi:x86_64 vcpkg_toolkit:x64-android]) (push) Waiting to run
ContinuousIntegration / macos (push) Waiting to run
ContinuousIntegration / windows (push) Waiting to run
2024-10-02 18:51:35 +02:00
13 changed files with 545 additions and 258 deletions

View File

@ -116,7 +116,8 @@ target_sources(tomato PUBLIC
./frame_streams/sdl/sdl_audio2_frame_stream2.cpp ./frame_streams/sdl/sdl_audio2_frame_stream2.cpp
./frame_streams/sdl/video.hpp ./frame_streams/sdl/video.hpp
./frame_streams/sdl/video_push_converter.hpp ./frame_streams/sdl/video_push_converter.hpp
./frame_streams/sdl/video_push_converter.cpp ./frame_streams/sdl/sdl_video_frame_stream2.hpp
./frame_streams/sdl/sdl_video_frame_stream2.cpp
./stream_manager_ui.hpp ./stream_manager_ui.hpp
./stream_manager_ui.cpp ./stream_manager_ui.cpp

View File

@ -297,7 +297,10 @@ float ChatGui4::render(float time_delta) {
acceptable_sessions.push_back(o); acceptable_sessions.push_back(o);
} }
static Components::VoIP::DefaultConfig g_default_connections{}; static Components::VoIP::DefaultConfig g_default_connections{
true, true,
true, false
};
if (ImGui::BeginMenu("default connections")) { if (ImGui::BeginMenu("default connections")) {
ImGui::MenuItem("incoming audio", nullptr, &g_default_connections.incoming_audio); ImGui::MenuItem("incoming audio", nullptr, &g_default_connections.incoming_audio);

View File

@ -10,6 +10,8 @@
#include "./frame_streams/sdl/video.hpp" #include "./frame_streams/sdl/video.hpp"
#include "./frame_streams/frame_stream2.hpp" #include "./frame_streams/frame_stream2.hpp"
#include "./frame_streams/locked_frame_stream.hpp"
#include "./frame_streams/sdl/video_push_converter.hpp"
#include <string> #include <string>
#include <memory> #include <memory>
@ -27,40 +29,6 @@ namespace Message {
uint64_t getTimeMS(void); uint64_t getTimeMS(void);
} }
// threadsafe queue frame stream
// protected by a simple mutex lock
template<typename FrameType>
struct LockedFrameStream2 : public FrameStream2I<FrameType> {
std::mutex _lock;
std::deque<FrameType> _frames;
~LockedFrameStream2(void) {}
int32_t size(void) { return -1; }
std::optional<FrameType> pop(void) {
std::lock_guard lg{_lock};
if (_frames.empty()) {
return std::nullopt;
}
FrameType new_frame = std::move(_frames.front());
_frames.pop_front();
return std::move(new_frame);
}
bool push(const FrameType& value) {
std::lock_guard lg{_lock};
_frames.push_back(value);
return true;
}
};
struct DebugVideoTapSink : public FrameStream2SinkI<SDLVideoFrame> { struct DebugVideoTapSink : public FrameStream2SinkI<SDLVideoFrame> {
TextureUploaderI& _tu; TextureUploaderI& _tu;
@ -80,7 +48,7 @@ struct DebugVideoTapSink : public FrameStream2SinkI<SDLVideoFrame> {
float _v_interval_avg {0.f}; // s float _v_interval_avg {0.f}; // s
} view; } view;
std::shared_ptr<LockedFrameStream2<SDLVideoFrame>> stream; std::shared_ptr<PushConversionVideoStream<LockedFrameStream2<SDLVideoFrame>>> stream;
}; };
std::vector<Writer> _writers; std::vector<Writer> _writers;
@ -91,7 +59,7 @@ struct DebugVideoTapSink : public FrameStream2SinkI<SDLVideoFrame> {
std::shared_ptr<FrameStream2I<SDLVideoFrame>> subscribe(void) override { std::shared_ptr<FrameStream2I<SDLVideoFrame>> subscribe(void) override {
_writers.emplace_back(Writer{ _writers.emplace_back(Writer{
Writer::View{_id_counter++}, Writer::View{_id_counter++},
std::make_shared<LockedFrameStream2<SDLVideoFrame>>() std::make_shared<PushConversionVideoStream<LockedFrameStream2<SDLVideoFrame>>>(SDL_PIXELFORMAT_RGBA32)
}); });
return _writers.back().stream; return _writers.back().stream;
@ -127,7 +95,7 @@ struct DebugVideoTestSource : public FrameStream2SourceI<SDLVideoFrame> {
_thread = std::thread([this](void) { _thread = std::thread([this](void) {
while (!_stop) { while (!_stop) {
if (!_readers.empty()) { if (!_readers.empty()) {
auto* surf = SDL_CreateSurface(960, 720, SDL_PIXELFORMAT_ARGB32); auto* surf = SDL_CreateSurface(960, 720, SDL_PIXELFORMAT_RGBA32);
// color // color
static auto start_time = Message::getTimeMS(); static auto start_time = Message::getTimeMS();
@ -220,7 +188,7 @@ float DebugVideoTap::render(void) {
for (auto& [view, stream] : dvtsw) { for (auto& [view, stream] : dvtsw) {
std::string window_title {"DebugVideoTap #"}; std::string window_title {"DebugVideoTap #"};
window_title += std::to_string(view._id); window_title += std::to_string(view._id);
ImGui::SetNextWindowSize({250, 250}, ImGuiCond_Appearing); ImGui::SetNextWindowSize({400, 420}, ImGuiCond_Appearing);
if (ImGui::Begin(window_title.c_str())) { if (ImGui::Begin(window_title.c_str())) {
while (auto new_frame_opt = stream->pop()) { while (auto new_frame_opt = stream->pop()) {
// timing // timing
@ -233,7 +201,7 @@ float DebugVideoTap::render(void) {
if (view._v_interval_avg == 0) { if (view._v_interval_avg == 0) {
view._v_interval_avg = delta/1'000'000.f; view._v_interval_avg = delta/1'000'000.f;
} else { } else {
const float r = 0.2f; const float r = 0.05f;
view._v_interval_avg = view._v_interval_avg * (1.f-r) + (delta/1'000'000.f) * r; view._v_interval_avg = view._v_interval_avg * (1.f-r) + (delta/1'000'000.f) * r;
} }
} }
@ -277,7 +245,7 @@ float DebugVideoTap::render(void) {
// img here // img here
if (view._tex != 0) { if (view._tex != 0) {
ImGui::SameLine(); ImGui::SameLine();
ImGui::Text("moving avg interval: %f", view._v_interval_avg); ImGui::Text("%dx%d ~avg interval: %.0fms (%.2ffps)", view._tex_w, view._tex_h, view._v_interval_avg*1000.f, 1.f/view._v_interval_avg);
const float img_w = ImGui::GetContentRegionAvail().x; const float img_w = ImGui::GetContentRegionAvail().x;
ImGui::Image( ImGui::Image(
reinterpret_cast<ImTextureID>(view._tex), reinterpret_cast<ImTextureID>(view._tex),
@ -287,6 +255,9 @@ float DebugVideoTap::render(void) {
); );
} }
if (view._v_interval_avg > 0) {
min_interval = std::min(min_interval, view._v_interval_avg);
}
} }
ImGui::End(); ImGui::End();
} }

View File

@ -0,0 +1,187 @@
#include "./sdl_video_frame_stream2.hpp"
#include <chrono>
#include <iostream>
SDLVideo2InputDevice::SDLVideo2InputDevice(void) {
int devcount {0};
SDL_CameraID *devices = SDL_GetCameras(&devcount);
std::cout << "SDLVID: SDL Camera Driver: " << SDL_GetCurrentCameraDriver() << "\n";
if (devices == nullptr || devcount < 1) {
throw int(2); // TODO: proper error code
}
std::cout << "SDLVID: found cameras:\n";
for (int i = 0; i < devcount; i++) {
const SDL_CameraID device = devices[i];
const char *name = SDL_GetCameraName(device);
std::cout << " - Camera #" << i << ": " << name << "\n";
int speccount {0};
SDL_CameraSpec** specs = SDL_GetCameraSupportedFormats(device, &speccount);
if (specs == nullptr) {
std::cout << " - no supported spec\n";
} else {
for (int spec_i = 0; spec_i < speccount; spec_i++) {
std::cout << " - " << specs[spec_i]->width << "x" << specs[spec_i]->height << "@" << float(specs[spec_i]->framerate_numerator)/specs[spec_i]->framerate_denominator << "fps " << SDL_GetPixelFormatName(specs[spec_i]->format) << "\n";
}
SDL_free(specs);
}
}
SDL_free(devices);
}
SDLVideo2InputDevice::~SDLVideo2InputDevice(void) {
}
std::shared_ptr<FrameStream2I<SDLVideoFrame>> SDLVideo2InputDevice::subscribe(void) {
const int prev_ref = _ref++;
if (prev_ref == 0) {
// there was previously no stream, we assume no thread
// open device here? or on the thread?
int devcount {0};
SDL_CameraID *devices = SDL_GetCameras(&devcount);
if (devices == nullptr || devcount < 1) {
_ref--;
// error/no devices, should we do this in the constructor?
SDL_free(devices);
return nullptr;
}
//auto device = devices[0];
auto device = devices[devcount-1];
SDL_CameraSpec spec {
// FORCE a different pixel format
SDL_PIXELFORMAT_UNKNOWN,
//SDL_PIXELFORMAT_YUY2,
SDL_COLORSPACE_UNKNOWN,
//SDL_COLORSPACE_YUV_DEFAULT,
1280, 720,
60, 1
};
// choose a good spec, large res but <= 1080p
int speccount {0};
SDL_CameraSpec** specs = SDL_GetCameraSupportedFormats(device, &speccount);
if (specs != nullptr) {
spec = *specs[0];
for (int spec_i = 1; spec_i < speccount; spec_i++) {
if (specs[spec_i]->height > 1080) {
continue;
}
if (spec.height > specs[spec_i]->height) {
continue;
}
if (
float(spec.framerate_numerator)/float(spec.framerate_denominator)
>
float(specs[spec_i]->framerate_numerator)/float(specs[spec_i]->framerate_denominator)
) {
continue;
}
if (spec.format == SDL_PIXELFORMAT_NV12 && specs[spec_i]->format == SDL_PIXELFORMAT_YUY2) {
// HACK: prefer nv12 over yuy2
continue;
}
// seems to be better
spec = *specs[spec_i];
}
SDL_free(specs);
}
std::unique_ptr<SDL_Camera, decltype(&SDL_CloseCamera)> camera {nullptr, &SDL_CloseCamera};
camera = {
//SDL_OpenCamera(device, nullptr),
SDL_OpenCamera(device, &spec),
&SDL_CloseCamera
};
SDL_free(devices);
if (!camera) {
std::cerr << "SDLVID error: failed opening camera device\n";
_ref--;
return nullptr;
}
// seems like we need this before get format() ?
// TODO: best would be waiting in thread, but that obv does not work well
// TODO: sometimes if the device is/was in use it will stay 0 for ever
while (SDL_GetCameraPermissionState(camera.get()) == 0) {
//std::cerr << "permission for camera not granted\n";
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
if (SDL_GetCameraPermissionState(camera.get()) <= 0) {
std::cerr << "SDLVID error: user denied camera permission\n";
_ref--;
return nullptr;
}
float fps {1.f};
if (!SDL_GetCameraFormat(camera.get(), &spec)) {
// meh
_ref--;
return nullptr;
} else {
fps = float(spec.framerate_numerator)/float(spec.framerate_denominator);
std::cout << "SDLVID: camera fps: " << fps << "fps (" << spec.framerate_numerator << "/" << spec.framerate_denominator << ")\n";
auto* format_name = SDL_GetPixelFormatName(spec.format);
std::cout << "SDLVID: camera format: " << format_name << "\n";
}
_thread = std::thread([this, camera = std::move(camera), fps](void) {
while (_ref > 0) {
Uint64 timestampNS = 0;
// aquire frame
SDL_Surface* sdl_frame_next = SDL_AcquireCameraFrame(camera.get(), &timestampNS);
if (sdl_frame_next != nullptr) {
SDLVideoFrame new_frame_non_owning {
timestampNS/1000,
sdl_frame_next
};
// creates surface copies
push(new_frame_non_owning);
SDL_ReleaseCameraFrame(camera.get(), sdl_frame_next);
}
// sleep for interval
// TODO: do we really need half?
std::this_thread::sleep_for(std::chrono::milliseconds(int64_t((1000/fps)*0.5)));
}
// camera destructor closes device here
});
std::cout << "SDLVID: started new cam thread\n";
}
return FrameStream2MultiSource<SDLVideoFrame>::subscribe();
}
bool SDLVideo2InputDevice::unsubscribe(const std::shared_ptr<FrameStream2I<SDLVideoFrame>>& sub) {
if (FrameStream2MultiSource<SDLVideoFrame>::unsubscribe(sub)) {
if (--_ref == 0) {
// was last stream, close device and thread
_thread.join(); // TODO: defer to destructor or new thread?
// this might take a moment and lock up the main thread
std::cout << "SDLVID: ended cam thread\n";
}
return true;
}
return false;
}

View File

@ -0,0 +1,29 @@
#pragma once
#include "./video.hpp"
#include "../frame_stream2.hpp"
#include "../multi_source.hpp"
#include <atomic>
#include <thread>
// tips: you can force a SDL vido driver by setting an env:
// SDL_CAMERA_DRIVER=v4l2
// SDL_CAMERA_DRIVER=pipewire
// etc.
// while a stream is subscribed, have the camera device open
// and aquire and push frames from a thread
struct SDLVideo2InputDevice : public FrameStream2MultiSource<SDLVideoFrame> {
std::atomic_uint _ref {0};
std::thread _thread;
// TODO: device id
SDLVideo2InputDevice(void);
virtual ~SDLVideo2InputDevice(void);
// we hook int multi source
std::shared_ptr<FrameStream2I<SDLVideoFrame>> subscribe(void) override;
bool unsubscribe(const std::shared_ptr<FrameStream2I<SDLVideoFrame>>& sub) override;
};

View File

@ -1,58 +0,0 @@
#include "./video_push_converter.hpp"
SDL_Surface* convertYUY2_IYUV(SDL_Surface* surf) {
if (surf->format != SDL_PIXELFORMAT_YUY2) {
return nullptr;
}
if ((surf->w % 2) != 0) {
SDL_SetError("YUY2->IYUV does not support odd widths");
// hmmm, we dont handle odd widths
return nullptr;
}
SDL_LockSurface(surf);
SDL_Surface* conv_surf = SDL_CreateSurface(surf->w, surf->h, SDL_PIXELFORMAT_IYUV);
SDL_LockSurface(conv_surf);
// YUY2 is 4:2:2 packed
// Y is simple, we just copy it over
// U V are double the resolution (vertically), so we avg both
// Packed mode: Y0+U0+Y1+V0 (1 plane)
uint8_t* y_plane = static_cast<uint8_t*>(conv_surf->pixels);
uint8_t* u_plane = static_cast<uint8_t*>(conv_surf->pixels) + conv_surf->w*conv_surf->h;
uint8_t* v_plane = static_cast<uint8_t*>(conv_surf->pixels) + conv_surf->w*conv_surf->h + (conv_surf->w/2)*(conv_surf->h/2);
const uint8_t* yuy2_data = static_cast<const uint8_t*>(surf->pixels);
for (int y = 0; y < surf->h; y++) {
for (int x = 0; x < surf->w; x += 2) {
// every pixel uses 2 bytes
const uint8_t* yuy2_curser = yuy2_data + y*surf->w*2 + x*2;
uint8_t src_y0 = yuy2_curser[0];
uint8_t src_u = yuy2_curser[1];
uint8_t src_y1 = yuy2_curser[2];
uint8_t src_v = yuy2_curser[3];
y_plane[y*conv_surf->w + x] = src_y0;
y_plane[y*conv_surf->w + x+1] = src_y1;
size_t uv_index = (y/2) * (conv_surf->w/2) + x/2;
if (y % 2 == 0) {
// first write
u_plane[uv_index] = src_u;
v_plane[uv_index] = src_v;
} else {
// second write, mix with existing value
u_plane[uv_index] = (int(u_plane[uv_index]) + int(src_u)) / 2;
v_plane[uv_index] = (int(v_plane[uv_index]) + int(src_v)) / 2;
}
}
}
SDL_UnlockSurface(conv_surf);
SDL_UnlockSurface(surf);
return conv_surf;
}

View File

@ -7,24 +7,10 @@
#include <iostream> // meh #include <iostream> // meh
static bool isFormatYUV(SDL_PixelFormat f) {
return
f == SDL_PIXELFORMAT_YV12 ||
f == SDL_PIXELFORMAT_IYUV ||
f == SDL_PIXELFORMAT_YUY2 ||
f == SDL_PIXELFORMAT_UYVY ||
f == SDL_PIXELFORMAT_YVYU ||
f == SDL_PIXELFORMAT_NV12 ||
f == SDL_PIXELFORMAT_NV21 ||
f == SDL_PIXELFORMAT_P010
;
}
SDL_Surface* convertYUY2_IYUV(SDL_Surface* surf);
template<typename RealStream> template<typename RealStream>
struct PushConversionVideoStream : public RealStream { struct PushConversionVideoStream : public RealStream {
SDL_PixelFormat _forced_format {SDL_PIXELFORMAT_IYUV}; SDL_PixelFormat _forced_format {SDL_PIXELFORMAT_IYUV};
// TODO: force colorspace?
template<typename... Args> template<typename... Args>
PushConversionVideoStream(SDL_PixelFormat forced_format, Args&&... args) : RealStream(std::forward<Args>(args)...), _forced_format(forced_format) {} PushConversionVideoStream(SDL_PixelFormat forced_format, Args&&... args) : RealStream(std::forward<Args>(args)...), _forced_format(forced_format) {}
@ -33,41 +19,33 @@ struct PushConversionVideoStream : public RealStream {
bool push(const SDLVideoFrame& value) override { bool push(const SDLVideoFrame& value) override {
SDL_Surface* surf = value.surface.get(); SDL_Surface* surf = value.surface.get();
if (surf->format != _forced_format) { if (surf->format != _forced_format) {
//std::cerr << "DTC: need to convert from " << SDL_GetPixelFormatName(converted_surf->format) << " to SDL_PIXELFORMAT_IYUV\n"; //std::cerr << "PCVS: need to convert from " << SDL_GetPixelFormatName(surf->format) << " to " << SDL_GetPixelFormatName(_forced_format) << "\n";
if (surf->format == SDL_PIXELFORMAT_YUY2 && _forced_format == SDL_PIXELFORMAT_IYUV) { if ((surf = SDL_ConvertSurface(surf, _forced_format)) == nullptr) {
// optimized custom impl surf = value.surface.get(); // reset ptr
//std::cerr << "PCVS warning: default conversion failed: " << SDL_GetError() << "\n";
//auto start = Message::getTimeMS(); // sdl hardcodes BT709_LIMITED
surf = convertYUY2_IYUV(surf); if ((surf = SDL_ConvertSurfaceAndColorspace(surf, _forced_format, nullptr, SDL_GetSurfaceColorspace(surf), 0)) == nullptr) {
//auto end = Message::getTimeMS(); //if ((surf = SDL_ConvertSurfaceAndColorspace(surf, _forced_format, nullptr, SDL_COLORSPACE_BT709_LIMITED, 0)) == nullptr) {
// 3ms surf = value.surface.get(); // reset ptr
//std::cerr << "DTC: timing " << SDL_GetPixelFormatName(converted_surf->format) << "->SDL_PIXELFORMAT_IYUV: " << end-start << "ms\n"; //std::cerr << "PCVS warning: default conversion with same colorspace failed: " << SDL_GetError() << "\n";
} else if (isFormatYUV(surf->format)) { // simple convert failed, fall back to ->rgb->yuv
// TODO: fix sdl rgb->yuv conversion resulting in too dark (colorspace) issues //SDL_Surface* tmp_conv_surf = SDL_ConvertSurfaceAndColorspace(surf, SDL_PIXELFORMAT_RGB24, nullptr, SDL_COLORSPACE_RGB_DEFAULT, 0);
// https://github.com/libsdl-org/SDL/issues/10877 SDL_Surface* tmp_conv_surf = SDL_ConvertSurface(surf, SDL_PIXELFORMAT_RGB24);
if (tmp_conv_surf == nullptr) {
// meh, need to convert to rgb as a stopgap std::cerr << "PCVS error: conversion to RGB failed: " << SDL_GetError() << "\n";
//auto start = Message::getTimeMS();
SDL_Surface* tmp_conv_surf = SDL_ConvertSurfaceAndColorspace(surf, SDL_PIXELFORMAT_RGB24, nullptr, SDL_COLORSPACE_RGB_DEFAULT, 0);
//auto end = Message::getTimeMS();
// 1ms
//std::cerr << "DTC: timing " << SDL_GetPixelFormatName(converted_surf->format) << "->SDL_PIXELFORMAT_RGB24: " << end-start << "ms\n";
//start = Message::getTimeMS();
surf = SDL_ConvertSurfaceAndColorspace(tmp_conv_surf, _forced_format, nullptr, SDL_COLORSPACE_YUV_DEFAULT, 0);
//end = Message::getTimeMS();
// 60ms
//std::cerr << "DTC: timing SDL_PIXELFORMAT_RGB24->" << SDL_GetPixelFormatName(_forced_format) << ": " << end-start << "ms\n";
SDL_DestroySurface(tmp_conv_surf);
} else { } else {
surf = SDL_ConvertSurface(surf, _forced_format); //surf = SDL_ConvertSurfaceAndColorspace(tmp_conv_surf, _forced_format, nullptr, SDL_COLORSPACE_YUV_DEFAULT, 0);
surf = SDL_ConvertSurface(tmp_conv_surf, _forced_format);
//surf = SDL_ConvertSurfaceAndColorspace(tmp_conv_surf, _forced_format, nullptr, SDL_COLORSPACE_BT601_LIMITED, 0);
SDL_DestroySurface(tmp_conv_surf);
}
}
} }
if (surf == nullptr) { if (surf == nullptr) {
// oh god // oh god
std::cerr << "DTC error: failed to convert surface to IYUV: " << SDL_GetError() << "\n"; std::cerr << "PCVS error: failed to convert surface to IYUV: " << SDL_GetError() << "\n";
return false; return false;
} }
} }

View File

@ -194,7 +194,7 @@ bool StreamManager::connect(Object src, Object sink, bool threaded) {
std::move(our_data), std::move(our_data),
[](Connection& con) -> void { [](Connection& con) -> void {
// there might be more stored // there might be more stored
for (size_t i = 0; i < 10; i++) { for (size_t i = 0; i < 64; i++) {
auto new_frame_opt = static_cast<inlineData*>(con.data.get())->reader->pop(); auto new_frame_opt = static_cast<inlineData*>(con.data.get())->reader->pop();
// TODO: frame interval estimates // TODO: frame interval estimates
if (new_frame_opt.has_value()) { if (new_frame_opt.has_value()) {

View File

@ -6,6 +6,7 @@
#include <solanaceae/contact/components.hpp> #include <solanaceae/contact/components.hpp>
#include "./frame_streams/sdl/sdl_audio2_frame_stream2.hpp" #include "./frame_streams/sdl/sdl_audio2_frame_stream2.hpp"
#include "./frame_streams/sdl/sdl_video_frame_stream2.hpp"
#include <imgui/imgui.h> #include <imgui/imgui.h>
@ -178,6 +179,27 @@ MainScreen::MainScreen(SimpleConfigModel&& conf_, SDL_Renderer* renderer_, Theme
} else { } else {
std::cerr << "MS warning: no sdl audio: " << SDL_GetError() << "\n"; std::cerr << "MS warning: no sdl audio: " << SDL_GetError() << "\n";
} }
if (SDL_InitSubSystem(SDL_INIT_CAMERA)) {
{ // video in
ObjectHandle vsrc {os.registry(), os.registry().create()};
try {
vsrc.emplace<Components::FrameStream2Source<SDLVideoFrame>>(
std::make_unique<SDLVideo2InputDevice>()
);
vsrc.emplace<Components::StreamSource>(Components::StreamSource::create<SDLVideoFrame>("SDL Video Default Recording Device"));
vsrc.emplace<Components::TagDefaultTarget>();
os.throwEventConstruct(vsrc);
} catch (...) {
std::cerr << "MS error: failed constructing default video input source\n";
os.registry().destroy(vsrc);
}
}
} else {
std::cerr << "MS warning: no sdl camera: " << SDL_GetError() << "\n";
}
} }
MainScreen::~MainScreen(void) { MainScreen::~MainScreen(void) {
@ -324,6 +346,11 @@ Screen* MainScreen::render(float time_delta, bool&) {
ImGui::SetItemTooltip("Limiting compute can slow down things like filetransfers!"); ImGui::SetItemTooltip("Limiting compute can slow down things like filetransfers!");
} }
ImGui::Separator();
ImGui::Text("render interval: %.0fms (%.2ffps)", _render_interval*1000.f, 1.f/_render_interval);
ImGui::Text("tick interval: %.0fms (%.2ftps)", _min_tick_interval*1000.f, 1.f/_min_tick_interval);
ImGui::EndMenu(); ImGui::EndMenu();
} }
if (ImGui::BeginMenu("Settings")) { if (ImGui::BeginMenu("Settings")) {
@ -482,12 +509,12 @@ Screen* MainScreen::render(float time_delta, bool&) {
// min over non animations in all cases // min over non animations in all cases
_render_interval = std::min<float>(pm_interval, cg_interval); _render_interval = std::min<float>(pm_interval, cg_interval);
_render_interval = std::min<float>(_render_interval, tc_unfinished_queue_interval); _render_interval = std::min<float>(_render_interval, tc_unfinished_queue_interval);
_render_interval = std::min<float>(_render_interval, dvt_interval);
// low delay time window // low delay time window
if (!_window_hidden && _time_since_event < curr_profile.low_delay_window) { if (!_window_hidden && _time_since_event < curr_profile.low_delay_window) {
_render_interval = std::min<float>(_render_interval, ctc_interval); _render_interval = std::min<float>(_render_interval, ctc_interval);
_render_interval = std::min<float>(_render_interval, msgtc_interval); _render_interval = std::min<float>(_render_interval, msgtc_interval);
_render_interval = std::min<float>(_render_interval, dvt_interval);
_render_interval = std::clamp( _render_interval = std::clamp(
_render_interval, _render_interval,
@ -498,7 +525,6 @@ Screen* MainScreen::render(float time_delta, bool&) {
} else if (!_window_hidden && _time_since_event < curr_profile.mid_delay_window) { } else if (!_window_hidden && _time_since_event < curr_profile.mid_delay_window) {
_render_interval = std::min<float>(_render_interval, ctc_interval); _render_interval = std::min<float>(_render_interval, ctc_interval);
_render_interval = std::min<float>(_render_interval, msgtc_interval); _render_interval = std::min<float>(_render_interval, msgtc_interval);
_render_interval = std::min<float>(_render_interval, dvt_interval);
_render_interval = std::clamp( _render_interval = std::clamp(
_render_interval, _render_interval,

View File

@ -80,6 +80,17 @@ uint32_t ToxAVI::toxavIterationInterval(void) const {
void ToxAVI::toxavIterate(void) { void ToxAVI::toxavIterate(void) {
toxav_iterate(_tox_av); toxav_iterate(_tox_av);
dispatch(
ToxAV_Event::iterate_audio,
Events::IterateAudio{
}
);
dispatch(
ToxAV_Event::iterate_video,
Events::IterateVideo{
}
);
} }
uint32_t ToxAVI::toxavAudioIterationInterval(void) const { uint32_t ToxAVI::toxavAudioIterationInterval(void) const {
@ -88,6 +99,12 @@ uint32_t ToxAVI::toxavAudioIterationInterval(void) const {
void ToxAVI::toxavAudioIterate(void) { void ToxAVI::toxavAudioIterate(void) {
toxav_audio_iterate(_tox_av); toxav_audio_iterate(_tox_av);
dispatch(
ToxAV_Event::iterate_audio,
Events::IterateAudio{
}
);
} }
uint32_t ToxAVI::toxavVideoIterationInterval(void) const { uint32_t ToxAVI::toxavVideoIterationInterval(void) const {
@ -96,6 +113,12 @@ uint32_t ToxAVI::toxavVideoIterationInterval(void) const {
void ToxAVI::toxavVideoIterate(void) { void ToxAVI::toxavVideoIterate(void) {
toxav_video_iterate(_tox_av); toxav_video_iterate(_tox_av);
dispatch(
ToxAV_Event::iterate_video,
Events::IterateVideo{
}
);
} }
Toxav_Err_Call ToxAVI::toxavCall(uint32_t friend_number, uint32_t audio_bit_rate, uint32_t video_bit_rate) { Toxav_Err_Call ToxAVI::toxavCall(uint32_t friend_number, uint32_t audio_bit_rate, uint32_t video_bit_rate) {

View File

@ -55,6 +55,16 @@ namespace /*toxav*/ Events {
int32_t vstride; int32_t vstride;
}; };
// event fired on a/av thread every iterate
struct IterateAudio {
//float time_delta;
};
// event fired on v/av thread every iterate
struct IterateVideo {
//float time_delta;
};
} // Event } // Event
enum class ToxAV_Event : uint32_t { enum class ToxAV_Event : uint32_t {
@ -65,6 +75,9 @@ enum class ToxAV_Event : uint32_t {
friend_audio_frame, friend_audio_frame,
friend_video_frame, friend_video_frame,
iterate_audio,
iterate_video,
MAX MAX
}; };
@ -79,11 +92,15 @@ struct ToxAVEventI {
virtual bool onEvent(const Events::FriendVideoBitrate&) { return false; } virtual bool onEvent(const Events::FriendVideoBitrate&) { return false; }
virtual bool onEvent(const Events::FriendAudioFrame&) { return false; } virtual bool onEvent(const Events::FriendAudioFrame&) { return false; }
virtual bool onEvent(const Events::FriendVideoFrame&) { return false; } virtual bool onEvent(const Events::FriendVideoFrame&) { return false; }
virtual bool onEvent(const Events::IterateAudio&) { return false; }
virtual bool onEvent(const Events::IterateVideo&) { return false; }
}; };
using ToxAVEventProviderI = EventProviderI<ToxAVEventI>; using ToxAVEventProviderI = EventProviderI<ToxAVEventI>;
// TODO: seperate out implementation from interface // TODO: seperate out implementation from interface
struct ToxAVI : public ToxAVEventProviderI { struct ToxAVI : public ToxAVEventProviderI {
// tox and toxav internally are mutex protected
// BUT only if "experimental_thread_safety" is enabled
Tox* _tox = nullptr; Tox* _tox = nullptr;
ToxAV* _tox_av = nullptr; ToxAV* _tox_av = nullptr;

View File

@ -185,7 +185,8 @@ void ToxAVVoIPModel::addAudioSink(ObjectHandle session, uint32_t friend_number)
ObjectHandle outgoing_audio {_os.registry(), _os.registry().create()}; ObjectHandle outgoing_audio {_os.registry(), _os.registry().create()};
auto new_asink = std::make_unique<ToxAVCallAudioSink>(_av, friend_number); auto new_asink = std::make_unique<ToxAVCallAudioSink>(_av, friend_number);
outgoing_audio.emplace<ToxAVCallAudioSink*>(new_asink.get()); auto* new_asink_ptr = new_asink.get();
outgoing_audio.emplace<ToxAVCallAudioSink*>(new_asink_ptr);
outgoing_audio.emplace<Components::FrameStream2Sink<AudioFrame2>>(std::move(new_asink)); outgoing_audio.emplace<Components::FrameStream2Sink<AudioFrame2>>(std::move(new_asink));
outgoing_audio.emplace<Components::StreamSink>(Components::StreamSink::create<AudioFrame2>("ToxAV Friend Call Outgoing Audio")); outgoing_audio.emplace<Components::StreamSink>(Components::StreamSink::create<AudioFrame2>("ToxAV Friend Call Outgoing Audio"));
@ -201,6 +202,9 @@ void ToxAVVoIPModel::addAudioSink(ObjectHandle session, uint32_t friend_number)
// TODO: tie session to stream // TODO: tie session to stream
_os.throwEventConstruct(outgoing_audio); _os.throwEventConstruct(outgoing_audio);
std::lock_guard lg{_audio_sinks_mutex};
_audio_sinks.push_back(new_asink_ptr);
} }
void ToxAVVoIPModel::addVideoSource(ObjectHandle session, uint32_t friend_number) { void ToxAVVoIPModel::addVideoSource(ObjectHandle session, uint32_t friend_number) {
@ -234,7 +238,8 @@ void ToxAVVoIPModel::addVideoSink(ObjectHandle session, uint32_t friend_number)
ObjectHandle outgoing_video {_os.registry(), _os.registry().create()}; ObjectHandle outgoing_video {_os.registry(), _os.registry().create()};
auto new_vsink = std::make_unique<ToxAVCallVideoSink>(_av, friend_number); auto new_vsink = std::make_unique<ToxAVCallVideoSink>(_av, friend_number);
outgoing_video.emplace<ToxAVCallVideoSink*>(new_vsink.get()); auto* new_vsink_ptr = new_vsink.get();
outgoing_video.emplace<ToxAVCallVideoSink*>(new_vsink_ptr);
outgoing_video.emplace<Components::FrameStream2Sink<SDLVideoFrame>>(std::move(new_vsink)); outgoing_video.emplace<Components::FrameStream2Sink<SDLVideoFrame>>(std::move(new_vsink));
outgoing_video.emplace<Components::StreamSink>(Components::StreamSink::create<SDLVideoFrame>("ToxAV Friend Call Outgoing Video")); outgoing_video.emplace<Components::StreamSink>(Components::StreamSink::create<SDLVideoFrame>("ToxAV Friend Call Outgoing Video"));
@ -250,6 +255,9 @@ void ToxAVVoIPModel::addVideoSink(ObjectHandle session, uint32_t friend_number)
// TODO: tie session to stream // TODO: tie session to stream
_os.throwEventConstruct(outgoing_video); _os.throwEventConstruct(outgoing_video);
std::lock_guard lg{_video_sinks_mutex};
_video_sinks.push_back(new_vsink_ptr);
} }
void ToxAVVoIPModel::destroySession(ObjectHandle session) { void ToxAVVoIPModel::destroySession(ObjectHandle session) {
@ -282,6 +290,20 @@ void ToxAVVoIPModel::destroySession(ObjectHandle session) {
_video_sources.erase(it_vsrc); _video_sources.erase(it_vsrc);
} }
} }
if (session.all_of<ToxAVCallAudioSink*>()) {
std::lock_guard lg{_audio_sinks_mutex};
auto it = std::find(_audio_sinks.cbegin(), _audio_sinks.cend(), session.get<ToxAVCallAudioSink*>());
if (it != _audio_sinks.cend()) {
_audio_sinks.erase(it);
}
}
if (session.all_of<ToxAVCallVideoSink*>()) {
std::lock_guard lg{_video_sinks_mutex};
auto it = std::find(_video_sinks.cbegin(), _video_sinks.cend(), session.get<ToxAVCallVideoSink*>());
if (it != _video_sinks.cend()) {
_video_sinks.erase(it);
}
}
// destory sources // destory sources
if (auto* ss = session.try_get<Components::VoIP::StreamSources>(); ss != nullptr) { if (auto* ss = session.try_get<Components::VoIP::StreamSources>(); ss != nullptr) {
@ -306,34 +328,10 @@ void ToxAVVoIPModel::destroySession(ObjectHandle session) {
_os.registry().destroy(session); _os.registry().destroy(session);
} }
ToxAVVoIPModel::ToxAVVoIPModel(ObjectStore2& os, ToxAVI& av, Contact3Registry& cr, ToxContactModel2& tcm) : void ToxAVVoIPModel::audio_thread_tick(void) {
_os(os), _av(av), _cr(cr), _tcm(tcm) //for (const auto& [oc, asink] : _os.registry().view<ToxAVCallAudioSink*>().each()) {
{ std::lock_guard lg{_audio_sinks_mutex};
_av.subscribe(this, ToxAV_Event::friend_call); for (const auto& asink : _audio_sinks) {
_av.subscribe(this, ToxAV_Event::friend_call_state);
_av.subscribe(this, ToxAV_Event::friend_audio_bitrate);
_av.subscribe(this, ToxAV_Event::friend_video_bitrate);
_av.subscribe(this, ToxAV_Event::friend_audio_frame);
_av.subscribe(this, ToxAV_Event::friend_video_frame);
// attach to all tox friend contacts
for (const auto& [cv, _] : _cr.view<Contact::Components::ToxFriendPersistent>().each()) {
_cr.emplace<VoIPModelI*>(cv, this);
}
// TODO: events
}
ToxAVVoIPModel::~ToxAVVoIPModel(void) {
for (const auto& [ov, voipmodel] : _os.registry().view<VoIPModelI*>().each()) {
if (voipmodel == this) {
destroySession(_os.objectHandle(ov));
}
}
}
void ToxAVVoIPModel::tick(void) {
for (const auto& [oc, asink] : _os.registry().view<ToxAVCallAudioSink*>().each()) {
if (!asink->_writer) { if (!asink->_writer) {
continue; continue;
} }
@ -366,8 +364,12 @@ void ToxAVVoIPModel::tick(void) {
} }
} }
} }
}
for (const auto& [oc, vsink] : _os.registry().view<ToxAVCallVideoSink*>().each()) { void ToxAVVoIPModel::video_thread_tick(void) {
//for (const auto& [oc, vsink] : _os.registry().view<ToxAVCallVideoSink*>().each()) {
std::lock_guard lg{_video_sinks_mutex};
for (const auto& vsink : _video_sinks) {
if (!vsink->_writer) { if (!vsink->_writer) {
continue; continue;
} }
@ -401,6 +403,134 @@ void ToxAVVoIPModel::tick(void) {
} }
} }
void ToxAVVoIPModel::handleEvent(const Events::FriendCall& e) {
// new incoming call, create voip session, ready to be accepted
// (or rejected...)
const auto session_contact = _tcm.getContactFriend(e.friend_number);
if (!_cr.valid(session_contact)) {
return;
}
ObjectHandle new_session {_os.registry(), _os.registry().create()};
new_session.emplace<VoIPModelI*>(this);
new_session.emplace<Components::VoIP::TagVoIPSession>(); // ??
new_session.emplace<Components::VoIP::Incoming>(session_contact); // in 1on1 its always the same contact, might leave blank
new_session.emplace<Components::VoIP::SessionContact>(session_contact);
new_session.emplace<Components::VoIP::SessionState>().state = Components::VoIP::SessionState::State::RINGING;
new_session.emplace<Components::ToxAVIncomingAV>(e.audio_enabled, e.video_enabled);
_os.throwEventConstruct(new_session);
}
void ToxAVVoIPModel::handleEvent(const Events::FriendCallState& e) {
const auto session_contact = _tcm.getContactFriend(e.friend_number);
if (!_cr.valid(session_contact)) {
return;
}
ToxAVFriendCallState s{e.state};
// find session(s?)
// TODO: keep lookup table
for (const auto& [ov, voipmodel] : _os.registry().view<VoIPModelI*>().each()) {
if (voipmodel == this) {
auto o = _os.objectHandle(ov);
if (!o.all_of<Components::VoIP::SessionContact>()) {
continue;
}
if (session_contact != o.get<Components::VoIP::SessionContact>().c) {
continue;
}
if (s.is_error() || s.is_finished()) {
// destroy call
destroySession(o);
} else {
// remote accepted our call, or av send/recv conditions changed?
o.get<Components::VoIP::SessionState>().state = Components::VoIP::SessionState::State::CONNECTED; // set to in call ??
if (s.is_accepting_a() && !o.all_of<Components::ToxAVAudioSink>()) {
addAudioSink(o, e.friend_number);
} else if (!s.is_accepting_a() && o.all_of<Components::ToxAVAudioSink>()) {
// remove asink?
}
// video
if (s.is_accepting_v() && !o.all_of<Components::ToxAVVideoSink>()) {
addVideoSink(o, e.friend_number);
} else if (!s.is_accepting_v() && o.all_of<Components::ToxAVVideoSink>()) {
// remove vsink?
}
// add/update sources
// audio
if (s.is_sending_a() && !o.all_of<Components::ToxAVAudioSource>()) {
addAudioSource(o, e.friend_number);
} else if (!s.is_sending_a() && o.all_of<Components::ToxAVAudioSource>()) {
// remove asrc?
}
// video
if (s.is_sending_v() && !o.all_of<Components::ToxAVVideoSource>()) {
addVideoSource(o, e.friend_number);
} else if (!s.is_sending_v() && o.all_of<Components::ToxAVVideoSource>()) {
// remove vsrc?
}
}
}
}
}
ToxAVVoIPModel::ToxAVVoIPModel(ObjectStore2& os, ToxAVI& av, Contact3Registry& cr, ToxContactModel2& tcm) :
_os(os), _av(av), _cr(cr), _tcm(tcm)
{
_av.subscribe(this, ToxAV_Event::friend_call);
_av.subscribe(this, ToxAV_Event::friend_call_state);
_av.subscribe(this, ToxAV_Event::friend_audio_bitrate);
_av.subscribe(this, ToxAV_Event::friend_video_bitrate);
_av.subscribe(this, ToxAV_Event::friend_audio_frame);
_av.subscribe(this, ToxAV_Event::friend_video_frame);
_av.subscribe(this, ToxAV_Event::iterate_audio);
_av.subscribe(this, ToxAV_Event::iterate_video);
// attach to all tox friend contacts
for (const auto& [cv, _] : _cr.view<Contact::Components::ToxFriendPersistent>().each()) {
_cr.emplace<VoIPModelI*>(cv, this);
}
// TODO: events
}
ToxAVVoIPModel::~ToxAVVoIPModel(void) {
for (const auto& [ov, voipmodel] : _os.registry().view<VoIPModelI*>().each()) {
if (voipmodel == this) {
destroySession(_os.objectHandle(ov));
}
}
}
void ToxAVVoIPModel::tick(void) {
std::lock_guard lg{_e_queue_mutex};
while (!_e_queue.empty()) {
const auto& e_var = _e_queue.front();
if (std::holds_alternative<Events::FriendCall>(e_var)) {
const auto& e = std::get<Events::FriendCall>(e_var);
handleEvent(e);
} else if (std::holds_alternative<Events::FriendCallState>(e_var)) {
const auto& e = std::get<Events::FriendCallState>(e_var);
handleEvent(e);
} else {
assert(false && "unk event");
}
_e_queue.pop_front();
}
}
ObjectHandle ToxAVVoIPModel::enter(const Contact3 c, const Components::VoIP::DefaultConfig& defaults) { ObjectHandle ToxAVVoIPModel::enter(const Contact3 c, const Components::VoIP::DefaultConfig& defaults) {
if (!_cr.all_of<Contact::Components::ToxFriendEphemeral>(c)) { if (!_cr.all_of<Contact::Components::ToxFriendEphemeral>(c)) {
return {}; return {};
@ -529,94 +659,24 @@ bool ToxAVVoIPModel::leave(ObjectHandle session) {
} }
bool ToxAVVoIPModel::onEvent(const Events::FriendCall& e) { bool ToxAVVoIPModel::onEvent(const Events::FriendCall& e) {
// new incoming call, create voip session, ready to be accepted std::lock_guard lg{_e_queue_mutex};
// (or rejected...) _e_queue.push_back(e);
return true; // false?
const auto session_contact = _tcm.getContactFriend(e.friend_number);
if (!_cr.valid(session_contact)) {
return false;
}
ObjectHandle new_session {_os.registry(), _os.registry().create()};
new_session.emplace<VoIPModelI*>(this);
new_session.emplace<Components::VoIP::TagVoIPSession>(); // ??
new_session.emplace<Components::VoIP::Incoming>(session_contact); // in 1on1 its always the same contact, might leave blank
new_session.emplace<Components::VoIP::SessionContact>(session_contact);
new_session.emplace<Components::VoIP::SessionState>().state = Components::VoIP::SessionState::State::RINGING;
new_session.emplace<Components::ToxAVIncomingAV>(e.audio_enabled, e.video_enabled);
_os.throwEventConstruct(new_session);
return true;
} }
bool ToxAVVoIPModel::onEvent(const Events::FriendCallState& e) { bool ToxAVVoIPModel::onEvent(const Events::FriendCallState& e) {
const auto session_contact = _tcm.getContactFriend(e.friend_number); std::lock_guard lg{_e_queue_mutex};
if (!_cr.valid(session_contact)) { _e_queue.push_back(e);
return false; return true; // false?
}
ToxAVFriendCallState s{e.state};
// find session(s?)
// TODO: keep lookup table
for (const auto& [ov, voipmodel] : _os.registry().view<VoIPModelI*>().each()) {
if (voipmodel == this) {
auto o = _os.objectHandle(ov);
if (!o.all_of<Components::VoIP::SessionContact>()) {
continue;
}
if (session_contact != o.get<Components::VoIP::SessionContact>().c) {
continue;
}
if (s.is_error() || s.is_finished()) {
// destroy call
destroySession(o);
} else {
// remote accepted our call, or av send/recv conditions changed?
o.get<Components::VoIP::SessionState>().state = Components::VoIP::SessionState::State::CONNECTED; // set to in call ??
if (s.is_accepting_a() && !o.all_of<Components::ToxAVAudioSink>()) {
addAudioSink(o, e.friend_number);
} else if (!s.is_accepting_a() && o.all_of<Components::ToxAVAudioSink>()) {
// remove asink?
}
// video
if (s.is_accepting_v() && !o.all_of<Components::ToxAVVideoSink>()) {
addVideoSink(o, e.friend_number);
} else if (!s.is_accepting_v() && o.all_of<Components::ToxAVVideoSink>()) {
// remove vsink?
}
// add/update sources
// audio
if (s.is_sending_a() && !o.all_of<Components::ToxAVAudioSource>()) {
addAudioSource(o, e.friend_number);
} else if (!s.is_sending_a() && o.all_of<Components::ToxAVAudioSource>()) {
// remove asrc?
}
// video
if (s.is_sending_v() && !o.all_of<Components::ToxAVVideoSource>()) {
addVideoSource(o, e.friend_number);
} else if (!s.is_sending_v() && o.all_of<Components::ToxAVVideoSource>()) {
// remove vsrc?
}
}
}
}
return true;
} }
bool ToxAVVoIPModel::onEvent(const Events::FriendAudioBitrate&) { bool ToxAVVoIPModel::onEvent(const Events::FriendAudioBitrate&) {
// TODO: use this info
return false; return false;
} }
bool ToxAVVoIPModel::onEvent(const Events::FriendVideoBitrate&) { bool ToxAVVoIPModel::onEvent(const Events::FriendVideoBitrate&) {
// TODO: use this info
return false; return false;
} }
@ -701,11 +761,23 @@ bool ToxAVVoIPModel::onEvent(const Events::FriendVideoFrame& e) {
vsrc.get<FrameStream2MultiSource<SDLVideoFrame>*>()->push({ vsrc.get<FrameStream2MultiSource<SDLVideoFrame>*>()->push({
// ms -> us // ms -> us
Message::getTimeMS() * 1000, // TODO: make more precise // would be nice if we had been giving this from toxcore
// TODO: make more precise
Message::getTimeMS() * 1000,
new_surf new_surf
}); });
SDL_DestroySurface(new_surf); SDL_DestroySurface(new_surf);
return true;
}
bool ToxAVVoIPModel::onEvent(const Events::IterateAudio&) {
audio_thread_tick();
return false;
}
bool ToxAVVoIPModel::onEvent(const Events::IterateVideo&) {
video_thread_tick();
return false; return false;
} }

View File

@ -7,6 +7,13 @@
#include "./tox_av.hpp" #include "./tox_av.hpp"
#include <unordered_map> #include <unordered_map>
#include <variant>
#include <deque>
#include <mutex>
// fwd
struct ToxAVCallAudioSink;
struct ToxAVCallVideoSink;
class ToxAVVoIPModel : protected ToxAVEventI, public VoIPModelI { class ToxAVVoIPModel : protected ToxAVEventI, public VoIPModelI {
ObjectStore2& _os; ObjectStore2& _os;
@ -14,6 +21,26 @@ class ToxAVVoIPModel : protected ToxAVEventI, public VoIPModelI {
Contact3Registry& _cr; Contact3Registry& _cr;
ToxContactModel2& _tcm; ToxContactModel2& _tcm;
uint64_t _pad0;
// these events need to be worked on the main thread instead
// TODO: replac ewith lockless queue
std::deque<
std::variant<
Events::FriendCall,
Events::FriendCallState
// bitrates
>> _e_queue;
std::mutex _e_queue_mutex;
uint64_t _pad1;
std::vector<ToxAVCallAudioSink*> _audio_sinks;
std::mutex _audio_sinks_mutex;
uint64_t _pad2;
std::vector<ToxAVCallVideoSink*> _video_sinks;
std::mutex _video_sinks_mutex;
uint64_t _pad3;
// for faster lookup // for faster lookup
std::unordered_map<uint32_t, ObjectHandle> _audio_sources; std::unordered_map<uint32_t, ObjectHandle> _audio_sources;
std::unordered_map<uint32_t, ObjectHandle> _video_sources; std::unordered_map<uint32_t, ObjectHandle> _video_sources;
@ -26,10 +53,19 @@ class ToxAVVoIPModel : protected ToxAVEventI, public VoIPModelI {
void destroySession(ObjectHandle session); void destroySession(ObjectHandle session);
// TODO: this needs to move to the toxav thread
// we could use "events" as pre/post audio/video iterate...
void audio_thread_tick(void);
void video_thread_tick(void);
void handleEvent(const Events::FriendCall&);
void handleEvent(const Events::FriendCallState&);
public: public:
ToxAVVoIPModel(ObjectStore2& os, ToxAVI& av, Contact3Registry& cr, ToxContactModel2& tcm); ToxAVVoIPModel(ObjectStore2& os, ToxAVI& av, Contact3Registry& cr, ToxContactModel2& tcm);
~ToxAVVoIPModel(void); ~ToxAVVoIPModel(void);
// handle events coming from toxav thread(s)
void tick(void); void tick(void);
public: // voip model public: // voip model
@ -44,5 +80,7 @@ class ToxAVVoIPModel : protected ToxAVEventI, public VoIPModelI {
bool onEvent(const Events::FriendVideoBitrate&) override; bool onEvent(const Events::FriendVideoBitrate&) override;
bool onEvent(const Events::FriendAudioFrame&) override; bool onEvent(const Events::FriendAudioFrame&) override;
bool onEvent(const Events::FriendVideoFrame&) override; bool onEvent(const Events::FriendVideoFrame&) override;
bool onEvent(const Events::IterateAudio&) override;
bool onEvent(const Events::IterateVideo&) override;
}; };