big stream progress

- stream manager (with bare bones ui)
- debug video tap
- toxav progress
- toxav debug ui
- some default devices
This commit is contained in:
Green Sky 2024-09-15 11:39:23 +02:00
parent a100eaae82
commit 964f6de656
No known key found for this signature in database
19 changed files with 989 additions and 69 deletions

View File

@ -103,6 +103,13 @@ target_sources(tomato PUBLIC
./chat_gui4.hpp ./chat_gui4.hpp
./chat_gui4.cpp ./chat_gui4.cpp
./stream_manager.hpp
./stream_manager_ui.hpp
./stream_manager_ui.cpp
./debug_video_tap.hpp
./debug_video_tap.cpp
./content/content.hpp ./content/content.hpp
./content/frame_stream2.hpp ./content/frame_stream2.hpp
./content/sdl_video_frame_stream2.hpp ./content/sdl_video_frame_stream2.hpp
@ -116,6 +123,9 @@ if (TOMATO_TOX_AV)
target_sources(tomato PUBLIC target_sources(tomato PUBLIC
./tox_av.hpp ./tox_av.hpp
./tox_av.cpp ./tox_av.cpp
./debug_tox_call.hpp
./debug_tox_call.cpp
) )
target_compile_definitions(tomato PUBLIC TOMATO_TOX_AV) target_compile_definitions(tomato PUBLIC TOMATO_TOX_AV)

View File

@ -33,6 +33,20 @@ struct FrameStream2I {
virtual bool push(const FrameType& value) = 0; virtual bool push(const FrameType& value) = 0;
}; };
template<typename FrameType>
struct FrameStream2SourceI {
virtual ~FrameStream2SourceI(void) {}
[[nodiscard]] virtual std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) = 0;
virtual bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) = 0;
};
template<typename FrameType>
struct FrameStream2SinkI {
virtual ~FrameStream2SinkI(void) {}
[[nodiscard]] virtual std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) = 0;
virtual bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) = 0;
};
// needs count frames queue size // needs count frames queue size
// having ~1-2sec buffer size is often sufficent // having ~1-2sec buffer size is often sufficent
template<typename FrameType> template<typename FrameType>
@ -76,33 +90,52 @@ struct QueuedFrameStream2 : public FrameStream2I<FrameType> {
} }
}; };
// implements a stream that pops or pushes to all sub streams // implements a stream that pushes to all sub streams
// you need to mind the direction you intend it to use
// release all streams before destructing! // TODO: improve lifetime here, maybe some shared semaphore? // release all streams before destructing! // TODO: improve lifetime here, maybe some shared semaphore?
template<typename FrameType, typename SubStreamType = QueuedFrameStream2<FrameType>> template<typename FrameType, typename SubStreamType = QueuedFrameStream2<FrameType>>
struct FrameStream2MultiStream : public FrameStream2I<FrameType> { struct FrameStream2MultiSource : public FrameStream2SourceI<FrameType>, public FrameStream2I<FrameType> {
using sub_stream_type_t = SubStreamType; using sub_stream_type_t = SubStreamType;
// pointer stability // pointer stability
std::vector<std::unique_ptr<SubStreamType>> _sub_streams; std::vector<std::shared_ptr<SubStreamType>> _sub_streams;
std::mutex _sub_stream_lock; // accessing the _sub_streams array needs to be exclusive std::mutex _sub_stream_lock; // accessing the _sub_streams array needs to be exclusive
// a simple lock here is ok, since this tends to be a rare operation, // a simple lock here is ok, since this tends to be a rare operation,
// except for the push, which is always on the same thread // except for the push, which is always on the same thread
// TODO: forward args instead virtual ~FrameStream2MultiSource(void) {}
SubStreamType* aquireSubStream(size_t queue_size = 10, bool lossy = true) {
//// TODO: forward args instead
//SubStreamType* aquireSubStream(size_t queue_size = 10, bool lossy = true) {
// std::lock_guard lg{_sub_stream_lock};
// return _sub_streams.emplace_back(std::make_unique<SubStreamType>(queue_size, lossy)).get();
//}
std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) override {
// TODO: args???
size_t queue_size = 10;
bool lossy = true;
std::lock_guard lg{_sub_stream_lock}; std::lock_guard lg{_sub_stream_lock};
return _sub_streams.emplace_back(std::make_unique<SubStreamType>(queue_size, lossy)).get(); return _sub_streams.emplace_back(std::make_unique<SubStreamType>(queue_size, lossy));
} }
void releaseSubStream(SubStreamType* sub) { //void releaseSubStream(SubStreamType* sub) {
// std::lock_guard lg{_sub_stream_lock};
// for (auto it = _sub_streams.begin(); it != _sub_streams.end(); it++) {
// if (it->get() == sub) {
// _sub_streams.erase(it);
// break;
// }
// }
//}
bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) override {
std::lock_guard lg{_sub_stream_lock}; std::lock_guard lg{_sub_stream_lock};
for (auto it = _sub_streams.begin(); it != _sub_streams.end(); it++) { for (auto it = _sub_streams.begin(); it != _sub_streams.end(); it++) {
if (it->get() == sub) { if (*it == sub) {
_sub_streams.erase(it); _sub_streams.erase(it);
break; return true;
} }
} }
return false; // ?
} }
// stream interface // stream interface
@ -113,6 +146,7 @@ struct FrameStream2MultiStream : public FrameStream2I<FrameType> {
} }
std::optional<FrameType> pop(void) override { std::optional<FrameType> pop(void) override {
// nope
assert(false && "this logic is very frame type specific, provide an impl"); assert(false && "this logic is very frame type specific, provide an impl");
return std::nullopt; return std::nullopt;
} }

View File

@ -124,7 +124,7 @@ bool SDLAudioOutputDeviceDefaultInstance::push(const AudioFrame& value) {
std::cerr << "empty audio frame??\n"; std::cerr << "empty audio frame??\n";
} }
if (SDL_PutAudioStreamData(_stream.get(), data.ptr, data.size * sizeof(int16_t)) < 0) { if (!SDL_PutAudioStreamData(_stream.get(), data.ptr, data.size * sizeof(int16_t))) {
std::cerr << "put data error\n"; std::cerr << "put data error\n";
return false; // return true? return false; // return true?
} }
@ -145,26 +145,40 @@ SDLAudioOutputDeviceDefaultInstance::SDLAudioOutputDeviceDefaultInstance(SDLAudi
SDLAudioOutputDeviceDefaultInstance::~SDLAudioOutputDeviceDefaultInstance(void) { SDLAudioOutputDeviceDefaultInstance::~SDLAudioOutputDeviceDefaultInstance(void) {
} }
SDLAudioOutputDeviceDefaultInstance SDLAudioOutputDeviceDefaultFactory::create(void) {
SDLAudioOutputDeviceDefaultInstance new_instance; SDLAudioOutputDeviceDefaultSink::~SDLAudioOutputDeviceDefaultSink(void) {
// TODO: pause and close device?
}
std::shared_ptr<FrameStream2I<AudioFrame>> SDLAudioOutputDeviceDefaultSink::subscribe(void) {
auto new_instance = std::make_shared<SDLAudioOutputDeviceDefaultInstance>();
constexpr SDL_AudioSpec spec = { SDL_AUDIO_S16, 1, 48000 }; constexpr SDL_AudioSpec spec = { SDL_AUDIO_S16, 1, 48000 };
new_instance._stream = { new_instance->_stream = {
SDL_OpenAudioDeviceStream(SDL_AUDIO_DEVICE_DEFAULT_PLAYBACK, &spec, nullptr, nullptr), SDL_OpenAudioDeviceStream(SDL_AUDIO_DEVICE_DEFAULT_PLAYBACK, &spec, nullptr, nullptr),
&SDL_DestroyAudioStream &SDL_DestroyAudioStream
}; };
new_instance._last_sample_rate = spec.freq; new_instance->_last_sample_rate = spec.freq;
new_instance._last_channels = spec.channels; new_instance->_last_channels = spec.channels;
new_instance._last_format = spec.format; new_instance->_last_format = spec.format;
if (!static_cast<bool>(new_instance._stream)) { if (!static_cast<bool>(new_instance->_stream)) {
std::cerr << "SDL open audio device failed!\n"; std::cerr << "SDL open audio device failed!\n";
return nullptr;
} }
const auto audio_device_id = SDL_GetAudioStreamDevice(new_instance._stream.get()); const auto audio_device_id = SDL_GetAudioStreamDevice(new_instance->_stream.get());
SDL_ResumeAudioDevice(audio_device_id); SDL_ResumeAudioDevice(audio_device_id);
return new_instance; return new_instance;
} }
bool SDLAudioOutputDeviceDefaultSink::unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) {
if (!sub) {
return false;
}
return true;
}

View File

@ -11,13 +11,13 @@
#include <thread> #include <thread>
// we dont have to multicast ourself, because sdl streams and virtual devices already do this, but we do it anyway // we dont have to multicast ourself, because sdl streams and virtual devices already do this, but we do it anyway
using AudioFrameStream2MultiStream = FrameStream2MultiStream<AudioFrame>; using AudioFrameStream2MultiSource = FrameStream2MultiSource<AudioFrame>;
using AudioFrameStream2 = AudioFrameStream2MultiStream::sub_stream_type_t; // just use the default for now using AudioFrameStream2 = AudioFrameStream2MultiSource::sub_stream_type_t; // just use the default for now
// object components? // object components?
// source // source
struct SDLAudioInputDeviceDefault : protected AudioFrameStream2MultiStream { struct SDLAudioInputDeviceDefault : protected AudioFrameStream2MultiSource {
std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream; std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream;
std::atomic<bool> _thread_should_quit {false}; std::atomic<bool> _thread_should_quit {false};
@ -30,12 +30,12 @@ struct SDLAudioInputDeviceDefault : protected AudioFrameStream2MultiStream {
// stops the thread and closes the device? // stops the thread and closes the device?
~SDLAudioInputDeviceDefault(void); ~SDLAudioInputDeviceDefault(void);
using AudioFrameStream2MultiStream::aquireSubStream; using AudioFrameStream2MultiSource::subscribe;
using AudioFrameStream2MultiStream::releaseSubStream; using AudioFrameStream2MultiSource::unsubscribe;
}; };
// sink // sink
struct SDLAudioOutputDeviceDefaultInstance : protected AudioFrameStream2I { struct SDLAudioOutputDeviceDefaultInstance : public AudioFrameStream2I {
std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream; std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream;
uint32_t _last_sample_rate {48'000}; uint32_t _last_sample_rate {48'000};
@ -53,9 +53,12 @@ struct SDLAudioOutputDeviceDefaultInstance : protected AudioFrameStream2I {
}; };
// constructs entirely new streams, since sdl handles sync and mixing for us (or should) // constructs entirely new streams, since sdl handles sync and mixing for us (or should)
struct SDLAudioOutputDeviceDefaultFactory { struct SDLAudioOutputDeviceDefaultSink : public FrameStream2SinkI<AudioFrame> {
// TODO: pause device? // TODO: pause device?
SDLAudioOutputDeviceDefaultInstance create(void); ~SDLAudioOutputDeviceDefaultSink(void);
std::shared_ptr<FrameStream2I<AudioFrame>> subscribe(void) override;
bool unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) override;
}; };

View File

@ -1,4 +1,6 @@
#include "./sdl_video_frame_stream2.hpp" #include "./sdl_video_frame_stream2.hpp"
#include "SDL3/SDL_camera.h"
#include "SDL3/SDL_pixels.h"
#include <chrono> #include <chrono>
#include <cstdint> #include <cstdint>
@ -6,6 +8,7 @@
#include <memory> #include <memory>
#include <thread> #include <thread>
// TODO: move out and create lazy cam for each device
SDLVideoCameraContent::SDLVideoCameraContent(void) { SDLVideoCameraContent::SDLVideoCameraContent(void) {
int devcount {0}; int devcount {0};
//SDL_CameraDeviceID *devices = SDL_GetCameraDevices(&devcount); //SDL_CameraDeviceID *devices = SDL_GetCameraDevices(&devcount);
@ -25,7 +28,6 @@ SDLVideoCameraContent::SDLVideoCameraContent(void) {
int speccount {0}; int speccount {0};
SDL_CameraSpec** specs = SDL_GetCameraSupportedFormats(device, &speccount); SDL_CameraSpec** specs = SDL_GetCameraSupportedFormats(device, &speccount);
//SDL_CameraSpec* specs = SDL_GetCameraSupportedFormats(device, &speccount);
if (specs == nullptr) { if (specs == nullptr) {
std::cout << " - no supported spec\n"; std::cout << " - no supported spec\n";
} else { } else {
@ -42,22 +44,29 @@ SDLVideoCameraContent::SDLVideoCameraContent(void) {
// FORCE a diffrent pixel format // FORCE a diffrent pixel format
//SDL_PIXELFORMAT_RGBA8888, //SDL_PIXELFORMAT_RGBA8888,
//SDL_PIXELFORMAT_UNKNOWN, //SDL_PIXELFORMAT_UNKNOWN,
SDL_PIXELFORMAT_IYUV, //SDL_PIXELFORMAT_IYUV,
SDL_PIXELFORMAT_YUY2,
SDL_COLORSPACE_SRGB,
//SDL_COLORSPACE_UNKNOWN, //SDL_COLORSPACE_UNKNOWN,
//SDL_COLORSPACE_SRGB,
//SDL_COLORSPACE_SRGB_LINEAR,
SDL_COLORSPACE_YUV_DEFAULT,
//1280, 720, //1280, 720,
//640, 360, //640, 360,
640, 480, //640, 480,
696, 392,
//1, 30 //1, 30
30, 1 30, 1
}; };
_camera = { _camera = {
SDL_OpenCamera(devices[0], &spec), //SDL_OpenCamera(devices[devcount-1], &spec),
SDL_OpenCamera(devices[0], nullptr),
//SDL_OpenCamera(devices[0], &spec),
&SDL_CloseCamera &SDL_CloseCamera
}; };
SDL_GetCameraFormat(_camera.get(), &spec);
} }
SDL_free(devices); SDL_free(devices);
if (!static_cast<bool>(_camera)) { if (!static_cast<bool>(_camera)) {
@ -76,8 +85,9 @@ SDLVideoCameraContent::SDLVideoCameraContent(void) {
SDL_CameraSpec spec; SDL_CameraSpec spec;
float fps {1.f}; float fps {1.f};
if (SDL_GetCameraFormat(_camera.get(), &spec) != 0) { if (!SDL_GetCameraFormat(_camera.get(), &spec)) {
// meh // meh
throw int(5);
} else { } else {
fps = float(spec.framerate_numerator)/float(spec.framerate_denominator); fps = float(spec.framerate_numerator)/float(spec.framerate_denominator);
std::cout << "camera fps: " << fps << "fps (" << spec.framerate_numerator << "/" << spec.framerate_denominator << ")\n"; std::cout << "camera fps: " << fps << "fps (" << spec.framerate_numerator << "/" << spec.framerate_denominator << ")\n";

View File

@ -28,25 +28,28 @@ struct SDLVideoFrame {
SDLVideoFrame(const SDLVideoFrame& other) { SDLVideoFrame(const SDLVideoFrame& other) {
timestampNS = other.timestampNS; timestampNS = other.timestampNS;
if (static_cast<bool>(other.surface)) { if (static_cast<bool>(other.surface)) {
// TODO: use SDL_DuplicateSurface() //surface = {
// SDL_CreateSurface(
// other.surface->w,
// other.surface->h,
// other.surface->format
// ),
// &SDL_DestroySurface
//};
//SDL_BlitSurface(other.surface.get(), nullptr, surface.get(), nullptr);
surface = { surface = {
SDL_CreateSurface( SDL_DuplicateSurface(other.surface.get()),
other.surface->w,
other.surface->h,
other.surface->format
),
&SDL_DestroySurface &SDL_DestroySurface
}; };
SDL_BlitSurface(other.surface.get(), nullptr, surface.get(), nullptr);
} }
} }
SDLVideoFrame& operator=(const SDLVideoFrame& other) = delete; SDLVideoFrame& operator=(const SDLVideoFrame& other) = delete;
}; };
using SDLVideoFrameStream2MultiStream = FrameStream2MultiStream<SDLVideoFrame>; using SDLVideoFrameStream2MultiSource = FrameStream2MultiSource<SDLVideoFrame>;
using SDLVideoFrameStream2 = SDLVideoFrameStream2MultiStream::sub_stream_type_t; // just use the default for now using SDLVideoFrameStream2 = SDLVideoFrameStream2MultiSource::sub_stream_type_t; // just use the default for now
struct SDLVideoCameraContent : protected SDLVideoFrameStream2MultiStream { struct SDLVideoCameraContent : public SDLVideoFrameStream2MultiSource {
// meh, empty default // meh, empty default
std::unique_ptr<SDL_Camera, decltype(&SDL_CloseCamera)> _camera {nullptr, &SDL_CloseCamera}; std::unique_ptr<SDL_Camera, decltype(&SDL_CloseCamera)> _camera {nullptr, &SDL_CloseCamera};
std::atomic<bool> _thread_should_quit {false}; std::atomic<bool> _thread_should_quit {false};
@ -60,7 +63,7 @@ struct SDLVideoCameraContent : protected SDLVideoFrameStream2MultiStream {
~SDLVideoCameraContent(void); ~SDLVideoCameraContent(void);
// make only some of writer public // make only some of writer public
using SDLVideoFrameStream2MultiStream::aquireSubStream; using SDLVideoFrameStream2MultiSource::subscribe;
using SDLVideoFrameStream2MultiStream::releaseSubStream; using SDLVideoFrameStream2MultiSource::unsubscribe;
}; };

226
src/debug_tox_call.cpp Normal file
View File

@ -0,0 +1,226 @@
#include "./debug_tox_call.hpp"
#include <SDL3/SDL.h>
#include <cstdint>
#include <imgui/imgui.h>
#include <cstring>
#include <iostream>
// fwd
namespace Message {
uint64_t getTimeMS(void);
}
static constexpr float lerp(float a, float b, float t) {
return a + t * (b - a);
}
namespace Components {
struct ToxAVFriendAudioSource {
};
struct ToxAVFriendAudioSink {
};
struct ToxAVFriendVideoSource {
};
struct ToxAVFriendVideoSink {
};
}
DebugToxCall::DebugToxCall(ObjectStore2& os, ToxAV& toxav, TextureUploaderI& tu) : _os(os), _toxav(toxav), _tu(tu) {
_toxav.subscribe(this, ToxAV_Event::friend_call);
_toxav.subscribe(this, ToxAV_Event::friend_call_state);
_toxav.subscribe(this, ToxAV_Event::friend_audio_bitrate);
_toxav.subscribe(this, ToxAV_Event::friend_video_bitrate);
_toxav.subscribe(this, ToxAV_Event::friend_audio_frame);
_toxav.subscribe(this, ToxAV_Event::friend_video_frame);
}
void DebugToxCall::tick(float time_delta) {
}
float DebugToxCall::render(void) {
float next_frame {2.f};
if (ImGui::Begin("toxav debug")) {
ImGui::Text("Calls:");
ImGui::Indent();
for (auto& [fid, call] : _calls) {
ImGui::PushID(fid);
ImGui::Text("fid:%d state:%d", fid, call.state);
if (call.incoming) {
ImGui::SameLine();
if (ImGui::SmallButton("answer")) {
const auto ret = _toxav.toxavAnswer(fid, 0, 0);
if (ret == TOXAV_ERR_ANSWER_OK) {
call.incoming = false;
}
}
} else if (call.state != TOXAV_FRIEND_CALL_STATE_FINISHED) {
next_frame = std::min(next_frame, 0.1f);
ImGui::SameLine();
if (ImGui::SmallButton("hang up")) {
const auto ret = _toxav.toxavCallControl(fid, TOXAV_CALL_CONTROL_CANCEL);
if (ret == TOXAV_ERR_CALL_CONTROL_OK) {
// we hung up
// not sure if its possible for toxcore to tell this us too when the other side does this at the same time?
call.state = TOXAV_FRIEND_CALL_STATE_FINISHED;
}
}
//if (ImGui::BeginCombo("audio src", "---")) {
// ImGui::EndCombo();
//}
//if (ImGui::BeginCombo("video src", "---")) {
// ImGui::EndCombo();
//}
}
//if (call.last_v_frame_tex != 0 && ImGui::BeginItemTooltip()) {
if (call.last_v_frame_tex != 0) {
next_frame = std::min(next_frame, call.v_frame_interval_avg);
ImGui::Text("vframe interval avg: %f", call.v_frame_interval_avg);
ImGui::Image(
reinterpret_cast<ImTextureID>(call.last_v_frame_tex),
//ImVec2{float(call.last_v_frame_width), float(call.last_v_frame_height)}
ImVec2{100.f, 100.f * float(call.last_v_frame_height)/call.last_v_frame_width}
);
//ImGui::EndTooltip();
}
ImGui::PopID();
}
ImGui::Unindent();
}
ImGui::End();
return next_frame;
}
bool DebugToxCall::onEvent(const Events::FriendCall& e) {
auto& call = _calls[e.friend_number];
call.incoming = true;
call.incoming_a = e.audio_enabled;
call.incoming_v = e.video_enabled;
//call.state = TOXAV_FRIEND_CALL_STATE_NONE;
return true;
}
bool DebugToxCall::onEvent(const Events::FriendCallState& e) {
auto& call = _calls[e.friend_number];
call.state = e.state;
return true;
}
bool DebugToxCall::onEvent(const Events::FriendAudioBitrate&) {
return false;
}
bool DebugToxCall::onEvent(const Events::FriendVideoBitrate&) {
return false;
}
bool DebugToxCall::onEvent(const Events::FriendAudioFrame& e) {
auto& call = _calls[e.friend_number];
call.num_a_frames++;
return false;
}
bool DebugToxCall::onEvent(const Events::FriendVideoFrame& e) {
auto& call = _calls[e.friend_number];
call.num_v_frames++;
if (call.last_v_frame_timepoint == 0) {
call.last_v_frame_timepoint = Message::getTimeMS();
} else {
const auto new_time_point = Message::getTimeMS();
auto time_delta_ms = new_time_point - call.last_v_frame_timepoint;
call.last_v_frame_timepoint = new_time_point;
time_delta_ms = std::min<uint64_t>(time_delta_ms, 10*1000); // cap at 10sec
if (call.v_frame_interval_avg == 0) {
call.v_frame_interval_avg = time_delta_ms/1000.f;
} else {
std::cerr << "lerp(" << call.v_frame_interval_avg << ", " << time_delta_ms/1000.f << ", 0.2f) = ";
call.v_frame_interval_avg = lerp(call.v_frame_interval_avg, time_delta_ms/1000.f, 0.2f);
std::cerr << call.v_frame_interval_avg << "\n";
}
}
auto* new_surf = SDL_CreateSurface(e.width, e.height, SDL_PIXELFORMAT_IYUV);
assert(new_surf);
if (SDL_LockSurface(new_surf)) {
// copy the data
// we know how the implementation works, its y u v consecutivlely
// y
for (size_t y = 0; y < e.height; y++) {
std::memcpy(
//static_cast<uint8_t*>(new_surf->pixels) + new_surf->pitch*y,
static_cast<uint8_t*>(new_surf->pixels) + e.width*y,
e.y.ptr + e.ystride*y,
e.width
);
}
// u
for (size_t y = 0; y < e.height/2; y++) {
std::memcpy(
static_cast<uint8_t*>(new_surf->pixels) + (e.width*e.height) + (e.width/2)*y,
e.u.ptr + e.ustride*y,
e.width/2
);
}
// v
for (size_t y = 0; y < e.height/2; y++) {
std::memcpy(
static_cast<uint8_t*>(new_surf->pixels) + (e.width*e.height) + ((e.width/2)*(e.height/2)) + (e.width/2)*y,
e.v.ptr + e.vstride*y,
e.width/2
);
}
SDL_UnlockSurface(new_surf);
}
auto* converted_surf = SDL_ConvertSurfaceAndColorspace(new_surf, SDL_PIXELFORMAT_RGBA32, nullptr, SDL_COLORSPACE_YUV_DEFAULT, 0);
SDL_DestroySurface(new_surf);
if (converted_surf == nullptr) {
assert(false);
return true;
}
SDL_LockSurface(converted_surf);
if (call.last_v_frame_tex == 0 || call.last_v_frame_width != e.width || call.last_v_frame_height != e.height) {
_tu.destroy(call.last_v_frame_tex);
call.last_v_frame_tex = _tu.uploadRGBA(
static_cast<const uint8_t*>(converted_surf->pixels),
converted_surf->w,
converted_surf->h,
TextureUploaderI::LINEAR,
TextureUploaderI::STREAMING
);
call.last_v_frame_width = e.width;
call.last_v_frame_height = e.height;
} else {
_tu.updateRGBA(call.last_v_frame_tex, static_cast<const uint8_t*>(converted_surf->pixels), converted_surf->w * converted_surf->h * 4);
}
SDL_UnlockSurface(converted_surf);
SDL_DestroySurface(converted_surf);
// TODO: use this instead
//SDL_UpdateYUVTexture(tex, nullptr, e.y.ptr, e.ystride,...
std::cout << "DTC: updated video texture " << call.last_v_frame_tex << "\n";
return false;
}

53
src/debug_tox_call.hpp Normal file
View File

@ -0,0 +1,53 @@
#pragma once
#include <solanaceae/object_store/fwd.hpp>
#include "./tox_av.hpp"
#include "./texture_uploader.hpp"
#include <map>
#include <cstdint>
class DebugToxCall : public ToxAVEventI {
ObjectStore2& _os;
ToxAV& _toxav;
TextureUploaderI& _tu;
struct Call {
bool incoming {false};
bool incoming_a {false};
bool incoming_v {false};
uint32_t state {0}; // ? just last state ?
uint32_t abr {0};
uint32_t vbr {0};
size_t num_a_frames {0};
size_t num_v_frames {0};
// fps moving interval
uint64_t last_v_frame_timepoint {0};
float v_frame_interval_avg {0.f};
uint64_t last_v_frame_tex {0};
uint64_t last_v_frame_width {0};
uint64_t last_v_frame_height {0};
};
// tox friend id -> call
std::map<uint32_t, Call> _calls;
public:
DebugToxCall(ObjectStore2& os, ToxAV& toxav, TextureUploaderI& tu);
~DebugToxCall(void) {}
void tick(float time_delta);
float render(void);
protected: // toxav events
bool onEvent(const Events::FriendCall&) override;
bool onEvent(const Events::FriendCallState&) override;
bool onEvent(const Events::FriendAudioBitrate&) override;
bool onEvent(const Events::FriendVideoBitrate&) override;
bool onEvent(const Events::FriendAudioFrame&) override;
bool onEvent(const Events::FriendVideoFrame&) override;
};

204
src/debug_video_tap.cpp Normal file
View File

@ -0,0 +1,204 @@
#include "./debug_video_tap.hpp"
#include <solanaceae/object_store/object_store.hpp>
#include <entt/entity/entity.hpp>
#include <SDL3/SDL.h>
#include <imgui/imgui.h>
#include "./content/sdl_video_frame_stream2.hpp"
#include <string>
#include <memory>
#include <iostream>
struct DebugVideoTapSink : public FrameStream2SinkI<SDLVideoFrame> {
std::shared_ptr<QueuedFrameStream2<SDLVideoFrame>> _writer;
DebugVideoTapSink(void) {}
~DebugVideoTapSink(void) {}
// sink
std::shared_ptr<FrameStream2I<SDLVideoFrame>> subscribe(void) override {
if (_writer) {
// max 1 (exclusive)
return nullptr;
}
_writer = std::make_shared<QueuedFrameStream2<SDLVideoFrame>>(1, true);
return _writer;
}
bool unsubscribe(const std::shared_ptr<FrameStream2I<SDLVideoFrame>>& sub) override {
if (!sub || !_writer) {
// nah
return false;
}
if (sub == _writer) {
_writer = nullptr;
return true;
}
// what
return false;
}
};
DebugVideoTap::DebugVideoTap(ObjectStore2& os, StreamManager& sm, TextureUploaderI& tu) : _os(os), _sm(sm), _tu(tu) {
// post self as video sink
_tap = {_os.registry(), _os.registry().create()};
try {
auto dvts = std::make_unique<DebugVideoTapSink>();
_tap.emplace<DebugVideoTapSink*>(dvts.get()); // to get our data back
_tap.emplace<Components::FrameStream2Sink<SDLVideoFrame>>(
std::move(dvts)
);
_tap.emplace<Components::StreamSink>("DebugVideoTap", std::string{entt::type_name<SDLVideoFrame>::value()});
} catch (...) {
_os.registry().destroy(_tap);
}
}
DebugVideoTap::~DebugVideoTap(void) {
if (static_cast<bool>(_tap)) {
_os.registry().destroy(_tap);
}
}
float DebugVideoTap::render(void) {
if (ImGui::Begin("DebugVideoTap")) {
// list sources dropdown to connect too
std::string preview_label {"none"};
if (static_cast<bool>(_selected_src)) {
preview_label = std::to_string(entt::to_integral(_selected_src.entity())) + " (" + _selected_src.get<Components::StreamSource>().name + ")";
}
if (ImGui::BeginCombo("selected source", preview_label.c_str())) {
if (ImGui::Selectable("none")) {
switchTo({});
}
for (const auto& [oc, ss] : _os.registry().view<Components::StreamSource>().each()) {
if (ss.frame_type_name != entt::type_name<SDLVideoFrame>::value()) {
continue;
}
std::string label = std::to_string(entt::to_integral(oc)) + " (" + ss.name + ")";
if (ImGui::Selectable(label.c_str())) {
switchTo({_os.registry(), oc});
}
}
ImGui::EndCombo();
}
{ // first pull the latest img from sink and update the texture
assert(static_cast<bool>(_tap));
auto& dvtsw = _tap.get<DebugVideoTapSink*>()->_writer;
if (dvtsw) {
while (true) {
auto new_frame_opt = dvtsw->pop();
if (new_frame_opt.has_value()) {
// timing
if (_v_last_ts == 0) {
_v_last_ts = new_frame_opt.value().timestampNS;
} else {
auto delta = int64_t(new_frame_opt.value().timestampNS) - int64_t(_v_last_ts);
_v_last_ts = new_frame_opt.value().timestampNS;
//delta = std::min<int64_t>(delta, 10*1000*1000);
if (_v_interval_avg == 0) {
_v_interval_avg = delta/1'000'000'000.f;
} else {
const float r = 0.2f;
_v_interval_avg = _v_interval_avg * (1-r) + (delta/1'000'000'000.f) * r;
}
}
SDL_Surface* new_frame_surf = new_frame_opt.value().surface.get();
SDL_Surface* converted_surf = new_frame_surf;
if (new_frame_surf->format != SDL_PIXELFORMAT_RGBA32) {
// we need to convert
//std::cerr << "DVT: need to convert\n";
converted_surf = SDL_ConvertSurfaceAndColorspace(new_frame_surf, SDL_PIXELFORMAT_RGBA32, nullptr, SDL_COLORSPACE_RGB_DEFAULT, 0);
assert(converted_surf->format == SDL_PIXELFORMAT_RGBA32);
}
SDL_LockSurface(converted_surf);
if (_tex == 0 || (int)_tex_w != converted_surf->w || (int)_tex_h != converted_surf->h) {
_tu.destroy(_tex);
_tex = _tu.uploadRGBA(
static_cast<const uint8_t*>(converted_surf->pixels),
converted_surf->w,
converted_surf->h,
TextureUploaderI::LINEAR,
TextureUploaderI::STREAMING
);
_tex_w = converted_surf->w;
_tex_h = converted_surf->h;
} else {
_tu.updateRGBA(_tex, static_cast<const uint8_t*>(converted_surf->pixels), converted_surf->w * converted_surf->h * 4);
}
SDL_UnlockSurface(converted_surf);
if (new_frame_surf != converted_surf) {
// clean up temp
SDL_DestroySurface(converted_surf);
}
} else {
break;
}
}
}
}
// img here
if (_tex != 0) {
ImGui::Text("moving avg interval: %f", _v_interval_avg);
const float img_w = ImGui::GetContentRegionAvail().x;
ImGui::Image(
reinterpret_cast<ImTextureID>(_tex),
ImVec2{img_w, img_w * float(_tex_h)/_tex_w}
);
}
}
ImGui::End();
if (_v_interval_avg != 0) {
return _v_interval_avg;
} else {
return 2.f;
}
}
void DebugVideoTap::switchTo(ObjectHandle o) {
if (o == _selected_src) {
std::cerr << "DVT: switch to same ...\n";
return;
}
_tu.destroy(_tex);
_tex = 0;
_v_last_ts = 0;
_v_interval_avg = 0;
if (static_cast<bool>(_selected_src)) {
_sm.disconnect<SDLVideoFrame>(_selected_src, _tap);
}
if (static_cast<bool>(o) && _sm.connect<SDLVideoFrame>(o, _tap)) {
_selected_src = o;
} else {
std::cerr << "DVT: cleared video source\n";
_selected_src = {};
}
}

32
src/debug_video_tap.hpp Normal file
View File

@ -0,0 +1,32 @@
#pragma once
#include <cstdint>
#include <solanaceae/object_store/fwd.hpp>
#include "./stream_manager.hpp"
#include "./texture_uploader.hpp"
// provides a sink and a small window displaying a SDLVideoFrame
class DebugVideoTap {
ObjectStore2& _os;
StreamManager& _sm;
TextureUploaderI& _tu;
ObjectHandle _selected_src;
ObjectHandle _tap;
uint64_t _tex {0};
uint32_t _tex_w {0};
uint32_t _tex_h {0};
uint64_t _v_last_ts {0}; // ns
float _v_interval_avg {0.f}; // s
public:
DebugVideoTap(ObjectStore2& os, StreamManager& sm, TextureUploaderI& tu);
~DebugVideoTap(void);
float render(void);
void switchTo(ObjectHandle o);
};

View File

@ -38,7 +38,7 @@ int main(int argc, char** argv) {
// setup hints // setup hints
#ifndef __ANDROID__ #ifndef __ANDROID__
if (SDL_SetHint(SDL_HINT_VIDEO_ALLOW_SCREENSAVER, "1") != SDL_TRUE) { if (!SDL_SetHint(SDL_HINT_VIDEO_ALLOW_SCREENSAVER, "1")) {
std::cerr << "Failed to set '" << SDL_HINT_VIDEO_ALLOW_SCREENSAVER << "' to 1\n"; std::cerr << "Failed to set '" << SDL_HINT_VIDEO_ALLOW_SCREENSAVER << "' to 1\n";
} }
#endif #endif
@ -76,34 +76,34 @@ int main(int argc, char** argv) {
std::cout << "SDL Renderer: " << SDL_GetRendererName(renderer.get()) << "\n"; std::cout << "SDL Renderer: " << SDL_GetRendererName(renderer.get()) << "\n";
// optionally init audio and camera // optionally init audio and camera
if (SDL_Init(SDL_INIT_AUDIO) < 0) { if (!SDL_Init(SDL_INIT_AUDIO)) {
std::cerr << "SDL_Init AUDIO failed (" << SDL_GetError() << ")\n"; std::cerr << "SDL_Init AUDIO failed (" << SDL_GetError() << ")\n";
} else if (false) { } else if (false) {
SDLAudioInputDeviceDefault aidd; SDLAudioInputDeviceDefault aidd;
auto* reader = aidd.aquireSubStream(); auto reader = aidd.subscribe();
auto writer = SDLAudioOutputDeviceDefaultFactory{}.create(); auto writer = SDLAudioOutputDeviceDefaultSink{}.subscribe();
for (size_t i = 0; i < 100; i++) { for (size_t i = 0; i < 200; i++) {
std::this_thread::sleep_for(std::chrono::milliseconds(10)); std::this_thread::sleep_for(std::chrono::milliseconds(10));
auto new_frame_opt = reader->pop(); auto new_frame_opt = reader->pop();
if (new_frame_opt.has_value()) { if (new_frame_opt.has_value()) {
std::cout << "audio frame was seq:" << new_frame_opt.value().seq << " sr:" << new_frame_opt.value().sample_rate << " " << (new_frame_opt.value().isS16()?"S16":"F32") << " l:" << (new_frame_opt.value().isS16()?new_frame_opt.value().getSpan<int16_t>().size:new_frame_opt.value().getSpan<float>().size) << "\n"; std::cout << "audio frame was seq:" << new_frame_opt.value().seq << " sr:" << new_frame_opt.value().sample_rate << " " << (new_frame_opt.value().isS16()?"S16":"F32") << " l:" << (new_frame_opt.value().isS16()?new_frame_opt.value().getSpan<int16_t>().size:new_frame_opt.value().getSpan<float>().size) << "\n";
writer.push(new_frame_opt.value()); writer->push(new_frame_opt.value());
} else { } else {
std::cout << "no audio frame\n"; std::cout << "no audio frame\n";
} }
} }
aidd.releaseSubStream(reader); aidd.unsubscribe(reader);
} }
if (SDL_Init(SDL_INIT_CAMERA) < 0) { if (!SDL_Init(SDL_INIT_CAMERA)) {
std::cerr << "SDL_Init CAMERA failed (" << SDL_GetError() << ")\n"; std::cerr << "SDL_Init CAMERA failed (" << SDL_GetError() << ")\n";
} else if (false) { // HACK } else if (false) { // HACK
std::cerr << "CAMERA initialized\n"; std::cerr << "CAMERA initialized\n";
SDLVideoCameraContent vcc; SDLVideoCameraContent vcc;
auto* reader = vcc.aquireSubStream(); auto reader = vcc.subscribe();
for (size_t i = 0; i < 20; i++) { for (size_t i = 0; i < 20; i++) {
std::this_thread::sleep_for(std::chrono::milliseconds(50)); std::this_thread::sleep_for(std::chrono::milliseconds(50));
auto new_frame_opt = reader->pop(); auto new_frame_opt = reader->pop();
@ -111,7 +111,7 @@ int main(int argc, char** argv) {
std::cout << "video frame was " << new_frame_opt.value().surface->w << "x" << new_frame_opt.value().surface->h << " " << new_frame_opt.value().timestampNS << "ns " << new_frame_opt.value().surface->format << "sf\n"; std::cout << "video frame was " << new_frame_opt.value().surface->w << "x" << new_frame_opt.value().surface->h << " " << new_frame_opt.value().timestampNS << "ns " << new_frame_opt.value().surface->format << "sf\n";
} }
} }
vcc.releaseSubStream(reader); vcc.unsubscribe(reader);
} }
std::cout << "after sdl video stuffery\n"; std::cout << "after sdl video stuffery\n";

View File

@ -9,6 +9,11 @@
#include <SDL3/SDL.h> #include <SDL3/SDL.h>
#include "./content/sdl_video_frame_stream2.hpp"
#include "content/audio_stream.hpp"
#include "content/sdl_audio_frame_stream2.hpp"
#include "stream_manager.hpp"
#include <memory> #include <memory>
#include <cmath> #include <cmath>
#include <string_view> #include <string_view>
@ -19,11 +24,13 @@ MainScreen::MainScreen(SimpleConfigModel&& conf_, SDL_Renderer* renderer_, Theme
rmm(cr), rmm(cr),
msnj{cr, {}, {}}, msnj{cr, {}, {}},
mts(rmm), mts(rmm),
sm(os),
tc(save_path, save_password), tc(save_path, save_password),
tpi(tc.getTox()), tpi(tc.getTox()),
ad(tc), ad(tc),
#if TOMATO_TOX_AV #if TOMATO_TOX_AV
tav(tc.getTox()), tav(tc.getTox()),
dtc(os, tav, sdlrtu),
#endif #endif
tcm(cr, tc, tc), tcm(cr, tc, tc),
tmm(rmm, cr, tcm, tc, tc), tmm(rmm, cr, tcm, tc, tc),
@ -40,6 +47,8 @@ MainScreen::MainScreen(SimpleConfigModel&& conf_, SDL_Renderer* renderer_, Theme
cg(conf, os, rmm, cr, sdlrtu, contact_tc, msg_tc, theme), cg(conf, os, rmm, cr, sdlrtu, contact_tc, msg_tc, theme),
sw(conf), sw(conf),
osui(os), osui(os),
smui(os, sm),
dvt(os, sm, sdlrtu),
tuiu(tc, conf), tuiu(tc, conf),
tdch(tpi) tdch(tpi)
{ {
@ -136,6 +145,43 @@ MainScreen::MainScreen(SimpleConfigModel&& conf_, SDL_Renderer* renderer_, Theme
} }
conf.dump(); conf.dump();
{ // add system av devices
{
ObjectHandle vsrc {os.registry(), os.registry().create()};
try {
vsrc.emplace<Components::FrameStream2Source<SDLVideoFrame>>(
std::make_unique<SDLVideoCameraContent>()
);
vsrc.emplace<Components::StreamSource>("WebCam", std::string{entt::type_name<SDLVideoFrame>::value()});
} catch (...) {
os.registry().destroy(vsrc);
}
}
{ // audio in
ObjectHandle asrc {os.registry(), os.registry().create()};
try {
throw int(2);
} catch (...) {
os.registry().destroy(asrc);
}
}
{ // audio out
ObjectHandle asink {os.registry(), os.registry().create()};
try {
asink.emplace<Components::FrameStream2Sink<AudioFrame>>(
std::make_unique<SDLAudioOutputDeviceDefaultSink>()
);
asink.emplace<Components::StreamSink>("LoudSpeaker", std::string{entt::type_name<AudioFrame>::value()});
} catch (...) {
os.registry().destroy(asink);
}
}
}
} }
MainScreen::~MainScreen(void) { MainScreen::~MainScreen(void) {
@ -252,14 +298,19 @@ Screen* MainScreen::render(float time_delta, bool&) {
} }
// ACTUALLY NOT IF RENDERED, MOVED LOGIC TO ABOVE // ACTUALLY NOT IF RENDERED, MOVED LOGIC TO ABOVE
// it might unload textures, so it needs to be done before rendering // it might unload textures, so it needs to be done before rendering
const float ctc_interval = contact_tc.update(); float animation_interval = contact_tc.update();
const float msgtc_interval = msg_tc.update(); animation_interval = std::min<float>(animation_interval, msg_tc.update());
const float cg_interval = cg.render(time_delta); // render const float cg_interval = cg.render(time_delta); // render
sw.render(); // render sw.render(); // render
osui.render(); osui.render();
smui.render();
animation_interval = std::min<float>(animation_interval, dvt.render());
tuiu.render(); // render tuiu.render(); // render
tdch.render(); // render tdch.render(); // render
#if TOMATO_TOX_AV
animation_interval = std::min<float>(animation_interval, dtc.render());
#endif
{ // main window menubar injection { // main window menubar injection
if (ImGui::Begin("tomato")) { if (ImGui::Begin("tomato")) {
@ -440,8 +491,7 @@ Screen* MainScreen::render(float time_delta, bool&) {
// low delay time window // low delay time window
if (!_window_hidden && _time_since_event < curr_profile.low_delay_window) { if (!_window_hidden && _time_since_event < curr_profile.low_delay_window) {
_render_interval = std::min<float>(_render_interval, ctc_interval); _render_interval = std::min<float>(_render_interval, animation_interval);
_render_interval = std::min<float>(_render_interval, msgtc_interval);
_render_interval = std::clamp( _render_interval = std::clamp(
_render_interval, _render_interval,
@ -450,8 +500,7 @@ Screen* MainScreen::render(float time_delta, bool&) {
); );
// mid delay time window // mid delay time window
} else if (!_window_hidden && _time_since_event < curr_profile.mid_delay_window) { } else if (!_window_hidden && _time_since_event < curr_profile.mid_delay_window) {
_render_interval = std::min<float>(_render_interval, ctc_interval); _render_interval = std::min<float>(_render_interval, animation_interval);
_render_interval = std::min<float>(_render_interval, msgtc_interval);
_render_interval = std::clamp( _render_interval = std::clamp(
_render_interval, _render_interval,
@ -479,10 +528,13 @@ Screen* MainScreen::tick(float time_delta, bool& quit) {
#if TOMATO_TOX_AV #if TOMATO_TOX_AV
tav.toxavIterate(); tav.toxavIterate();
const float av_interval = tav.toxavIterationInterval()/1000.f; const float av_interval = tav.toxavIterationInterval()/1000.f;
dtc.tick(time_delta);
#endif #endif
tcm.iterate(time_delta); // compute tcm.iterate(time_delta); // compute
const float sm_interval = sm.tick(time_delta);
const float fo_interval = tffom.tick(time_delta); const float fo_interval = tffom.tick(time_delta);
tam.iterate(); // compute tam.iterate(); // compute
@ -522,6 +574,11 @@ Screen* MainScreen::tick(float time_delta, bool& quit) {
); );
#endif #endif
_min_tick_interval = std::min<float>(
_min_tick_interval,
sm_interval
);
//std::cout << "MS: min tick interval: " << _min_tick_interval << "\n"; //std::cout << "MS: min tick interval: " << _min_tick_interval << "\n";
switch (_compute_perf_mode) { switch (_compute_perf_mode) {

View File

@ -16,6 +16,8 @@
#include <solanaceae/tox_messages/tox_message_manager.hpp> #include <solanaceae/tox_messages/tox_message_manager.hpp>
#include <solanaceae/tox_messages/tox_transfer_manager.hpp> #include <solanaceae/tox_messages/tox_transfer_manager.hpp>
#include "./stream_manager.hpp"
#include "./tox_client.hpp" #include "./tox_client.hpp"
#include "./auto_dirty.hpp" #include "./auto_dirty.hpp"
@ -30,12 +32,15 @@
#include "./chat_gui4.hpp" #include "./chat_gui4.hpp"
#include "./chat_gui/settings_window.hpp" #include "./chat_gui/settings_window.hpp"
#include "./object_store_ui.hpp" #include "./object_store_ui.hpp"
#include "./stream_manager_ui.hpp"
#include "./debug_video_tap.hpp"
#include "./tox_ui_utils.hpp" #include "./tox_ui_utils.hpp"
#include "./tox_dht_cap_histo.hpp" #include "./tox_dht_cap_histo.hpp"
#include "./tox_friend_faux_offline_messaging.hpp" #include "./tox_friend_faux_offline_messaging.hpp"
#if TOMATO_TOX_AV #if TOMATO_TOX_AV
#include "./tox_av.hpp" #include "./tox_av.hpp"
#include "./debug_tox_call.hpp"
#endif #endif
#include <string> #include <string>
@ -58,12 +63,15 @@ struct MainScreen final : public Screen {
MessageSerializerNJ msnj; MessageSerializerNJ msnj;
MessageTimeSort mts; MessageTimeSort mts;
StreamManager sm;
ToxEventLogger tel{std::cout}; ToxEventLogger tel{std::cout};
ToxClient tc; ToxClient tc;
ToxPrivateImpl tpi; ToxPrivateImpl tpi;
AutoDirty ad; AutoDirty ad;
#if TOMATO_TOX_AV #if TOMATO_TOX_AV
ToxAV tav; ToxAV tav;
DebugToxCall dtc;
#endif #endif
ToxContactModel2 tcm; ToxContactModel2 tcm;
ToxMessageManager tmm; ToxMessageManager tmm;
@ -86,6 +94,8 @@ struct MainScreen final : public Screen {
ChatGui4 cg; ChatGui4 cg;
SettingsWindow sw; SettingsWindow sw;
ObjectStoreUI osui; ObjectStoreUI osui;
StreamManagerUI smui;
DebugVideoTap dvt;
ToxUIUtils tuiu; ToxUIUtils tuiu;
ToxDHTCapHisto tdch; ToxDHTCapHisto tdch;

View File

@ -32,7 +32,7 @@ uint64_t SDLRendererTextureUploader::uploadRGBA(const uint8_t* data, uint32_t wi
SDL_UpdateTexture(tex, nullptr, surf->pixels, surf->pitch); SDL_UpdateTexture(tex, nullptr, surf->pixels, surf->pitch);
SDL_BlendMode surf_blend_mode = SDL_BLENDMODE_NONE; SDL_BlendMode surf_blend_mode = SDL_BLENDMODE_NONE;
if (SDL_GetSurfaceBlendMode(surf, &surf_blend_mode) == 0) { if (SDL_GetSurfaceBlendMode(surf, &surf_blend_mode)) {
SDL_SetTextureBlendMode(tex, surf_blend_mode); SDL_SetTextureBlendMode(tex, surf_blend_mode);
} }

210
src/stream_manager.hpp Normal file
View File

@ -0,0 +1,210 @@
#pragma once
#include <solanaceae/object_store/fwd.hpp>
#include <solanaceae/object_store/object_store.hpp>
#include <entt/core/type_info.hpp>
#include "./content/frame_stream2.hpp"
#include <vector>
#include <memory>
#include <algorithm>
namespace Components {
struct StreamSource {
std::string name;
std::string frame_type_name;
// TODO: connect fn
};
struct StreamSink {
std::string name;
std::string frame_type_name;
// TODO: connect fn
};
template<typename FrameType>
using FrameStream2Source = std::unique_ptr<FrameStream2SourceI<FrameType>>;
template<typename FrameType>
using FrameStream2Sink = std::unique_ptr<FrameStream2SinkI<FrameType>>;
} // Components
class StreamManager {
friend class StreamManagerUI;
ObjectStore2& _os;
struct Connection {
ObjectHandle src;
ObjectHandle sink;
std::function<void(Connection&)> pump_fn;
bool on_main_thread {true};
std::atomic_bool stop {false}; // disconnect
std::atomic_bool finished {false}; // disconnect
// pump thread
// frame interval counters and estimates
Connection(void) = default;
Connection(
ObjectHandle src_,
ObjectHandle sink_,
std::function<void(Connection&)>&& pump_fn_,
bool on_main_thread_ = true
) :
src(src_),
sink(sink_),
pump_fn(std::move(pump_fn_)),
on_main_thread(on_main_thread_)
{}
};
std::vector<std::unique_ptr<Connection>> _connections;
public:
StreamManager(ObjectStore2& os) : _os(os) {}
virtual ~StreamManager(void) {}
// TODO: default typed sources and sinks
// stream type is FrameStream2I<FrameType>
// TODO: improve this design
// src and sink need to be a FrameStream2MultiStream<FrameType>
template<typename FrameType>
bool connect(Object src, Object sink) {
auto res = std::find_if(
_connections.cbegin(), _connections.cend(),
[&](const auto& a) { return a->src == src && a->sink == sink; }
);
if (res != _connections.cend()) {
// already exists
return false;
}
auto h_src = _os.objectHandle(src);
auto h_sink = _os.objectHandle(sink);
if (!static_cast<bool>(h_src) || !static_cast<bool>(h_sink)) {
// an object does not exist
return false;
}
if (!h_src.all_of<Components::FrameStream2Source<FrameType>>()) {
// src not stream source
return false;
}
if (!h_sink.all_of<Components::FrameStream2Sink<FrameType>>()) {
// sink not stream sink
return false;
}
// HACK:
if (!h_src.all_of<Components::StreamSource>()) {
h_src.emplace<Components::StreamSource>("", std::string{entt::type_name<FrameType>::value()});
}
if (!h_sink.all_of<Components::StreamSink>()) {
h_sink.emplace<Components::StreamSink>("", std::string{entt::type_name<FrameType>::value()});
}
auto& src_stream = h_src.get<Components::FrameStream2Source<FrameType>>();
auto& sink_stream = h_sink.get<Components::FrameStream2Sink<FrameType>>();
auto reader = src_stream->subscribe();
if (!reader) {
return false;
}
auto writer = sink_stream->subscribe();
if (!writer) {
return false;
}
_connections.push_back(std::make_unique<Connection>(
h_src,
h_sink,
// refactor extract, we just need the type info here
[reader = std::move(reader), writer = std::move(writer)](Connection& con) -> void {
// there might be more stored
for (size_t i = 0; i < 10; i++) {
auto new_frame_opt = reader->pop();
// TODO: frame interval estimates
if (new_frame_opt.has_value()) {
writer->push(new_frame_opt.value());
} else {
break;
}
}
if (con.stop) {
auto* src_stream_ptr = con.src.try_get<Components::FrameStream2Source<FrameType>>();
if (src_stream_ptr != nullptr) {
(*src_stream_ptr)->unsubscribe(reader);
}
auto* sink_stream_ptr = con.sink.try_get<Components::FrameStream2Sink<FrameType>>();
if (sink_stream_ptr != nullptr) {
(*sink_stream_ptr)->unsubscribe(writer);
}
con.finished = true;
}
},
true // TODO: threaded
));
return true;
}
template<typename StreamType>
bool disconnect(Object src, Object sink) {
auto res = std::find_if(
_connections.cbegin(), _connections.cend(),
[&](const auto& a) { return a->src == src && a->sink == sink; }
);
if (res == _connections.cend()) {
// not found
return false;
}
// do disconnect
(*res)->stop = true;
return true;
}
template<typename StreamType>
bool disconnectAll(Object o) {
bool succ {false};
for (const auto& con : _connections) {
if (con->src == o || con->sink == o) {
con->stop = true;
succ = true;
}
}
return succ;
}
// do we need the time delta?
float tick(float) {
// pump all mainthread connections
for (auto it = _connections.begin(); it != _connections.end();) {
auto& con = **it;
if (con.on_main_thread) {
con.pump_fn(con);
}
if (con.stop && con.finished) {
it = _connections.erase(it);
} else {
it++;
}
}
// return min over intervals instead
return 0.01f;
}
};

39
src/stream_manager_ui.cpp Normal file
View File

@ -0,0 +1,39 @@
#include "./stream_manager_ui.hpp"
#include <solanaceae/object_store/object_store.hpp>
#include <imgui/imgui.h>
StreamManagerUI::StreamManagerUI(ObjectStore2& os, StreamManager& sm) : _os(os), _sm(sm) {
}
void StreamManagerUI::render(void) {
if (ImGui::Begin("StreamManagerUI")) {
// TODO: node canvas?
// by fametype ??
ImGui::SeparatorText("Sources");
// list sources
for (const auto& [oc, ss] : _os.registry().view<Components::StreamSource>().each()) {
ImGui::Text("src %d (%s)[%s]", entt::to_integral(oc), ss.name.c_str(), ss.frame_type_name.c_str());
}
ImGui::SeparatorText("Sinks");
// list sinks
for (const auto& [oc, ss] : _os.registry().view<Components::StreamSink>().each()) {
ImGui::Text("sink %d (%s)[%s]", entt::to_integral(oc), ss.name.c_str(), ss.frame_type_name.c_str());
}
ImGui::SeparatorText("Connections");
// list connections
for (const auto& con : _sm._connections) {
ImGui::Text("con %d->%d", entt::to_integral(con->src.entity()), entt::to_integral(con->sink.entity()));
}
}
ImGui::End();
}

15
src/stream_manager_ui.hpp Normal file
View File

@ -0,0 +1,15 @@
#pragma once
#include <solanaceae/object_store/fwd.hpp>
#include "./stream_manager.hpp"
class StreamManagerUI {
ObjectStore2& _os;
StreamManager& _sm;
public:
StreamManagerUI(ObjectStore2& os, StreamManager& sm);
void render(void);
};

View File

@ -153,11 +153,11 @@ Toxav_Err_Bit_Rate_Set ToxAV::toxavVideoSetBitRate(uint32_t friend_number, uint3
void ToxAV::cb_call(uint32_t friend_number, bool audio_enabled, bool video_enabled) { void ToxAV::cb_call(uint32_t friend_number, bool audio_enabled, bool video_enabled) {
std::cerr << "TOXAV: receiving call f:" << friend_number << " a:" << audio_enabled << " v:" << video_enabled << "\n"; std::cerr << "TOXAV: receiving call f:" << friend_number << " a:" << audio_enabled << " v:" << video_enabled << "\n";
Toxav_Err_Answer err_answer { TOXAV_ERR_ANSWER_OK }; //Toxav_Err_Answer err_answer { TOXAV_ERR_ANSWER_OK };
toxav_answer(_tox_av, friend_number, 0, 0, &err_answer); //toxav_answer(_tox_av, friend_number, 0, 0, &err_answer);
if (err_answer != TOXAV_ERR_ANSWER_OK) { //if (err_answer != TOXAV_ERR_ANSWER_OK) {
std::cerr << "!!!!!!!! answer failed " << err_answer << "\n"; // std::cerr << "!!!!!!!! answer failed " << err_answer << "\n";
} //}
dispatch( dispatch(
ToxAV_Event::friend_call, ToxAV_Event::friend_call,