Compare commits

...

34 Commits

Author SHA1 Message Date
87f7290301
wip but working audio reframer 2024-09-27 12:47:22 +02:00
7ebbad2b94
make input device only open with >= 1 connection 2024-09-27 09:51:36 +02:00
5b3e0e2a0b
rewrite audio input, always open? but thin stream wrapper 2024-09-25 14:36:51 +02:00
5eca1a99e0
more stream stuff, audio input device is unusable 2024-09-25 11:25:22 +02:00
d0eeef2b94
improve streams also throw os events 2024-09-25 10:46:13 +02:00
4d5d708d6d
improve stream manager ui 2024-09-24 16:16:19 +02:00
9b7ba5875c
stream manger ui connections disconnect works 2024-09-24 16:16:18 +02:00
9b5cb2cfab
imgui ids 2024-09-24 16:16:18 +02:00
9f62e01ab8
wip stream manager ui rework 2024-09-24 16:16:18 +02:00
8cdf2a2ca3
add mirror option to debug video tap 2024-09-24 16:16:18 +02:00
697611ff55
fix video leak, toav audio 2024-09-24 16:16:18 +02:00
42c7ab8571
toxav incomming audio src and fix sdl audio output sink 2024-09-24 16:16:17 +02:00
36e75c0fab
dont drop frames too much 2024-09-24 16:16:17 +02:00
a934273714
update flake 2024-09-24 16:16:17 +02:00
a618435f17
a litle cleanup 2024-09-24 16:16:17 +02:00
a622b6aa3f
custom YUY2 -> IYUV conversion code
improves from 1+60ms to 3ms
2024-09-24 16:16:17 +02:00
b4373e0d9a
more stream progress, threaded connections toxav video sending 2024-09-24 16:16:16 +02:00
964f6de656
big stream progress
- stream manager (with bare bones ui)
- debug video tap
- toxav progress
- toxav debug ui
- some default devices
2024-09-24 16:16:16 +02:00
a100eaae82
toxav events 2024-09-24 16:16:16 +02:00
106c8e8403
tweaking 2024-09-24 16:16:16 +02:00
ca4ab01f77
fix cam when not having perm 2024-09-24 16:16:16 +02:00
4ff9386398
move queue code to external 2024-09-24 16:16:15 +02:00
93d65ead89
update sdl video frame stream for sdl changes 2024-09-24 16:16:15 +02:00
edd949879b
update audio frame stream for sdl changes 2024-09-24 16:16:15 +02:00
ef78c49e29
disabled tests and print cam driver 2024-09-24 16:16:15 +02:00
e149873673
fix audio sleep -> superb perf 2024-09-24 16:16:15 +02:00
3a98e10007
more refactoring, audio loopback kinda working 2024-09-24 16:16:14 +02:00
b3e5e4c950
frame refactoring, add audio frame and sdl default device input 2024-09-24 16:16:14 +02:00
165e80c456
frame stream refactor, i like it now 2024-09-24 16:16:14 +02:00
495ec41234
content stuff 2024-09-24 16:16:14 +02:00
ddadc9bdbc
settle on content store for now 2024-09-24 16:16:14 +02:00
b657802e8d
get sdl camera working 2024-09-24 16:16:13 +02:00
bedf0b02bc
framestream2 (with rigtorp/SPSCQueue) 2024-09-24 16:16:13 +02:00
1d0a4cafe2
start sketching 2024-09-24 16:16:13 +02:00
31 changed files with 3295 additions and 10 deletions

View File

@ -27,11 +27,19 @@ message("II TOMATO_TOX_AV: ${TOMATO_TOX_AV}")
if (TOMATO_ASAN)
if (${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL "Clang")
if (NOT WIN32) # exclude mingw
#link_libraries(-fsanitize=address)
add_compile_options(-fsanitize=address,undefined)
link_libraries(-fsanitize=address,undefined)
#link_libraries(-fsanitize=undefined)
link_libraries(-static-libasan) # make it "work" on nix
#add_compile_options(-fsanitize=thread)
#link_libraries(-fsanitize=thread)
message("II enabled ASAN")
if (OFF) # TODO: switch for minimal runtime in deployed scenarios
add_compile_options(-fsanitize-minimal-runtime)
link_libraries(-fsanitize-minimal-runtime)
endif()
else()
message("!! can not enable ASAN on this platform (gcc/clang + win)")
endif()

View File

@ -24,3 +24,4 @@ add_subdirectory(./libwebp)
add_subdirectory(./qoi)
add_subdirectory(./sdl_image)
add_subdirectory(./spscqueue)

9
external/spscqueue/CMakeLists.txt vendored Normal file
View File

@ -0,0 +1,9 @@
cmake_minimum_required(VERSION 3.9 FATAL_ERROR)
add_library(SPSCQueue INTERFACE
./SPSCQueue.h
)
target_compile_features(SPSCQueue INTERFACE cxx_std_17)
target_include_directories(SPSCQueue INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}")

237
external/spscqueue/SPSCQueue.h vendored Normal file
View File

@ -0,0 +1,237 @@
/*
Copyright (c) 2020 Erik Rigtorp <erik@rigtorp.se>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#pragma once
#include <atomic>
#include <cassert>
#include <cstddef>
#include <memory> // std::allocator
#include <new> // std::hardware_destructive_interference_size
#include <stdexcept>
#include <type_traits> // std::enable_if, std::is_*_constructible
#ifdef __has_cpp_attribute
#if __has_cpp_attribute(nodiscard)
#define RIGTORP_NODISCARD [[nodiscard]]
#endif
#endif
#ifndef RIGTORP_NODISCARD
#define RIGTORP_NODISCARD
#endif
namespace rigtorp {
template <typename T, typename Allocator = std::allocator<T>> class SPSCQueue {
#if defined(__cpp_if_constexpr) && defined(__cpp_lib_void_t)
template <typename Alloc2, typename = void>
struct has_allocate_at_least : std::false_type {};
template <typename Alloc2>
struct has_allocate_at_least<
Alloc2, std::void_t<typename Alloc2::value_type,
decltype(std::declval<Alloc2 &>().allocate_at_least(
size_t{}))>> : std::true_type {};
#endif
public:
explicit SPSCQueue(const size_t capacity,
const Allocator &allocator = Allocator())
: capacity_(capacity), allocator_(allocator) {
// The queue needs at least one element
if (capacity_ < 1) {
capacity_ = 1;
}
capacity_++; // Needs one slack element
// Prevent overflowing size_t
if (capacity_ > SIZE_MAX - 2 * kPadding) {
capacity_ = SIZE_MAX - 2 * kPadding;
}
#if defined(__cpp_if_constexpr) && defined(__cpp_lib_void_t)
if constexpr (has_allocate_at_least<Allocator>::value) {
auto res = allocator_.allocate_at_least(capacity_ + 2 * kPadding);
slots_ = res.ptr;
capacity_ = res.count - 2 * kPadding;
} else {
slots_ = std::allocator_traits<Allocator>::allocate(
allocator_, capacity_ + 2 * kPadding);
}
#else
slots_ = std::allocator_traits<Allocator>::allocate(
allocator_, capacity_ + 2 * kPadding);
#endif
static_assert(alignof(SPSCQueue<T>) == kCacheLineSize, "");
static_assert(sizeof(SPSCQueue<T>) >= 3 * kCacheLineSize, "");
assert(reinterpret_cast<char *>(&readIdx_) -
reinterpret_cast<char *>(&writeIdx_) >=
static_cast<std::ptrdiff_t>(kCacheLineSize));
}
~SPSCQueue() {
while (front()) {
pop();
}
std::allocator_traits<Allocator>::deallocate(allocator_, slots_,
capacity_ + 2 * kPadding);
}
// non-copyable and non-movable
SPSCQueue(const SPSCQueue &) = delete;
SPSCQueue &operator=(const SPSCQueue &) = delete;
template <typename... Args>
void emplace(Args &&...args) noexcept(
std::is_nothrow_constructible<T, Args &&...>::value) {
static_assert(std::is_constructible<T, Args &&...>::value,
"T must be constructible with Args&&...");
auto const writeIdx = writeIdx_.load(std::memory_order_relaxed);
auto nextWriteIdx = writeIdx + 1;
if (nextWriteIdx == capacity_) {
nextWriteIdx = 0;
}
while (nextWriteIdx == readIdxCache_) {
readIdxCache_ = readIdx_.load(std::memory_order_acquire);
}
new (&slots_[writeIdx + kPadding]) T(std::forward<Args>(args)...);
writeIdx_.store(nextWriteIdx, std::memory_order_release);
}
template <typename... Args>
RIGTORP_NODISCARD bool try_emplace(Args &&...args) noexcept(
std::is_nothrow_constructible<T, Args &&...>::value) {
static_assert(std::is_constructible<T, Args &&...>::value,
"T must be constructible with Args&&...");
auto const writeIdx = writeIdx_.load(std::memory_order_relaxed);
auto nextWriteIdx = writeIdx + 1;
if (nextWriteIdx == capacity_) {
nextWriteIdx = 0;
}
if (nextWriteIdx == readIdxCache_) {
readIdxCache_ = readIdx_.load(std::memory_order_acquire);
if (nextWriteIdx == readIdxCache_) {
return false;
}
}
new (&slots_[writeIdx + kPadding]) T(std::forward<Args>(args)...);
writeIdx_.store(nextWriteIdx, std::memory_order_release);
return true;
}
void push(const T &v) noexcept(std::is_nothrow_copy_constructible<T>::value) {
static_assert(std::is_copy_constructible<T>::value,
"T must be copy constructible");
emplace(v);
}
template <typename P, typename = typename std::enable_if<
std::is_constructible<T, P &&>::value>::type>
void push(P &&v) noexcept(std::is_nothrow_constructible<T, P &&>::value) {
emplace(std::forward<P>(v));
}
RIGTORP_NODISCARD bool
try_push(const T &v) noexcept(std::is_nothrow_copy_constructible<T>::value) {
static_assert(std::is_copy_constructible<T>::value,
"T must be copy constructible");
return try_emplace(v);
}
template <typename P, typename = typename std::enable_if<
std::is_constructible<T, P &&>::value>::type>
RIGTORP_NODISCARD bool
try_push(P &&v) noexcept(std::is_nothrow_constructible<T, P &&>::value) {
return try_emplace(std::forward<P>(v));
}
RIGTORP_NODISCARD T *front() noexcept {
auto const readIdx = readIdx_.load(std::memory_order_relaxed);
if (readIdx == writeIdxCache_) {
writeIdxCache_ = writeIdx_.load(std::memory_order_acquire);
if (writeIdxCache_ == readIdx) {
return nullptr;
}
}
return &slots_[readIdx + kPadding];
}
void pop() noexcept {
static_assert(std::is_nothrow_destructible<T>::value,
"T must be nothrow destructible");
auto const readIdx = readIdx_.load(std::memory_order_relaxed);
assert(writeIdx_.load(std::memory_order_acquire) != readIdx &&
"Can only call pop() after front() has returned a non-nullptr");
slots_[readIdx + kPadding].~T();
auto nextReadIdx = readIdx + 1;
if (nextReadIdx == capacity_) {
nextReadIdx = 0;
}
readIdx_.store(nextReadIdx, std::memory_order_release);
}
RIGTORP_NODISCARD size_t size() const noexcept {
std::ptrdiff_t diff = writeIdx_.load(std::memory_order_acquire) -
readIdx_.load(std::memory_order_acquire);
if (diff < 0) {
diff += capacity_;
}
return static_cast<size_t>(diff);
}
RIGTORP_NODISCARD bool empty() const noexcept {
return writeIdx_.load(std::memory_order_acquire) ==
readIdx_.load(std::memory_order_acquire);
}
RIGTORP_NODISCARD size_t capacity() const noexcept { return capacity_ - 1; }
private:
#ifdef __cpp_lib_hardware_interference_size
static constexpr size_t kCacheLineSize =
std::hardware_destructive_interference_size;
#else
static constexpr size_t kCacheLineSize = 64;
#endif
// Padding to avoid false sharing between slots_ and adjacent allocations
static constexpr size_t kPadding = (kCacheLineSize - 1) / sizeof(T) + 1;
private:
size_t capacity_;
T *slots_;
#if defined(__has_cpp_attribute) && __has_cpp_attribute(no_unique_address)
Allocator allocator_ [[no_unique_address]];
#else
Allocator allocator_;
#endif
// Align to cache line size in order to avoid false sharing
// readIdxCache_ and writeIdxCache_ is used to reduce the amount of cache
// coherency traffic
alignas(kCacheLineSize) std::atomic<size_t> writeIdx_ = {0};
alignas(kCacheLineSize) size_t readIdxCache_ = 0;
alignas(kCacheLineSize) std::atomic<size_t> readIdx_ = {0};
alignas(kCacheLineSize) size_t writeIdxCache_ = 0;
};
} // namespace rigtorp

View File

@ -80,6 +80,8 @@
] ++ self.packages.${system}.default.dlopenBuildInputs;
cmakeFlags = [
"-DTOMATO_TOX_AV=ON"
"-DTOMATO_ASAN=OFF"
"-DCMAKE_BUILD_TYPE=RelWithDebInfo"
#"-DCMAKE_BUILD_TYPE=Debug"

View File

@ -102,12 +102,30 @@ target_sources(tomato PUBLIC
./chat_gui4.hpp
./chat_gui4.cpp
./stream_manager.hpp
./stream_manager_ui.hpp
./stream_manager_ui.cpp
./debug_video_tap.hpp
./debug_video_tap.cpp
./content/content.hpp
./content/frame_stream2.hpp
./content/sdl_video_frame_stream2.hpp
./content/sdl_video_frame_stream2.cpp
./content/audio_stream.hpp
./content/sdl_audio_frame_stream2.hpp
./content/sdl_audio_frame_stream2.cpp
)
if (TOMATO_TOX_AV)
target_sources(tomato PUBLIC
./tox_av.hpp
./tox_av.cpp
./debug_tox_call.hpp
./debug_tox_call.cpp
)
target_compile_definitions(tomato PUBLIC TOMATO_TOX_AV)
@ -140,6 +158,8 @@ target_link_libraries(tomato PUBLIC
libwebpmux # the f why (needed for anim encode)
qoi
SDL3_image::SDL3_image
SPSCQueue
)
# probably not enough

View File

@ -0,0 +1,72 @@
#pragma once
#include "./frame_stream2.hpp"
#include <solanaceae/util/span.hpp>
#include <cstdint>
#include <variant>
#include <vector>
// raw audio
// channels make samples interleaved,
// planar channels are not supported
struct AudioFrame {
// sequence number, to detect gaps
uint32_t seq {0};
// TODO: maybe use ts instead to discard old?
// since buffer size is variable, some timestamp would be needed to estimate the lost time
// samples per second
uint32_t sample_rate {48'000};
size_t channels {0};
std::variant<
std::vector<int16_t>, // S16, platform endianess
Span<int16_t>, // non owning variant, for direct consumption
std::vector<float>, // f32
Span<float> // non owning variant, for direct consumption
> buffer;
// helpers
bool isS16(void) const {
return
std::holds_alternative<std::vector<int16_t>>(buffer) ||
std::holds_alternative<Span<int16_t>>(buffer)
;
}
bool isF32(void) const {
return
std::holds_alternative<std::vector<float>>(buffer) ||
std::holds_alternative<Span<float>>(buffer)
;
}
template<typename T>
Span<T> getSpan(void) const {
static_assert(std::is_same_v<int16_t, T> || std::is_same_v<float, T>);
if constexpr (std::is_same_v<int16_t, T>) {
assert(isS16());
if (std::holds_alternative<std::vector<int16_t>>(buffer)) {
return Span<int16_t>{std::get<std::vector<int16_t>>(buffer)};
} else {
return std::get<Span<int16_t>>(buffer);
}
} else if constexpr (std::is_same_v<float, T>) {
assert(isF32());
if (std::holds_alternative<std::vector<float>>(buffer)) {
return Span<float>{std::get<std::vector<float>>(buffer)};
} else {
return std::get<Span<float>>(buffer);
}
}
return {};
}
};
using AudioFrameStream2I = FrameStream2I<AudioFrame>;
using AudioFrameStream2MultiSource = FrameStream2MultiSource<AudioFrame>;
using AudioFrameStream2 = AudioFrameStream2MultiSource::sub_stream_type_t; // just use the default for now

36
src/content/content.hpp Normal file
View File

@ -0,0 +1,36 @@
#pragma once
#include <entt/container/dense_set.hpp>
#include <solanaceae/object_store/object_store.hpp>
#include <solanaceae/contact/contact_model3.hpp>
#include <solanaceae/message3/registry_message_model.hpp>
#include <solanaceae/file/file2.hpp>
namespace Content1::Components {
// TODO: design it as a tree?
// or something
struct TagFile {};
struct TagAudioStream {};
struct TagVideoStream {};
struct TimingTiedTo {
entt::dense_set<ObjectHandle> ties;
};
// the associated messages, if any
// useful if you want to update progress on the message
struct Messages {
std::vector<Message3Handle> messages;
};
// ?
struct SuspectedParticipants {
entt::dense_set<Contact3> participants;
};
} // Content::Components

View File

@ -0,0 +1,153 @@
#pragma once
#include <cstdint>
#include <memory>
#include <optional>
#include <vector>
#include <mutex>
#include <SPSCQueue.h>
// Frames ofen consist of:
// - seq id // incremental sequential id, gaps in ids can be used to detect loss
// - data // the frame data
// eg:
//struct ExampleFrame {
//int64_t seq_id {0};
//std::vector<uint8_t> data;
//};
template<typename FrameType>
struct FrameStream2I {
virtual ~FrameStream2I(void) {}
// get number of available frames
[[nodiscard]] virtual int32_t size(void) = 0;
// get next frame
// TODO: optional instead?
// data sharing? -> no, data is copied for each fsr, if concurency supported
[[nodiscard]] virtual std::optional<FrameType> pop(void) = 0;
// returns true if there are readers (or we dont know)
virtual bool push(const FrameType& value) = 0;
};
template<typename FrameType>
struct FrameStream2SourceI {
virtual ~FrameStream2SourceI(void) {}
[[nodiscard]] virtual std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) = 0;
virtual bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) = 0;
};
template<typename FrameType>
struct FrameStream2SinkI {
virtual ~FrameStream2SinkI(void) {}
[[nodiscard]] virtual std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) = 0;
virtual bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) = 0;
};
// needs count frames queue size
// having ~1-2sec buffer size is often sufficent
template<typename FrameType>
struct QueuedFrameStream2 : public FrameStream2I<FrameType> {
using frame_type = FrameType;
rigtorp::SPSCQueue<FrameType> _queue;
// discard values if queue full
// will block if not lossy and full on push
const bool _lossy {true};
explicit QueuedFrameStream2(size_t queue_size, bool lossy = true) : _queue(queue_size), _lossy(lossy) {}
int32_t size(void) override {
return _queue.size();
}
std::optional<FrameType> pop(void) override {
auto* ret_ptr = _queue.front();
if (ret_ptr == nullptr) {
return std::nullopt;
}
// move away
FrameType ret = std::move(*ret_ptr);
_queue.pop();
return ret;
}
bool push(const FrameType& value) override {
if (_lossy) {
[[maybe_unused]] auto _ = _queue.try_emplace(value);
// TODO: maybe return ?
} else {
_queue.push(value);
}
return true;
}
};
// implements a stream that pushes to all sub streams
// release all streams before destructing! // TODO: improve lifetime here, maybe some shared semaphore?
template<typename FrameType, typename SubStreamType = QueuedFrameStream2<FrameType>>
struct FrameStream2MultiSource : public FrameStream2SourceI<FrameType>, public FrameStream2I<FrameType> {
using sub_stream_type_t = SubStreamType;
// pointer stability
std::vector<std::shared_ptr<SubStreamType>> _sub_streams;
std::mutex _sub_stream_lock; // accessing the _sub_streams array needs to be exclusive
// a simple lock here is ok, since this tends to be a rare operation,
// except for the push, which is always on the same thread
virtual ~FrameStream2MultiSource(void) {}
// TODO: forward args instead
std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) override {
// TODO: args???
size_t queue_size = 8;
bool lossy = true;
std::lock_guard lg{_sub_stream_lock};
return _sub_streams.emplace_back(std::make_unique<SubStreamType>(queue_size, lossy));
}
bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) override {
std::lock_guard lg{_sub_stream_lock};
for (auto it = _sub_streams.begin(); it != _sub_streams.end(); it++) {
if (*it == sub) {
_sub_streams.erase(it);
return true;
}
}
return false; // ?
}
// stream interface
int32_t size(void) override {
// TODO: return something sensible?
return -1;
}
std::optional<FrameType> pop(void) override {
// nope
assert(false && "this logic is very frame type specific, provide an impl");
return std::nullopt;
}
// returns true if there are readers
bool push(const FrameType& value) override {
std::lock_guard lg{_sub_stream_lock};
bool have_readers{false};
for (auto& it : _sub_streams) {
[[maybe_unused]] auto _ = it->push(value);
have_readers = true; // even if queue full, we still continue believing in them
// maybe consider push return value?
}
return have_readers;
}
};

View File

@ -0,0 +1,289 @@
#include "./sdl_audio_frame_stream2.hpp"
#include <iostream>
// "thin" wrapper around sdl audio streams
// we dont needs to get fance, as they already provide everything we need
struct SDLAudioStreamReader : public AudioFrameStream2I {
std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream;
uint32_t _seq_counter {0};
uint32_t _sample_rate {48'000};
size_t _channels {0};
SDL_AudioFormat _format {SDL_AUDIO_S16};
std::vector<int16_t> _buffer;
SDLAudioStreamReader(void) : _stream(nullptr, nullptr) {}
SDLAudioStreamReader(SDLAudioStreamReader&& other) :
_stream(std::move(other._stream)),
_sample_rate(other._sample_rate),
_channels(other._channels),
_format(other._format)
{
static const size_t buffer_size {960*_channels};
_buffer.resize(buffer_size);
}
~SDLAudioStreamReader(void) {
if (_stream) {
SDL_UnbindAudioStream(_stream.get());
}
}
int32_t size(void) override {
//assert(_stream);
// returns bytes
//SDL_GetAudioStreamAvailable(_stream.get());
return -1;
}
std::optional<AudioFrame> pop(void) override {
assert(_stream);
assert(_format == SDL_AUDIO_S16);
static const size_t buffer_size {960*_channels};
_buffer.resize(buffer_size); // noop?
const auto read_bytes = SDL_GetAudioStreamData(
_stream.get(),
_buffer.data(),
_buffer.size()*sizeof(int16_t)
);
// no new frame yet, or error
if (read_bytes <= 0) {
return std::nullopt;
}
return AudioFrame {
_seq_counter++,
_sample_rate, _channels,
Span<int16_t>(_buffer.data(), read_bytes/sizeof(int16_t)),
};
}
bool push(const AudioFrame&) override {
// TODO: make universal sdl stream wrapper (combine with SDLAudioOutputDeviceDefaultInstance)
assert(false && "read only");
return false;
}
};
SDLAudioInputDevice::SDLAudioInputDevice(void) : SDLAudioInputDevice(SDL_AUDIO_DEVICE_DEFAULT_RECORDING) {
}
SDLAudioInputDevice::SDLAudioInputDevice(SDL_AudioDeviceID conf_device_id) : _configured_device_id(conf_device_id) {
if (_configured_device_id == 0) {
// TODO: proper error handling
throw int(1);
}
}
SDLAudioInputDevice::~SDLAudioInputDevice(void) {
_streams.clear();
if (_virtual_device_id != 0) {
SDL_CloseAudioDevice(_virtual_device_id);
_virtual_device_id = 0;
}
}
std::shared_ptr<FrameStream2I<AudioFrame>> SDLAudioInputDevice::subscribe(void) {
if (_virtual_device_id == 0) {
// first stream, open device
// this spec is more like a hint to the hardware
SDL_AudioSpec spec {
SDL_AUDIO_S16,
1, // TODO: conf
48'000,
};
_virtual_device_id = SDL_OpenAudioDevice(_configured_device_id, &spec);
}
if (_virtual_device_id == 0) {
std::cerr << "SDLAID error: failed opening device " << _configured_device_id << "\n";
return nullptr;
}
SDL_AudioSpec spec {
SDL_AUDIO_S16,
1, // TODO: conf
48'000,
};
SDL_AudioSpec device_spec {
SDL_AUDIO_S16,
1, // TODO: conf
48'000,
};
// TODO: error check
SDL_GetAudioDeviceFormat(_virtual_device_id, &device_spec, nullptr);
// error check
auto* sdl_stream = SDL_CreateAudioStream(&device_spec, &spec);
// error check
SDL_BindAudioStream(_virtual_device_id, sdl_stream);
auto new_stream = std::make_shared<SDLAudioStreamReader>();
// TODO: move to ctr
new_stream->_stream = {sdl_stream, &SDL_DestroyAudioStream};
new_stream->_sample_rate = spec.freq;
new_stream->_channels = spec.channels;
new_stream->_format = spec.format;
_streams.emplace_back(new_stream);
return new_stream;
}
bool SDLAudioInputDevice::unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) {
for (auto it = _streams.cbegin(); it != _streams.cend(); it++) {
if (*it == sub) {
_streams.erase(it);
if (_streams.empty()) {
// last stream, close
// TODO: make sure no shared ptr still exists???
SDL_CloseAudioDevice(_virtual_device_id);
std::cout << "SDLAID: closing device " << _virtual_device_id << " (" << _configured_device_id << ")\n";
_virtual_device_id = 0;
}
return true;
}
}
return false;
}
// does not need to be visible in the header
struct SDLAudioOutputDeviceDefaultInstance : public AudioFrameStream2I {
std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream;
uint32_t _last_sample_rate {48'000};
size_t _last_channels {0};
SDL_AudioFormat _last_format {SDL_AUDIO_S16};
// TODO: audio device
SDLAudioOutputDeviceDefaultInstance(void);
SDLAudioOutputDeviceDefaultInstance(SDLAudioOutputDeviceDefaultInstance&& other);
~SDLAudioOutputDeviceDefaultInstance(void);
int32_t size(void) override;
std::optional<AudioFrame> pop(void) override;
bool push(const AudioFrame& value) override;
};
SDLAudioOutputDeviceDefaultInstance::SDLAudioOutputDeviceDefaultInstance(void) : _stream(nullptr, nullptr) {
}
SDLAudioOutputDeviceDefaultInstance::SDLAudioOutputDeviceDefaultInstance(SDLAudioOutputDeviceDefaultInstance&& other) : _stream(std::move(other._stream)) {
}
SDLAudioOutputDeviceDefaultInstance::~SDLAudioOutputDeviceDefaultInstance(void) {
}
int32_t SDLAudioOutputDeviceDefaultInstance::size(void) {
return -1;
}
std::optional<AudioFrame> SDLAudioOutputDeviceDefaultInstance::pop(void) {
assert(false);
// this is an output device, there is no data to pop
return std::nullopt;
}
bool SDLAudioOutputDeviceDefaultInstance::push(const AudioFrame& value) {
if (!static_cast<bool>(_stream)) {
return false;
}
// verify here the fame has the same sample type, channel count and sample freq
// if something changed, we need to either use a temporary stream, just for conversion, or reopen _stream with the new params
// because of data temporality, the second options looks like a better candidate
if (
value.sample_rate != _last_sample_rate ||
value.channels != _last_channels ||
(value.isF32() && _last_format != SDL_AUDIO_F32) ||
(value.isS16() && _last_format != SDL_AUDIO_S16)
) {
const SDL_AudioSpec spec = {
static_cast<SDL_AudioFormat>((value.isF32() ? SDL_AUDIO_F32 : SDL_AUDIO_S16)),
static_cast<int>(value.channels),
static_cast<int>(value.sample_rate)
};
SDL_SetAudioStreamFormat(_stream.get(), &spec, nullptr);
std::cerr << "SDLAOD: audio format changed\n";
}
if (value.isS16()) {
auto data = value.getSpan<int16_t>();
if (data.size == 0) {
std::cerr << "empty audio frame??\n";
}
if (!SDL_PutAudioStreamData(_stream.get(), data.ptr, data.size * sizeof(int16_t))) {
std::cerr << "put data error\n";
return false; // return true?
}
} else if (value.isF32()) {
auto data = value.getSpan<float>();
if (data.size == 0) {
std::cerr << "empty audio frame??\n";
}
if (!SDL_PutAudioStreamData(_stream.get(), data.ptr, data.size * sizeof(float))) {
std::cerr << "put data error\n";
return false; // return true?
}
}
_last_sample_rate = value.sample_rate;
_last_channels = value.channels;
_last_format = value.isF32() ? SDL_AUDIO_F32 : SDL_AUDIO_S16;
return true;
}
SDLAudioOutputDeviceDefaultSink::~SDLAudioOutputDeviceDefaultSink(void) {
// TODO: pause and close device?
}
std::shared_ptr<FrameStream2I<AudioFrame>> SDLAudioOutputDeviceDefaultSink::subscribe(void) {
auto new_instance = std::make_shared<SDLAudioOutputDeviceDefaultInstance>();
constexpr SDL_AudioSpec spec = { SDL_AUDIO_S16, 1, 48000 };
new_instance->_stream = {
SDL_OpenAudioDeviceStream(SDL_AUDIO_DEVICE_DEFAULT_PLAYBACK, &spec, nullptr, nullptr),
&SDL_DestroyAudioStream
};
new_instance->_last_sample_rate = spec.freq;
new_instance->_last_channels = spec.channels;
new_instance->_last_format = spec.format;
if (!static_cast<bool>(new_instance->_stream)) {
std::cerr << "SDL open audio device failed!\n";
return nullptr;
}
const auto audio_device_id = SDL_GetAudioStreamDevice(new_instance->_stream.get());
SDL_ResumeAudioDevice(audio_device_id);
return new_instance;
}
bool SDLAudioOutputDeviceDefaultSink::unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) {
if (!sub) {
return false;
}
return true;
}

View File

@ -0,0 +1,43 @@
#pragma once
#include "./frame_stream2.hpp"
#include "./audio_stream.hpp"
#include <SDL3/SDL.h>
#include <cstdint>
#include <vector>
// we dont have to multicast ourself, because sdl streams and virtual devices already do this
// source
// opens device
// creates a sdl audio stream for each subscribed reader stream
struct SDLAudioInputDevice : public FrameStream2SourceI<AudioFrame> {
// held by instances
using sdl_stream_type = std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)>;
SDL_AudioDeviceID _configured_device_id {0};
SDL_AudioDeviceID _virtual_device_id {0};
std::vector<std::shared_ptr<FrameStream2I<AudioFrame>>> _streams;
SDLAudioInputDevice(void);
SDLAudioInputDevice(SDL_AudioDeviceID conf_device_id);
~SDLAudioInputDevice(void);
std::shared_ptr<FrameStream2I<AudioFrame>> subscribe(void) override;
bool unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) override;
};
// sink
// constructs entirely new streams, since sdl handles sync and mixing for us (or should)
struct SDLAudioOutputDeviceDefaultSink : public FrameStream2SinkI<AudioFrame> {
// TODO: pause device?
~SDLAudioOutputDeviceDefaultSink(void);
std::shared_ptr<FrameStream2I<AudioFrame>> subscribe(void) override;
bool unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) override;
};

View File

@ -0,0 +1,167 @@
#include "./sdl_video_frame_stream2.hpp"
#include "SDL3/SDL_camera.h"
#include "SDL3/SDL_pixels.h"
#include <chrono>
#include <cstdint>
#include <iostream>
#include <memory>
#include <thread>
// TODO: move out and create lazy cam for each device
SDLVideoCameraContent::SDLVideoCameraContent(void) {
int devcount {0};
//SDL_CameraDeviceID *devices = SDL_GetCameraDevices(&devcount);
SDL_CameraID *devices = SDL_GetCameras(&devcount);
std::cout << "SDL Camera Driver: " << SDL_GetCurrentCameraDriver() << "\n";
if (devices == nullptr || devcount < 1) {
throw int(1); // TODO: proper exception?
}
std::cout << "### found cameras:\n";
for (int i = 0; i < devcount; i++) {
const SDL_CameraID device = devices[i];
const char *name = SDL_GetCameraName(device);
std::cout << " - Camera #" << i << ": " << name << "\n";
int speccount {0};
SDL_CameraSpec** specs = SDL_GetCameraSupportedFormats(device, &speccount);
if (specs == nullptr) {
std::cout << " - no supported spec\n";
} else {
for (int spec_i = 0; spec_i < speccount; spec_i++) {
std::cout << " - " << specs[spec_i]->width << "x" << specs[spec_i]->height << "@" << float(specs[spec_i]->framerate_numerator)/specs[spec_i]->framerate_denominator << "fps " << SDL_GetPixelFormatName(specs[spec_i]->format) << "\n";
}
SDL_free(specs);
}
}
{
SDL_CameraSpec spec {
// FORCE a diffrent pixel format
//SDL_PIXELFORMAT_RGBA8888,
//SDL_PIXELFORMAT_UNKNOWN,
//SDL_PIXELFORMAT_IYUV,
SDL_PIXELFORMAT_YUY2,
//SDL_COLORSPACE_UNKNOWN,
//SDL_COLORSPACE_SRGB,
//SDL_COLORSPACE_SRGB_LINEAR,
SDL_COLORSPACE_YUV_DEFAULT,
//1280, 720,
//640, 360,
//640, 480,
696, 392,
//1, 30
30, 1
};
_camera = {
//SDL_OpenCamera(devices[devcount-1], &spec),
SDL_OpenCamera(devices[0], nullptr),
//SDL_OpenCamera(devices[0], &spec),
&SDL_CloseCamera
};
SDL_GetCameraFormat(_camera.get(), &spec);
}
SDL_free(devices);
if (!static_cast<bool>(_camera)) {
throw int(2);
}
while (SDL_GetCameraPermissionState(_camera.get()) == 0) {
std::cerr << "permission for camera not granted\n";
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
if (SDL_GetCameraPermissionState(_camera.get()) < 0) {
std::cerr << "user denied camera permission\n";
throw int(3);
}
SDL_CameraSpec spec;
float fps {1.f};
if (!SDL_GetCameraFormat(_camera.get(), &spec)) {
// meh
throw int(5);
} else {
fps = float(spec.framerate_numerator)/float(spec.framerate_denominator);
std::cout << "camera fps: " << fps << "fps (" << spec.framerate_numerator << "/" << spec.framerate_denominator << ")\n";
auto* format_name = SDL_GetPixelFormatName(spec.format);
std::cout << "camera format: " << format_name << "\n";
}
_thread = std::thread([this, fps](void) {
while (!_thread_should_quit) {
Uint64 timestampNS = 0;
SDL_Surface* sdl_frame_next = SDL_AcquireCameraFrame(_camera.get(), &timestampNS);
// no new frame yet, or error
if (sdl_frame_next == nullptr) {
// only sleep 1/10, we expected a frame
std::this_thread::sleep_for(std::chrono::milliseconds(int64_t((1000/fps) / 10)));
continue;
}
#if 0
{ // test copy to trigger bug
SDL_Surface* test_surf = SDL_CreateSurface(
sdl_frame_next->w,
sdl_frame_next->h,
SDL_PIXELFORMAT_RGBA8888
);
assert(test_surf != nullptr);
SDL_BlitSurface(sdl_frame_next, nullptr, test_surf, nullptr);
SDL_DestroySurface(test_surf);
}
#endif
bool someone_listening {false};
{
SDLVideoFrame new_frame_non_owning {
timestampNS/1000,
sdl_frame_next
};
// creates surface copies
someone_listening = push(new_frame_non_owning);
}
SDL_ReleaseCameraFrame(_camera.get(), sdl_frame_next);
if (someone_listening) {
// double the interval on acquire
std::this_thread::sleep_for(std::chrono::milliseconds(int64_t((1000/fps)*0.5)));
} else {
std::cerr << "i guess no one is listening\n";
// we just sleep 4x as long, bc no one is listening
std::this_thread::sleep_for(std::chrono::milliseconds(int64_t((1000/fps)*4)));
}
}
});
}
SDLVideoCameraContent::~SDLVideoCameraContent(void) {
_thread_should_quit = true;
_thread.join();
// TODO: what to do if readers are still present?
// HACK: sdl is buggy and freezes otherwise. it is likely still possible, but rare to freeze here
// flush unused frames
#if 1
while (true) {
SDL_Surface* sdl_frame_next = SDL_AcquireCameraFrame(_camera.get(), nullptr);
if (sdl_frame_next != nullptr) {
SDL_ReleaseCameraFrame(_camera.get(), sdl_frame_next);
} else {
break;
}
}
#endif
}

View File

@ -0,0 +1,71 @@
#pragma once
#include "./frame_stream2.hpp"
#include <SDL3/SDL.h>
#include <cstdint>
#include <thread>
inline void nopSurfaceDestructor(SDL_Surface*) {}
// this is very sdl specific
struct SDLVideoFrame {
// TODO: sequence numbering?
// micro seconds (nano is way too much)
uint64_t timestampUS {0};
std::unique_ptr<SDL_Surface, decltype(&SDL_DestroySurface)> surface {nullptr, &SDL_DestroySurface};
// special non-owning constructor?
SDLVideoFrame(
uint64_t ts,
SDL_Surface* surf
) {
timestampUS = ts;
surface = {surf, &nopSurfaceDestructor};
}
SDLVideoFrame(SDLVideoFrame&& other) = default;
// copy
SDLVideoFrame(const SDLVideoFrame& other) {
timestampUS = other.timestampUS;
if (static_cast<bool>(other.surface)) {
//surface = {
// SDL_CreateSurface(
// other.surface->w,
// other.surface->h,
// other.surface->format
// ),
// &SDL_DestroySurface
//};
//SDL_BlitSurface(other.surface.get(), nullptr, surface.get(), nullptr);
surface = {
SDL_DuplicateSurface(other.surface.get()),
&SDL_DestroySurface
};
}
}
SDLVideoFrame& operator=(const SDLVideoFrame& other) = delete;
};
using SDLVideoFrameStream2MultiSource = FrameStream2MultiSource<SDLVideoFrame>;
using SDLVideoFrameStream2 = SDLVideoFrameStream2MultiSource::sub_stream_type_t; // just use the default for now
struct SDLVideoCameraContent : public SDLVideoFrameStream2MultiSource {
// meh, empty default
std::unique_ptr<SDL_Camera, decltype(&SDL_CloseCamera)> _camera {nullptr, &SDL_CloseCamera};
std::atomic<bool> _thread_should_quit {false};
std::thread _thread;
// construct source and start thread
// TODO: optimize so the thread is not always running
SDLVideoCameraContent(void);
// stops the thread and closes the camera
~SDLVideoCameraContent(void);
// make only some of writer public
using SDLVideoFrameStream2MultiSource::subscribe;
using SDLVideoFrameStream2MultiSource::unsubscribe;
};

View File

@ -0,0 +1,15 @@
#pragma once
#include <solanaceae/util/span.hpp>
// most media that can be counted as "stream" comes in packets/frames/messages
// so this class provides an interface for ideal async fetching of frames
struct RawFrameStreamReaderI {
// return the number of ready frames in cache
// returns -1 if unknown
virtual int64_t have(void) = 0;
// get next frame, empty if none
virtual ByteSpan getNext(void) = 0;
};

View File

@ -0,0 +1,39 @@
#include "./stream_reader_sdl_audio.hpp"
SDLAudioFrameStreamReader::SDLAudioFrameStreamReader(int32_t buffer_size) : _buffer_size(buffer_size), _stream{nullptr, &SDL_DestroyAudioStream} {
_buffer.resize(_buffer_size);
const SDL_AudioSpec spec = { SDL_AUDIO_S16, 1, 48000 };
_stream = {
SDL_OpenAudioDeviceStream(SDL_AUDIO_DEVICE_DEFAULT_CAPTURE, &spec, nullptr, nullptr),
&SDL_DestroyAudioStream
};
}
Span<int16_t> SDLAudioFrameStreamReader::getNextAudioFrame(void) {
const int32_t needed_bytes = (_buffer.size() - _remaining_size) * sizeof(int16_t);
const auto read_bytes = SDL_GetAudioStreamData(_stream.get(), _buffer.data()+_remaining_size, needed_bytes);
if (read_bytes < 0) {
// error
return {};
}
if (read_bytes < needed_bytes) {
// HACK: we are just assuming here that sdl never gives us half a sample!
_remaining_size += read_bytes / sizeof(int16_t);
return {};
}
_remaining_size = 0;
return Span<int16_t>{_buffer};
}
int64_t SDLAudioFrameStreamReader::have(void) {
return -1;
}
ByteSpan SDLAudioFrameStreamReader::getNext(void) {
auto next_frame_span = getNextAudioFrame();
return ByteSpan{reinterpret_cast<const uint8_t*>(next_frame_span.ptr), next_frame_span.size};
}

View File

@ -0,0 +1,31 @@
#pragma once
#include "./stream_reader.hpp"
#include <SDL3/SDL.h>
#include <cstdint>
#include <cstdio>
#include <memory>
struct SDLAudioFrameStreamReader : public RawFrameStreamReaderI {
// count samples per buffer
const int32_t _buffer_size {1024};
std::vector<int16_t> _buffer;
size_t _remaining_size {0}; // data still in buffer, that was remaining from last call and not enough to fill a full frame
// meh, empty default
std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream;
// buffer_size in number of samples
SDLAudioFrameStreamReader(int32_t buffer_size = 1024);
// data owned by StreamReader, overwritten by next call to getNext*()
Span<int16_t> getNextAudioFrame(void);
public: // interface
int64_t have(void) override;
ByteSpan getNext(void) override;
};

View File

@ -0,0 +1,84 @@
#include "./stream_reader_sdl_video.hpp"
#include <iostream>
SDLVideoFrameStreamReader::SDLVideoFrameStreamReader() : _camera{nullptr, &SDL_CloseCamera}, _surface{nullptr, &SDL_DestroySurface} {
// enumerate
int devcount = 0;
SDL_CameraDeviceID *devices = SDL_GetCameraDevices(&devcount);
if (devices == nullptr || devcount < 1) {
throw int(1); // TODO: proper exception?
}
std::cout << "### found cameras:\n";
for (int i = 0; i < devcount; i++) {
const SDL_CameraDeviceID device = devices[i];
char *name = SDL_GetCameraDeviceName(device);
std::cout << " - Camera #" << i << ": " << name << "\n";
SDL_free(name);
}
_camera = {
SDL_OpenCameraDevice(devices[0], nullptr),
&SDL_CloseCamera
};
if (!static_cast<bool>(_camera)) {
throw int(2);
}
SDL_CameraSpec spec;
if (SDL_GetCameraFormat(_camera.get(), &spec) < 0) {
// meh
} else {
// interval
float interval = float(spec.interval_numerator)/float(spec.interval_denominator);
std::cout << "camera interval: " << interval*1000 << "ms\n";
}
}
SDLVideoFrameStreamReader::VideoFrame SDLVideoFrameStreamReader::getNextVideoFrameRGBA(void) {
if (!static_cast<bool>(_camera)) {
return {};
}
Uint64 timestampNS = 0;
SDL_Surface* frame_next = SDL_AcquireCameraFrame(_camera.get(), &timestampNS);
// no new frame yet, or error
if (frame_next == nullptr) {
//std::cout << "failed acquiring frame\n";
return {};
}
// TODO: investigate zero copy
_surface = {
SDL_ConvertSurfaceFormat(frame_next, SDL_PIXELFORMAT_RGBA8888),
&SDL_DestroySurface
};
SDL_ReleaseCameraFrame(_camera.get(), frame_next);
SDL_LockSurface(_surface.get());
return {
_surface->w,
_surface->h,
timestampNS,
{
reinterpret_cast<const uint8_t*>(_surface->pixels),
uint64_t(_surface->w*_surface->h*4) // rgba
}
};
}
int64_t SDLVideoFrameStreamReader::have(void) {
return -1;
}
ByteSpan SDLVideoFrameStreamReader::getNext(void) {
return {};
}

View File

@ -0,0 +1,34 @@
#pragma once
#include "./stream_reader.hpp"
#include <SDL3/SDL.h>
#include <cstdint>
#include <cstdio>
#include <memory>
struct SDLVideoFrameStreamReader : public RawFrameStreamReaderI {
// meh, empty default
std::unique_ptr<SDL_Camera, decltype(&SDL_CloseCamera)> _camera;
std::unique_ptr<SDL_Surface, decltype(&SDL_DestroySurface)> _surface;
SDLVideoFrameStreamReader(void);
struct VideoFrame {
int32_t width {0};
int32_t height {0};
uint64_t timestampNS {0};
ByteSpan data;
};
// data owned by StreamReader, overwritten by next call to getNext*()
VideoFrame getNextVideoFrameRGBA(void);
public: // interface
int64_t have(void) override;
ByteSpan getNext(void) override;
};

718
src/debug_tox_call.cpp Normal file
View File

@ -0,0 +1,718 @@
#include "./debug_tox_call.hpp"
#include "./stream_manager.hpp"
#include "./content/audio_stream.hpp"
#include "./content/sdl_video_frame_stream2.hpp"
#include <SDL3/SDL.h>
#include <imgui/imgui.h>
#include <cstring>
#include <cstdint>
#include <iostream>
#include <memory>
#include <optional>
// fwd
namespace Message {
uint64_t getTimeMS();
}
static bool isFormatPlanar(SDL_PixelFormat f) {
return
f == SDL_PIXELFORMAT_YV12 ||
f == SDL_PIXELFORMAT_IYUV ||
f == SDL_PIXELFORMAT_YUY2 ||
f == SDL_PIXELFORMAT_UYVY ||
f == SDL_PIXELFORMAT_YVYU ||
f == SDL_PIXELFORMAT_NV12 ||
f == SDL_PIXELFORMAT_NV21 ||
f == SDL_PIXELFORMAT_P010
;
}
static SDL_Surface* convertYUY2_IYUV(SDL_Surface* surf) {
if (surf->format != SDL_PIXELFORMAT_YUY2) {
return nullptr;
}
if ((surf->w % 2) != 0) {
SDL_SetError("YUY2->IYUV does not support odd widths");
// hmmm, we dont handle odd widths
return nullptr;
}
SDL_LockSurface(surf);
SDL_Surface* conv_surf = SDL_CreateSurface(surf->w, surf->h, SDL_PIXELFORMAT_IYUV);
SDL_LockSurface(conv_surf);
// YUY2 is 4:2:2 packed
// Y is simple, we just copy it over
// U V are double the resolution (vertically), so we avg both
// Packed mode: Y0+U0+Y1+V0 (1 plane)
uint8_t* y_plane = static_cast<uint8_t*>(conv_surf->pixels);
uint8_t* u_plane = static_cast<uint8_t*>(conv_surf->pixels) + conv_surf->w*conv_surf->h;
uint8_t* v_plane = static_cast<uint8_t*>(conv_surf->pixels) + conv_surf->w*conv_surf->h + (conv_surf->w/2)*(conv_surf->h/2);
const uint8_t* yuy2_data = static_cast<const uint8_t*>(surf->pixels);
for (int y = 0; y < surf->h; y++) {
for (int x = 0; x < surf->w; x += 2) {
// every pixel uses 2 bytes
const uint8_t* yuy2_curser = yuy2_data + y*surf->w*2 + x*2;
uint8_t src_y0 = yuy2_curser[0];
uint8_t src_u = yuy2_curser[1];
uint8_t src_y1 = yuy2_curser[2];
uint8_t src_v = yuy2_curser[3];
y_plane[y*conv_surf->w + x] = src_y0;
y_plane[y*conv_surf->w + x+1] = src_y1;
size_t uv_index = (y/2) * (conv_surf->w/2) + x/2;
if (y % 2 == 0) {
// first write
u_plane[uv_index] = src_u;
v_plane[uv_index] = src_v;
} else {
// second write, mix with existing value
u_plane[uv_index] = (int(u_plane[uv_index]) + int(src_u)) / 2;
v_plane[uv_index] = (int(v_plane[uv_index]) + int(src_v)) / 2;
}
}
}
SDL_UnlockSurface(conv_surf);
SDL_UnlockSurface(surf);
return conv_surf;
}
struct PushConversionQueuedVideoStream : public QueuedFrameStream2<SDLVideoFrame> {
SDL_PixelFormat _forced_format {SDL_PIXELFORMAT_IYUV};
PushConversionQueuedVideoStream(size_t queue_size, bool lossy = true) : QueuedFrameStream2<SDLVideoFrame>(queue_size, lossy) {}
~PushConversionQueuedVideoStream(void) {}
bool push(const SDLVideoFrame& value) override {
SDL_Surface* converted_surf = value.surface.get();
if (converted_surf->format != _forced_format) {
//std::cerr << "DTC: need to convert from " << SDL_GetPixelFormatName(converted_surf->format) << " to SDL_PIXELFORMAT_IYUV\n";
if (converted_surf->format == SDL_PIXELFORMAT_YUY2 && _forced_format == SDL_PIXELFORMAT_IYUV) {
// optimized custom impl
//auto start = Message::getTimeMS();
converted_surf = convertYUY2_IYUV(converted_surf);
//auto end = Message::getTimeMS();
// 3ms
//std::cerr << "DTC: timing " << SDL_GetPixelFormatName(converted_surf->format) << "->SDL_PIXELFORMAT_IYUV: " << end-start << "ms\n";
} else if (isFormatPlanar(converted_surf->format)) {
// meh, need to convert to rgb as a stopgap
//auto start = Message::getTimeMS();
//SDL_Surface* tmp_conv_surf = SDL_ConvertSurfaceAndColorspace(converted_surf, SDL_PIXELFORMAT_RGBA32, nullptr, SDL_COLORSPACE_RGB_DEFAULT, 0);
SDL_Surface* tmp_conv_surf = SDL_ConvertSurfaceAndColorspace(converted_surf, SDL_PIXELFORMAT_RGB24, nullptr, SDL_COLORSPACE_RGB_DEFAULT, 0);
//auto end = Message::getTimeMS();
// 1ms
//std::cerr << "DTC: timing " << SDL_GetPixelFormatName(converted_surf->format) << "->SDL_PIXELFORMAT_RGB24: " << end-start << "ms\n";
// TODO: fix sdl rgb->yuv conversion resulting in too dark (colorspace) issues
//start = Message::getTimeMS();
converted_surf = SDL_ConvertSurfaceAndColorspace(tmp_conv_surf, SDL_PIXELFORMAT_IYUV, nullptr, SDL_COLORSPACE_YUV_DEFAULT, 0);
//end = Message::getTimeMS();
// 60ms
//std::cerr << "DTC: timing SDL_PIXELFORMAT_RGB24->SDL_PIXELFORMAT_IYUV: " << end-start << "ms\n";
SDL_DestroySurface(tmp_conv_surf);
} else {
converted_surf = SDL_ConvertSurface(converted_surf, SDL_PIXELFORMAT_IYUV);
}
if (converted_surf == nullptr) {
// oh god
std::cerr << "DTC error: failed to convert surface to IYUV: " << SDL_GetError() << "\n";
return false;
}
}
assert(converted_surf != nullptr);
if (converted_surf != value.surface.get()) {
// TODO: add ctr with uptr
SDLVideoFrame new_value{value.timestampUS, nullptr};
new_value.surface = {
converted_surf,
&SDL_DestroySurface
};
return QueuedFrameStream2<SDLVideoFrame>::push(std::move(new_value));
} else {
return QueuedFrameStream2<SDLVideoFrame>::push(value);
}
}
};
// exlusive
// TODO: replace with something better than a queue
struct ToxAVCallVideoSink : public FrameStream2SinkI<SDLVideoFrame> {
ToxAV& _toxav;
// bitrate for enabled state
uint32_t _video_bitrate {2};
uint32_t _fid;
std::shared_ptr<PushConversionQueuedVideoStream> _writer;
ToxAVCallVideoSink(ToxAV& toxav, uint32_t fid) : _toxav(toxav), _fid(fid) {}
~ToxAVCallVideoSink(void) {
if (_writer) {
_writer = nullptr;
_toxav.toxavVideoSetBitRate(_fid, 0);
}
}
// sink
std::shared_ptr<FrameStream2I<SDLVideoFrame>> subscribe(void) override {
if (_writer) {
// max 1 (exclusive, composite video somewhere else)
return nullptr;
}
auto err = _toxav.toxavVideoSetBitRate(_fid, _video_bitrate);
if (err != TOXAV_ERR_BIT_RATE_SET_OK) {
return nullptr;
}
_writer = std::make_shared<PushConversionQueuedVideoStream>(10, true);
return _writer;
}
bool unsubscribe(const std::shared_ptr<FrameStream2I<SDLVideoFrame>>& sub) override {
if (!sub || !_writer) {
// nah
return false;
}
if (sub == _writer) {
_writer = nullptr;
/*auto err = */_toxav.toxavVideoSetBitRate(_fid, 0);
// print warning? on error?
return true;
}
// what
return false;
}
};
// TODO: make proper adapter
struct AudioStreamReFramer {
FrameStream2I<AudioFrame>* _stream {nullptr};
uint32_t frame_length_ms {10};
uint32_t own_seq_counter {0};
std::vector<int16_t> buffer;
size_t samples_in_buffer {0}; // absolute, so divide by ch for actual length
uint32_t seq {0};
uint32_t sample_rate {48'000};
size_t channels {0};
std::optional<AudioFrame> pop(void) {
assert(_stream != nullptr);
auto new_in = _stream->pop();
if (new_in.has_value()) {
auto& new_value = new_in.value();
assert(new_value.isS16());
if (!new_value.isS16()) {
return std::nullopt;
}
if (
(buffer.empty()) || // buffer not yet inited
(sample_rate != new_value.sample_rate || channels != new_value.channels) // not the same
) {
seq = 0;
sample_rate = new_value.sample_rate;
channels = new_value.channels;
// buffer does not exist or config changed and we discard
// preallocate to 2x desired buffer size
buffer = std::vector<int16_t>(2 * (channels*sample_rate*frame_length_ms)/1000);
samples_in_buffer = 0;
}
// TODO: this will be very important in the future
// replace seq with timestapsUS like video??
#if 0
// some time / seq comparison shit
if (seq != 0 && new_value.seq != 0) {
if (seq+1 != new_value.seq) {
// we skipped shit
// TODO: insert silence to pad?
// drop existing
samples_in_buffer = 0;
}
}
#endif
// update to latest
seq = new_value.seq;
auto new_span = new_value.getSpan<int16_t>();
//std::cout << "new incoming frame is " << new_value.getSpan<int16_t>().size/new_value.channels*1000/new_value.sample_rate << "ms\n";
// now append
// buffer too small
if (buffer.size() - samples_in_buffer < new_value.getSpan<int16_t>().size) {
buffer.resize(buffer.size() + new_value.getSpan<int16_t>().size - (buffer.size() - samples_in_buffer));
}
// TODO: memcpy
for (size_t i = 0; i < new_span.size; i++) {
buffer.at(samples_in_buffer+i) = new_span[i];
}
samples_in_buffer += new_span.size;
} else if (buffer.empty() || samples_in_buffer == 0) {
// first pop might result in invalid state
return std::nullopt;
}
const size_t desired_size {frame_length_ms * sample_rate * channels / 1000};
// > threshold?
if (samples_in_buffer < desired_size) {
return std::nullopt;
}
std::vector<int16_t> return_buffer(desired_size);
// copy data
for (size_t i = 0; i < return_buffer.size(); i++) {
return_buffer.at(i) = buffer.at(i);
}
// now crop buffer (meh)
// move data from back to front
for (size_t i = 0; i < samples_in_buffer-return_buffer.size(); i++) {
buffer.at(i) = buffer.at(desired_size + i);
}
samples_in_buffer -= return_buffer.size();
return AudioFrame{
own_seq_counter++,
sample_rate,
channels,
std::move(return_buffer),
};
}
};
struct ToxAVCallAudioSink : public FrameStream2SinkI<AudioFrame> {
ToxAV& _toxav;
// bitrate for enabled state
uint32_t _audio_bitrate {32};
uint32_t _fid;
std::shared_ptr<QueuedFrameStream2<AudioFrame>> _writer;
ToxAVCallAudioSink(ToxAV& toxav, uint32_t fid) : _toxav(toxav), _fid(fid) {}
~ToxAVCallAudioSink(void) {
if (_writer) {
_writer = nullptr;
_toxav.toxavAudioSetBitRate(_fid, 0);
}
}
// sink
std::shared_ptr<FrameStream2I<AudioFrame>> subscribe(void) override {
if (_writer) {
// max 1 (exclusive for now)
return nullptr;
}
auto err = _toxav.toxavAudioSetBitRate(_fid, _audio_bitrate);
if (err != TOXAV_ERR_BIT_RATE_SET_OK) {
return nullptr;
}
_writer = std::make_shared<QueuedFrameStream2<AudioFrame>>(16, false);
return _writer;
}
bool unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) override {
if (!sub || !_writer) {
// nah
return false;
}
if (sub == _writer) {
_writer = nullptr;
/*auto err = */_toxav.toxavAudioSetBitRate(_fid, 0);
// print warning? on error?
return true;
}
// what
return false;
}
};
DebugToxCall::DebugToxCall(ObjectStore2& os, ToxAV& toxav, TextureUploaderI& tu) : _os(os), _toxav(toxav), _tu(tu) {
_toxav.subscribe(this, ToxAV_Event::friend_call);
_toxav.subscribe(this, ToxAV_Event::friend_call_state);
_toxav.subscribe(this, ToxAV_Event::friend_audio_bitrate);
_toxav.subscribe(this, ToxAV_Event::friend_video_bitrate);
_toxav.subscribe(this, ToxAV_Event::friend_audio_frame);
_toxav.subscribe(this, ToxAV_Event::friend_video_frame);
}
DebugToxCall::~DebugToxCall(void) {
// destroy all calls/connections/sources/sinks here
for (auto& [fid, call] : _calls) {
if (static_cast<bool>(call.incoming_vsrc)) {
_os.throwEventDestroy(call.incoming_vsrc);
call.incoming_vsrc.destroy();
}
if (static_cast<bool>(call.incoming_asrc)) {
_os.throwEventDestroy(call.incoming_asrc);
call.incoming_asrc.destroy();
}
if (static_cast<bool>(call.outgoing_vsink)) {
_os.throwEventDestroy(call.outgoing_vsink);
call.outgoing_vsink.destroy();
}
if (static_cast<bool>(call.outgoing_asink)) {
_os.throwEventDestroy(call.outgoing_asink);
call.outgoing_asink.destroy();
}
}
}
void DebugToxCall::tick(float) {
// pump sinks to tox
// TODO: own thread or direct on push (requires thread save toxcore)
// TODO: pump at double the frame rate
for (const auto& [oc, vsink] : _os.registry().view<ToxAVCallVideoSink*>().each()) {
if (!vsink->_writer) {
continue;
}
for (size_t i = 0; i < 10; i++) {
auto new_frame_opt = vsink->_writer->pop();
if (!new_frame_opt.has_value()) {
break;
}
if (!new_frame_opt.value().surface) {
// wtf?
continue;
}
// conversion is done in the sinks stream
SDL_Surface* surf = new_frame_opt.value().surface.get();
assert(surf != nullptr);
SDL_LockSurface(surf);
_toxav.toxavVideoSendFrame(
vsink->_fid,
surf->w, surf->h,
static_cast<const uint8_t*>(surf->pixels),
static_cast<const uint8_t*>(surf->pixels) + surf->w * surf->h,
static_cast<const uint8_t*>(surf->pixels) + surf->w * surf->h + (surf->w/2) * (surf->h/2)
);
SDL_UnlockSurface(surf);
}
}
for (const auto& [oc, asink, asrf] : _os.registry().view<ToxAVCallAudioSink*, AudioStreamReFramer>().each()) {
if (!asink->_writer) {
continue;
}
asrf._stream = asink->_writer.get();
for (size_t i = 0; i < 10; i++) {
//auto new_frame_opt = asink->_writer->pop();
auto new_frame_opt = asrf.pop();
if (!new_frame_opt.has_value()) {
break;
}
const auto& new_frame = new_frame_opt.value();
assert(new_frame.isS16());
//* @param sample_count Number of samples in this frame. Valid numbers here are
//* `((sample rate) * (audio length) / 1000)`, where audio length can be
//* 2.5, 5, 10, 20, 40 or 60 milliseconds.
// we likely needs to subdivide/repackage
// frame size should be an option exposed to the user
// with 10ms as a default ?
// the larger the frame size, the less overhead but the more delay
auto err = _toxav.toxavAudioSendFrame(
asink->_fid,
new_frame.getSpan<int16_t>().ptr,
new_frame.getSpan<int16_t>().size / new_frame.channels,
new_frame.channels,
new_frame.sample_rate
);
if (err != TOXAV_ERR_SEND_FRAME_OK) {
std::cerr << "DTC: failed to send audio frame " << err << "\n";
}
}
}
}
float DebugToxCall::render(void) {
float next_frame {2.f};
if (ImGui::Begin("toxav debug")) {
ImGui::Text("Calls:");
ImGui::Indent();
for (auto& [fid, call] : _calls) {
ImGui::PushID(fid);
ImGui::Text("fid:%d state:%d", fid, call.state);
if (call.incoming) {
ImGui::SameLine();
if (ImGui::SmallButton("answer")) {
const auto ret = _toxav.toxavAnswer(fid, 0, 0);
//const auto ret = _toxav.toxavAnswer(fid, 0, 1); // 1mbit/s
//const auto ret = _toxav.toxavAnswer(fid, 0, 2); // 2mbit/s
//const auto ret = _toxav.toxavAnswer(fid, 0, 100); // 100mbit/s
//const auto ret = _toxav.toxavAnswer(fid, 0, 2500); // 2500mbit/s
if (ret == TOXAV_ERR_ANSWER_OK) {
call.incoming = false;
// create sinks
call.outgoing_vsink = {_os.registry(), _os.registry().create()};
{
auto new_vsink = std::make_unique<ToxAVCallVideoSink>(_toxav, fid);
call.outgoing_vsink.emplace<ToxAVCallVideoSink*>(new_vsink.get());
call.outgoing_vsink.emplace<Components::FrameStream2Sink<SDLVideoFrame>>(std::move(new_vsink));
call.outgoing_vsink.emplace<Components::StreamSink>(Components::StreamSink::create<SDLVideoFrame>("ToxAV Friend Call Outgoing Video"));
_os.throwEventConstruct(call.outgoing_vsink);
}
call.outgoing_asink = {_os.registry(), _os.registry().create()};
{
auto new_asink = std::make_unique<ToxAVCallAudioSink>(_toxav, fid);
call.outgoing_asink.emplace<ToxAVCallAudioSink*>(new_asink.get());
call.outgoing_asink.emplace<AudioStreamReFramer>().frame_length_ms = 10;
call.outgoing_asink.emplace<Components::FrameStream2Sink<AudioFrame>>(std::move(new_asink));
call.outgoing_asink.emplace<Components::StreamSink>(Components::StreamSink::create<AudioFrame>("ToxAV Friend Call Outgoing Audio"));
_os.throwEventConstruct(call.outgoing_asink);
}
// create sources
if (call.incoming_v) {
call.incoming_vsrc = {_os.registry(), _os.registry().create()};
{
auto new_vsrc = std::make_unique<SDLVideoFrameStream2MultiSource>();
call.incoming_vsrc.emplace<SDLVideoFrameStream2MultiSource*>(new_vsrc.get());
call.incoming_vsrc.emplace<Components::FrameStream2Source<SDLVideoFrame>>(std::move(new_vsrc));
call.incoming_vsrc.emplace<Components::StreamSource>(Components::StreamSource::create<SDLVideoFrame>("ToxAV Friend Call Incoming Video"));
_os.throwEventConstruct(call.incoming_vsrc);
}
}
if (call.incoming_a) {
call.incoming_asrc = {_os.registry(), _os.registry().create()};
{
auto new_asrc = std::make_unique<AudioFrameStream2MultiSource>();
call.incoming_asrc.emplace<AudioFrameStream2MultiSource*>(new_asrc.get());
call.incoming_asrc.emplace<Components::FrameStream2Source<AudioFrame>>(std::move(new_asrc));
call.incoming_asrc.emplace<Components::StreamSource>(Components::StreamSource::create<AudioFrame>("ToxAV Friend Call Incoming Audio"));
_os.throwEventConstruct(call.incoming_asrc);
}
}
}
}
} else if (call.state != TOXAV_FRIEND_CALL_STATE_FINISHED) {
next_frame = std::min(next_frame, 0.1f);
ImGui::SameLine();
if (ImGui::SmallButton("hang up")) {
const auto ret = _toxav.toxavCallControl(fid, TOXAV_CALL_CONTROL_CANCEL);
if (ret == TOXAV_ERR_CALL_CONTROL_OK) {
// we hung up
// not sure if its possible for toxcore to tell this us too when the other side does this at the same time?
call.state = TOXAV_FRIEND_CALL_STATE_FINISHED;
// TODO: stream manager disconnectAll()
if (static_cast<bool>(call.incoming_vsrc)) {
_os.throwEventDestroy(call.incoming_vsrc);
call.incoming_vsrc.destroy();
}
if (static_cast<bool>(call.incoming_asrc)) {
_os.throwEventDestroy(call.incoming_asrc);
call.incoming_asrc.destroy();
}
if (static_cast<bool>(call.outgoing_vsink)) {
_os.throwEventDestroy(call.outgoing_vsink);
call.outgoing_vsink.destroy();
}
if (static_cast<bool>(call.outgoing_asink)) {
_os.throwEventDestroy(call.outgoing_asink);
call.outgoing_asink.destroy();
}
}
}
//if (ImGui::BeginCombo("audio src", "---")) {
// ImGui::EndCombo();
//}
//if (ImGui::BeginCombo("video src", "---")) {
// ImGui::EndCombo();
//}
}
ImGui::PopID();
}
ImGui::Unindent();
}
ImGui::End();
return next_frame;
}
bool DebugToxCall::onEvent(const Events::FriendCall& e) {
auto& call = _calls[e.friend_number];
call.incoming = true;
call.incoming_a = e.audio_enabled;
call.incoming_v = e.video_enabled;
call.state = TOXAV_FRIEND_CALL_STATE_NONE;
return true;
}
bool DebugToxCall::onEvent(const Events::FriendCallState& e) {
auto& call = _calls[e.friend_number];
call.state = e.state;
if (
(call.state & TOXAV_FRIEND_CALL_STATE_FINISHED) != 0 ||
(call.state & TOXAV_FRIEND_CALL_STATE_ERROR) != 0
) {
if (static_cast<bool>(call.incoming_vsrc)) {
_os.throwEventDestroy(call.incoming_vsrc);
call.incoming_vsrc.destroy();
}
if (static_cast<bool>(call.incoming_asrc)) {
_os.throwEventDestroy(call.incoming_asrc);
call.incoming_asrc.destroy();
}
if (static_cast<bool>(call.outgoing_vsink)) {
_os.throwEventDestroy(call.outgoing_vsink);
call.outgoing_vsink.destroy();
}
if (static_cast<bool>(call.outgoing_asink)) {
_os.throwEventDestroy(call.outgoing_asink);
call.outgoing_asink.destroy();
}
}
return true;
}
bool DebugToxCall::onEvent(const Events::FriendAudioBitrate&) {
return false;
}
bool DebugToxCall::onEvent(const Events::FriendVideoBitrate&) {
return false;
}
bool DebugToxCall::onEvent(const Events::FriendAudioFrame& e) {
auto& call = _calls[e.friend_number];
if (!static_cast<bool>(call.incoming_asrc)) {
// missing src to put frame into ??
return false;
}
assert(call.incoming_asrc.all_of<AudioFrameStream2MultiSource*>());
assert(call.incoming_asrc.all_of<Components::FrameStream2Source<AudioFrame>>());
call.num_a_frames++;
call.incoming_asrc.get<AudioFrameStream2MultiSource*>()->push(AudioFrame{
0, //seq
e.sampling_rate,
e.channels,
std::vector<int16_t>(e.pcm.begin(), e.pcm.end()) // copy
});
return true;
}
bool DebugToxCall::onEvent(const Events::FriendVideoFrame& e) {
// TODO: skip if we dont know about this call
auto& call = _calls[e.friend_number];
if (!static_cast<bool>(call.incoming_vsrc)) {
// missing src to put frame into ??
return false;
}
assert(call.incoming_vsrc.all_of<SDLVideoFrameStream2MultiSource*>());
assert(call.incoming_vsrc.all_of<Components::FrameStream2Source<SDLVideoFrame>>());
call.num_v_frames++;
auto* new_surf = SDL_CreateSurface(e.width, e.height, SDL_PIXELFORMAT_IYUV);
assert(new_surf);
if (SDL_LockSurface(new_surf)) {
// copy the data
// we know how the implementation works, its y u v consecutivlely
// y
for (size_t y = 0; y < e.height; y++) {
std::memcpy(
//static_cast<uint8_t*>(new_surf->pixels) + new_surf->pitch*y,
static_cast<uint8_t*>(new_surf->pixels) + e.width*y,
e.y.ptr + e.ystride*y,
e.width
);
}
// u
for (size_t y = 0; y < e.height/2; y++) {
std::memcpy(
static_cast<uint8_t*>(new_surf->pixels) + (e.width*e.height) + (e.width/2)*y,
e.u.ptr + e.ustride*y,
e.width/2
);
}
// v
for (size_t y = 0; y < e.height/2; y++) {
std::memcpy(
static_cast<uint8_t*>(new_surf->pixels) + (e.width*e.height) + ((e.width/2)*(e.height/2)) + (e.width/2)*y,
e.v.ptr + e.vstride*y,
e.width/2
);
}
SDL_UnlockSurface(new_surf);
}
call.incoming_vsrc.get<SDLVideoFrameStream2MultiSource*>()->push({
// ms -> us
Message::getTimeMS() * 1000, // TODO: make more precise
new_surf
});
SDL_DestroySurface(new_surf);
return true;
}

52
src/debug_tox_call.hpp Normal file
View File

@ -0,0 +1,52 @@
#pragma once
//#include <solanaceae/object_store/fwd.hpp>
#include <solanaceae/object_store/object_store.hpp>
#include "./tox_av.hpp"
#include "./texture_uploader.hpp"
#include <map>
#include <cstdint>
class DebugToxCall : public ToxAVEventI {
ObjectStore2& _os;
ToxAV& _toxav;
TextureUploaderI& _tu;
struct Call {
bool incoming {false};
bool incoming_a {false};
bool incoming_v {false};
uint32_t state {0}; // ? just last state ?
uint32_t incomming_abr {0};
uint32_t incomming_vbr {0};
size_t num_a_frames {0};
size_t num_v_frames {0};
ObjectHandle incoming_vsrc;
ObjectHandle incoming_asrc;
ObjectHandle outgoing_vsink;
ObjectHandle outgoing_asink;
};
// tox friend id -> call
std::map<uint32_t, Call> _calls;
public:
DebugToxCall(ObjectStore2& os, ToxAV& toxav, TextureUploaderI& tu);
~DebugToxCall(void);
void tick(float time_delta);
float render(void);
protected: // toxav events
bool onEvent(const Events::FriendCall&) override;
bool onEvent(const Events::FriendCallState&) override;
bool onEvent(const Events::FriendAudioBitrate&) override;
bool onEvent(const Events::FriendVideoBitrate&) override;
bool onEvent(const Events::FriendAudioFrame&) override;
bool onEvent(const Events::FriendVideoFrame&) override;
};

210
src/debug_video_tap.cpp Normal file
View File

@ -0,0 +1,210 @@
#include "./debug_video_tap.hpp"
#include <solanaceae/object_store/object_store.hpp>
#include <entt/entity/entity.hpp>
#include <SDL3/SDL.h>
#include <imgui/imgui.h>
#include "./content/sdl_video_frame_stream2.hpp"
#include <string>
#include <memory>
#include <iostream>
struct DebugVideoTapSink : public FrameStream2SinkI<SDLVideoFrame> {
std::shared_ptr<QueuedFrameStream2<SDLVideoFrame>> _writer;
DebugVideoTapSink(void) {}
~DebugVideoTapSink(void) {}
// sink
std::shared_ptr<FrameStream2I<SDLVideoFrame>> subscribe(void) override {
if (_writer) {
// max 1 (exclusive)
return nullptr;
}
_writer = std::make_shared<QueuedFrameStream2<SDLVideoFrame>>(1, true);
return _writer;
}
bool unsubscribe(const std::shared_ptr<FrameStream2I<SDLVideoFrame>>& sub) override {
if (!sub || !_writer) {
// nah
return false;
}
if (sub == _writer) {
_writer = nullptr;
return true;
}
// what
return false;
}
};
DebugVideoTap::DebugVideoTap(ObjectStore2& os, StreamManager& sm, TextureUploaderI& tu) : _os(os), _sm(sm), _tu(tu) {
// post self as video sink
_tap = {_os.registry(), _os.registry().create()};
try {
auto dvts = std::make_unique<DebugVideoTapSink>();
_tap.emplace<DebugVideoTapSink*>(dvts.get()); // to get our data back
_tap.emplace<Components::FrameStream2Sink<SDLVideoFrame>>(
std::move(dvts)
);
_tap.emplace<Components::StreamSink>(Components::StreamSink::create<SDLVideoFrame>("DebugVideoTap"));
} catch (...) {
_os.registry().destroy(_tap);
}
}
DebugVideoTap::~DebugVideoTap(void) {
if (static_cast<bool>(_tap)) {
_os.registry().destroy(_tap);
}
}
float DebugVideoTap::render(void) {
if (ImGui::Begin("DebugVideoTap")) {
{ // first pull the latest img from sink and update the texture
assert(static_cast<bool>(_tap));
auto& dvtsw = _tap.get<DebugVideoTapSink*>()->_writer;
if (dvtsw) {
while (true) {
auto new_frame_opt = dvtsw->pop();
if (new_frame_opt.has_value()) {
// timing
if (_v_last_ts == 0) {
_v_last_ts = new_frame_opt.value().timestampUS;
} else {
auto delta = int64_t(new_frame_opt.value().timestampUS) - int64_t(_v_last_ts);
_v_last_ts = new_frame_opt.value().timestampUS;
//delta = std::min<int64_t>(delta, 10*1000*1000);
if (_v_interval_avg == 0) {
_v_interval_avg = delta/1'000'000.f;
} else {
const float r = 0.2f;
_v_interval_avg = _v_interval_avg * (1.f-r) + (delta/1'000'000.f) * r;
}
}
SDL_Surface* new_frame_surf = new_frame_opt.value().surface.get();
SDL_Surface* converted_surf = new_frame_surf;
if (new_frame_surf->format != SDL_PIXELFORMAT_RGBA32) {
// we need to convert
//std::cerr << "DVT: need to convert\n";
converted_surf = SDL_ConvertSurfaceAndColorspace(new_frame_surf, SDL_PIXELFORMAT_RGBA32, nullptr, SDL_COLORSPACE_RGB_DEFAULT, 0);
assert(converted_surf->format == SDL_PIXELFORMAT_RGBA32);
}
SDL_LockSurface(converted_surf);
if (_tex == 0 || (int)_tex_w != converted_surf->w || (int)_tex_h != converted_surf->h) {
_tu.destroy(_tex);
_tex = _tu.uploadRGBA(
static_cast<const uint8_t*>(converted_surf->pixels),
converted_surf->w,
converted_surf->h,
TextureUploaderI::LINEAR,
TextureUploaderI::STREAMING
);
_tex_w = converted_surf->w;
_tex_h = converted_surf->h;
} else {
_tu.updateRGBA(_tex, static_cast<const uint8_t*>(converted_surf->pixels), converted_surf->w * converted_surf->h * 4);
}
SDL_UnlockSurface(converted_surf);
if (new_frame_surf != converted_surf) {
// clean up temp
SDL_DestroySurface(converted_surf);
}
} else {
break;
}
}
}
}
// list sources dropdown to connect too
std::string preview_label {"none"};
if (static_cast<bool>(_selected_src)) {
preview_label = std::to_string(entt::to_integral(entt::to_entity(_selected_src.entity()))) + " (" + _selected_src.get<Components::StreamSource>().name + ")";
}
if (ImGui::BeginCombo("selected source", preview_label.c_str())) {
if (ImGui::Selectable("none")) {
switchTo({});
}
for (const auto& [oc, ss] : _os.registry().view<Components::StreamSource>().each()) {
if (ss.frame_type_name != entt::type_name<SDLVideoFrame>::value()) {
continue;
}
std::string label = std::to_string(entt::to_integral(entt::to_entity(oc))) + " (" + ss.name + ")";
if (ImGui::Selectable(label.c_str())) {
switchTo({_os.registry(), oc});
}
}
ImGui::EndCombo();
}
//ImGui::SetNextItemWidth(0);
ImGui::Checkbox("mirror", &_mirror);
// img here
if (_tex != 0) {
ImGui::SameLine();
ImGui::Text("moving avg interval: %f", _v_interval_avg);
const float img_w = ImGui::GetContentRegionAvail().x;
ImGui::Image(
reinterpret_cast<ImTextureID>(_tex),
ImVec2{img_w, img_w * float(_tex_h)/_tex_w},
ImVec2{_mirror?1.f:0.f, 0},
ImVec2{_mirror?0.f:1.f, 1}
);
}
}
ImGui::End();
if (_v_interval_avg != 0) {
return _v_interval_avg;
} else {
return 2.f;
}
}
void DebugVideoTap::switchTo(ObjectHandle o) {
if (o == _selected_src) {
std::cerr << "DVT: switch to same ...\n";
return;
}
_tu.destroy(_tex);
_tex = 0;
_v_last_ts = 0;
_v_interval_avg = 0;
if (static_cast<bool>(_selected_src)) {
_sm.disconnect<SDLVideoFrame>(_selected_src, _tap);
}
if (static_cast<bool>(o) && _sm.connect<SDLVideoFrame>(o, _tap)) {
_selected_src = o;
} else {
std::cerr << "DVT: cleared video source\n";
_selected_src = {};
}
}

34
src/debug_video_tap.hpp Normal file
View File

@ -0,0 +1,34 @@
#pragma once
#include <cstdint>
#include <solanaceae/object_store/fwd.hpp>
#include "./stream_manager.hpp"
#include "./texture_uploader.hpp"
// provides a sink and a small window displaying a SDLVideoFrame
class DebugVideoTap {
ObjectStore2& _os;
StreamManager& _sm;
TextureUploaderI& _tu;
ObjectHandle _selected_src;
ObjectHandle _tap;
uint64_t _tex {0};
uint32_t _tex_w {0};
uint32_t _tex_h {0};
bool _mirror {false}; // flip horizontally
uint64_t _v_last_ts {0}; // us
float _v_interval_avg {0.f}; // s
public:
DebugVideoTap(ObjectStore2& os, StreamManager& sm, TextureUploaderI& tu);
~DebugVideoTap(void);
float render(void);
void switchTo(ObjectHandle o);
};

View File

@ -12,6 +12,9 @@
#include "./start_screen.hpp"
#include "./content/sdl_video_frame_stream2.hpp"
#include "./content/sdl_audio_frame_stream2.hpp"
#include <filesystem>
#include <memory>
#include <iostream>
@ -35,7 +38,7 @@ int main(int argc, char** argv) {
// setup hints
#ifndef __ANDROID__
if (SDL_SetHint(SDL_HINT_VIDEO_ALLOW_SCREENSAVER, "1") != SDL_TRUE) {
if (!SDL_SetHint(SDL_HINT_VIDEO_ALLOW_SCREENSAVER, "1")) {
std::cerr << "Failed to set '" << SDL_HINT_VIDEO_ALLOW_SCREENSAVER << "' to 1\n";
}
#endif
@ -72,10 +75,51 @@ int main(int argc, char** argv) {
std::cout << "SDL Renderer: " << SDL_GetRendererName(renderer.get()) << "\n";
// optionally init audio and camera
if (!SDL_Init(SDL_INIT_AUDIO)) {
std::cerr << "SDL_Init AUDIO failed (" << SDL_GetError() << ")\n";
} else if (false) {
SDLAudioInputDevice aid;
auto reader = aid.subscribe();
auto writer = SDLAudioOutputDeviceDefaultSink{}.subscribe();
for (size_t i = 0; i < 200; i++) {
std::this_thread::sleep_for(std::chrono::milliseconds(10));
auto new_frame_opt = reader->pop();
if (new_frame_opt.has_value()) {
std::cout << "audio frame was seq:" << new_frame_opt.value().seq << " sr:" << new_frame_opt.value().sample_rate << " " << (new_frame_opt.value().isS16()?"S16":"F32") << " l:" << (new_frame_opt.value().isS16()?new_frame_opt.value().getSpan<int16_t>().size:new_frame_opt.value().getSpan<float>().size) << "\n";
writer->push(new_frame_opt.value());
} else {
std::cout << "no audio frame\n";
}
}
aid.unsubscribe(reader);
}
if (!SDL_Init(SDL_INIT_CAMERA)) {
std::cerr << "SDL_Init CAMERA failed (" << SDL_GetError() << ")\n";
} else if (false) { // HACK
std::cerr << "CAMERA initialized\n";
SDLVideoCameraContent vcc;
auto reader = vcc.subscribe();
for (size_t i = 0; i < 20; i++) {
std::this_thread::sleep_for(std::chrono::milliseconds(50));
auto new_frame_opt = reader->pop();
if (new_frame_opt.has_value()) {
std::cout << "video frame was " << new_frame_opt.value().surface->w << "x" << new_frame_opt.value().surface->h << " " << new_frame_opt.value().timestampUS << "us " << new_frame_opt.value().surface->format << "sf\n";
}
}
vcc.unsubscribe(reader);
}
std::cout << "after sdl video stuffery\n";
IMGUI_CHECKVERSION();
ImGui::CreateContext();
// TODO: test android behaviour
// -> its too big, dpi does not take eye-screen-distance into account
float display_scale = SDL_GetWindowDisplayScale(window.get());
if (display_scale < 0.001f) {
// error?

View File

@ -9,6 +9,11 @@
#include <SDL3/SDL.h>
#include "./content/sdl_video_frame_stream2.hpp"
#include "content/audio_stream.hpp"
#include "content/sdl_audio_frame_stream2.hpp"
#include "stream_manager.hpp"
#include <memory>
#include <cmath>
#include <string_view>
@ -19,11 +24,13 @@ MainScreen::MainScreen(SimpleConfigModel&& conf_, SDL_Renderer* renderer_, Theme
rmm(cr),
msnj{cr, {}, {}},
mts(rmm),
sm(os),
tc(save_path, save_password),
tpi(tc.getTox()),
ad(tc),
#if TOMATO_TOX_AV
tav(tc.getTox()),
dtc(os, tav, sdlrtu),
#endif
tcm(cr, tc, tc),
tmm(rmm, cr, tcm, tc, tc),
@ -40,6 +47,8 @@ MainScreen::MainScreen(SimpleConfigModel&& conf_, SDL_Renderer* renderer_, Theme
cg(conf, os, rmm, cr, sdlrtu, contact_tc, msg_tc, theme),
sw(conf),
osui(os),
smui(os, sm),
dvt(os, sm, sdlrtu),
tuiu(tc, conf),
tdch(tpi)
{
@ -136,6 +145,53 @@ MainScreen::MainScreen(SimpleConfigModel&& conf_, SDL_Renderer* renderer_, Theme
}
conf.dump();
{ // add system av devices
if (false) {
ObjectHandle vsrc {os.registry(), os.registry().create()};
try {
vsrc.emplace<Components::FrameStream2Source<SDLVideoFrame>>(
std::make_unique<SDLVideoCameraContent>()
);
vsrc.emplace<Components::StreamSource>(Components::StreamSource::create<SDLVideoFrame>("WebCam"));
os.throwEventConstruct(vsrc);
} catch (...) {
os.registry().destroy(vsrc);
}
}
if (true) { // audio in
ObjectHandle asrc {os.registry(), os.registry().create()};
try {
asrc.emplace<Components::FrameStream2Source<AudioFrame>>(
std::make_unique<SDLAudioInputDevice>()
);
asrc.emplace<Components::StreamSource>(Components::StreamSource::create<AudioFrame>("SDL Audio Default Recording Device"));
os.throwEventConstruct(asrc);
} catch (...) {
os.registry().destroy(asrc);
}
}
{ // audio out
ObjectHandle asink {os.registry(), os.registry().create()};
try {
asink.emplace<Components::FrameStream2Sink<AudioFrame>>(
std::make_unique<SDLAudioOutputDeviceDefaultSink>()
);
asink.emplace<Components::StreamSink>(Components::StreamSink::create<AudioFrame>("SDL Audio Default Playback Device"));
os.throwEventConstruct(asink);
} catch (...) {
os.registry().destroy(asink);
}
}
}
}
MainScreen::~MainScreen(void) {
@ -252,14 +308,19 @@ Screen* MainScreen::render(float time_delta, bool&) {
}
// ACTUALLY NOT IF RENDERED, MOVED LOGIC TO ABOVE
// it might unload textures, so it needs to be done before rendering
const float ctc_interval = contact_tc.update();
const float msgtc_interval = msg_tc.update();
float animation_interval = contact_tc.update();
animation_interval = std::min<float>(animation_interval, msg_tc.update());
const float cg_interval = cg.render(time_delta); // render
sw.render(); // render
osui.render();
smui.render();
animation_interval = std::min<float>(animation_interval, dvt.render());
tuiu.render(); // render
tdch.render(); // render
#if TOMATO_TOX_AV
animation_interval = std::min<float>(animation_interval, dtc.render());
#endif
{ // main window menubar injection
if (ImGui::Begin("tomato")) {
@ -440,8 +501,7 @@ Screen* MainScreen::render(float time_delta, bool&) {
// low delay time window
if (!_window_hidden && _time_since_event < curr_profile.low_delay_window) {
_render_interval = std::min<float>(_render_interval, ctc_interval);
_render_interval = std::min<float>(_render_interval, msgtc_interval);
_render_interval = std::min<float>(_render_interval, animation_interval);
_render_interval = std::clamp(
_render_interval,
@ -450,8 +510,7 @@ Screen* MainScreen::render(float time_delta, bool&) {
);
// mid delay time window
} else if (!_window_hidden && _time_since_event < curr_profile.mid_delay_window) {
_render_interval = std::min<float>(_render_interval, ctc_interval);
_render_interval = std::min<float>(_render_interval, msgtc_interval);
_render_interval = std::min<float>(_render_interval, animation_interval);
_render_interval = std::clamp(
_render_interval,
@ -476,8 +535,16 @@ Screen* MainScreen::render(float time_delta, bool&) {
Screen* MainScreen::tick(float time_delta, bool& quit) {
quit = !tc.iterate(time_delta); // compute
#if TOMATO_TOX_AV
tav.toxavIterate();
const float av_interval = tav.toxavIterationInterval()/1000.f;
dtc.tick(time_delta);
#endif
tcm.iterate(time_delta); // compute
const float sm_interval = sm.tick(time_delta);
const float fo_interval = tffom.tick(time_delta);
tam.iterate(); // compute
@ -510,6 +577,18 @@ Screen* MainScreen::tick(float time_delta, bool& quit) {
fo_interval
);
#if TOMATO_TOX_AV
_min_tick_interval = std::min<float>(
_min_tick_interval,
av_interval
);
#endif
_min_tick_interval = std::min<float>(
_min_tick_interval,
sm_interval
);
//std::cout << "MS: min tick interval: " << _min_tick_interval << "\n";
switch (_compute_perf_mode) {

View File

@ -16,6 +16,8 @@
#include <solanaceae/tox_messages/tox_message_manager.hpp>
#include <solanaceae/tox_messages/tox_transfer_manager.hpp>
#include "./stream_manager.hpp"
#include "./tox_client.hpp"
#include "./auto_dirty.hpp"
@ -30,12 +32,15 @@
#include "./chat_gui4.hpp"
#include "./chat_gui/settings_window.hpp"
#include "./object_store_ui.hpp"
#include "./stream_manager_ui.hpp"
#include "./debug_video_tap.hpp"
#include "./tox_ui_utils.hpp"
#include "./tox_dht_cap_histo.hpp"
#include "./tox_friend_faux_offline_messaging.hpp"
#if TOMATO_TOX_AV
#include "./tox_av.hpp"
#include "./debug_tox_call.hpp"
#endif
#include <string>
@ -58,12 +63,15 @@ struct MainScreen final : public Screen {
MessageSerializerNJ msnj;
MessageTimeSort mts;
StreamManager sm;
ToxEventLogger tel{std::cout};
ToxClient tc;
ToxPrivateImpl tpi;
AutoDirty ad;
#if TOMATO_TOX_AV
ToxAV tav;
DebugToxCall dtc;
#endif
ToxContactModel2 tcm;
ToxMessageManager tmm;
@ -86,6 +94,8 @@ struct MainScreen final : public Screen {
ChatGui4 cg;
SettingsWindow sw;
ObjectStoreUI osui;
StreamManagerUI smui;
DebugVideoTap dvt;
ToxUIUtils tuiu;
ToxDHTCapHisto tdch;

View File

@ -32,7 +32,7 @@ uint64_t SDLRendererTextureUploader::uploadRGBA(const uint8_t* data, uint32_t wi
SDL_UpdateTexture(tex, nullptr, surf->pixels, surf->pitch);
SDL_BlendMode surf_blend_mode = SDL_BLENDMODE_NONE;
if (SDL_GetSurfaceBlendMode(surf, &surf_blend_mode) == 0) {
if (SDL_GetSurfaceBlendMode(surf, &surf_blend_mode)) {
SDL_SetTextureBlendMode(tex, surf_blend_mode);
}

320
src/stream_manager.hpp Normal file
View File

@ -0,0 +1,320 @@
#pragma once
#include <solanaceae/object_store/fwd.hpp>
#include <solanaceae/object_store/object_store.hpp>
#include <entt/core/type_info.hpp>
#include "./content/frame_stream2.hpp"
#include <vector>
#include <memory>
#include <algorithm>
#include <thread>
#include <chrono>
// fwd
class StreamManager;
namespace Components {
struct StreamSource {
std::string name;
std::string frame_type_name;
std::function<bool(StreamManager&, Object, Object, bool)> connect_fn;
template<typename FrameType>
static StreamSource create(const std::string& name);
};
struct StreamSink {
std::string name;
std::string frame_type_name;
template<typename FrameType>
static StreamSink create(const std::string& name);
};
template<typename FrameType>
using FrameStream2Source = std::unique_ptr<FrameStream2SourceI<FrameType>>;
template<typename FrameType>
using FrameStream2Sink = std::unique_ptr<FrameStream2SinkI<FrameType>>;
} // Components
class StreamManager {
friend class StreamManagerUI; // TODO: make this go away
ObjectStore2& _os;
struct Connection {
ObjectHandle src;
ObjectHandle sink;
struct Data {
virtual ~Data(void) {}
};
std::unique_ptr<Data> data; // stores reader writer type erased
std::function<void(Connection&)> pump_fn;
std::function<void(Connection&)> unsubscribe_fn;
bool on_main_thread {true};
std::atomic_bool stop {false}; // disconnect
std::atomic_bool finished {false}; // disconnect
// pump thread
std::thread pump_thread;
// frame interval counters and estimates
Connection(void) = default;
Connection(
ObjectHandle src_,
ObjectHandle sink_,
std::unique_ptr<Data>&& data_,
std::function<void(Connection&)>&& pump_fn_,
std::function<void(Connection&)>&& unsubscribe_fn_,
bool on_main_thread_ = true
) :
src(src_),
sink(sink_),
data(std::move(data_)),
pump_fn(std::move(pump_fn_)),
unsubscribe_fn(std::move(unsubscribe_fn_)),
on_main_thread(on_main_thread_)
{
if (!on_main_thread) {
// start thread
pump_thread = std::thread([this](void) {
while (!stop) {
pump_fn(*this);
std::this_thread::sleep_for(std::chrono::milliseconds(5));
}
finished = true;
});
}
}
};
std::vector<std::unique_ptr<Connection>> _connections;
public:
StreamManager(ObjectStore2& os) : _os(os) {}
virtual ~StreamManager(void) {
// stop all connetions
for (const auto& con : _connections) {
con->stop = true;
if (!con->on_main_thread) {
con->pump_thread.join(); // we skip the finished check and wait
}
con->unsubscribe_fn(*con);
}
}
// stream type is FrameStream2I<FrameType>
// TODO: improve this design
// src and sink need to be a FrameStream2MultiStream<FrameType>
template<typename FrameType>
bool connect(Object src, Object sink, bool threaded = true) {
auto res = std::find_if(
_connections.cbegin(), _connections.cend(),
[&](const auto& a) { return a->src == src && a->sink == sink; }
);
if (res != _connections.cend()) {
// already exists
return false;
}
auto h_src = _os.objectHandle(src);
auto h_sink = _os.objectHandle(sink);
if (!static_cast<bool>(h_src) || !static_cast<bool>(h_sink)) {
// an object does not exist
return false;
}
if (!h_src.all_of<Components::FrameStream2Source<FrameType>>()) {
// src not stream source
return false;
}
if (!h_sink.all_of<Components::FrameStream2Sink<FrameType>>()) {
// sink not stream sink
return false;
}
auto& src_stream = h_src.get<Components::FrameStream2Source<FrameType>>();
auto& sink_stream = h_sink.get<Components::FrameStream2Sink<FrameType>>();
struct inlineData : public Connection::Data {
virtual ~inlineData(void) {}
std::shared_ptr<FrameStream2I<FrameType>> reader;
std::shared_ptr<FrameStream2I<FrameType>> writer;
};
auto our_data = std::make_unique<inlineData>();
our_data->reader = src_stream->subscribe();
if (!our_data->reader) {
return false;
}
our_data->writer = sink_stream->subscribe();
if (!our_data->writer) {
return false;
}
_connections.push_back(std::make_unique<Connection>(
h_src,
h_sink,
std::move(our_data),
[](Connection& con) -> void {
// there might be more stored
for (size_t i = 0; i < 10; i++) {
auto new_frame_opt = static_cast<inlineData*>(con.data.get())->reader->pop();
// TODO: frame interval estimates
if (new_frame_opt.has_value()) {
static_cast<inlineData*>(con.data.get())->writer->push(new_frame_opt.value());
} else {
break;
}
}
},
[](Connection& con) -> void {
auto* src_stream_ptr = con.src.try_get<Components::FrameStream2Source<FrameType>>();
if (src_stream_ptr != nullptr) {
(*src_stream_ptr)->unsubscribe(static_cast<inlineData*>(con.data.get())->reader);
}
auto* sink_stream_ptr = con.sink.try_get<Components::FrameStream2Sink<FrameType>>();
if (sink_stream_ptr != nullptr) {
(*sink_stream_ptr)->unsubscribe(static_cast<inlineData*>(con.data.get())->writer);
}
},
!threaded
));
return true;
}
bool connect(Object src, Object sink, bool threaded = true) {
auto h_src = _os.objectHandle(src);
auto h_sink = _os.objectHandle(sink);
if (!static_cast<bool>(h_src) || !static_cast<bool>(h_sink)) {
// an object does not exist
return false;
}
// get src and sink comps
if (!h_src.all_of<Components::StreamSource>()) {
// src not stream source
return false;
}
if (!h_sink.all_of<Components::StreamSink>()) {
// sink not stream sink
return false;
}
const auto& ssrc = h_src.get<Components::StreamSource>();
const auto& ssink = h_sink.get<Components::StreamSink>();
// compare type
if (ssrc.frame_type_name != ssink.frame_type_name) {
return false;
}
// always fail in debug mode
assert(static_cast<bool>(ssrc.connect_fn));
if (!static_cast<bool>(ssrc.connect_fn)) {
return false;
}
// use connect fn from src
return ssrc.connect_fn(*this, src, sink, threaded);
}
template<typename StreamType>
bool disconnect(Object src, Object sink) {
auto res = std::find_if(
_connections.cbegin(), _connections.cend(),
[&](const auto& a) { return a->src == src && a->sink == sink; }
);
if (res == _connections.cend()) {
// not found
return false;
}
// do disconnect
(*res)->stop = true;
return true;
}
template<typename StreamType>
bool disconnectAll(Object o) {
bool succ {false};
for (const auto& con : _connections) {
if (con->src == o || con->sink == o) {
con->stop = true;
succ = true;
}
}
return succ;
}
// do we need the time delta?
float tick(float) {
// pump all mainthread connections
for (auto it = _connections.begin(); it != _connections.end();) {
auto& con = **it;
if (!static_cast<bool>(con.src) || !static_cast<bool>(con.sink)) {
// either side disappeard without disconnectAll
// TODO: warn/error log
con.stop = true;
}
if (con.on_main_thread) {
con.pump_fn(con);
}
if (con.stop && (con.finished || con.on_main_thread)) {
if (!con.on_main_thread) {
assert(con.pump_thread.joinable());
con.pump_thread.join();
}
con.unsubscribe_fn(con);
it = _connections.erase(it);
} else {
it++;
}
}
// return min over intervals instead
return 0.01f;
}
};
namespace Components {
// we require the complete sm type here
template<typename FrameType>
StreamSource StreamSource::create(const std::string& name) {
return StreamSource{
name,
std::string{entt::type_name<FrameType>::value()},
+[](StreamManager& sm, Object src, Object sink, bool threaded) {
return sm.connect<FrameType>(src, sink, threaded);
},
};
}
template<typename FrameType>
StreamSink StreamSink::create(const std::string& name) {
return StreamSink{
name,
std::string{entt::type_name<FrameType>::value()},
};
}
} // Components

222
src/stream_manager_ui.cpp Normal file
View File

@ -0,0 +1,222 @@
#include "./stream_manager_ui.hpp"
#include <solanaceae/object_store/object_store.hpp>
#include <imgui/imgui.h>
#include <string>
StreamManagerUI::StreamManagerUI(ObjectStore2& os, StreamManager& sm) : _os(os), _sm(sm) {
}
void StreamManagerUI::render(void) {
{ // main window menubar injection
// assumes the window "tomato" was rendered already by cg
if (ImGui::Begin("tomato")) {
if (ImGui::BeginMenuBar()) {
// TODO: drop all menu sep?
//ImGui::Separator(); // os already exists (very hacky)
if (ImGui::BeginMenu("ObjectStore")) {
if (ImGui::MenuItem("Stream Manger", nullptr, _show_window)) {
_show_window = !_show_window;
}
ImGui::EndMenu();
}
ImGui::EndMenuBar();
}
}
ImGui::End();
}
if (!_show_window) {
return;
}
if (ImGui::Begin("StreamManagerUI", &_show_window)) {
// TODO: node canvas
// by fametype ??
if (ImGui::CollapsingHeader("Sources", ImGuiTreeNodeFlags_DefaultOpen)) {
// list sources
if (ImGui::BeginTable("sources_and_sinks", 4, ImGuiTableFlags_SizingFixedFit | ImGuiTableFlags_BordersInnerV)) {
ImGui::TableSetupColumn("id");
ImGui::TableSetupColumn("name");
ImGui::TableSetupColumn("##conn");
ImGui::TableSetupColumn("type");
ImGui::TableHeadersRow();
for (const auto& [oc, ss] : _os.registry().view<Components::StreamSource>().each()) {
//ImGui::Text("src %d (%s)[%s]", entt::to_integral(entt::to_entity(oc)), ss.name.c_str(), ss.frame_type_name.c_str());
ImGui::PushID(entt::to_integral(oc));
ImGui::TableNextColumn();
ImGui::Text("%d", entt::to_integral(entt::to_entity(oc)));
const auto *ssrc = _os.registry().try_get<Components::StreamSource>(oc);
ImGui::TableNextColumn();
ImGui::TextUnformatted(ssrc!=nullptr?ssrc->name.c_str():"none");
ImGui::TableNextColumn();
if (ImGui::SmallButton("->")) {
ImGui::OpenPopup("src_connect");
}
if (ImGui::BeginPopup("src_connect")) {
if (ImGui::BeginMenu("connect to")) {
for (const auto& [oc_sink, s_sink] : _os.registry().view<Components::StreamSink>().each()) {
if (s_sink.frame_type_name != ss.frame_type_name) {
continue;
}
ImGui::PushID(entt::to_integral(oc_sink));
std::string sink_label {"src "};
sink_label += std::to_string(entt::to_integral(entt::to_entity(oc_sink)));
sink_label += " (";
sink_label += s_sink.name;
sink_label += ")[";
sink_label += s_sink.frame_type_name;
sink_label += "]";
if (ImGui::MenuItem(sink_label.c_str())) {
_sm.connect(oc, oc_sink);
}
ImGui::PopID();
}
ImGui::EndMenu();
}
ImGui::EndPopup();
}
ImGui::TableNextColumn();
ImGui::TextUnformatted(ssrc!=nullptr?ssrc->frame_type_name.c_str():"???");
ImGui::PopID();
}
ImGui::EndTable();
}
} // sources header
if (ImGui::CollapsingHeader("Sinks", ImGuiTreeNodeFlags_DefaultOpen)) {
// list sinks
if (ImGui::BeginTable("sources_and_sinks", 4, ImGuiTableFlags_SizingFixedFit | ImGuiTableFlags_BordersInnerV)) {
ImGui::TableSetupColumn("id");
ImGui::TableSetupColumn("name");
ImGui::TableSetupColumn("##conn");
ImGui::TableSetupColumn("type");
ImGui::TableHeadersRow();
for (const auto& [oc, ss] : _os.registry().view<Components::StreamSink>().each()) {
ImGui::PushID(entt::to_integral(oc));
ImGui::TableNextColumn();
ImGui::Text("%d", entt::to_integral(entt::to_entity(oc)));
const auto *ssink = _os.registry().try_get<Components::StreamSink>(oc);
ImGui::TableNextColumn();
ImGui::TextUnformatted(ssink!=nullptr?ssink->name.c_str():"none");
ImGui::TableNextColumn();
if (ImGui::SmallButton("->")) {
ImGui::OpenPopup("sink_connect");
}
// ImGuiWindowFlags_AlwaysAutoResize | ImGuiWindowFlags_NoTitleBar | ImGuiWindowFlags_NoSavedSettings
if (ImGui::BeginPopup("sink_connect")) {
if (ImGui::BeginMenu("connect to")) {
for (const auto& [oc_src, s_src] : _os.registry().view<Components::StreamSource>().each()) {
if (s_src.frame_type_name != ss.frame_type_name) {
continue;
}
ImGui::PushID(entt::to_integral(oc_src));
std::string source_label {"src "};
source_label += std::to_string(entt::to_integral(entt::to_entity(oc_src)));
source_label += " (";
source_label += s_src.name;
source_label += ")[";
source_label += s_src.frame_type_name;
source_label += "]";
if (ImGui::MenuItem(source_label.c_str())) {
_sm.connect(oc_src, oc);
}
ImGui::PopID();
}
ImGui::EndMenu();
}
ImGui::EndPopup();
}
ImGui::TableNextColumn();
ImGui::TextUnformatted(ssink!=nullptr?ssink->frame_type_name.c_str():"???");
ImGui::PopID();
}
ImGui::EndTable();
}
} // sink header
if (ImGui::CollapsingHeader("Connections", ImGuiTreeNodeFlags_DefaultOpen)) {
// list connections
if (ImGui::BeginTable("connections", 6, ImGuiTableFlags_SizingFixedFit | ImGuiTableFlags_BordersInnerV)) {
ImGui::TableSetupColumn("##id"); // TODO: remove?
ImGui::TableSetupColumn("##disco");
ImGui::TableSetupColumn("##qdesc");
ImGui::TableSetupColumn("from");
ImGui::TableSetupColumn("to");
ImGui::TableSetupColumn("type");
ImGui::TableHeadersRow();
for (size_t i = 0; i < _sm._connections.size(); i++) {
const auto& con = _sm._connections[i];
//ImGui::Text("con %d->%d", entt::to_integral(entt::to_entity(con->src.entity())), entt::to_integral(entt::to_entity(con->sink.entity())));
ImGui::PushID(i);
ImGui::TableNextColumn();
ImGui::Text("%zu", i); // do connections have ids?
ImGui::TableNextColumn();
if (ImGui::SmallButton("X")) {
con->stop = true;
}
ImGui::TableNextColumn();
ImGui::Text("%d->%d", entt::to_integral(entt::to_entity(con->src.entity())), entt::to_integral(entt::to_entity(con->sink.entity())));
const auto *ssrc = con->src.try_get<Components::StreamSource>();
ImGui::TableNextColumn();
ImGui::TextUnformatted(ssrc!=nullptr?ssrc->name.c_str():"none");
const auto *ssink = con->sink.try_get<Components::StreamSink>();
ImGui::TableNextColumn();
ImGui::TextUnformatted(ssink!=nullptr?ssink->name.c_str():"none");
ImGui::TableNextColumn();
ImGui::TextUnformatted(
(ssrc!=nullptr)?
ssrc->frame_type_name.c_str():
(ssink!=nullptr)?
ssink->frame_type_name.c_str()
:"???"
);
ImGui::PopID();
}
ImGui::EndTable();
}
} // con header
}
ImGui::End();
}

17
src/stream_manager_ui.hpp Normal file
View File

@ -0,0 +1,17 @@
#pragma once
#include <solanaceae/object_store/fwd.hpp>
#include "./stream_manager.hpp"
class StreamManagerUI {
ObjectStore2& _os;
StreamManager& _sm;
bool _show_window {true};
public:
StreamManagerUI(ObjectStore2& os, StreamManager& sm);
void render(void);
};

View File

@ -2,14 +2,85 @@
#include <cassert>
#include <cstdint>
#include <iostream>
// https://almogfx.bandcamp.com/track/crushed-w-cassade
struct ToxAVFriendCallState final {
const uint32_t state {TOXAV_FRIEND_CALL_STATE_NONE};
[[nodiscard]] bool is_error(void) const { return state & TOXAV_FRIEND_CALL_STATE_ERROR; }
[[nodiscard]] bool is_finished(void) const { return state & TOXAV_FRIEND_CALL_STATE_FINISHED; }
[[nodiscard]] bool is_sending_a(void) const { return state & TOXAV_FRIEND_CALL_STATE_SENDING_A; }
[[nodiscard]] bool is_sending_v(void) const { return state & TOXAV_FRIEND_CALL_STATE_SENDING_V; }
[[nodiscard]] bool is_accepting_a(void) const { return state & TOXAV_FRIEND_CALL_STATE_ACCEPTING_A; }
[[nodiscard]] bool is_accepting_v(void) const { return state & TOXAV_FRIEND_CALL_STATE_ACCEPTING_V; }
};
ToxAV::ToxAV(Tox* tox) : _tox(tox) {
Toxav_Err_New err_new {TOXAV_ERR_NEW_OK};
_tox_av = toxav_new(_tox, &err_new);
// TODO: throw
assert(err_new == TOXAV_ERR_NEW_OK);
toxav_callback_call(
_tox_av,
+[](ToxAV*, uint32_t friend_number, bool audio_enabled, bool video_enabled, void *user_data) {
assert(user_data != nullptr);
static_cast<ToxAV*>(user_data)->cb_call(friend_number, audio_enabled, video_enabled);
},
this
);
toxav_callback_call_state(
_tox_av,
+[](ToxAV*, uint32_t friend_number, uint32_t state, void *user_data) {
assert(user_data != nullptr);
static_cast<ToxAV*>(user_data)->cb_call_state(friend_number, state);
},
this
);
toxav_callback_audio_bit_rate(
_tox_av,
+[](ToxAV*, uint32_t friend_number, uint32_t audio_bit_rate, void *user_data) {
assert(user_data != nullptr);
static_cast<ToxAV*>(user_data)->cb_audio_bit_rate(friend_number, audio_bit_rate);
},
this
);
toxav_callback_video_bit_rate(
_tox_av,
+[](ToxAV*, uint32_t friend_number, uint32_t video_bit_rate, void *user_data) {
assert(user_data != nullptr);
static_cast<ToxAV*>(user_data)->cb_video_bit_rate(friend_number, video_bit_rate);
},
this
);
toxav_callback_audio_receive_frame(
_tox_av,
+[](ToxAV*, uint32_t friend_number, const int16_t pcm[], size_t sample_count, uint8_t channels, uint32_t sampling_rate, void *user_data) {
assert(user_data != nullptr);
static_cast<ToxAV*>(user_data)->cb_audio_receive_frame(friend_number, pcm, sample_count, channels, sampling_rate);
},
this
);
toxav_callback_video_receive_frame(
_tox_av,
+[](ToxAV*, uint32_t friend_number,
uint16_t width, uint16_t height,
const uint8_t y[/*! max(width, abs(ystride)) * height */],
const uint8_t u[/*! max(width/2, abs(ustride)) * (height/2) */],
const uint8_t v[/*! max(width/2, abs(vstride)) * (height/2) */],
int32_t ystride, int32_t ustride, int32_t vstride,
void *user_data
) {
assert(user_data != nullptr);
static_cast<ToxAV*>(user_data)->cb_video_receive_frame(friend_number, width, height, y, u, v, ystride, ustride, vstride);
},
this
);
}
ToxAV::~ToxAV(void) {
toxav_kill(_tox_av);
}
@ -80,3 +151,101 @@ Toxav_Err_Bit_Rate_Set ToxAV::toxavVideoSetBitRate(uint32_t friend_number, uint3
return err;
}
void ToxAV::cb_call(uint32_t friend_number, bool audio_enabled, bool video_enabled) {
std::cerr << "TOXAV: receiving call f:" << friend_number << " a:" << audio_enabled << " v:" << video_enabled << "\n";
//Toxav_Err_Answer err_answer { TOXAV_ERR_ANSWER_OK };
//toxav_answer(_tox_av, friend_number, 0, 0, &err_answer);
//if (err_answer != TOXAV_ERR_ANSWER_OK) {
// std::cerr << "!!!!!!!! answer failed " << err_answer << "\n";
//}
dispatch(
ToxAV_Event::friend_call,
Events::FriendCall{
friend_number,
audio_enabled,
video_enabled,
}
);
}
void ToxAV::cb_call_state(uint32_t friend_number, uint32_t state) {
//ToxAVFriendCallState w_state{state};
//w_state.is_error();
std::cerr << "TOXAV: call state f:" << friend_number << " s:" << state << "\n";
dispatch(
ToxAV_Event::friend_call_state,
Events::FriendCallState{
friend_number,
state,
}
);
}
void ToxAV::cb_audio_bit_rate(uint32_t friend_number, uint32_t audio_bit_rate) {
std::cerr << "TOXAV: audio bitrate f:" << friend_number << " abr:" << audio_bit_rate << "\n";
dispatch(
ToxAV_Event::friend_audio_bitrate,
Events::FriendAudioBitrate{
friend_number,
audio_bit_rate,
}
);
}
void ToxAV::cb_video_bit_rate(uint32_t friend_number, uint32_t video_bit_rate) {
std::cerr << "TOXAV: video bitrate f:" << friend_number << " vbr:" << video_bit_rate << "\n";
dispatch(
ToxAV_Event::friend_video_bitrate,
Events::FriendVideoBitrate{
friend_number,
video_bit_rate,
}
);
}
void ToxAV::cb_audio_receive_frame(uint32_t friend_number, const int16_t pcm[], size_t sample_count, uint8_t channels, uint32_t sampling_rate) {
//std::cerr << "TOXAV: audio frame f:" << friend_number << " sc:" << sample_count << " ch:" << (int)channels << " sr:" << sampling_rate << "\n";
dispatch(
ToxAV_Event::friend_audio_frame,
Events::FriendAudioFrame{
friend_number,
Span<int16_t>(pcm, sample_count*channels), // TODO: is sample count *ch or /ch?
channels,
sampling_rate,
}
);
}
void ToxAV::cb_video_receive_frame(
uint32_t friend_number,
uint16_t width, uint16_t height,
const uint8_t y[/*! max(width, abs(ystride)) * height */],
const uint8_t u[/*! max(width/2, abs(ustride)) * (height/2) */],
const uint8_t v[/*! max(width/2, abs(vstride)) * (height/2) */],
int32_t ystride, int32_t ustride, int32_t vstride
) {
//std::cerr << "TOXAV: video frame f:" << friend_number << " w:" << width << " h:" << height << "\n";
dispatch(
ToxAV_Event::friend_video_frame,
Events::FriendVideoFrame{
friend_number,
width,
height,
Span<uint8_t>(y, std::max<int64_t>(width, std::abs(ystride)) * height),
Span<uint8_t>(u, std::max<int64_t>(width/2, std::abs(ustride)) * (height/2)),
Span<uint8_t>(v, std::max<int64_t>(width/2, std::abs(vstride)) * (height/2)),
ystride,
ustride,
vstride,
}
);
}

View File

@ -1,15 +1,98 @@
#pragma once
#include <solanaceae/util/span.hpp>
#include <solanaceae/util/event_provider.hpp>
#include <tox/toxav.h>
struct ToxAV {
namespace /*toxav*/ Events {
struct FriendCall {
uint32_t friend_number;
bool audio_enabled;
bool video_enabled;
};
struct FriendCallState {
uint32_t friend_number;
uint32_t state;
};
struct FriendAudioBitrate {
uint32_t friend_number;
uint32_t audio_bit_rate;
};
struct FriendVideoBitrate {
uint32_t friend_number;
uint32_t video_bit_rate;
};
struct FriendAudioFrame {
uint32_t friend_number;
Span<int16_t> pcm;
//size_t sample_count;
uint8_t channels;
uint32_t sampling_rate;
};
struct FriendVideoFrame {
uint32_t friend_number;
uint16_t width;
uint16_t height;
//const uint8_t y[[>! max(width, abs(ystride)) * height <]];
//const uint8_t u[[>! max(width/2, abs(ustride)) * (height/2) <]];
//const uint8_t v[[>! max(width/2, abs(vstride)) * (height/2) <]];
// mdspan would be nice here
// bc of the stride, span might be larger than the actual data it contains
Span<uint8_t> y;
Span<uint8_t> u;
Span<uint8_t> v;
int32_t ystride;
int32_t ustride;
int32_t vstride;
};
} // Event
enum class ToxAV_Event : uint32_t {
friend_call,
friend_call_state,
friend_audio_bitrate,
friend_video_bitrate,
friend_audio_frame,
friend_video_frame,
MAX
};
struct ToxAVEventI {
using enumType = ToxAV_Event;
virtual ~ToxAVEventI(void) {}
virtual bool onEvent(const Events::FriendCall&) { return false; }
virtual bool onEvent(const Events::FriendCallState&) { return false; }
virtual bool onEvent(const Events::FriendAudioBitrate&) { return false; }
virtual bool onEvent(const Events::FriendVideoBitrate&) { return false; }
virtual bool onEvent(const Events::FriendAudioFrame&) { return false; }
virtual bool onEvent(const Events::FriendVideoFrame&) { return false; }
};
using ToxAVEventProviderI = EventProviderI<ToxAVEventI>;
struct ToxAV : public ToxAVEventProviderI{
Tox* _tox = nullptr;
ToxAV* _tox_av = nullptr;
static constexpr const char* version {"0"};
ToxAV(Tox* tox);
virtual ~ToxAV(void);
// interface
// if iterate is called on a different thread, it will fire events there
uint32_t toxavIterationInterval(void) const;
void toxavIterate(void);
@ -33,5 +116,21 @@ struct ToxAV {
//int32_t toxav_groupchat_disable_av(Tox *tox, uint32_t groupnumber);
//bool toxav_groupchat_av_enabled(Tox *tox, uint32_t groupnumber);
// toxav callbacks
void cb_call(uint32_t friend_number, bool audio_enabled, bool video_enabled);
void cb_call_state(uint32_t friend_number, uint32_t state);
void cb_audio_bit_rate(uint32_t friend_number, uint32_t audio_bit_rate);
void cb_video_bit_rate(uint32_t friend_number, uint32_t video_bit_rate);
void cb_audio_receive_frame(uint32_t friend_number, const int16_t pcm[], size_t sample_count, uint8_t channels, uint32_t sampling_rate);
void cb_video_receive_frame(
uint32_t friend_number,
uint16_t width, uint16_t height,
const uint8_t y[/*! max(width, abs(ystride)) * height */],
const uint8_t u[/*! max(width/2, abs(ustride)) * (height/2) */],
const uint8_t v[/*! max(width/2, abs(vstride)) * (height/2) */],
int32_t ystride, int32_t ustride, int32_t vstride
);
};