big stream progress
- stream manager (with bare bones ui) - debug video tap - toxav progress - toxav debug ui - some default devices
This commit is contained in:
@ -33,6 +33,20 @@ struct FrameStream2I {
|
||||
virtual bool push(const FrameType& value) = 0;
|
||||
};
|
||||
|
||||
template<typename FrameType>
|
||||
struct FrameStream2SourceI {
|
||||
virtual ~FrameStream2SourceI(void) {}
|
||||
[[nodiscard]] virtual std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) = 0;
|
||||
virtual bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) = 0;
|
||||
};
|
||||
|
||||
template<typename FrameType>
|
||||
struct FrameStream2SinkI {
|
||||
virtual ~FrameStream2SinkI(void) {}
|
||||
[[nodiscard]] virtual std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) = 0;
|
||||
virtual bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) = 0;
|
||||
};
|
||||
|
||||
// needs count frames queue size
|
||||
// having ~1-2sec buffer size is often sufficent
|
||||
template<typename FrameType>
|
||||
@ -76,33 +90,52 @@ struct QueuedFrameStream2 : public FrameStream2I<FrameType> {
|
||||
}
|
||||
};
|
||||
|
||||
// implements a stream that pops or pushes to all sub streams
|
||||
// you need to mind the direction you intend it to use
|
||||
// implements a stream that pushes to all sub streams
|
||||
// release all streams before destructing! // TODO: improve lifetime here, maybe some shared semaphore?
|
||||
template<typename FrameType, typename SubStreamType = QueuedFrameStream2<FrameType>>
|
||||
struct FrameStream2MultiStream : public FrameStream2I<FrameType> {
|
||||
struct FrameStream2MultiSource : public FrameStream2SourceI<FrameType>, public FrameStream2I<FrameType> {
|
||||
using sub_stream_type_t = SubStreamType;
|
||||
|
||||
// pointer stability
|
||||
std::vector<std::unique_ptr<SubStreamType>> _sub_streams;
|
||||
std::vector<std::shared_ptr<SubStreamType>> _sub_streams;
|
||||
std::mutex _sub_stream_lock; // accessing the _sub_streams array needs to be exclusive
|
||||
// a simple lock here is ok, since this tends to be a rare operation,
|
||||
// except for the push, which is always on the same thread
|
||||
|
||||
// TODO: forward args instead
|
||||
SubStreamType* aquireSubStream(size_t queue_size = 10, bool lossy = true) {
|
||||
virtual ~FrameStream2MultiSource(void) {}
|
||||
|
||||
//// TODO: forward args instead
|
||||
//SubStreamType* aquireSubStream(size_t queue_size = 10, bool lossy = true) {
|
||||
// std::lock_guard lg{_sub_stream_lock};
|
||||
// return _sub_streams.emplace_back(std::make_unique<SubStreamType>(queue_size, lossy)).get();
|
||||
//}
|
||||
std::shared_ptr<FrameStream2I<FrameType>> subscribe(void) override {
|
||||
// TODO: args???
|
||||
size_t queue_size = 10;
|
||||
bool lossy = true;
|
||||
|
||||
std::lock_guard lg{_sub_stream_lock};
|
||||
return _sub_streams.emplace_back(std::make_unique<SubStreamType>(queue_size, lossy)).get();
|
||||
return _sub_streams.emplace_back(std::make_unique<SubStreamType>(queue_size, lossy));
|
||||
}
|
||||
|
||||
void releaseSubStream(SubStreamType* sub) {
|
||||
//void releaseSubStream(SubStreamType* sub) {
|
||||
// std::lock_guard lg{_sub_stream_lock};
|
||||
// for (auto it = _sub_streams.begin(); it != _sub_streams.end(); it++) {
|
||||
// if (it->get() == sub) {
|
||||
// _sub_streams.erase(it);
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
bool unsubscribe(const std::shared_ptr<FrameStream2I<FrameType>>& sub) override {
|
||||
std::lock_guard lg{_sub_stream_lock};
|
||||
for (auto it = _sub_streams.begin(); it != _sub_streams.end(); it++) {
|
||||
if (it->get() == sub) {
|
||||
if (*it == sub) {
|
||||
_sub_streams.erase(it);
|
||||
break;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false; // ?
|
||||
}
|
||||
|
||||
// stream interface
|
||||
@ -113,6 +146,7 @@ struct FrameStream2MultiStream : public FrameStream2I<FrameType> {
|
||||
}
|
||||
|
||||
std::optional<FrameType> pop(void) override {
|
||||
// nope
|
||||
assert(false && "this logic is very frame type specific, provide an impl");
|
||||
return std::nullopt;
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ bool SDLAudioOutputDeviceDefaultInstance::push(const AudioFrame& value) {
|
||||
std::cerr << "empty audio frame??\n";
|
||||
}
|
||||
|
||||
if (SDL_PutAudioStreamData(_stream.get(), data.ptr, data.size * sizeof(int16_t)) < 0) {
|
||||
if (!SDL_PutAudioStreamData(_stream.get(), data.ptr, data.size * sizeof(int16_t))) {
|
||||
std::cerr << "put data error\n";
|
||||
return false; // return true?
|
||||
}
|
||||
@ -145,26 +145,40 @@ SDLAudioOutputDeviceDefaultInstance::SDLAudioOutputDeviceDefaultInstance(SDLAudi
|
||||
SDLAudioOutputDeviceDefaultInstance::~SDLAudioOutputDeviceDefaultInstance(void) {
|
||||
}
|
||||
|
||||
SDLAudioOutputDeviceDefaultInstance SDLAudioOutputDeviceDefaultFactory::create(void) {
|
||||
SDLAudioOutputDeviceDefaultInstance new_instance;
|
||||
|
||||
SDLAudioOutputDeviceDefaultSink::~SDLAudioOutputDeviceDefaultSink(void) {
|
||||
// TODO: pause and close device?
|
||||
}
|
||||
|
||||
std::shared_ptr<FrameStream2I<AudioFrame>> SDLAudioOutputDeviceDefaultSink::subscribe(void) {
|
||||
auto new_instance = std::make_shared<SDLAudioOutputDeviceDefaultInstance>();
|
||||
|
||||
constexpr SDL_AudioSpec spec = { SDL_AUDIO_S16, 1, 48000 };
|
||||
|
||||
new_instance._stream = {
|
||||
new_instance->_stream = {
|
||||
SDL_OpenAudioDeviceStream(SDL_AUDIO_DEVICE_DEFAULT_PLAYBACK, &spec, nullptr, nullptr),
|
||||
&SDL_DestroyAudioStream
|
||||
};
|
||||
new_instance._last_sample_rate = spec.freq;
|
||||
new_instance._last_channels = spec.channels;
|
||||
new_instance._last_format = spec.format;
|
||||
new_instance->_last_sample_rate = spec.freq;
|
||||
new_instance->_last_channels = spec.channels;
|
||||
new_instance->_last_format = spec.format;
|
||||
|
||||
if (!static_cast<bool>(new_instance._stream)) {
|
||||
if (!static_cast<bool>(new_instance->_stream)) {
|
||||
std::cerr << "SDL open audio device failed!\n";
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const auto audio_device_id = SDL_GetAudioStreamDevice(new_instance._stream.get());
|
||||
const auto audio_device_id = SDL_GetAudioStreamDevice(new_instance->_stream.get());
|
||||
SDL_ResumeAudioDevice(audio_device_id);
|
||||
|
||||
return new_instance;
|
||||
}
|
||||
|
||||
bool SDLAudioOutputDeviceDefaultSink::unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) {
|
||||
if (!sub) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -11,13 +11,13 @@
|
||||
#include <thread>
|
||||
|
||||
// we dont have to multicast ourself, because sdl streams and virtual devices already do this, but we do it anyway
|
||||
using AudioFrameStream2MultiStream = FrameStream2MultiStream<AudioFrame>;
|
||||
using AudioFrameStream2 = AudioFrameStream2MultiStream::sub_stream_type_t; // just use the default for now
|
||||
using AudioFrameStream2MultiSource = FrameStream2MultiSource<AudioFrame>;
|
||||
using AudioFrameStream2 = AudioFrameStream2MultiSource::sub_stream_type_t; // just use the default for now
|
||||
|
||||
// object components?
|
||||
|
||||
// source
|
||||
struct SDLAudioInputDeviceDefault : protected AudioFrameStream2MultiStream {
|
||||
struct SDLAudioInputDeviceDefault : protected AudioFrameStream2MultiSource {
|
||||
std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream;
|
||||
|
||||
std::atomic<bool> _thread_should_quit {false};
|
||||
@ -30,12 +30,12 @@ struct SDLAudioInputDeviceDefault : protected AudioFrameStream2MultiStream {
|
||||
// stops the thread and closes the device?
|
||||
~SDLAudioInputDeviceDefault(void);
|
||||
|
||||
using AudioFrameStream2MultiStream::aquireSubStream;
|
||||
using AudioFrameStream2MultiStream::releaseSubStream;
|
||||
using AudioFrameStream2MultiSource::subscribe;
|
||||
using AudioFrameStream2MultiSource::unsubscribe;
|
||||
};
|
||||
|
||||
// sink
|
||||
struct SDLAudioOutputDeviceDefaultInstance : protected AudioFrameStream2I {
|
||||
struct SDLAudioOutputDeviceDefaultInstance : public AudioFrameStream2I {
|
||||
std::unique_ptr<SDL_AudioStream, decltype(&SDL_DestroyAudioStream)> _stream;
|
||||
|
||||
uint32_t _last_sample_rate {48'000};
|
||||
@ -53,9 +53,12 @@ struct SDLAudioOutputDeviceDefaultInstance : protected AudioFrameStream2I {
|
||||
};
|
||||
|
||||
// constructs entirely new streams, since sdl handles sync and mixing for us (or should)
|
||||
struct SDLAudioOutputDeviceDefaultFactory {
|
||||
struct SDLAudioOutputDeviceDefaultSink : public FrameStream2SinkI<AudioFrame> {
|
||||
// TODO: pause device?
|
||||
|
||||
SDLAudioOutputDeviceDefaultInstance create(void);
|
||||
~SDLAudioOutputDeviceDefaultSink(void);
|
||||
|
||||
std::shared_ptr<FrameStream2I<AudioFrame>> subscribe(void) override;
|
||||
bool unsubscribe(const std::shared_ptr<FrameStream2I<AudioFrame>>& sub) override;
|
||||
};
|
||||
|
||||
|
@ -1,4 +1,6 @@
|
||||
#include "./sdl_video_frame_stream2.hpp"
|
||||
#include "SDL3/SDL_camera.h"
|
||||
#include "SDL3/SDL_pixels.h"
|
||||
|
||||
#include <chrono>
|
||||
#include <cstdint>
|
||||
@ -6,6 +8,7 @@
|
||||
#include <memory>
|
||||
#include <thread>
|
||||
|
||||
// TODO: move out and create lazy cam for each device
|
||||
SDLVideoCameraContent::SDLVideoCameraContent(void) {
|
||||
int devcount {0};
|
||||
//SDL_CameraDeviceID *devices = SDL_GetCameraDevices(&devcount);
|
||||
@ -25,7 +28,6 @@ SDLVideoCameraContent::SDLVideoCameraContent(void) {
|
||||
|
||||
int speccount {0};
|
||||
SDL_CameraSpec** specs = SDL_GetCameraSupportedFormats(device, &speccount);
|
||||
//SDL_CameraSpec* specs = SDL_GetCameraSupportedFormats(device, &speccount);
|
||||
if (specs == nullptr) {
|
||||
std::cout << " - no supported spec\n";
|
||||
} else {
|
||||
@ -42,22 +44,29 @@ SDLVideoCameraContent::SDLVideoCameraContent(void) {
|
||||
// FORCE a diffrent pixel format
|
||||
//SDL_PIXELFORMAT_RGBA8888,
|
||||
//SDL_PIXELFORMAT_UNKNOWN,
|
||||
SDL_PIXELFORMAT_IYUV,
|
||||
//SDL_PIXELFORMAT_IYUV,
|
||||
SDL_PIXELFORMAT_YUY2,
|
||||
|
||||
SDL_COLORSPACE_SRGB,
|
||||
//SDL_COLORSPACE_UNKNOWN,
|
||||
//SDL_COLORSPACE_SRGB,
|
||||
//SDL_COLORSPACE_SRGB_LINEAR,
|
||||
SDL_COLORSPACE_YUV_DEFAULT,
|
||||
|
||||
//1280, 720,
|
||||
//640, 360,
|
||||
640, 480,
|
||||
//640, 480,
|
||||
696, 392,
|
||||
|
||||
//1, 30
|
||||
30, 1
|
||||
};
|
||||
_camera = {
|
||||
SDL_OpenCamera(devices[0], &spec),
|
||||
//SDL_OpenCamera(devices[devcount-1], &spec),
|
||||
SDL_OpenCamera(devices[0], nullptr),
|
||||
//SDL_OpenCamera(devices[0], &spec),
|
||||
&SDL_CloseCamera
|
||||
};
|
||||
SDL_GetCameraFormat(_camera.get(), &spec);
|
||||
}
|
||||
SDL_free(devices);
|
||||
if (!static_cast<bool>(_camera)) {
|
||||
@ -76,8 +85,9 @@ SDLVideoCameraContent::SDLVideoCameraContent(void) {
|
||||
|
||||
SDL_CameraSpec spec;
|
||||
float fps {1.f};
|
||||
if (SDL_GetCameraFormat(_camera.get(), &spec) != 0) {
|
||||
if (!SDL_GetCameraFormat(_camera.get(), &spec)) {
|
||||
// meh
|
||||
throw int(5);
|
||||
} else {
|
||||
fps = float(spec.framerate_numerator)/float(spec.framerate_denominator);
|
||||
std::cout << "camera fps: " << fps << "fps (" << spec.framerate_numerator << "/" << spec.framerate_denominator << ")\n";
|
||||
|
@ -28,25 +28,28 @@ struct SDLVideoFrame {
|
||||
SDLVideoFrame(const SDLVideoFrame& other) {
|
||||
timestampNS = other.timestampNS;
|
||||
if (static_cast<bool>(other.surface)) {
|
||||
// TODO: use SDL_DuplicateSurface()
|
||||
//surface = {
|
||||
// SDL_CreateSurface(
|
||||
// other.surface->w,
|
||||
// other.surface->h,
|
||||
// other.surface->format
|
||||
// ),
|
||||
// &SDL_DestroySurface
|
||||
//};
|
||||
//SDL_BlitSurface(other.surface.get(), nullptr, surface.get(), nullptr);
|
||||
surface = {
|
||||
SDL_CreateSurface(
|
||||
other.surface->w,
|
||||
other.surface->h,
|
||||
other.surface->format
|
||||
),
|
||||
&SDL_DestroySurface
|
||||
SDL_DuplicateSurface(other.surface.get()),
|
||||
&SDL_DestroySurface
|
||||
};
|
||||
SDL_BlitSurface(other.surface.get(), nullptr, surface.get(), nullptr);
|
||||
}
|
||||
}
|
||||
SDLVideoFrame& operator=(const SDLVideoFrame& other) = delete;
|
||||
};
|
||||
|
||||
using SDLVideoFrameStream2MultiStream = FrameStream2MultiStream<SDLVideoFrame>;
|
||||
using SDLVideoFrameStream2 = SDLVideoFrameStream2MultiStream::sub_stream_type_t; // just use the default for now
|
||||
using SDLVideoFrameStream2MultiSource = FrameStream2MultiSource<SDLVideoFrame>;
|
||||
using SDLVideoFrameStream2 = SDLVideoFrameStream2MultiSource::sub_stream_type_t; // just use the default for now
|
||||
|
||||
struct SDLVideoCameraContent : protected SDLVideoFrameStream2MultiStream {
|
||||
struct SDLVideoCameraContent : public SDLVideoFrameStream2MultiSource {
|
||||
// meh, empty default
|
||||
std::unique_ptr<SDL_Camera, decltype(&SDL_CloseCamera)> _camera {nullptr, &SDL_CloseCamera};
|
||||
std::atomic<bool> _thread_should_quit {false};
|
||||
@ -60,7 +63,7 @@ struct SDLVideoCameraContent : protected SDLVideoFrameStream2MultiStream {
|
||||
~SDLVideoCameraContent(void);
|
||||
|
||||
// make only some of writer public
|
||||
using SDLVideoFrameStream2MultiStream::aquireSubStream;
|
||||
using SDLVideoFrameStream2MultiStream::releaseSubStream;
|
||||
using SDLVideoFrameStream2MultiSource::subscribe;
|
||||
using SDLVideoFrameStream2MultiSource::unsubscribe;
|
||||
};
|
||||
|
||||
|
Reference in New Issue
Block a user