Teaspeak-Server/server/src/client/web/VoiceBridge.cpp

303 lines
12 KiB
C++

#include <misc/std_unique_ptr.h>
#include <log/LogUtils.h>
#include <misc/endianness.h>
#include <dlfcn.h>
#include "WebClient.h"
using namespace std;
using namespace ts;
using namespace ts::server;
using namespace ts::web;
void VoiceBridge::callback_log(void* ptr, pipes::Logger::LogLevel level, const std::string& name, const std::string& message, ...) {
auto max_length = 1024 * 8;
char buffer[max_length];
va_list args;
va_start(args, message);
max_length = vsnprintf(buffer, max_length, message.c_str(), args);
va_end(args);
auto bridge = (VoiceBridge*) ptr;
debugMessage(LOG_GENERAL, "{}[WebRTC][{}][{}] {}", CLIENT_STR_LOG_PREFIX_(bridge->owner()), level, name, string(buffer));
}
namespace gioloop {
void* main_loop_;
void*(*g_main_loop_new)(void* /* context */, bool /* is true */);
void(*g_main_loop_run)(void* /* loop */);
void(*g_main_loop_unref)(void* /* loop */);
void*(*g_main_loop_ref)(void* /* loop */);
bool initialized{false};
void initialize() {
if(initialized) return;
initialized = true;
g_main_loop_new = (decltype(g_main_loop_new)) dlsym(nullptr, "g_main_loop_new");
g_main_loop_run = (decltype(g_main_loop_run)) dlsym(nullptr, "g_main_loop_run");
g_main_loop_ref = (decltype(g_main_loop_ref)) dlsym(nullptr, "g_main_loop_ref");
g_main_loop_unref = (decltype(g_main_loop_unref)) dlsym(nullptr, "g_main_loop_unref");
if(!g_main_loop_run || !g_main_loop_new || !g_main_loop_ref || !g_main_loop_unref) {
logWarning(LOG_INSTANCE, "Missing g_main_loop_new, g_main_loop_run, g_main_loop_ref or g_main_loop_unref functions. Could not spawn main loop.");
g_main_loop_run = nullptr;
g_main_loop_new = nullptr;
return;
}
main_loop_ = g_main_loop_new(nullptr, false);
if(!main_loop_) {
logError(LOG_INSTANCE, "Failed to spawn new event loop for the web client.");
return;
}
std::thread([]{
g_main_loop_run(main_loop_);
}).detach();
}
std::shared_ptr<GMainLoop> loop() {
return std::shared_ptr<GMainLoop>{(GMainLoop*) g_main_loop_ref(main_loop_), g_main_loop_unref};
}
}
VoiceBridge::VoiceBridge(const shared_ptr<WebClient>& owner) : _owner(owner) {
auto config = make_shared<rtc::PeerConnection::Config>();
config->nice_config = make_shared<rtc::NiceWrapper::Config>();
config->nice_config->ice_port_range = {config::web::webrtc_port_min, config::web::webrtc_port_max};
if(config::web::stun_enabled)
config->nice_config->stun_server = { config::web::stun_host, config::web::stun_port };
config->nice_config->allow_ice_udp = config::web::udp_enabled;
config->nice_config->allow_ice_tcp = config::web::tcp_enabled;
config->nice_config->use_upnp = config::web::enable_upnp;
gioloop::initialize();
config->event_loop = gioloop::loop();
/*
config->nice_config->main_loop = std::shared_ptr<GMainLoop>(g_main_loop_new(nullptr, false), g_main_loop_unref);
std::thread(g_main_loop_run, config->nice_config->main_loop.get()).detach();
*/
config->logger = make_shared<pipes::Logger>();
config->logger->callback_log = VoiceBridge::callback_log;
config->logger->callback_argument = this;
//config->sctp.local_port = 5202; //Fire Fox don't support a different port :D
this->connection = make_unique<rtc::PeerConnection>(config) ;
}
VoiceBridge::~VoiceBridge() {
__asm__("nop");
}
int VoiceBridge::server_id() {
auto locked = this->_owner.lock();
return locked ? locked->getServerId() : 0;
}
std::shared_ptr<server::WebClient> VoiceBridge::owner() {
return this->_owner.lock();
}
bool VoiceBridge::initialize(std::string &error) {
if(!this->connection->initialize(error)) return false;
this->connection->callback_ice_candidate = [&](const rtc::IceCandidate& candidate) {
if(!candidate.is_finished_candidate()) {
if(auto callback{this->callback_ice_candidate}; callback)
callback(candidate);
} else {
if(auto callback{this->callback_ice_candidate_finished}; callback)
callback(candidate.sdpMid, candidate.sdpMLineIndex);
}
};
this->connection->callback_new_stream = [&](const std::shared_ptr<rtc::Channel> &channel) { this->handle_media_stream(channel); }; //bind(&VoiceBridge::handle_media_stream, this, placeholders::_1); => crash
this->connection->callback_setup_fail = [&](rtc::PeerConnection::ConnectionComponent comp, const std::string& reason) {
debugMessage(this->server_id(), "{} WebRTC setup failed! Component {} ({})", CLIENT_STR_LOG_PREFIX_(this->owner()), comp, reason);
if(this->callback_failed)
this->callback_failed();
};
return true;
}
bool VoiceBridge::parse_offer(const std::string &sdp) {
this->offer_timestamp = chrono::system_clock::now();
string error;
return this->connection->apply_offer(error, sdp);
}
int VoiceBridge::apply_ice(const std::deque<std::shared_ptr<rtc::IceCandidate>>& candidates) {
return this->connection->apply_ice_candidates(candidates);
}
void VoiceBridge::remote_ice_finished() {
this->connection->remote_candidates_finished();
}
std::string VoiceBridge::generate_answer() {
return this->connection->generate_answer(false);
}
void VoiceBridge::execute_tick() {
if(!this->voice_channel_) {
if(this->offer_timestamp.time_since_epoch().count() > 0 && this->offer_timestamp + chrono::seconds{20} < chrono::system_clock::now()) {
this->offer_timestamp = chrono::system_clock::time_point();
this->connection->callback_setup_fail(rtc::PeerConnection::ConnectionComponent::BASE, "setup timeout");
}
}
}
void VoiceBridge::handle_media_stream(const std::shared_ptr<rtc::Channel> &undefined_stream) {
if(undefined_stream->type() == rtc::CHANTYPE_APPLICATION) {
auto stream = dynamic_pointer_cast<rtc::ApplicationChannel>(undefined_stream);
if(!stream) return;
stream->callback_datachannel_new = [&](const std::shared_ptr<rtc::DataChannel> &channel) { this->handle_data_channel(channel); }; //bind(&VoiceBridge::handle_data_channel, this, placeholders::_1); => may crash?
} else if(undefined_stream->type() == rtc::CHANTYPE_AUDIO) {
auto stream = dynamic_pointer_cast<rtc::AudioChannel>(undefined_stream);
if(!stream) return;
logTrace(this->server_id(), "Audio channel extensions:");
for(const auto& ex : stream->list_extensions()) {
logTrace(this->server_id(), " - {}: {}", ex->id, ex->name);
}
stream->register_local_extension("urn:ietf:params:rtp-hdrext:ssrc-audio-level");
for(const auto& codec : stream->list_codecs()) {
if(codec->type == rtc::codec::Codec::OPUS) {
codec->accepted = true;
break;
}
}
if(!this->incoming_voice_channel_.lock()) {
debugMessage(this->server_id(), "Having client's voice audio stream.");
this->incoming_voice_channel_ = stream;
stream->incoming_data_handler = [&](const std::shared_ptr<rtc::MediaChannel> &channel, const pipes::buffer_view &data, size_t payload_offset) {
this->handle_audio_voice_data(channel, data, payload_offset); };
} else if(!this->incoming_whisper_channel_.lock()) {
debugMessage(this->server_id(), "Having client's whispers audio stream.");
this->incoming_whisper_channel_ = stream;
stream->incoming_data_handler = [&](const std::shared_ptr<rtc::MediaChannel> &channel, const pipes::buffer_view &data, size_t payload_offset) {
this->handle_audio_voice_whisper_data(channel, data, payload_offset); };
} else {
debugMessage(this->server_id(), "Client sdp offer contains more than two voice channels.");
}
} else {
logError(this->server_id(), "Got offer for unknown channel of type {}", undefined_stream->type());
}
}
void VoiceBridge::handle_data_channel(const std::shared_ptr<rtc::DataChannel> &channel) {
if(channel->lable() == "main" || channel->lable() == "voice") {
this->voice_channel_ = channel;
debugMessage(this->server_id(), "{} Got voice channel!", CLIENT_STR_LOG_PREFIX_(this->owner()));
this->callback_initialized();
weak_ptr<rtc::DataChannel> weak_channel = channel;
channel->callback_binary = [&, weak_channel](const pipes::buffer_view& buffer) {
if(buffer.length() < 2)
return;
this->callback_voice_data(buffer.view(2), buffer[0] == 1);
};
channel->callback_close = [&, weak_channel] {
auto channel_ref = weak_channel.lock();
if(channel_ref == this->voice_channel_) {
this->voice_channel_ = nullptr;
//TODO may callback?
debugMessage(this->server_id(), "{} Voice channel disconnected!", CLIENT_STR_LOG_PREFIX_(this->owner()));
}
};
} else if(channel->lable() == "voice-whisper") {
this->voice_whisper_channel_ = channel;
debugMessage(this->server_id(), "{} Got voice whisper channel", CLIENT_STR_LOG_PREFIX_(this->owner()));
weak_ptr<rtc::DataChannel> weak_channel = channel;
channel->callback_binary = [&, weak_channel](const pipes::buffer_view& buffer) {
if(buffer.length() < 1)
return;
this->callback_voice_whisper_data(buffer.view(1), buffer[0] == 1);
};
channel->callback_close = [&, weak_channel] {
auto channel_ref = weak_channel.lock();
if(channel_ref == this->voice_whisper_channel_) {
this->voice_whisper_channel_ = nullptr;
debugMessage(this->server_id(), "{} Voice whisper channel has been closed.", CLIENT_STR_LOG_PREFIX_(this->owner()));
}
};
}
}
void VoiceBridge::handle_audio_voice_data(const std::shared_ptr<rtc::MediaChannel> &channel, const pipes::buffer_view &data, size_t payload_offset) {
if(channel->codec->type != rtc::codec::Codec::OPUS) {
//debugMessage(this->server_id(), "{} Got unknown codec ({})!", CLIENT_STR_LOG_PREFIX_(this->owner()), channel->codec->type);
return;
}
this->handle_audio_voice_x_data(&this->voice_state, data, payload_offset);
}
void VoiceBridge::handle_audio_voice_whisper_data(const std::shared_ptr<rtc::MediaChannel> &channel, const pipes::buffer_view &data, size_t payload_offset) {
if(channel->codec->type != rtc::codec::Codec::OPUS) {
return;
}
this->handle_audio_voice_x_data(&this->whisper_state, data, payload_offset);
}
void VoiceBridge::handle_audio_voice_x_data(VoiceStateData *state, const pipes::buffer_view &data, size_t payload_offset) {
bool is_silence{false};
auto audio_channel = state->channel.lock();
if(!audio_channel) {
return;
}
for(const auto& ext : audio_channel->list_extensions(rtc::direction::incoming)) {
if(ext->name == "urn:ietf:params:rtp-hdrext:ssrc-audio-level") {
int level;
if(rtc::protocol::rtp_header_extension_parse_audio_level(data, ext->id, &level) == 0) {
//debugMessage(this->server_id(), "Audio level: {}", level);
if(level == 127) {
is_silence = true;
break;
}
}
break;
}
}
if(is_silence) {
if(state->muted) {
/* the muted state is already set */
return;
}
state->muted = true;
auto target_buffer = buffer::allocate_buffer(3);
le2be16(state->sequence_packet_id++, (char*) target_buffer.data_ptr());
target_buffer[2] = 5;
state->callback(target_buffer, false);
} else {
if(state->muted) {
state->muted = false;
}
auto target_buffer = buffer::allocate_buffer(data.length() - payload_offset + 3);
le2be16(state->sequence_packet_id++, (char*) target_buffer.data_ptr());
target_buffer[2] = 5;
memcpy(&target_buffer[3], &data[payload_offset], data.length() - payload_offset);
state->callback(target_buffer, state->sequence_packet_id < 7);
}
}