Update to 1.4.11

This commit is contained in:
WolverinDEV 2020-09-24 22:06:52 +02:00
parent a8b1299c4e
commit 4a49d36ece
19 changed files with 783 additions and 197 deletions

2
github

@ -1 +1 @@
Subproject commit 27579459d13317200cb9d49f2908b92c22031b11
Subproject commit 4fa1ab237cd12b53de46fe82d31c942513c619bd

View File

@ -122,5 +122,5 @@ function deploy_client() {
#install_npm
#compile_scripts
#compile_native
package_client
#deploy_client
#package_client
deploy_client

View File

@ -2,8 +2,8 @@ import {createErrorModal} from "tc-shared/ui/elements/Modal";
import * as electron from "electron";
import {tr, tra} from "tc-shared/i18n/localize";
import {server_connections} from "tc-shared/ui/frames/connection_handlers";
import {handle_connect_request} from "tc-shared/main";
import {server_connections} from "tc-shared/ConnectionManager";
electron.ipcRenderer.on('connect', (event, url) => handle_native_connect_request(url));
@ -25,8 +25,9 @@ function handle_native_connect_request(url_string: string) {
}
let connection = server_connections.active_connection();
if(connection.connected)
if(connection.connected) {
connection = server_connections.spawn_server_connection();
}
handle_connect_request({
address: url.host + ":" + (url.searchParams.has("port") ? url.searchParams.get("port") : "9987"),

View File

@ -1,8 +1,8 @@
import {Settings, settings} from "tc-shared/settings";
import {server_connections} from "tc-shared/ui/frames/connection_handlers";
import {tr} from "tc-shared/i18n/localize";
import {Arguments, process_args} from "../shared/process-arguments";
import {remote} from "electron";
import {server_connections} from "tc-shared/ConnectionManager";
window.onbeforeunload = event => {
if(settings.static(Settings.KEY_DISABLE_UNLOAD_DIALOG))

View File

@ -0,0 +1,175 @@
import * as loader from "tc-loader";
import {Stage} from "tc-loader";
import {MenuItemConstructorOptions, NativeImage, remote, Tray} from "electron";
import {clientIconClassToImage} from "./IconHelper";
import {ClientIcon} from "svg-sprites/client-icons";
import {ConnectionHandler, ConnectionState} from "tc-shared/ConnectionHandler";
import {server_connections} from "tc-shared/ConnectionManager";
import {tr} from "tc-shared/i18n/localize";
import {global_client_actions} from "tc-shared/events/GlobalEvents";
const kTrayGlobalUniqueId = "9ccaf91c-a54f-45e0-b061-c50c9f7864ca";
let tray: Tray;
let eventListener = [];
let defaultIcon: NativeImage;
async function initializeTray() {
defaultIcon = clientIconClassToImage(ClientIcon.TeaspeakLogo);
defaultIcon = remote.nativeImage.createFromBuffer(defaultIcon.toPNG());
tray = new remote.Tray(defaultIcon);
tray.setTitle("TeaSpeak - Client");
tray.on("double-click", () => remote.getCurrentWindow().show());
server_connections.events().on("notify_active_handler_changed", event => initializeConnection(event.newHandler));
initializeConnection(undefined);
}
function initializeConnection(connection: ConnectionHandler) {
eventListener.forEach(callback => callback());
eventListener = [];
let showClientStatus = connection?.connection_state === ConnectionState.CONNECTED;
let clientStatusIcon: ClientIcon = connection?.getClient().getStatusIcon();
const updateTray = () => {
if(showClientStatus) {
let icon = clientIconClassToImage(clientStatusIcon);
icon = remote.nativeImage.createFromBuffer(icon.toPNG());
tray.setImage(icon);
tray.setToolTip("TeaSpeak - Client\nConnected to " + connection.channelTree.server.properties.virtualserver_name);
} else {
tray.setImage(defaultIcon);
tray.setToolTip("TeaSpeak - Client");
}
}
const updateContextMenu = () => {
let items: MenuItemConstructorOptions[] = [];
items.push(
{ label: tr("Show TeaClient"), type: "normal", icon: defaultIcon, click: () => remote.getCurrentWindow().show() },
{ label: "seperator", type: "separator" },
);
items.push(
{
label: tr("Connect to server"),
type: "normal",
icon: clientIconClassToImage(ClientIcon.Connect),
click: () => {
global_client_actions.fire("action_open_window_connect", { newTab: connection?.connected });
remote.getCurrentWindow().show();
}
},
{
label: tr("Disconnect from current server"),
type: "normal",
icon: clientIconClassToImage(ClientIcon.Disconnect),
click: () => connection.disconnectFromServer(),
enabled: connection?.connected
},
{ label: "seperator", type: "separator" },
)
if(connection) {
if(connection.isMicrophoneDisabled()) {
items.push({
label: tr("Enable microphone"),
type: "normal",
icon: clientIconClassToImage(ClientIcon.ActivateMicrophone),
checked: true,
click: () => connection.setMicrophoneMuted(false)
});
} else if(connection.isMicrophoneMuted()) {
items.push({
label: tr("Unmute microphone"),
type: "normal",
icon: clientIconClassToImage(ClientIcon.InputMuted),
checked: true,
click: () => {
connection.setMicrophoneMuted(false);
connection.acquireInputHardware().then(() => {});
}
});
} else {
items.push({
label: tr("Mute microphone"),
type: "normal",
icon: clientIconClassToImage(ClientIcon.InputMuted),
checked: false,
click: () => connection.setMicrophoneMuted(true)
});
}
if(connection.isSpeakerMuted()) {
items.push({
label: tr("Unmute speaker/headphones"),
type: "normal",
icon: clientIconClassToImage(ClientIcon.OutputMuted),
checked: true,
click: () => connection.setSpeakerMuted(false)
});
} else {
items.push({
label: tr("Mute speaker/headphones"),
type: "normal",
icon: clientIconClassToImage(ClientIcon.OutputMuted),
checked: true,
click: () => connection.setSpeakerMuted(false)
});
}
items.push(
{ label: "seperator", type: "separator" }
);
}
items.push(
{ label: tr("Quit"), type: "normal", icon: clientIconClassToImage(ClientIcon.CloseButton), click: () => remote.getCurrentWindow().close() }
);
tray.setContextMenu(remote.Menu.buildFromTemplate(items));
};
if(connection) {
eventListener.push(connection.channelTree.server.events.on("notify_properties_updated", event => {
if("virtualserver_name" in event.updated_properties) {
updateTray();
}
}));
eventListener.push(connection.events().on("notify_connection_state_changed", event => {
showClientStatus = event.new_state === ConnectionState.CONNECTED;
updateTray();
updateContextMenu();
}));
eventListener.push(connection.getClient().events.on("notify_status_icon_changed", event => {
clientStatusIcon = event.newIcon;
updateTray();
}));
eventListener.push(connection.events().on("notify_state_updated", event => {
switch (event.state) {
case "away":
case "microphone":
case "speaker":
updateContextMenu();
break;
}
}));
}
updateContextMenu();
updateTray();
}
loader.register_task(Stage.JAVASCRIPT_INITIALIZING, {
name: "tray bar",
function: initializeTray,
priority: 10
});
window.addEventListener("unload", () => tray.destroy());

View File

@ -1,5 +1,6 @@
import {
AbstractInput,
FilterMode,
InputConsumer,
InputConsumerType,
InputEvents,
@ -14,6 +15,7 @@ import {Filter, FilterType, FilterTypeClass} from "tc-shared/voice/Filter";
import {NativeFilter, NStateFilter, NThresholdFilter, NVoiceLevelFilter} from "./AudioFilter";
import {IDevice} from "tc-shared/audio/recorder";
import {LogCategory, logWarn} from "tc-shared/log";
import NativeFilterMode = audio.record.FilterMode;
export class NativeInput implements AbstractInput {
readonly events: Registry<InputEvents>;
@ -90,8 +92,9 @@ export class NativeInput implements AbstractInput {
}
async setDeviceId(device: string | undefined): Promise<void> {
if(this.deviceId === device)
if(this.deviceId === device) {
return;
}
this.deviceId = device;
await this.stop();
@ -160,8 +163,10 @@ export class NativeInput implements AbstractInput {
}
async setConsumer(consumer: InputConsumer): Promise<void> {
if(typeof(consumer) !== "undefined")
if(typeof(consumer) !== "undefined") {
throw "we only support native consumers!"; // TODO: May create a general wrapper?
}
return;
}
@ -172,6 +177,40 @@ export class NativeInput implements AbstractInput {
getVolume(): number {
return this.nativeHandle.get_volume();
}
getFilterMode(): FilterMode {
const mode = this.nativeConsumer.get_filter_mode();
switch (mode) {
case NativeFilterMode.Block:
return FilterMode.Block;
case NativeFilterMode.Bypass:
return FilterMode.Bypass;
case NativeFilterMode.Filter:
default:
return FilterMode.Filter;
}
}
setFilterMode(mode: FilterMode) {
let nativeMode: NativeFilterMode;
switch (mode) {
case FilterMode.Filter:
nativeMode = NativeFilterMode.Filter;
break;
case FilterMode.Bypass:
nativeMode = NativeFilterMode.Bypass;
break;
case FilterMode.Block:
nativeMode = NativeFilterMode.Block;
break;
}
this.nativeConsumer.set_filter_mode(nativeMode);
}
}
export class NativeLevelMeter implements LevelMeter {
@ -236,11 +275,11 @@ export class NativeLevelMeter implements LevelMeter {
this._filter = undefined;
}
device(): IDevice {
getDevice(): IDevice {
return this._device;
}
set_observer(callback: (value: number) => any) {
setObserver(callback: (value: number) => any) {
this._callback = callback;
}
}

View File

@ -17,12 +17,12 @@ import {
} from "tc-native/connection";
import {ConnectionCommandHandler} from "tc-shared/connection/CommandHandler";
import {HandshakeHandler} from "tc-shared/connection/HandshakeHandler";
import {ServerAddress} from "tc-shared/ui/server";
import {TeaSpeakHandshakeHandler} from "tc-shared/profiles/identities/TeamSpeakIdentity";
import {VoiceConnection} from "./VoiceConnection";
import {NativeVoiceConnectionWrapper} from "./VoiceConnection";
import {AbstractVoiceConnection} from "tc-shared/connection/VoiceConnection";
import {LogCategory, logDebug, logWarn} from "tc-shared/log";
import {ErrorCode} from "tc-shared/connection/ErrorCode";
import {ServerAddress} from "tc-shared/tree/Server";
interface ErrorCodeListener {
callback: (result: CommandResult) => void;
@ -166,7 +166,7 @@ class ErrorCommandHandler extends AbstractCommandHandler {
export class ServerConnection extends AbstractServerConnection {
private _native_handle: NativeServerConnection;
private readonly _voice_connection: VoiceConnection;
private readonly _voice_connection: NativeVoiceConnectionWrapper;
private _do_teamspeak: boolean;
@ -204,10 +204,9 @@ export class ServerConnection extends AbstractServerConnection {
arguments: args
});
};
this._voice_connection = new VoiceConnection(this, this._native_handle._voice_connection);
this._voice_connection = new NativeVoiceConnectionWrapper(this, this._native_handle._voice_connection);
this.command_helper.initialize();
this._voice_connection.setup();
}
native_handle() : NativeServerConnection {

View File

@ -1,84 +1,173 @@
import {ServerConnection} from "./ServerConnection";
import {NativeVoiceConnection} from "tc-native/connection";
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {tr} from "tc-shared/i18n/localize";
import {LogCategory} from "tc-shared/log";
import * as log from "tc-shared/log";
import {
AbstractVoiceConnection,
LatencySettings,
VoiceClient,
VoiceConnectionStatus
VoiceConnectionStatus,
WhisperSessionInitializer
} from "tc-shared/connection/VoiceConnection";
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {NativeVoiceClient, NativeVoiceConnection, PlayerState} from "tc-native/connection";
import {ServerConnection} from "./ServerConnection";
import {VoiceClient} from "tc-shared/voice/VoiceClient";
import {WhisperSession, WhisperTarget} from "tc-shared/voice/VoiceWhisper";
import {NativeInput} from "../audio/AudioRecorder";
import {ConnectionState} from "tc-shared/ConnectionHandler";
import {VoicePlayerEvents, VoicePlayerLatencySettings, VoicePlayerState} from "tc-shared/voice/VoicePlayer";
import {Registry} from "tc-shared/events";
import {LogCategory, logInfo, logWarn} from "tc-shared/log";
import {tr} from "tc-shared/i18n/localize";
export class VoiceConnection extends AbstractVoiceConnection {
readonly connection: ServerConnection;
readonly handle: NativeVoiceConnection;
export class NativeVoiceConnectionWrapper extends AbstractVoiceConnection {
private readonly serverConnectionStateChangedListener;
private readonly native: NativeVoiceConnection;
private _audio_source: RecorderProfile;
private localAudioStarted = false;
private connectionState: VoiceConnectionStatus;
private currentRecorder: RecorderProfile;
private registeredVoiceClients: {[key: number]: NativeVoiceClientWrapper} = {};
private currentlyReplayingAudio = false;
private readonly voiceClientStateChangedEventListener;
constructor(connection: ServerConnection, voice: NativeVoiceConnection) {
super(connection);
this.connection = connection;
this.handle = voice;
this.native = voice;
this.serverConnectionStateChangedListener = () => {
if(this.connection.getConnectionState() === ConnectionState.CONNECTED) {
this.setConnectionState(VoiceConnectionStatus.Connected);
} else {
this.setConnectionState(VoiceConnectionStatus.Disconnected);
}
}
this.connection.events.on("notify_connection_state_changed", this.serverConnectionStateChangedListener);
this.connectionState = VoiceConnectionStatus.Disconnected;
this.voiceClientStateChangedEventListener = this.handleVoiceClientStateChange.bind(this);
}
setup() { }
destroy() {
this.connection.events.off("notify_connection_state_changed", this.serverConnectionStateChangedListener);
}
async acquire_voice_recorder(recorder: RecorderProfile | undefined, enforce?: boolean) {
if(this._audio_source === recorder && !enforce)
getConnectionState(): VoiceConnectionStatus {
return this.connectionState;
}
getFailedMessage(): string {
/* the native voice connection can't fail */
return "this message should never appear";
}
private setConnectionState(state: VoiceConnectionStatus) {
if(this.connectionState === state) {
return;
if(this._audio_source)
await this._audio_source.unmount();
if(recorder) {
if(!(recorder.input instanceof NativeInput))
throw "Recorder input must be an instance of NativeInput!";
await recorder.unmount();
}
this.handleVoiceEnded();
this._audio_source = recorder;
const oldState = this.connectionState;
this.connectionState = state;
this.events.fire("notify_connection_status_changed", { oldStatus: oldState, newStatus: state });
}
if(recorder) {
recorder.current_handler = this.connection.client;
encodingSupported(codec: number): boolean {
return this.native.encoding_supported(codec);
}
recorder.callback_unmount = () => {
this._audio_source = undefined;
this.handle.set_audio_source(undefined);
this.connection.client.update_voice_status(undefined);
};
decodingSupported(codec: number): boolean {
return this.native.decoding_supported(codec);
}
recorder.callback_start = this.on_voice_started.bind(this);
recorder.callback_stop = this.handleVoiceEnded.bind(this);
(recorder as any).callback_support_change = () => {
this.connection.client.update_voice_status(undefined);
};
this.handle.set_audio_source((recorder.input as NativeInput).getNativeConsumer());
async acquireVoiceRecorder(recorder: RecorderProfile | undefined): Promise<void> {
if(this.currentRecorder === recorder) {
return;
}
this.connection.client.update_voice_status(undefined);
if(this.currentRecorder) {
await this.currentRecorder.unmount();
}
this.handleVoiceEndEvent();
this.currentRecorder = recorder;
try {
if(recorder) {
if(!(recorder.input instanceof NativeInput)) {
this.currentRecorder = undefined;
throw "Recorder input must be an instance of NativeInput!";
}
await recorder.unmount();
recorder.current_handler = this.connection.client;
recorder.callback_unmount = () => {
this.currentRecorder = undefined;
this.native.set_audio_source(undefined);
this.handleVoiceEndEvent();
};
recorder.callback_start = this.handleVoiceStartEvent.bind(this);
recorder.callback_stop = this.handleVoiceEndEvent.bind(this);
this.native.set_audio_source(recorder.input.getNativeConsumer());
}
} catch(error) {
this.currentRecorder = undefined;
throw error;
}
this.events.fire("notify_recorder_changed", {})
}
voice_playback_support() : boolean {
return this.connection.connected();
voiceRecorder(): RecorderProfile {
return this.currentRecorder;
}
voice_send_support() : boolean {
return this.connection.connected();
getEncoderCodec(): number {
return this.native.get_encoder_codec();
}
private current_channel_codec() : number {
setEncoderCodec(codec: number) {
this.native.set_encoder_codec(codec);
}
isReplayingVoice(): boolean {
return this.currentlyReplayingAudio;
}
private setReplayingVoice(status: boolean) {
if(status === this.currentlyReplayingAudio) {
return;
}
this.currentlyReplayingAudio = status;
this.events.fire("notify_voice_replay_state_change", { replaying: status });
}
private handleVoiceClientStateChange() {
this.setReplayingVoice(this.availableVoiceClients().findIndex(client => client.getState() === VoicePlayerState.PLAYING || client.getState() === VoicePlayerState.BUFFERING) !== -1);
}
private handleVoiceStartEvent() {
const chandler = this.connection.client;
return (chandler.getClient().currentChannel() || {properties: { channel_codec: 4}}).properties.channel_codec;
if(chandler.isMicrophoneMuted()) {
logWarn(LogCategory.VOICE, tr("Received local voice started event, even thou we're muted!"));
return;
}
this.native.enable_voice_send(true);
this.localAudioStarted = true;
logInfo(LogCategory.VOICE, tr("Local voice started"));
const ch = chandler.getClient();
if(ch) ch.speaking = true;
}
private handleVoiceEnded() {
private handleVoiceEndEvent() {
this.native.enable_voice_send(false);
const chandler = this.connection.client;
chandler.getClient().speaking = false;
const ch = chandler.getClient();
if(ch) ch.speaking = false;
if(!chandler.connected)
return false;
@ -86,97 +175,148 @@ export class VoiceConnection extends AbstractVoiceConnection {
if(chandler.isMicrophoneMuted())
return false;
console.log(tr("Local voice ended"));
//TODO Send end? (Or is this already an automated thing?)
logInfo(LogCategory.VOICE, tr("Local voice ended"));
this.localAudioStarted = false;
}
private on_voice_started() {
const chandler = this.connection.client;
if(chandler.isMicrophoneMuted()) {
/* evil hack due to the settings :D */
log.warn(LogCategory.VOICE, tr("Received local voice started event, even thou we're muted! Do not send any voice."));
if(this.handle) {
this.handle.enable_voice_send(false);
}
return;
}
log.info(LogCategory.VOICE, tr("Local voice started (Native)"));
this.handle.enable_voice_send(true);
const ch = chandler.getClient();
if(ch) ch.speaking = true;
availableVoiceClients(): NativeVoiceClientWrapper[] {
return Object.keys(this.registeredVoiceClients).map(clientId => this.registeredVoiceClients[clientId]);
}
getConnectionState(): VoiceConnectionStatus {
return VoiceConnectionStatus.Connected;
registerVoiceClient(clientId: number) {
const client = new NativeVoiceClientWrapper(this.native.register_client(clientId));
client.events.on("notify_state_changed", this.voiceClientStateChangedEventListener);
this.registeredVoiceClients[clientId] = client;
return client;
}
voice_recorder(): RecorderProfile {
return this._audio_source;
unregisterVoiceClient(client: VoiceClient) {
if(!(client instanceof NativeVoiceClientWrapper))
throw "invalid client type";
delete this.registeredVoiceClients[client.getClientId()];
this.native.unregister_client(client.getClientId());
client.destroy();
}
available_clients(): VoiceClient[] {
return this.handle.available_clients().map(e => Object.assign(e, {
support_latency_settings() { return true; },
reset_latency_settings() {
const stream = this.get_stream();
stream.set_buffer_latency(0.080);
stream.set_buffer_max_latency(0.5);
return this.latency_settings();
},
latency_settings(settings?: LatencySettings) : LatencySettings {
const stream = this.get_stream();
if(typeof settings !== "undefined") {
stream.set_buffer_latency(settings.min_buffer / 1000);
stream.set_buffer_max_latency(settings.max_buffer / 100);
}
return {
max_buffer: Math.floor(stream.get_buffer_max_latency() * 1000),
min_buffer: Math.floor(stream.get_buffer_latency() * 1000)
};
},
support_flush() { return true; },
flush() {
const stream = this.get_stream();
stream.flush_buffer();
}
}) as any); /* cast to any because of: Type 'import("/mnt/d/TeaSpeak/client_linux/client/imports/shared-app/connection/ConnectionBase").voice.PlayerState' is not assignable to type 'import("tc-native/connection").PlayerState' */
stopAllVoiceReplays() {
this.availableVoiceClients().forEach(client => client.abortReplay());
}
find_client(client_id: number) : VoiceClient | undefined {
for(const client of this.available_clients())
if(client.client_id === client_id)
return client;
/* whisper API */
getWhisperSessionInitializer(): WhisperSessionInitializer | undefined {
return undefined;
}
unregister_client(client: VoiceClient): Promise<void> {
this.handle.unregister_client(client.client_id);
return Promise.resolve();
getWhisperSessions(): WhisperSession[] {
return [];
}
register_client(client_id: number): VoiceClient {
const client = this.handle.register_client(client_id);
const c = this.find_client(client_id);
c.reset_latency_settings();
return c;
getWhisperTarget(): WhisperTarget | undefined {
return undefined;
}
decoding_supported(codec: number): boolean {
return this.handle.decoding_supported(codec);
setWhisperSessionInitializer(initializer: WhisperSessionInitializer | undefined) {
}
encoding_supported(codec: number): boolean {
return this.handle.encoding_supported(codec);
startWhisper(target: WhisperTarget): Promise<void> {
return Promise.resolve(undefined);
}
get_encoder_codec(): number {
return this.handle.get_encoder_codec();
dropWhisperSession(session: WhisperSession) {
}
set_encoder_codec(codec: number) {
return this.handle.set_encoder_codec(codec);
stopWhisper() {
}
}
class NativeVoiceClientWrapper implements VoiceClient {
private readonly native: NativeVoiceClient;
readonly events: Registry<VoicePlayerEvents>;
private playerState: VoicePlayerState;
constructor(native: NativeVoiceClient) {
this.events = new Registry<VoicePlayerEvents>();
this.native = native;
this.playerState = VoicePlayerState.STOPPED;
this.native.callback_state_changed = state => {
switch (state) {
case PlayerState.BUFFERING:
this.setState(VoicePlayerState.BUFFERING);
break;
case PlayerState.PLAYING:
this.setState(VoicePlayerState.PLAYING);
break;
case PlayerState.STOPPED:
this.setState(VoicePlayerState.STOPPED);
break;
case PlayerState.STOPPING:
this.setState(VoicePlayerState.STOPPING);
break;
}
}
}
destroy() {
this.events.destroy();
}
abortReplay() {
this.native.abort_replay();
}
flushBuffer() {
this.native.get_stream().flush_buffer();
}
getClientId(): number {
return this.native.client_id;
}
getState(): VoicePlayerState {
return this.playerState;
}
private setState(state: VoicePlayerState) {
if(this.playerState === state) {
return;
}
const oldState = this.playerState;
this.playerState = state;
this.events.fire("notify_state_changed", { oldState: oldState, newState: state });
}
setVolume(volume: number) {
this.native.set_volume(volume);
}
getVolume(): number {
return this.native.get_volume();
}
resetLatencySettings() {
const stream = this.native.get_stream();
stream.set_buffer_latency(0.080);
stream.set_buffer_max_latency(0.5);
}
setLatencySettings(settings: VoicePlayerLatencySettings) {
const stream = this.native.get_stream();
stream.set_buffer_latency(settings.minBufferTime / 1000);
stream.set_buffer_max_latency(settings.maxBufferTime / 1000);
}
getLatencySettings(): Readonly<VoicePlayerLatencySettings> {
const stream = this.native.get_stream();
return {
maxBufferTime: stream.get_buffer_max_latency() * 1000,
minBufferTime: stream.get_buffer_latency() * 1000
};
}
}

View File

@ -0,0 +1,180 @@
import {ServerConnection} from "./ServerConnection";
import {NativeVoiceConnection} from "tc-native/connection";
import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {tr} from "tc-shared/i18n/localize";
import {LogCategory} from "tc-shared/log";
import * as log from "tc-shared/log";
import {
AbstractVoiceConnection,
VoiceConnectionStatus
} from "tc-shared/connection/VoiceConnection";
import {NativeInput} from "../audio/AudioRecorder";
export class VoiceConnection extends AbstractVoiceConnection {
readonly connection: ServerConnection;
readonly handle: NativeVoiceConnection;
private _audio_source: RecorderProfile;
constructor(connection: ServerConnection, voice: NativeVoiceConnection) {
super(connection);
this.connection = connection;
this.handle = voice;
}
setup() { }
async acquire_voice_recorder(recorder: RecorderProfile | undefined, enforce?: boolean) {
if(this._audio_source === recorder && !enforce)
return;
if(this._audio_source)
await this._audio_source.unmount();
if(recorder) {
if(!(recorder.input instanceof NativeInput))
throw "Recorder input must be an instance of NativeInput!";
await recorder.unmount();
}
this.handleVoiceEnded();
this._audio_source = recorder;
if(recorder) {
recorder.current_handler = this.connection.client;
recorder.callback_unmount = () => {
this._audio_source = undefined;
this.handle.set_audio_source(undefined);
this.connection.client.update_voice_status(undefined);
};
recorder.callback_start = this.on_voice_started.bind(this);
recorder.callback_stop = this.handleVoiceEnded.bind(this);
(recorder as any).callback_support_change = () => {
this.connection.client.update_voice_status(undefined);
};
this.handle.set_audio_source((recorder.input as NativeInput).getNativeConsumer());
}
this.connection.client.update_voice_status(undefined);
}
voice_playback_support() : boolean {
return this.connection.connected();
}
voice_send_support() : boolean {
return this.connection.connected();
}
private current_channel_codec() : number {
const chandler = this.connection.client;
return (chandler.getClient().currentChannel() || {properties: { channel_codec: 4}}).properties.channel_codec;
}
private handleVoiceEnded() {
const chandler = this.connection.client;
chandler.getClient().speaking = false;
if(!chandler.connected)
return false;
if(chandler.isMicrophoneMuted())
return false;
console.log(tr("Local voice ended"));
//TODO Send end? (Or is this already an automated thing?)
}
private on_voice_started() {
const chandler = this.connection.client;
if(chandler.isMicrophoneMuted()) {
/* evil hack due to the settings :D */
log.warn(LogCategory.VOICE, tr("Received local voice started event, even thou we're muted! Do not send any voice."));
if(this.handle) {
this.handle.enable_voice_send(false);
}
return;
}
log.info(LogCategory.VOICE, tr("Local voice started (Native)"));
this.handle.enable_voice_send(true);
const ch = chandler.getClient();
if(ch) ch.speaking = true;
}
getConnectionState(): VoiceConnectionStatus {
return VoiceConnectionStatus.Connected;
}
voice_recorder(): RecorderProfile {
return this._audio_source;
}
available_clients(): VoiceClient[] {
return this.handle.available_clients().map(e => Object.assign(e, {
support_latency_settings() { return true; },
reset_latency_settings() {
const stream = this.get_stream();
stream.set_buffer_latency(0.080);
stream.set_buffer_max_latency(0.5);
return this.latency_settings();
},
latency_settings(settings?: LatencySettings) : LatencySettings {
const stream = this.get_stream();
if(typeof settings !== "undefined") {
stream.set_buffer_latency(settings.min_buffer / 1000);
stream.set_buffer_max_latency(settings.max_buffer / 100);
}
return {
max_buffer: Math.floor(stream.get_buffer_max_latency() * 1000),
min_buffer: Math.floor(stream.get_buffer_latency() * 1000)
};
},
support_flush() { return true; },
flush() {
const stream = this.get_stream();
stream.flush_buffer();
}
}) as any); /* cast to any because of: Type 'import("/mnt/d/TeaSpeak/client_linux/client/imports/shared-app/connection/ConnectionBase").voice.PlayerState' is not assignable to type 'import("tc-native/connection").PlayerState' */
}
find_client(client_id: number) : VoiceClient | undefined {
for(const client of this.available_clients())
if(client.client_id === client_id)
return client;
return undefined;
}
unregister_client(client: VoiceClient): Promise<void> {
this.handle.unregister_client(client.client_id);
return Promise.resolve();
}
register_client(client_id: number): VoiceClient {
const client = this.handle.register_client(client_id);
const c = this.find_client(client_id);
c.reset_latency_settings();
return c;
}
decoding_supported(codec: number): boolean {
return this.handle.decoding_supported(codec);
}
encoding_supported(codec: number): boolean {
return this.handle.encoding_supported(codec);
}
get_encoder_codec(): number {
return this.handle.get_encoder_codec();
}
set_encoder_codec(codec: number) {
return this.handle.set_encoder_codec(codec);
}
}

View File

@ -1,10 +1,9 @@
import {ServerAddress} from "tc-shared/ui/server";
import * as loader from "tc-loader";
import {AddressTarget, ResolveOptions} from "tc-shared/dns";
import * as dns_handler from "tc-native/dns";
import {ServerAddress} from "tc-shared/tree/Server";
export function supported() { return true; }
export async function resolve_address(address: ServerAddress, _options?: ResolveOptions) : Promise<AddressTarget> {
/* backwards compatibility */
if(typeof(address) === "string") {
@ -26,6 +25,7 @@ export async function resolve_address(address: ServerAddress, _options?: Resolve
});
})
}
export function supported() { return true; }
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
name: "Native DNS initialized",

View File

@ -165,6 +165,7 @@ loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
await import("./hooks/ChangeLogClient");
await import("./UnloadHandler");
await import("./WindowsTrayHandler");
} catch (error) {
console.log(error);
window.displayCriticalError("Failed to load native extensions: " + error);

View File

@ -184,6 +184,12 @@ declare module "tc-native/connection" {
}
export namespace record {
enum FilterMode {
Bypass,
Filter,
Block
}
export interface ConsumeFilter {
get_name() : string;
}
@ -229,6 +235,9 @@ declare module "tc-native/connection" {
create_filter_threshold(threshold: number) : ThresholdConsumeFilter;
create_filter_state() : StateConsumeFilter;
set_filter_mode(mode: FilterMode);
get_filter_mode() : FilterMode;
callback_data: (buffer: Float32Array) => any;
callback_ended: () => any;
callback_started: () => any;

View File

@ -24,6 +24,9 @@ NAN_MODULE_INIT(AudioConsumerWrapper::Init) {
Nan::SetPrototypeMethod(klass, "create_filter_threshold", AudioConsumerWrapper::_create_filter_threshold);
Nan::SetPrototypeMethod(klass, "create_filter_state", AudioConsumerWrapper::_create_filter_state);
Nan::SetPrototypeMethod(klass, "get_filter_mode", AudioConsumerWrapper::_get_filter_mode);
Nan::SetPrototypeMethod(klass, "set_filter_mode", AudioConsumerWrapper::_set_filter_mode);
constructor_template().Reset(klass);
constructor().Reset(Nan::GetFunction(klass).ToLocalChecked());
}
@ -92,7 +95,7 @@ void AudioConsumerWrapper::do_wrap(const v8::Local<v8::Object> &obj) {
v8::Local<v8::Value> argv[1];
argv[0] = js_fbuffer;
callback_function.As<v8::Function>()->Call(Nan::GetCurrentContext(), Nan::Undefined(), 1, argv);
(void) callback_function.As<v8::Function>()->Call(Nan::GetCurrentContext(), Nan::Undefined(), 1, argv);
}
});
@ -103,7 +106,7 @@ void AudioConsumerWrapper::do_wrap(const v8::Local<v8::Object> &obj) {
v8::Local<v8::Value> callback_function = Nan::Get(handle, Nan::New<v8::String>("callback_ended").ToLocalChecked()).FromMaybe(v8::Local<v8::Value>{});
if(callback_function.IsEmpty() || callback_function->IsNullOrUndefined() || !callback_function->IsFunction())
return;
callback_function.As<v8::Function>()->Call(Nan::GetCurrentContext(), Nan::Undefined(), 0, nullptr);
(void) callback_function.As<v8::Function>()->Call(Nan::GetCurrentContext(), Nan::Undefined(), 0, nullptr);
});
this->_call_started = Nan::async_callback([&]{
@ -113,7 +116,7 @@ void AudioConsumerWrapper::do_wrap(const v8::Local<v8::Object> &obj) {
v8::Local<v8::Value> callback_function = Nan::Get(handle, Nan::New<v8::String>("callback_started").ToLocalChecked()).FromMaybe(v8::Local<v8::Value>{});
if(callback_function.IsEmpty() || callback_function->IsNullOrUndefined() || !callback_function->IsFunction())
return;
callback_function.As<v8::Function>()->Call(Nan::GetCurrentContext(), Nan::Undefined(), 0, nullptr);
(void) callback_function.As<v8::Function>()->Call(Nan::GetCurrentContext(), Nan::Undefined(), 0, nullptr);
});
Nan::Set(this->handle(), Nan::New<v8::String>("frame_size").ToLocalChecked(), Nan::New<v8::Number>((uint32_t) this->_handle->frame_size));
@ -131,32 +134,43 @@ void AudioConsumerWrapper::unbind() {
void AudioConsumerWrapper::process_data(const void *buffer, size_t samples) {
lock_guard lock(this->execute_lock);
auto filters = this->filters();
for(const auto& filter : filters) {
auto _filter = filter->filter();
if(!_filter) continue;
bool should_process{true};
if(this->filter_mode_ == FilterMode::FILTER) {
auto filters = this->filters();
for(const auto& filter : filters) {
auto _filter = filter->filter();
if(!_filter) continue;
if(_filter->frame_size() != samples) {
cerr << "Tried to use a filter, but frame size does not match!" << endl;
continue;
}
if(!_filter->process(buffer)) {
if(!this->last_consumed) {
this->last_consumed = true;
this->_call_ended();
unique_lock native_read_lock(this->native_read_callback_lock);
if(this->native_read_callback) {
auto callback = this->native_read_callback; /* copy */
native_read_lock.unlock();
callback(nullptr, 0); /* notify end */
}
}
return;
}
if(_filter->frame_size() != samples) {
cerr << "Tried to use a filter, but frame size does not match!" << endl;
continue;
}
if(!_filter->process(buffer)) {
should_process = false;
break;
}
}
} else if(this->filter_mode_ != FilterMode::BYPASS) {
should_process = false;
}
if(this->last_consumed)
this->_call_started();
if(!should_process) {
if(!this->last_consumed) {
this->last_consumed = true;
this->_call_ended();
unique_lock native_read_lock(this->native_read_callback_lock);
if(this->native_read_callback) {
auto callback = this->native_read_callback; /* copy */
native_read_lock.unlock();
callback(nullptr, 0); /* notify end */
}
}
return;
}
if(this->last_consumed) {
this->_call_started();
}
this->last_consumed = false;
{
@ -177,7 +191,7 @@ void AudioConsumerWrapper::process_data(const void *buffer, size_t samples) {
buf->sample_count = samples;
{
lock_guard lock(this->_data_lock);
lock_guard data_lock{this->_data_lock};
this->_data_entries.push_back(move(buf));
}
this->_call_data();
@ -199,8 +213,8 @@ std::shared_ptr<AudioFilterWrapper> AudioConsumerWrapper::create_filter(const st
}
{
lock_guard lock(this->_filters_lock);
this->_filters.push_back(result);
lock_guard lock(this->filter_mutex_);
this->filter_.push_back(result);
}
return result;
@ -209,20 +223,22 @@ std::shared_ptr<AudioFilterWrapper> AudioConsumerWrapper::create_filter(const st
void AudioConsumerWrapper::delete_filter(const AudioFilterWrapper* filter) {
shared_ptr<AudioFilterWrapper> handle; /* need to keep the handle 'till everything has been finished */
{
lock_guard lock(this->_filters_lock);
for(auto& c : this->_filters) {
lock_guard lock(this->filter_mutex_);
for(auto& c : this->filter_) {
if(&*c == filter) {
handle = c;
break;
}
}
if(!handle)
return;
if(!handle) {
return;
}
{
auto it = find(this->_filters.begin(), this->_filters.end(), handle);
if(it != this->_filters.end())
this->_filters.erase(it);
auto it = find(this->filter_.begin(), this->filter_.end(), handle);
if(it != this->filter_.end()) {
this->filter_.erase(it);
}
}
}
@ -319,4 +335,21 @@ NAN_METHOD(AudioConsumerWrapper::_create_filter_state) {
auto object = handle->create_filter("state", filter);
info.GetReturnValue().Set(object->handle());
}
NAN_METHOD(AudioConsumerWrapper::_get_filter_mode) {
auto handle = ObjectWrap::Unwrap<AudioConsumerWrapper>(info.Holder());
info.GetReturnValue().Set((int) handle->filter_mode_);
}
NAN_METHOD(AudioConsumerWrapper::_set_filter_mode) {
auto handle = ObjectWrap::Unwrap<AudioConsumerWrapper>(info.Holder());
if(info.Length() != 1 || !info[0]->IsNumber()) {
Nan::ThrowError("invalid argument");
return;
}
auto value = info[0].As<v8::Number>()->ToInteger()->Value();
handle->filter_mode_ = (FilterMode) value;
}

View File

@ -17,15 +17,12 @@ namespace tc {
namespace recorder {
class AudioFilterWrapper;
class AudioRecorderWrapper;
/*
get_filters() : ConsumeFilter[];
register_filter(filter: ConsumeFilter);
unregister_filter(filter: ConsumeFilter);
create_filter_vad() : VADConsumeFilter;
create_filter_threshold() : ThresholdConsumeFilter;
*/
enum FilterMode {
BYPASS,
FILTER,
BLOCK
};
class AudioConsumerWrapper : public Nan::ObjectWrap {
friend class AudioRecorderWrapper;
@ -52,14 +49,19 @@ namespace tc {
static NAN_METHOD(_create_filter_threshold);
static NAN_METHOD(_create_filter_state);
static NAN_METHOD(_get_filter_mode);
static NAN_METHOD(_set_filter_mode);
std::shared_ptr<AudioFilterWrapper> create_filter(const std::string& /* name */, const std::shared_ptr<filter::Filter>& /* filter impl */);
void delete_filter(const AudioFilterWrapper*);
inline std::deque<std::shared_ptr<AudioFilterWrapper>> filters() {
std::lock_guard lock(this->_filters_lock);
return this->_filters;
std::lock_guard lock(this->filter_mutex_);
return this->filter_;
}
inline FilterMode filter_mode() const { return this->filter_mode_; }
inline std::shared_ptr<AudioConsumer> native_consumer() { return this->_handle; }
std::mutex native_read_callback_lock;
@ -70,8 +72,9 @@ namespace tc {
std::mutex execute_lock;
std::shared_ptr<AudioConsumer> _handle;
std::mutex _filters_lock;
std::deque<std::shared_ptr<AudioFilterWrapper>> _filters;
std::mutex filter_mutex_;
std::deque<std::shared_ptr<AudioFilterWrapper>> filter_;
FilterMode filter_mode_{FilterMode::FILTER};
bool last_consumed = false;
void do_wrap(const v8::Local<v8::Object>& /* object */);
@ -95,10 +98,6 @@ namespace tc {
Nan::callback_t<> _call_data;
Nan::callback_t<> _call_ended;
Nan::callback_t<> _call_started;
/*
callback_data: (buffer: Float32Array) => any;
callback_ended: () => any;
*/
};
}
}

View File

@ -66,8 +66,9 @@ AudioFilterWrapper::~AudioFilterWrapper() {
log_free("AudioFilterWrapper", this);
auto threshold_filter = dynamic_pointer_cast<filter::ThresholdFilter>(this->_filter);
if(threshold_filter)
threshold_filter->on_analyze = nullptr;
if(threshold_filter) {
threshold_filter->on_analyze = nullptr;
}
this->_callback_analyzed.Reset();
}
@ -284,7 +285,6 @@ NAN_METHOD(AudioFilterWrapper::_set_analyze_filter) {
}
}
NAN_METHOD(AudioFilterWrapper::_is_consuming) {
auto handle = ObjectWrap::Unwrap<AudioFilterWrapper>(info.Holder());
if(!handle->_filter) {

View File

@ -27,7 +27,7 @@ namespace tc {
}
AudioFilterWrapper(const std::string& name, const std::shared_ptr<filter::Filter>& /* handle */);
virtual ~AudioFilterWrapper();
~AudioFilterWrapper() override;
static NAN_METHOD(_get_name);

View File

@ -181,6 +181,16 @@ NAN_MODULE_INIT(init) {
audio::recorder::AudioRecorderWrapper::Init(namespace_record);
audio::recorder::AudioConsumerWrapper::Init(namespace_record);
audio::recorder::AudioFilterWrapper::Init(namespace_record);
{
auto enum_object = Nan::New<v8::Object>();
ENUM_SET(enum_object, "Bypass", audio::recorder::FilterMode::BYPASS);
ENUM_SET(enum_object, "Filter", audio::recorder::FilterMode::FILTER);
ENUM_SET(enum_object, "Block", audio::recorder::BLOCK);
Nan::DefineOwnProperty(namespace_record, Nan::New<v8::String>("FilterMode").ToLocalChecked(), enum_object, v8::DontDelete);
}
Nan::Set(namespace_audio, Nan::New<v8::String>("record").ToLocalChecked(), namespace_record);
}
{

View File

@ -84,7 +84,7 @@ namespace tc::connection {
void process_packet(uint16_t packet_id, const pipes::buffer_view& /* buffer */, codec::value /* codec */, bool /* head */);
void execute_tick();
inline float get_volume() { return this->volume_; }
inline float get_volume() const { return this->volume_; }
inline void set_volume(float value) { this->volume_ = value; }
inline state::value state() { return this->state_; }
@ -121,7 +121,7 @@ namespace tc::connection {
uint16_t last_packet_id{0xFFFF}; /* the first packet id is 0 so one packet before is 0xFFFF */
std::chrono::system_clock::time_point last_packet_timestamp;
inline std::chrono::system_clock::time_point stream_timeout() {
inline std::chrono::system_clock::time_point stream_timeout() const {
return this->last_packet_timestamp + std::chrono::milliseconds{1000};
}

View File

@ -1,6 +1,6 @@
{
"name": "TeaClient",
"version": "1.4.10",
"version": "1.4.11",
"description": "",
"main": "main.js",
"scripts": {