Some audio fixes

This commit is contained in:
WolverinDEV 2020-08-21 13:37:10 +02:00
parent 93b4521673
commit f3add08a92
33 changed files with 823 additions and 790 deletions

View File

@ -121,6 +121,6 @@ function deploy_client() {
#install_npm #install_npm
#compile_scripts #compile_scripts
compile_native #compile_native
package_client #package_client
deploy_client deploy_client

View File

@ -20,9 +20,9 @@ import {Arguments, process_args} from "../../shared/process-arguments";
import * as electron from "electron"; import * as electron from "electron";
import {PassThrough} from "stream"; import {PassThrough} from "stream";
import ErrnoException = NodeJS.ErrnoException; import ErrnoException = NodeJS.ErrnoException;
import * as winmgr from "../window";
import {reference_app} from "../main_window"; import {reference_app} from "../main_window";
import * as url from "url"; import * as url from "url";
import {loadWindowBounds, startTrackWindowBounds} from "../../shared/window";
const is_debug = false; const is_debug = false;
export function server_url() : string { export function server_url() : string {
@ -697,9 +697,10 @@ export async function execute_graphical(channel: string, ask_install: boolean) :
window.webContents.openDevTools(); window.webContents.openDevTools();
} }
await new Promise(resolve => window.on('ready-to-show', resolve)); await new Promise(resolve => window.on('ready-to-show', resolve));
await loadWindowBounds('update-installer', window);
startTrackWindowBounds('update-installer', window);
window.show(); window.show();
await winmgr.apply_bounds('update-installer', window);
winmgr.track_bounds('update-installer', window);
const current_vers = await current_version(); const current_vers = await current_version();
console.log("Current version: " + current_vers.toString(true)); console.log("Current version: " + current_vers.toString(true));

View File

@ -1,6 +1,4 @@
import {BrowserWindow, Menu, MenuItem, MessageBoxOptions, app, dialog} from "electron"; import {BrowserWindow, Menu, MenuItem, MessageBoxOptions, app, dialog} from "electron";
import * as electron from "electron";
import * as winmgr from "./window";
import * as path from "path"; import * as path from "path";
let app_references = 0; let app_references = 0;
@ -21,6 +19,7 @@ import * as updater from "./app-updater";
import * as loader from "./ui-loader"; import * as loader from "./ui-loader";
import * as crash_handler from "../crash_handler"; import * as crash_handler from "../crash_handler";
import * as url from "url"; import * as url from "url";
import {loadWindowBounds, startTrackWindowBounds} from "../shared/window";
// Keep a global reference of the window object, if you don't, the window will // Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected. // be closed automatically when the JavaScript object is garbage collected.
@ -62,8 +61,8 @@ function spawn_main_window(entry_point: string) {
main_window.once('ready-to-show', () => { main_window.once('ready-to-show', () => {
main_window.show(); main_window.show();
winmgr.apply_bounds('main-window', main_window).then(() => { loadWindowBounds('main-window', main_window).then(() => {
winmgr.track_bounds('main-window', main_window); startTrackWindowBounds('main-window', main_window);
main_window.focus(); main_window.focus();
loader.ui.cleanup(); loader.ui.cleanup();

View File

@ -23,4 +23,4 @@ ipcMain.on('basic-action', (event, action, ...args: any[]) => {
} else if(action === "reload-window") { } else if(action === "reload-window") {
window.reload(); window.reload();
} }
}); });

View File

@ -5,8 +5,8 @@ import {screen} from "electron";
import {Arguments, process_args} from "../../shared/process-arguments"; import {Arguments, process_args} from "../../shared/process-arguments";
import * as loader from "./loader"; import * as loader from "./loader";
import * as updater from "../app-updater"; import * as updater from "../app-updater";
import * as winmgr from "../window";
import * as url from "url"; import * as url from "url";
import {loadWindowBounds, startTrackWindowBounds} from "../../shared/window";
export namespace ui { export namespace ui {
let gui: electron.BrowserWindow; let gui: electron.BrowserWindow;
@ -124,8 +124,9 @@ export namespace ui {
console.log("Setting UI position to %ox%o", x, y); console.log("Setting UI position to %ox%o", x, y);
if(typeof x === "number" && typeof y === "number") if(typeof x === "number" && typeof y === "number")
gui.setPosition(x, y); gui.setPosition(x, y);
winmgr.apply_bounds('ui-load-window', gui, undefined, { apply_size: false }).then(() => {
winmgr.track_bounds('ui-load-window', gui); loadWindowBounds('ui-load-window', gui, undefined, { applySize: false }).then(() => {
startTrackWindowBounds('ui-load-window', gui);
const call_loader = () => load_files().catch(reject); const call_loader = () => load_files().catch(reject);
if(!process_args.has_flag(...Arguments.DISABLE_ANIMATION)) if(!process_args.has_flag(...Arguments.DISABLE_ANIMATION))

View File

@ -1,6 +1,6 @@
import * as electron from "electron"; import * as electron from "electron";
import * as path from "path"; import * as path from "path";
import * as winmgr from "../window"; import {loadWindowBounds, startTrackWindowBounds} from "../../shared/window";
let global_window: electron.BrowserWindow; let global_window: electron.BrowserWindow;
let global_window_promise: Promise<void>; let global_window_promise: Promise<void>;
@ -49,8 +49,8 @@ export async function open_preview(url: string) {
}); });
try { try {
await winmgr.apply_bounds('url-preview', global_window); await loadWindowBounds('url-preview', global_window);
winmgr.track_bounds('url-preview', global_window); startTrackWindowBounds('url-preview', global_window);
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
const timeout = setTimeout(() => reject("timeout"), 5000); const timeout = setTimeout(() => reject("timeout"), 5000);

View File

@ -1,15 +1,15 @@
import {AbstractExternalModalController} from "tc-shared/ui/react-elements/external-modal/Controller"; import {AbstractExternalModalController} from "tc-shared/ui/react-elements/external-modal/Controller";
import {setExternalModalControllerFactory} from "tc-shared/ui/react-elements/external-modal";
import * as ipc from "tc-shared/ipc/BrowserIPC"; import * as ipc from "tc-shared/ipc/BrowserIPC";
import * as log from "tc-shared/log"; import * as log from "tc-shared/log";
import {LogCategory} from "tc-shared/log"; import {LogCategory} from "tc-shared/log";
import * as loader from "tc-loader";
import {Stage} from "tc-loader";
import {BrowserWindow, remote} from "electron"; import {BrowserWindow, remote} from "electron";
import {tr} from "tc-shared/i18n/localize"; import {tr} from "tc-shared/i18n/localize";
import * as path from "path"; import * as path from "path";
import {Arguments, process_args} from "../shared/process-arguments";
import {Popout2ControllerMessages, PopoutIPCMessage} from "tc-shared/ui/react-elements/external-modal/IPCMessage";
import {loadWindowBounds, startTrackWindowBounds} from "../shared/window";
class ExternalModalController extends AbstractExternalModalController { export class ExternalModalController extends AbstractExternalModalController {
private window: BrowserWindow; private window: BrowserWindow;
constructor(a, b, c) { constructor(a, b, c) {
@ -26,21 +26,33 @@ class ExternalModalController extends AbstractExternalModalController {
autoHideMenuBar: true, autoHideMenuBar: true,
webPreferences: { webPreferences: {
nodeIntegration: true nodeIntegration: true,
}, },
icon: path.join(__dirname, "..", "..", "resources", "logo.ico"), icon: path.join(__dirname, "..", "..", "resources", "logo.ico"),
minWidth: 600, minWidth: 600,
minHeight: 300 minHeight: 300,
frame: false,
transparent: true,
show: true
}); });
loadWindowBounds("modal-" + this.modalType, this.window).then(() => {
startTrackWindowBounds("modal-" + this.modalType, this.window);
});
if(process_args.has_flag(Arguments.DEV_TOOLS))
this.window.webContents.openDevTools();
const parameters = { const parameters = {
"loader-target": "manifest", "loader-target": "manifest",
"chunk": "modal-external", "chunk": "modal-external",
"modal-target": this.modalType, "modal-target": this.modalType,
"ipc-channel": this.ipcChannel.channelId, "ipc-channel": this.ipcChannel.channelId,
"ipc-address": ipc.getInstance().getLocalAddress(), "ipc-address": ipc.getInstance().getLocalAddress(),
//"disableGlobalContextMenu": is_debug ? 1 : 0, "loader-abort": 0,
//"loader-abort": is_debug ? 1 : 0, "animation-short": 1
}; };
const baseUrl = location.origin + location.pathname + "?"; const baseUrl = location.origin + location.pathname + "?";
@ -54,7 +66,6 @@ class ExternalModalController extends AbstractExternalModalController {
return false; return false;
} }
this.window.show();
this.window.on("closed", () => { this.window.on("closed", () => {
this.window = undefined; this.window = undefined;
this.handleWindowClosed(); this.handleWindowClosed();
@ -73,12 +84,26 @@ class ExternalModalController extends AbstractExternalModalController {
protected focusWindow(): void { protected focusWindow(): void {
this.window?.focus(); this.window?.focus();
} }
}
loader.register_task(Stage.JAVASCRIPT_INITIALIZING, { protected handleTypedIPCMessage<T extends Popout2ControllerMessages>(type: T, payload: PopoutIPCMessage[T]) {
priority: 50, super.handleTypedIPCMessage(type, payload);
name: "external modal controller factory setup",
function: async () => { switch (type) {
setExternalModalControllerFactory((modal, events, userData) => new ExternalModalController(modal, events, userData)); case "invoke-modal-action":
const data = payload as PopoutIPCMessage["invoke-modal-action"];
switch (data.action) {
case "close":
this.destroy();
break;
case "minimize":
this.window?.minimize();
break;
}
break;
case "hello-popout":
break;
}
} }
}); }

View File

@ -0,0 +1,60 @@
import {Settings, settings} from "tc-shared/settings";
import {server_connections} from "tc-shared/ui/frames/connection_handlers";
import {tr} from "tc-shared/i18n/localize";
import {Arguments, process_args} from "../shared/process-arguments";
import {remote} from "electron";
const unloadListener = event => {
if(settings.static(Settings.KEY_DISABLE_UNLOAD_DIALOG))
return;
const active_connections = server_connections.all_connections().filter(e => e.connected);
if(active_connections.length == 0) return;
const do_exit = (closeWindow: boolean) => {
const dp = server_connections.all_connections().map(e => {
if(e.serverConnection.connected())
return e.serverConnection.disconnect(tr("client closed"))
.catch(error => {
console.warn(tr("Failed to disconnect from server %s on client close: %o"),
e.serverConnection.remote_address().host + ":" + e.serverConnection.remote_address().port,
error
);
});
return Promise.resolve();
});
if(closeWindow) {
const exit = () => {
const {remote} = window.require('electron');
remote.getCurrentWindow().close();
};
Promise.all(dp).then(exit);
/* force exit after 2500ms */
setTimeout(exit, 2500);
}
};
if(process_args.has_flag(Arguments.DEBUG)) {
do_exit(false);
return;
}
remote.dialog.showMessageBox(remote.getCurrentWindow(), {
type: 'question',
buttons: ['Yes', 'No'],
title: 'Confirm',
message: 'Are you really sure?\nYou\'re still connected!'
}).then(result => {
if(result.response === 0) {
/* prevent quitting because we try to disconnect */
window.removeEventListener("beforeunload", unloadListener);
do_exit(true);
}
});
event.preventDefault();
}
window.addEventListener("beforeunload", unloadListener);

View File

@ -0,0 +1,243 @@
import {audio} from "tc-native/connection";
import {FilterType, StateFilter, ThresholdFilter, VoiceLevelFilter} from "tc-shared/voice/Filter";
import {NativeInput} from "./AudioRecorder";
export abstract class NativeFilter {
readonly priority: number;
handle: NativeInput;
enabled: boolean = false;
protected constructor(handle, priority: number) {
this.handle = handle;
this.priority = priority;
}
abstract initialize();
abstract finalize();
isEnabled(): boolean {
return this.enabled;
}
setEnabled(flag: boolean): void {
if(this.enabled === flag)
return;
this.enabled = flag;
if(this.enabled) {
this.initialize();
} else {
this.finalize();
}
}
}
export class NThresholdFilter extends NativeFilter implements ThresholdFilter {
static readonly frames_per_second = 1 / (960 / 48000);
readonly type: FilterType.THRESHOLD;
private filter: audio.record.ThresholdConsumeFilter;
private _margin_frames: number = 25; /* 120ms */
private _threshold: number = 50;
private _callback_level: any;
private _attack_smooth = 0;
private _release_smooth = 0;
private levelCallbacks: ((level: number) => void)[] = [];
constructor(handle, priority: number) {
super(handle, priority);
Object.defineProperty(this, 'callback_level', {
get(): any {
return this._callback_level;
}, set(v: any): void {
if(v === this._callback_level)
return;
this._callback_level = v;
if(this.filter)
this.filter.set_analyze_filter(v);
},
enumerable: true,
configurable: false,
})
}
getMarginFrames(): number {
return this.filter ? this.filter.get_margin_time() * NThresholdFilter.frames_per_second : this._margin_frames;
}
getThreshold(): number {
return this.filter ? this.filter.get_threshold() : this._threshold;
}
setMarginFrames(value: number) {
this._margin_frames = value;
if(this.filter)
this.filter.set_margin_time(value / 960 / 1000);
}
getAttackSmooth(): number {
return this.filter ? this.filter.get_attack_smooth() : this._attack_smooth;
}
getReleaseSmooth(): number {
return this.filter ? this.filter.get_release_smooth() : this._release_smooth;
}
setAttackSmooth(value: number) {
this._attack_smooth = value;
if(this.filter)
this.filter.set_attack_smooth(value);
}
setReleaseSmooth(value: number) {
this._release_smooth = value;
if(this.filter)
this.filter.set_release_smooth(value);
}
setThreshold(value: number): Promise<void> {
if(typeof(value) === "string")
value = parseInt(value); /* yes... this happens */
this._threshold = value;
if(this.filter)
this.filter.set_threshold(value);
return Promise.resolve();
}
finalize() {
if(this.filter) {
if(this.handle.getNativeConsumer())
this.handle.getNativeConsumer().unregister_filter(this.filter);
this.filter = undefined;
}
}
initialize() {
const consumer = this.handle.getNativeConsumer();
if(!consumer)
return;
this.finalize();
this.filter = consumer.create_filter_threshold(this._threshold);
if(this._callback_level)
this.filter.set_analyze_filter(this._callback_level);
this.filter.set_margin_time(this._margin_frames / NThresholdFilter.frames_per_second);
this.filter.set_attack_smooth(this._attack_smooth);
this.filter.set_release_smooth(this._release_smooth);
}
registerLevelCallback(callback: (value: number) => void) {
this.levelCallbacks.push(callback);
}
removeLevelCallback(callback: (value: number) => void) {
const index = this.levelCallbacks.indexOf(callback);
if(index === -1) return;
this.levelCallbacks.splice(index, 1);
}
}
export class NStateFilter extends NativeFilter implements StateFilter {
readonly type: FilterType.STATE;
private filter: audio.record.StateConsumeFilter;
private active = false;
constructor(handle, priority: number) {
super(handle, priority);
}
finalize() {
if(this.filter) {
const consumer = this.handle.getNativeConsumer();
consumer?.unregister_filter(this.filter);
this.filter = undefined;
}
}
initialize() {
const consumer = this.handle.getNativeConsumer();
if(!consumer)
return;
this.finalize();
this.filter = consumer.create_filter_state();
this.filter.set_consuming(this.active);
}
isActive(): boolean {
return this.active;
}
setState(state: boolean) {
if(this.active === state)
return;
this.active = state;
if(this.filter) {
this.filter.set_consuming(state);
}
}
}
export class NVoiceLevelFilter extends NativeFilter implements VoiceLevelFilter {
static readonly frames_per_second = 1 / (960 / 48000);
readonly type: FilterType.VOICE_LEVEL;
private filter: audio.record.VADConsumeFilter;
private level = 3;
private _margin_frames = 6;
constructor(handle, priority: number) {
super(handle, priority);
}
finalize() {
if(this.filter) {
const consumer = this.handle.getNativeConsumer();
consumer?.unregister_filter(this.filter);
this.filter = undefined;
}
}
initialize() {
const consumer = this.handle.getNativeConsumer();
if(!consumer)
return;
this.finalize();
this.filter = consumer.create_filter_vad(this.level);
this.filter.set_margin_time(this._margin_frames / NVoiceLevelFilter.frames_per_second);
}
getLevel(): number {
return this.level;
}
setLevel(value: number) {
if(this.level === value)
return;
this.level = value;
if(this.filter) {
this.finalize();
this.initialize();
}
}
setMarginFrames(value: number) {
this._margin_frames = value;
if(this.filter)
this.filter.set_margin_time(value / NVoiceLevelFilter.frames_per_second);
}
getMarginFrames(): number {
return this.filter ? this.filter.get_margin_time() * NVoiceLevelFilter.frames_per_second : this._margin_frames;
}
}

View File

@ -1,445 +1,188 @@
import { import {
filter,
AbstractInput, AbstractInput,
InputDevice,
InputState,
InputConsumer, InputConsumer,
InputConsumerType, InputStartResult, LevelMeter InputConsumerType,
InputEvents,
InputStartResult,
InputState,
LevelMeter
} from "tc-shared/voice/RecorderBase"; } from "tc-shared/voice/RecorderBase";
import {audio} from "tc-native/connection"; import {audio} from "tc-native/connection";
import {tr} from "tc-shared/i18n/localize"; import {tr} from "tc-shared/i18n/localize";
import {Registry} from "tc-shared/events";
interface NativeDevice extends InputDevice { import {Filter, FilterType, FilterTypeClass} from "tc-shared/voice/Filter";
device_index: number; import {NativeFilter, NStateFilter, NThresholdFilter, NVoiceLevelFilter} from "./AudioFilter";
native: any; import {IDevice} from "tc-shared/audio/recorder";
} import {LogCategory, logWarn} from "tc-shared/log";
let _device_cache: NativeDevice[] = undefined;
export function devices() : InputDevice[] {
//TODO: Handle device updates!
if(!audio.initialized()) return [];
return _device_cache || (_device_cache = audio.available_devices().filter(e => e.input_supported || e.input_default).map(e => {
return {
unique_id: e.device_id,
channels: 2, /* TODO */
default_input: e.input_default,
supported: e.input_supported,
name: e.name,
driver: e.driver,
sample_rate: 48000, /* TODO! */
native: e
} as NativeDevice
}));
}
export function device_refresh_available() : boolean { return false; }
export function refresh_devices() : Promise<void> { throw "not supported yet!"; }
export function create_input() : AbstractInput {
return new NativeInput();
}
namespace filters {
export abstract class NativeFilter implements filter.Filter {
type: filter.Type;
handle: NativeInput;
enabled: boolean = false;
protected constructor(handle, type) { this.handle = handle; this.type = type; }
abstract initialize();
abstract finalize();
is_enabled(): boolean { return this.enabled; }
}
export class NThresholdFilter extends NativeFilter implements filter.ThresholdFilter {
static readonly frames_per_second = 1 / (960 / 48000);
private filter: audio.record.ThresholdConsumeFilter;
private _margin_frames: number = 25; /* 120ms */
private _threshold: number = 50;
private _callback_level: any;
private _attack_smooth = 0;
private _release_smooth = 0;
callback_level: (level: number) => any;
constructor(handle) {
super(handle, filter.Type.THRESHOLD);
Object.defineProperty(this, 'callback_level', {
get(): any {
return this._callback_level;
}, set(v: any): void {
if(v === this._callback_level)
return;
this._callback_level = v;
if(this.filter)
this.filter.set_analyze_filter(v);
},
enumerable: true,
configurable: false,
})
}
get_margin_frames(): number {
return this.filter ? this.filter.get_margin_time() * NThresholdFilter.frames_per_second : this._margin_frames;
}
get_threshold(): number {
return this.filter ? this.filter.get_threshold() : this._threshold;
}
set_margin_frames(value: number) {
this._margin_frames = value;
if(this.filter)
this.filter.set_margin_time(value / 960 / 1000);
}
get_attack_smooth(): number {
return this.filter ? this.filter.get_attack_smooth() : this._attack_smooth;
}
get_release_smooth(): number {
return this.filter ? this.filter.get_release_smooth() : this._release_smooth;
}
set_attack_smooth(value: number) {
this._attack_smooth = value;
if(this.filter)
this.filter.set_attack_smooth(value);
}
set_release_smooth(value: number) {
this._release_smooth = value;
if(this.filter)
this.filter.set_release_smooth(value);
}
set_threshold(value: number): Promise<void> {
if(typeof(value) === "string")
value = parseInt(value); /* yes... this happens */
this._threshold = value;
if(this.filter)
this.filter.set_threshold(value);
return Promise.resolve();
}
finalize() {
if(this.filter) {
if(this.handle.consumer)
this.handle.consumer.unregister_filter(this.filter);
this.filter = undefined;
}
}
initialize() {
if(!this.handle.consumer)
return;
this.finalize();
this.filter = this.handle.consumer.create_filter_threshold(this._threshold);
if(this._callback_level)
this.filter.set_analyze_filter(this._callback_level);
this.filter.set_margin_time(this._margin_frames / NThresholdFilter.frames_per_second);
this.filter.set_attack_smooth(this._attack_smooth);
this.filter.set_release_smooth(this._release_smooth);
}
}
export class NStateFilter extends NativeFilter implements filter.StateFilter {
private filter: audio.record.StateConsumeFilter;
private active = false;
constructor(handle) {
super(handle, filter.Type.STATE);
}
finalize() {
if(this.filter) {
if(this.handle.consumer)
this.handle.consumer.unregister_filter(this.filter);
this.filter = undefined;
}
}
initialize() {
if(!this.handle.consumer)
return;
this.finalize();
this.filter = this.handle.consumer.create_filter_state();
this.filter.set_consuming(this.active);
}
is_active(): boolean {
return this.active;
}
async set_state(state: boolean): Promise<void> {
if(this.active === state)
return;
this.active = state;
if(this.filter)
this.filter.set_consuming(state);
}
}
export class NVoiceLevelFilter extends NativeFilter implements filter.VoiceLevelFilter {
static readonly frames_per_second = 1 / (960 / 48000);
private filter: audio.record.VADConsumeFilter;
private level = 3;
private _margin_frames = 6;
constructor(handle) {
super(handle, filter.Type.VOICE_LEVEL);
}
finalize() {
if(this.filter) {
if(this.handle.consumer)
this.handle.consumer.unregister_filter(this.filter);
this.filter = undefined;
}
}
initialize() {
if(!this.handle.consumer)
return;
this.finalize();
this.filter = this.handle.consumer.create_filter_vad(this.level);
this.filter.set_margin_time(this._margin_frames / NVoiceLevelFilter.frames_per_second);
}
get_level(): number {
return this.level;
}
set_level(value: number) {
if(this.level === value)
return;
this.level = value;
if(this.filter) {
this.finalize();
this.initialize();
}
}
set_margin_frames(value: number) {
this._margin_frames = value;
if(this.filter)
this.filter.set_margin_time(value / NVoiceLevelFilter.frames_per_second);
}
get_margin_frames(): number {
return this.filter ? this.filter.get_margin_time() * NVoiceLevelFilter.frames_per_second : this._margin_frames;
}
}
}
export class NativeInput implements AbstractInput { export class NativeInput implements AbstractInput {
private handle: audio.record.AudioRecorder; readonly events: Registry<InputEvents>;
consumer: audio.record.AudioConsumer;
private _current_device: InputDevice; private nativeHandle: audio.record.AudioRecorder;
private _current_state: InputState = InputState.PAUSED; private nativeConsumer: audio.record.AudioConsumer;
callback_begin: () => any; private state: InputState;
callback_end: () => any; private deviceId: string | undefined;
private filters: filters.NativeFilter[] = []; private registeredFilters: (Filter & NativeFilter)[] = [];
private filtered = false;
constructor() { constructor() {
this.handle = audio.record.create_recorder(); this.events = new Registry<InputEvents>();
this.consumer = this.handle.create_consumer(); this.nativeHandle = audio.record.create_recorder();
this.consumer.callback_ended = () => {
if(this._current_state !== InputState.RECORDING)
return;
this._current_state = InputState.DRY; this.nativeConsumer = this.nativeHandle.create_consumer();
if(this.callback_end) this.nativeConsumer.callback_ended = () => {
this.callback_end(); this.filtered = true;
this.events.fire("notify_voice_end");
}; };
this.consumer.callback_started = () => { this.nativeConsumer.callback_started = () => {
if(this._current_state !== InputState.DRY) this.filtered = false;
return; this.events.fire("notify_voice_start");
this._current_state = InputState.RECORDING;
if(this.callback_begin)
this.callback_begin();
}; };
this._current_state = InputState.PAUSED; this.state = InputState.PAUSED;
} }
/* TODO: some kind of finalize? */ async start(): Promise<InputStartResult> {
current_consumer(): InputConsumer | undefined { if(this.state === InputState.RECORDING) {
return { logWarn(LogCategory.VOICE, tr("Tried to start an input recorder twice."));
type: InputConsumerType.NATIVE return InputStartResult.EOK;
}; }
this.state = InputState.INITIALIZING;
try {
const state = await new Promise<audio.record.DeviceSetResult>(resolve => this.nativeHandle.set_device(this.deviceId, resolve));
if(state !== "success") {
if(state === "invalid-device") {
return InputStartResult.EDEVICEUNKNOWN;
} else if(state === undefined) {
throw tr("invalid set device result state");
}
throw state;
}
await new Promise((resolve, reject) => this.nativeHandle.start(result => {
if(result === true) {
resolve();
} else {
reject(typeof result === "string" ? result : tr("failed to start input"));
}
}));
this.state = InputState.RECORDING;
return InputStartResult.EOK;
} finally {
if(this.state === InputState.INITIALIZING) {
this.state = InputState.PAUSED;
}
}
} }
async set_consumer(consumer: InputConsumer): Promise<void> { async stop(): Promise<void> {
if(typeof(consumer) !== "undefined") if(this.state === InputState.PAUSED)
throw "we only support native consumers!"; /* TODO: May create a general wrapper? */
return;
}
async set_device(_device: InputDevice | undefined): Promise<void> {
if(_device === this._current_device)
return; return;
this._current_device = _device; this.nativeHandle.stop();
try { this.state = InputState.PAUSED;
await new Promise(resolve => this.handle.set_device(this._current_device ? this._current_device.unique_id : undefined, resolve));
if(this._current_state !== InputState.PAUSED && this._current_device)
await new Promise((resolve, reject) => {
this.handle.start(flag => {
if(typeof flag === "boolean" && flag)
resolve();
else
reject(typeof flag === "string" ? flag : "failed to start");
});
});
} catch(error) {
console.warn(tr("Failed to start playback on new input device (%o)"), error);
throw error;
}
} }
current_device(): InputDevice | undefined { async setDeviceId(device: string | undefined): Promise<void> {
return this._current_device; if(this.deviceId === device)
return;
this.deviceId = device;
await this.stop();
} }
current_state(): InputState { currentDeviceId(): string | undefined {
return this._current_state; return this.deviceId;
} }
disable_filter(type: filter.Type) { isFiltered(): boolean {
const filter = this.get_filter(type) as filters.NativeFilter; return this.filtered;
if(filter.is_enabled())
filter.enabled = false;
filter.finalize();
} }
enable_filter(type: filter.Type) { removeFilter(filter: Filter) {
const filter = this.get_filter(type) as filters.NativeFilter; const index = this.registeredFilters.indexOf(filter as any);
if(!filter.is_enabled()) { if(index === -1) return;
filter.enabled = true;
filter.initialize(); const [ f ] = this.registeredFilters.splice(index, 1);
} f.finalize();
} }
clear_filter() { createFilter<T extends FilterType>(type: T, priority: number): FilterTypeClass<T> {
for(const filter of this.filters) { let filter;
filter.enabled = false;
filter.finalize();
}
}
get_filter(type: filter.Type): filter.Filter | undefined {
for(const filter of this.filters)
if(filter.type === type)
return filter;
let _filter: filters.NativeFilter;
switch (type) { switch (type) {
case filter.Type.THRESHOLD: case FilterType.STATE:
_filter = new filters.NThresholdFilter(this); filter = new NStateFilter(this, priority);
break; break;
case filter.Type.STATE:
_filter = new filters.NStateFilter(this); case FilterType.THRESHOLD:
filter = new NThresholdFilter(this, priority);
break; break;
case filter.Type.VOICE_LEVEL:
_filter = new filters.NVoiceLevelFilter(this); case FilterType.VOICE_LEVEL:
filter = new NVoiceLevelFilter(this, priority);
break; break;
default:
throw "this filter isn't supported!";
} }
this.filters.push(_filter);
return _filter; this.registeredFilters.push(filter);
return filter;
} }
supports_filter(type: filter.Type) : boolean { supportsFilter(type: FilterType): boolean {
switch (type) { switch (type) {
case filter.Type.THRESHOLD: case FilterType.VOICE_LEVEL:
case filter.Type.STATE: case FilterType.THRESHOLD:
case filter.Type.VOICE_LEVEL: case FilterType.STATE:
return true; return true;
default: default:
return false; return false;
} }
} }
async start(): Promise<InputStartResult> { currentState(): InputState {
try { return this.state;
await this.stop();
} catch(error) {
console.warn(tr("Failed to stop old record session before start (%o)"), error);
}
this._current_state = InputState.DRY;
try {
if(this._current_device)
await new Promise((resolve, reject) => {
this.handle.start(flag => {
if(flag)
resolve();
else
reject("start failed");
});
});
for(const filter of this.filters)
if(filter.is_enabled())
filter.initialize();
return InputStartResult.EOK;
} catch(error) {
this._current_state = InputState.PAUSED;
throw error;
}
} }
async stop(): Promise<void> { currentConsumer(): InputConsumer | undefined {
this.handle.stop(); return {
for(const filter of this.filters) type: InputConsumerType.NATIVE
filter.finalize(); };
if(this.callback_end)
this.callback_end();
this._current_state = InputState.PAUSED;
} }
get_volume(): number { getNativeConsumer() : audio.record.AudioConsumer {
return this.handle.get_volume(); return this.nativeConsumer;
} }
set_volume(volume: number) { async setConsumer(consumer: InputConsumer): Promise<void> {
this.handle.set_volume(volume); if(typeof(consumer) !== "undefined")
throw "we only support native consumers!"; // TODO: May create a general wrapper?
return;
}
setVolume(volume: number) {
this.nativeHandle.set_volume(volume);
}
getVolume(): number {
return this.nativeHandle.get_volume();
} }
} }
export async function create_levelmeter(device: InputDevice) : Promise<LevelMeter> { export class NativeLevelMeter implements LevelMeter {
const meter = new NativeLevelmenter(device as any); readonly _device: IDevice;
await meter.initialize();
return meter;
}
class NativeLevelmenter implements LevelMeter {
readonly _device: NativeDevice;
private _callback: (num: number) => any; private _callback: (num: number) => any;
private _recorder: audio.record.AudioRecorder; private _recorder: audio.record.AudioRecorder;
private _consumer: audio.record.AudioConsumer; private _consumer: audio.record.AudioConsumer;
private _filter: audio.record.ThresholdConsumeFilter; private _filter: audio.record.ThresholdConsumeFilter;
constructor(device: NativeDevice) { constructor(device: IDevice) {
this._device = device; this._device = device;
} }
@ -452,7 +195,7 @@ class NativeLevelmenter implements LevelMeter {
this._filter.set_attack_smooth(.75); this._filter.set_attack_smooth(.75);
this._filter.set_release_smooth(.75); this._filter.set_release_smooth(.75);
await new Promise(resolve => this._recorder.set_device(this._device ? this._device.unique_id : undefined, resolve)); await new Promise(resolve => this._recorder.set_device(this._device.deviceId, resolve));
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
this._recorder.start(flag => { this._recorder.start(flag => {
if (typeof flag === "boolean" && flag) if (typeof flag === "boolean" && flag)
@ -475,22 +218,25 @@ class NativeLevelmenter implements LevelMeter {
}); });
} }
destory() { destroy() {
if (this._filter) { if (this._filter) {
this._filter.set_analyze_filter(undefined); this._filter.set_analyze_filter(undefined);
this._consumer.unregister_filter(this._filter); this._consumer.unregister_filter(this._filter);
} }
if (this._consumer)
if (this._consumer) {
this._recorder.delete_consumer(this._consumer); this._recorder.delete_consumer(this._consumer);
this._recorder.stop(); }
this._recorder.set_device(undefined, () => {
}); /* -1 := No device */ if(this._recorder) {
this._recorder.stop();
}
this._recorder = undefined; this._recorder = undefined;
this._consumer = undefined; this._consumer = undefined;
this._filter = undefined; this._filter = undefined;
} }
device(): InputDevice { device(): IDevice {
return this._device; return this._device;
} }

View File

@ -0,0 +1,69 @@
import {AbstractDeviceList, DeviceListEvents, IDevice, PermissionState} from "tc-shared/audio/recorder";
import {Registry} from "tc-shared/events";
import * as loader from "tc-loader";
import {audio} from "tc-native/connection";
interface NativeIDevice extends IDevice {
isDefault: boolean
}
class InputDeviceList extends AbstractDeviceList {
private cachedDevices: NativeIDevice[];
constructor() {
super();
this.setPermissionState("granted");
}
isRefreshAvailable(): boolean {
return false;
}
async refresh(): Promise<void> {
throw "not supported";
}
async requestPermissions(): Promise<PermissionState> {
return "granted";
}
getDefaultDeviceId(): string {
return this.getDevices().find(e => e.isDefault)?.deviceId || "default";
}
getDevices(): NativeIDevice[] {
if(this.cachedDevices)
return this.cachedDevices;
this.cachedDevices = audio.available_devices()
.filter(e => e.input_supported || e.input_default)
.filter(e => e.driver !== "Windows WDM-KS") /* If we're using WDM-KS and opening the microphone view, for some reason the channels get blocked an never release.... */
.map(device => {
return {
deviceId: device.device_id,
name: device.name,
driver: device.driver,
isDefault: device.input_default
}
});
this.setState("healthy");
return this.cachedDevices;
}
getEvents(): Registry<DeviceListEvents> {
return this.events;
}
}
export let inputDeviceList;
loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
function: async () => {
inputDeviceList = new InputDeviceList();
inputDeviceList.getDevices();
},
priority: 80,
name: "initialize input devices"
});

View File

@ -1,8 +0,0 @@
import * as handler from "../../audio/AudioRecorder";
export const devices = handler.devices;
export const device_refresh_available = handler.device_refresh_available;
export const refresh_devices = handler.refresh_devices;
export const create_input = handler.create_input;
export const create_levelmeter = handler.create_levelmeter;

View File

@ -2,8 +2,7 @@ import {AbstractCommandHandler, AbstractCommandHandlerBoss} from "tc-shared/conn
import { import {
AbstractServerConnection, CommandOptionDefaults, CommandOptions, AbstractServerConnection, CommandOptionDefaults, CommandOptions,
ConnectionStateListener, ConnectionStateListener,
ServerCommand, ServerCommand
voice
} from "tc-shared/connection/ConnectionBase"; } from "tc-shared/connection/ConnectionBase";
import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration"; import {CommandResult} from "tc-shared/connection/ServerConnectionDeclaration";
import {tr} from "tc-shared/i18n/localize"; import {tr} from "tc-shared/i18n/localize";
@ -13,8 +12,8 @@ import {ConnectionCommandHandler} from "tc-shared/connection/CommandHandler";
import {HandshakeHandler} from "tc-shared/connection/HandshakeHandler"; import {HandshakeHandler} from "tc-shared/connection/HandshakeHandler";
import {ServerAddress} from "tc-shared/ui/server"; import {ServerAddress} from "tc-shared/ui/server";
import {TeaSpeakHandshakeHandler} from "tc-shared/profiles/identities/TeamSpeakIdentity"; import {TeaSpeakHandshakeHandler} from "tc-shared/profiles/identities/TeamSpeakIdentity";
import AbstractVoiceConnection = voice.AbstractVoiceConnection;
import {VoiceConnection} from "./VoiceConnection"; import {VoiceConnection} from "./VoiceConnection";
import {AbstractVoiceConnection} from "tc-shared/connection/VoiceConnection";
class ErrorCommandHandler extends AbstractCommandHandler { class ErrorCommandHandler extends AbstractCommandHandler {
private _handle: ServerConnection; private _handle: ServerConnection;
@ -236,7 +235,7 @@ export class ServerConnection extends AbstractServerConnection {
return true; return true;
} }
voice_connection(): AbstractVoiceConnection { getVoiceConnection(): AbstractVoiceConnection {
return this._voice_connection; return this._voice_connection;
} }
@ -303,17 +302,3 @@ export class NativeConnectionCommandBoss extends AbstractCommandHandlerBoss {
super(connection); super(connection);
} }
} }
/* override the "normal" connection */
export function spawn_server_connection(handle: ConnectionHandler) : AbstractServerConnection {
console.log("Spawning native connection");
return new ServerConnection(handle); /* will be overridden by the client */
}
export function destroy_server_connection(handle: AbstractServerConnection) {
if(!(handle instanceof ServerConnection))
throw "invalid handle";
//TODO: Here!
console.log("Call to destroy a server connection");
}

View File

@ -1,13 +1,15 @@
import {voice} from "tc-shared/connection/ConnectionBase";
import AbstractVoiceConnection = voice.AbstractVoiceConnection;
import {ServerConnection} from "./ServerConnection"; import {ServerConnection} from "./ServerConnection";
import {NativeVoiceConnection} from "tc-native/connection"; import {NativeVoiceConnection} from "tc-native/connection";
import {RecorderProfile} from "tc-shared/voice/RecorderProfile"; import {RecorderProfile} from "tc-shared/voice/RecorderProfile";
import {tr} from "tc-shared/i18n/localize"; import {tr} from "tc-shared/i18n/localize";
import {LogCategory} from "tc-shared/log"; import {LogCategory} from "tc-shared/log";
import * as log from "tc-shared/log"; import * as log from "tc-shared/log";
import VoiceClient = voice.VoiceClient; import {
import LatencySettings = voice.LatencySettings; AbstractVoiceConnection,
LatencySettings,
VoiceClient,
VoiceConnectionStatus
} from "tc-shared/connection/VoiceConnection";
import {NativeInput} from "../audio/AudioRecorder"; import {NativeInput} from "../audio/AudioRecorder";
export class VoiceConnection extends AbstractVoiceConnection { export class VoiceConnection extends AbstractVoiceConnection {
@ -56,7 +58,7 @@ export class VoiceConnection extends AbstractVoiceConnection {
this.connection.client.update_voice_status(undefined); this.connection.client.update_voice_status(undefined);
}; };
this.handle.set_audio_source((recorder.input as NativeInput).consumer); this.handle.set_audio_source((recorder.input as NativeInput).getNativeConsumer());
} }
this.connection.client.update_voice_status(undefined); this.connection.client.update_voice_status(undefined);
} }
@ -99,15 +101,15 @@ export class VoiceConnection extends AbstractVoiceConnection {
return; return;
} }
log.info(LogCategory.VOICE, tr("Local voice started")); log.info(LogCategory.VOICE, tr("Local voice started (Native)"));
this.handle.enable_voice_send(true); this.handle.enable_voice_send(true);
const ch = chandler.getClient(); const ch = chandler.getClient();
if(ch) ch.speaking = true; if(ch) ch.speaking = true;
} }
connected(): boolean { getConnectionState(): VoiceConnectionStatus {
return true; /* we cant be disconnected at any time! */ return VoiceConnectionStatus.Connected;
} }
voice_recorder(): RecorderProfile { voice_recorder(): RecorderProfile {

View File

@ -0,0 +1,20 @@
import {AudioRecorderBacked, DeviceList, IDevice, setRecorderBackend} from "tc-shared/audio/recorder";
import {AbstractInput, LevelMeter} from "tc-shared/voice/RecorderBase";
import {inputDeviceList} from "../audio/InputDeviceList";
import {NativeInput, NativeLevelMeter} from "../audio/AudioRecorder";
setRecorderBackend(new class implements AudioRecorderBacked {
createInput(): AbstractInput {
return new NativeInput();
}
async createLevelMeter(device: IDevice): Promise<LevelMeter> {
const meter = new NativeLevelMeter(device);
await meter.initialize();
return meter;
}
getDeviceList(): DeviceList {
return inputDeviceList;
}
});

View File

@ -0,0 +1,12 @@
import * as loader from "tc-loader";
import {Stage} from "tc-loader";
import {setExternalModalControllerFactory} from "tc-shared/ui/react-elements/external-modal";
import {ExternalModalController} from "../ExternalModalHandler";
loader.register_task(Stage.JAVASCRIPT_INITIALIZING, {
priority: 50,
name: "external modal controller factory setup",
function: async () => {
setExternalModalControllerFactory((modal, events, userData) => new ExternalModalController(modal, events, userData));
}
});

View File

@ -0,0 +1,25 @@
import {ServerConnectionFactory, setServerConnectionFactory} from "tc-shared/connection/ConnectionFactory";
import {ConnectionHandler} from "tc-shared/ConnectionHandler";
import {AbstractServerConnection} from "tc-shared/connection/ConnectionBase";
import * as loader from "tc-loader";
import {Stage} from "tc-loader";
import {ServerConnection} from "../connection/ServerConnection";
loader.register_task(Stage.JAVASCRIPT_INITIALIZING, {
priority: 50,
name: "server connection factory setup",
function: async () => {
setServerConnectionFactory(new class implements ServerConnectionFactory {
create(client: ConnectionHandler): AbstractServerConnection {
return new ServerConnection(client);
}
destroy(instance: AbstractServerConnection) {
if(!(instance instanceof ServerConnection))
throw "invalid handle";
instance.finalize();
}
});
}
});

View File

@ -28,8 +28,6 @@ declare global {
impl_display_critical_error: any; impl_display_critical_error: any;
displayCriticalError: any; displayCriticalError: any;
teaclient_initialize: any; teaclient_initialize: any;
open_connected_question: () => Promise<boolean>;
} }
} }
@ -56,8 +54,7 @@ loader.register_task(loader.Stage.INITIALIZING, {
loader.register_task(loader.Stage.INITIALIZING, { loader.register_task(loader.Stage.INITIALIZING, {
name: "teaclient initialize logging", name: "teaclient initialize logging",
function: async () => { function: async () => {
const logger = require("./logger"); (await import("./Logger")).setup();
logger.setup();
}, },
priority: 80 priority: 80
}); });
@ -100,19 +97,11 @@ loader.register_task(loader.Stage.INITIALIZING, {
if(process_args.has_value(Arguments.DUMMY_CRASH_RENDERER)) if(process_args.has_value(Arguments.DUMMY_CRASH_RENDERER))
crash_handler.handler.crash(); crash_handler.handler.crash();
if(!process_args.has_flag(Arguments.DEBUG)) {
window.open_connected_question = () => remote.dialog.showMessageBox(remote.getCurrentWindow(), {
type: 'question',
buttons: ['Yes', 'No'],
title: 'Confirm',
message: 'Are you really sure?\nYou\'re still connected!'
}).then(result => result.response === 0);
}
/* loader url setup */ /* loader url setup */
{ {
const baseUrl = process_args.value(Arguments.SERVER_URL); const baseUrl = process_args.value(Arguments.SERVER_URL);
if(typeof baseUrl === "string") { console.error(process_args.value(Arguments.UPDATER_UI_LOAD_TYPE));
if(typeof baseUrl === "string" && parseFloat((process_args.value(Arguments.UPDATER_UI_LOAD_TYPE)?.toString() || "").trim()) === 3) {
loader.config.baseUrl = baseUrl; loader.config.baseUrl = baseUrl;
} }
} }
@ -160,7 +149,12 @@ loader.register_task(loader.Stage.JAVASCRIPT_INITIALIZING, {
await import("./SingleInstanceHandler"); await import("./SingleInstanceHandler");
await import("./IconHelper"); await import("./IconHelper");
await import("./connection/FileTransfer"); await import("./connection/FileTransfer");
await import("./ExternalModalHandler");
await import("./hooks/AudioInput");
await import("./hooks/ExternalModal");
await import("./hooks/ServerConnection");
await import("./UnloadHandler");
} catch (error) { } catch (error) {
console.log(error); console.log(error);
window.displayCriticalError("Failed to load native extensions: " + error); window.displayCriticalError("Failed to load native extensions: " + error);

View File

@ -2,27 +2,27 @@ import * as electron from "electron";
import * as fs from "fs-extra"; import * as fs from "fs-extra";
import * as path from "path"; import * as path from "path";
/* We read/write to this file every time again because this file could be used by multible processes */ /* We read/write to this file every time again because this file could be used by multiple processes */
const data_file: string = path.join(electron.app.getPath('userData'), "window-bounds.json"); const data_file: string = path.join((electron.app || electron.remote.app).getPath('userData'), "window-bounds.json");
import BrowserWindow = Electron.BrowserWindow; import BrowserWindow = Electron.BrowserWindow;
import Rectangle = Electron.Rectangle; import Rectangle = Electron.Rectangle;
let _changed_data: {[key: string]:Rectangle} = {}; let changedData: {[key: string]:Rectangle} = {};
let _changed_saver: NodeJS.Timer; let changedDataSaveTimeout: NodeJS.Timer;
export async function save_changes() { export async function save_changes() {
clearTimeout(_changed_saver); clearTimeout(changedDataSaveTimeout);
try { try {
const data = (await fs.pathExists(data_file) ? await fs.readJson(data_file) : {}) || {}; const data = (await fs.pathExists(data_file) ? await fs.readJson(data_file) : {}) || {};
Object.assign(data, _changed_data); Object.assign(data, changedData);
await fs.ensureFile(data_file); await fs.ensureFile(data_file);
await fs.writeJson(data_file, data); await fs.writeJson(data_file, data);
path_exists = true; path_exists = true;
_changed_data = {}; changedData = {};
} catch(error) { } catch(error) {
console.warn("Failed to save window bounds: %o", error); console.warn("Failed to save window bounds: %o", error);
} }
@ -51,44 +51,47 @@ export async function get_last_bounds(key: string) : Promise<Rectangle> {
} }
} }
export function track_bounds(key: string, window: BrowserWindow) { export function startTrackWindowBounds(windowId: string, window: BrowserWindow) {
const events = ['move', 'moved', 'resize']; const events = ['move', 'moved', 'resize'];
const update_bounds = () => { const onWindowBoundsChanged = () => {
_changed_data[key] = window.getBounds(); changedData[windowId] = window.getBounds();
clearTimeout(_changed_saver); clearTimeout(changedDataSaveTimeout);
_changed_saver = setTimeout(save_changes, 1000); changedDataSaveTimeout = setTimeout(save_changes, 1000);
}; };
for(const event of events) for(const event of events)
window.on(event as any, update_bounds); window.on(event as any, onWindowBoundsChanged);
window.on('closed', () => { window.on('closed', () => {
for(const event of events) for(const event of events)
window.removeListener(event as any, update_bounds); window.removeListener(event as any, onWindowBoundsChanged);
}) });
} }
export async function apply_bounds(key: string, window: BrowserWindow, bounds?: Rectangle, options?: { apply_size?: boolean; apply_position?: boolean }) { export async function loadWindowBounds(windowId: string, window: BrowserWindow, bounds?: Rectangle, options?: { applySize?: boolean; applyPosition?: boolean }) {
const screen = electron.screen; const screen = electron.screen || electron.remote.screen;
if(!bounds) if(!bounds) {
bounds = await get_last_bounds(key); bounds = await get_last_bounds(windowId);
}
if(!options) if(!options) {
options = {}; options = {};
}
const original_bounds = window.getBounds(); const original_bounds = window.getBounds();
if(typeof(options.apply_size) !== "boolean" || options.apply_size) { if(typeof(options.applySize) !== "boolean" || options.applySize) {
let height = bounds.height > 0 ? bounds.height : original_bounds.height; let height = bounds.height > 0 ? bounds.height : original_bounds.height;
let width = bounds.width > 0 ? bounds.width : original_bounds.width; let width = bounds.width > 0 ? bounds.width : original_bounds.width;
if(height != original_bounds.height || width != original_bounds.width) if(height != original_bounds.height || width != original_bounds.width)
window.setSize(width, height, true); window.setSize(width, height, true);
} }
if(typeof(options.apply_position) !== "boolean" || options.apply_position) {
if(typeof(options.applyPosition) !== "boolean" || options.applyPosition) {
let x = typeof(bounds.x) === "number" ? bounds.x : original_bounds.x; let x = typeof(bounds.x) === "number" ? bounds.x : original_bounds.x;
let y = typeof(bounds.y) === "number" ? bounds.y : original_bounds.y; let y = typeof(bounds.y) === "number" ? bounds.y : original_bounds.y;
@ -101,7 +104,7 @@ export async function apply_bounds(key: string, window: BrowserWindow, bounds?:
flag_invalid = flag_invalid || bounds.y > x || (bounds.y + bounds.height) < y; flag_invalid = flag_invalid || bounds.y > x || (bounds.y + bounds.height) < y;
if(!flag_invalid) { if(!flag_invalid) {
window.setPosition(x, y, true); window.setPosition(x, y, true);
console.log("Updating position for %s", key); console.log("Updating position for %s", windowId);
} }
} }
} }

View File

@ -57,7 +57,13 @@ function(setup_nodejs)
function(add_nodejs_module NAME) function(add_nodejs_module NAME)
message("Registering module ${NAME}") message("Registering module ${NAME}")
_add_nodejs_module(${NAME} ${ARGN}) _add_nodejs_module(${NAME} ${ARGN})
target_compile_features(${NAME} PUBLIC cxx_std_17)
if(MSVC)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /std:c++17")
else()
target_compile_features(${NAME} PUBLIC cxx_std_17)
endif()
set_target_properties(${NAME} set_target_properties(${NAME}
PROPERTIES PROPERTIES
LIBRARY_OUTPUT_DIRECTORY "${NODE_LIB_DIRECTORY}/" LIBRARY_OUTPUT_DIRECTORY "${NODE_LIB_DIRECTORY}/"
@ -108,6 +114,7 @@ if (MSVC)
) )
foreach(CompilerFlag ${CompilerFlags}) foreach(CompilerFlag ${CompilerFlags})
string(REPLACE "/MD" "/MT" ${CompilerFlag} "${${CompilerFlag}}") string(REPLACE "/MD" "/MT" ${CompilerFlag} "${${CompilerFlag}}")
string(REGEX REPLACE "/O\\S+($| )" "/02" ${CompilerFlag} "${${CompilerFlag}}")
endforeach() endforeach()
#add_compile_options("/MTd") #add_compile_options("/MTd")
add_compile_options("/EHsc") #We require exception handling add_compile_options("/EHsc") #We require exception handling

View File

@ -620,7 +620,6 @@ function(add_nodejs_module NAME)
CXX_VISIBILITY_PRESET hidden CXX_VISIBILITY_PRESET hidden
POSITION_INDEPENDENT_CODE TRUE POSITION_INDEPENDENT_CODE TRUE
CMAKE_CXX_STANDARD_REQUIRED TRUE CMAKE_CXX_STANDARD_REQUIRED TRUE
CXX_STANDARD 11
) )
# Handle link flag cases properly # Handle link flag cases properly

View File

@ -3,15 +3,15 @@ set(MODULE_NAME "teaclient_ppt")
set(SOURCE_FILES src/KeyboardHook.cpp) set(SOURCE_FILES src/KeyboardHook.cpp)
if (MSVC) if (MSVC)
set(SOURCE_FILES ${SOURCE_FILES} src/Win32KeyboardHook.cpp src/Win32KeyboardHookLL.cpp src/Win32KeyboardRawInput.cpp) set(SOURCE_FILES ${SOURCE_FILES} src/Win32KeyboardHook.cpp src/Win32KeyboardHookLL.cpp src/Win32KeyboardRawInput.cpp)
add_definitions(-DUSING_UV_SHARED)
else() else()
add_definitions(-DHAVE_X11)
set(SOURCE_FILES ${SOURCE_FILES} src/X11KeyboardHook.cpp) set(SOURCE_FILES ${SOURCE_FILES} src/X11KeyboardHook.cpp)
endif() endif()
add_nodejs_module(${MODULE_NAME} binding.cc ${SOURCE_FILES}) add_nodejs_module(${MODULE_NAME} binding.cc ${SOURCE_FILES})
if (WIN32) if (WIN32)
target_compile_definitions(${MODULE_NAME} PUBLIC /O2) target_compile_definitions(${MODULE_NAME} PRIVATE -DUSING_UV_SHARED)
else()
target_compile_definitions(${MODULE_NAME} PRIVATE -DHAVE_X11)
endif() endif()
add_executable(Hook-Test ${SOURCE_FILES} test/HookTest.cpp) add_executable(Hook-Test ${SOURCE_FILES} test/HookTest.cpp)

View File

@ -151,7 +151,7 @@ set(REQUIRED_LIBRARIES
spdlog::spdlog_header_only spdlog::spdlog_header_only
Nan::Helpers Nan::Helpers
) )
if (SOUNDIO_BACKED) if (SOUNDIO_BACKED)
list(APPEND REQUIRED_LIBRARIES soundio::static) list(APPEND REQUIRED_LIBRARIES soundio::static)
@ -167,15 +167,12 @@ else()
asound asound
jack.a jack.a
pthread pthread
) )
endif() endif()
add_definitions(-DNO_OPEN_SSL) add_definitions(-DNO_OPEN_SSL)
target_link_libraries(${MODULE_NAME} ${REQUIRED_LIBRARIES}) target_link_libraries(${MODULE_NAME} ${REQUIRED_LIBRARIES})
target_compile_definitions(${MODULE_NAME} PUBLIC -DNODEJS_API) target_compile_definitions(${MODULE_NAME} PUBLIC -DNODEJS_API)
if (WIN32)
target_compile_definitions(${MODULE_NAME} PUBLIC /O2)
endif()
add_executable(Audio-Test ${SOURCE_FILES} test/audio/main.cpp) add_executable(Audio-Test ${SOURCE_FILES} test/audio/main.cpp)
target_link_libraries(Audio-Test ${REQUIRED_LIBRARIES}) target_link_libraries(Audio-Test ${REQUIRED_LIBRARIES})

View File

@ -234,9 +234,10 @@ declare module "tc-native/connection" {
callback_started: () => any; callback_started: () => any;
} }
export type DeviceSetResult = "success" | "invalid-device";
export interface AudioRecorder { export interface AudioRecorder {
get_device() : string; get_device() : string;
set_device(device_id: string, callback: () => void); /* Recorder needs to be started afterwards */ set_device(device_id: string, callback: (result: DeviceSetResult) => void); /* Recorder needs to be started afterwards */
start(callback: (result: boolean | string) => void); start(callback: (result: boolean | string) => void);
started() : boolean; started() : boolean;

View File

@ -2,6 +2,7 @@
// Created by WolverinDEV on 09/08/2020. // Created by WolverinDEV on 09/08/2020.
// //
#include <cmath>
#include "AudioGain.h" #include "AudioGain.h"
#include "../logger.h" #include "../logger.h"
@ -18,7 +19,7 @@ bool tc::audio::apply_gain(void *vp_buffer, size_t channel_count, size_t sample_
if(value > 1.f) { if(value > 1.f) {
log_debug(category::audio, tr("Audio gain apply clipped: {}"), (float) value); log_debug(category::audio, tr("Audio gain apply clipped: {}"), (float) value);
value = 1.f; value = isinf(value) ? 0 : 1.f;
audio_clipped = true; audio_clipped = true;
} }
} }

View File

@ -11,165 +11,19 @@ using namespace std;
using namespace tc; using namespace tc;
using namespace tc::audio; using namespace tc::audio;
#if false
class AudioInputSource {
public:
constexpr static auto kChannelCount{2};
constexpr static auto kSampleRate{48000};
explicit AudioInputSource(PaHostApiIndex index) : device_index{index} {}
~AudioInputSource() = default;
/* its blocking! */
void register_consumer(AudioInput* consumer) {
std::lock_guard lock{this->registered_inputs_lock};
if(find(this->registered_inputs.begin(), this->registered_inputs.end(), consumer) != this->registered_inputs.end())
return;
this->registered_inputs.push_back(consumer);
}
/* its blocking */
void remove_consumer(AudioInput* consumer) {
std::lock_guard lock{this->registered_inputs_lock};
auto index = find(this->registered_inputs.begin(), this->registered_inputs.end(), consumer);
if(index == this->registered_inputs.end())
return;
this->registered_inputs.erase(index);
if(!this->registered_inputs.empty())
return;
}
/* this could take a bit longer! */
bool begin_recording(std::string& error) {
std::lock_guard lock{this->state_lock};
if(this->state == RECORDING) return true;
if(this->state != STOPPED) {
if(this->state == DELETED) {
error = "stream has been deleted";
return false;
}
error = "invalid state";
return false;
}
this->current_device = Pa_GetDeviceInfo(this->device_index);
if(!this->current_device) {
error = "failed to get device info";
return false;
}
PaStreamParameters parameters{};
memset(&parameters, 0, sizeof(parameters));
parameters.channelCount = (int) kChannelCount;
parameters.device = this->device_index;
parameters.sampleFormat = paFloat32;
parameters.suggestedLatency = this->current_device->defaultLowOutputLatency;
auto err = Pa_OpenStream(
&this->input_stream,
&parameters,
nullptr,
(double) kSampleRate,
paFramesPerBufferUnspecified,
paClipOff,
&AudioInputSource::pa_audio_callback,
this);
if(err != paNoError) {
this->input_stream = nullptr;
error = to_string(err) + "/" + Pa_GetErrorText(err);
return false;
}
err = Pa_StartStream(this->input_stream);
if(err != paNoError) {
error = "recording failed " + to_string(err) + "/" + Pa_GetErrorText(err);
err = Pa_CloseStream(this->input_stream);
if(err != paNoError)
log_critical(category::audio, tr("Failed to close opened pa stream. This will cause memory leaks. Error: {}/{}"), err, Pa_GetErrorText(err));
return false;
}
this->state = RECORDING;
return true;
}
void stop_recording_if_possible() {
std::lock_guard lock{this->state_lock};
if(this->state != RECORDING) return;
{
std::lock_guard client_lock{this->registered_inputs_lock};
if(!this->registered_inputs.empty()) return;
}
this->state = STOPPED;
if(Pa_IsStreamActive(this->input_stream))
Pa_AbortStream(this->input_stream);
auto error = Pa_CloseStream(this->input_stream);
if(error != paNoError)
log_error(category::audio, tr("Failed to close PA stream: {}"), error);
this->input_stream = nullptr;
}
const PaDeviceIndex device_index;
private:
static int pa_audio_callback(const void *input, void *output, unsigned long frameCount, const PaStreamCallbackTimeInfo* timeInfo, PaStreamCallbackFlags statusFlags, void* _input_source) {
if(!input) return 0; /* this should never happen */
auto input_source = (AudioInputSource*) _input_source;
std::lock_guard lock{input_source->registered_inputs_lock};
for(auto& client : input_source->registered_inputs)
client->consume(input, frameCount, 2);
return 0;
}
std::mutex state_lock{};
enum _state {
STOPPED,
RECORDING,
DELETED
} state{STOPPED};
PaStream* input_stream{nullptr};
const PaDeviceInfo* current_device = nullptr;
std::mutex registered_inputs_lock{};
std::vector<AudioInput*> registered_inputs{};
};
std::mutex input_sources_lock{};
static std::deque<std::shared_ptr<AudioInputSource>> input_sources{};
std::shared_ptr<AudioInputSource> get_input_source(PaDeviceIndex device_index, bool create = true) {
std::lock_guard sources_lock{input_sources_lock};
for(const auto& input : input_sources)
if(input->device_index == device_index)
return input;
if(!create)
return nullptr;
auto input = std::make_shared<AudioInputSource>(device_index);
input_sources.push_back(std::make_shared<AudioInputSource>(device_index));
return input;
}
#endif
AudioConsumer::AudioConsumer(tc::audio::AudioInput *handle, size_t channel_count, size_t sample_rate, size_t frame_size) : AudioConsumer::AudioConsumer(tc::audio::AudioInput *handle, size_t channel_count, size_t sample_rate, size_t frame_size) :
handle(handle), handle(handle),
channel_count(channel_count), channel_count(channel_count),
sample_rate(sample_rate) , sample_rate(sample_rate) ,
frame_size(frame_size) { frame_size(frame_size) {
if(this->frame_size > 0) { if(this->frame_size > 0) {
this->reframer = make_unique<Reframer>(channel_count, frame_size); this->reframer = std::make_unique<Reframer>(channel_count, frame_size);
this->reframer->on_frame = [&](const void* buffer) { this->handle_framed_data(buffer, this->frame_size); }; this->reframer->on_frame = [&](const void* buffer) { this->handle_framed_data(buffer, this->frame_size); };
} }
} }
void AudioConsumer::handle_framed_data(const void *buffer, size_t samples) { void AudioConsumer::handle_framed_data(const void *buffer, size_t samples) {
unique_lock read_callback_lock(this->on_read_lock); std::unique_lock read_callback_lock(this->on_read_lock);
auto function = this->on_read; /* copy */ auto function = this->on_read; /* copy */
read_callback_lock.unlock(); read_callback_lock.unlock();
if(!function) if(!function)
@ -189,7 +43,7 @@ AudioInput::AudioInput(size_t channels, size_t rate) : _channel_count(channels),
AudioInput::~AudioInput() { AudioInput::~AudioInput() {
this->close_device(); this->close_device();
{ {
lock_guard lock(this->consumers_lock); std::lock_guard lock(this->consumers_lock);
for(const auto& consumer : this->_consumers) for(const auto& consumer : this->_consumers)
consumer->handle = nullptr; consumer->handle = nullptr;
} }
@ -197,7 +51,7 @@ AudioInput::~AudioInput() {
} }
void AudioInput::set_device(const std::shared_ptr<AudioDevice> &device) { void AudioInput::set_device(const std::shared_ptr<AudioDevice> &device) {
lock_guard lock(this->input_source_lock); std::lock_guard lock(this->input_source_lock);
if(device == this->input_device) return; if(device == this->input_device) return;
this->close_device(); this->close_device();
@ -205,7 +59,7 @@ void AudioInput::set_device(const std::shared_ptr<AudioDevice> &device) {
} }
void AudioInput::close_device() { void AudioInput::close_device() {
lock_guard lock(this->input_source_lock); std::lock_guard lock(this->input_source_lock);
if(this->input_recorder) { if(this->input_recorder) {
this->input_recorder->remove_consumer(this); this->input_recorder->remove_consumer(this);
this->input_recorder->stop_if_possible(); this->input_recorder->stop_if_possible();
@ -216,12 +70,15 @@ void AudioInput::close_device() {
} }
bool AudioInput::record(std::string& error) { bool AudioInput::record(std::string& error) {
lock_guard lock(this->input_source_lock); std::lock_guard lock(this->input_source_lock);
if(!this->input_device) { if(!this->input_device) {
error = "no device"; error = "no device";
return false; return false;
} }
if(this->input_recorder) return true;
if(this->input_recorder) {
return true;
}
this->input_recorder = this->input_device->record(); this->input_recorder = this->input_device->record();
if(!this->input_recorder) { if(!this->input_recorder) {
@ -255,9 +112,9 @@ void AudioInput::stop() {
} }
std::shared_ptr<AudioConsumer> AudioInput::create_consumer(size_t frame_length) { std::shared_ptr<AudioConsumer> AudioInput::create_consumer(size_t frame_length) {
auto result = shared_ptr<AudioConsumer>(new AudioConsumer(this, this->_channel_count, this->_sample_rate, frame_length)); auto result = std::shared_ptr<AudioConsumer>(new AudioConsumer(this, this->_channel_count, this->_sample_rate, frame_length));
{ {
lock_guard lock(this->consumers_lock); std::lock_guard lock(this->consumers_lock);
this->_consumers.push_back(result); this->_consumers.push_back(result);
} }
return result; return result;
@ -265,7 +122,7 @@ std::shared_ptr<AudioConsumer> AudioInput::create_consumer(size_t frame_length)
void AudioInput::delete_consumer(const std::shared_ptr<AudioConsumer> &source) { void AudioInput::delete_consumer(const std::shared_ptr<AudioConsumer> &source) {
{ {
lock_guard lock(this->consumers_lock); std::lock_guard lock(this->consumers_lock);
auto it = find(this->_consumers.begin(), this->_consumers.end(), source); auto it = find(this->_consumers.begin(), this->_consumers.end(), source);
if(it != this->_consumers.end()) if(it != this->_consumers.end())
this->_consumers.erase(it); this->_consumers.erase(it);
@ -314,13 +171,13 @@ void AudioInput::consume(const void *input, size_t frameCount, size_t channels)
audio::apply_gain(this->resample_buffer, this->_channel_count, frameCount, this->_volume); audio::apply_gain(this->resample_buffer, this->_channel_count, frameCount, this->_volume);
} }
auto begin = chrono::system_clock::now(); auto begin = std::chrono::system_clock::now();
for(const auto& consumer : this->consumers()) for(const auto& consumer : this->consumers())
consumer->process_data(input, frameCount); consumer->process_data(input, frameCount);
auto end = chrono::system_clock::now(); auto end = std::chrono::system_clock::now();
auto ms = chrono::duration_cast<chrono::milliseconds>(end - begin).count(); auto ms = std::chrono::duration_cast<std::chrono::milliseconds>(end - begin).count();
if(ms > 5) { if(ms > 5) {
log_warn(category::audio, tr("Processing of audio input needed {}ms. This could be an issue!"), chrono::duration_cast<chrono::milliseconds>(end - begin).count()); log_warn(category::audio, tr("Processing of audio input needed {}ms. This could be an issue!"), std::chrono::duration_cast<chrono::milliseconds>(end - begin).count());
} }
} }

View File

@ -39,7 +39,7 @@ namespace tc::audio {
class AudioInput : public AudioDeviceRecord::Consumer { class AudioInput : public AudioDeviceRecord::Consumer {
friend class ::AudioInputSource; friend class ::AudioInputSource;
public: public:
AudioInput(size_t /* channels */, size_t /* rate */); AudioInput(size_t /* channels */, size_t /* sample rate */);
virtual ~AudioInput(); virtual ~AudioInput();
void set_device(const std::shared_ptr<AudioDevice>& /* device */); void set_device(const std::shared_ptr<AudioDevice>& /* device */);
@ -58,10 +58,10 @@ namespace tc::audio {
std::shared_ptr<AudioConsumer> create_consumer(size_t /* frame size */); std::shared_ptr<AudioConsumer> create_consumer(size_t /* frame size */);
void delete_consumer(const std::shared_ptr<AudioConsumer>& /* source */); void delete_consumer(const std::shared_ptr<AudioConsumer>& /* source */);
inline size_t channel_count() { return this->_channel_count; } [[nodiscard]] inline size_t channel_count() const { return this->_channel_count; }
inline size_t sample_rate() { return this->_sample_rate; } [[nodiscard]] inline size_t sample_rate() const { return this->_sample_rate; }
inline float volume() { return this->_volume; } [[nodiscard]] inline float volume() const { return this->_volume; }
inline void set_volume(float value) { this->_volume = value; } inline void set_volume(float value) { this->_volume = value; }
private: private:
void consume(const void *, size_t, size_t) override; void consume(const void *, size_t, size_t) override;
@ -71,15 +71,15 @@ namespace tc::audio {
std::mutex consumers_lock; std::mutex consumers_lock;
std::deque<std::shared_ptr<AudioConsumer>> _consumers; std::deque<std::shared_ptr<AudioConsumer>> _consumers;
std::unique_ptr<AudioResampler> _resampler{nullptr};
std::recursive_mutex input_source_lock; std::recursive_mutex input_source_lock;
std::unique_ptr<AudioResampler> _resampler{nullptr};
std::shared_ptr<AudioDevice> input_device{}; std::shared_ptr<AudioDevice> input_device{};
void* resample_buffer{nullptr}; void* resample_buffer{nullptr};
size_t resample_buffer_size{0}; size_t resample_buffer_size{0};
float _volume = 1.f; float _volume{1.f};
std::shared_ptr<AudioDeviceRecord> input_recorder{}; std::shared_ptr<AudioDeviceRecord> input_recorder{};
}; };

View File

@ -149,23 +149,28 @@ NAN_METHOD(AudioRecorderWrapper::_set_device) {
return; return;
} }
unique_ptr<Nan::Persistent<v8::Function>> _callback = make_unique<Nan::Persistent<v8::Function>>(info[1].As<v8::Function>());
unique_ptr<Nan::Persistent<v8::Object>> _recorder = make_unique<Nan::Persistent<v8::Object>>(info.Holder());
auto call_callback = [call = std::move(_callback), recorder = move(_recorder)](const std::string& status) {
Nan::HandleScope scope;
auto callback_function = call->Get(Nan::GetCurrentContext()->GetIsolate());
v8::Local<v8::Value> args[1];
args[0] = Nan::LocalStringUTF8(status);
(void) callback_function->Call(Nan::GetCurrentContext(), Nan::Undefined(), 1, args);
recorder->Reset();
call->Reset();
};
auto device = is_null_device ? nullptr : audio::find_device_by_id(*Nan::Utf8String(info[0]), true); auto device = is_null_device ? nullptr : audio::find_device_by_id(*Nan::Utf8String(info[0]), true);
if(!device && !is_null_device) { if(!device && !is_null_device) {
Nan::ThrowError("invalid device id"); call_callback("invalid-device");
return; return;
} }
unique_ptr<Nan::Persistent<v8::Function>> _callback = make_unique<Nan::Persistent<v8::Function>>(info[1].As<v8::Function>()); auto _async_callback = Nan::async_callback([callback = std::move(call_callback)] {
unique_ptr<Nan::Persistent<v8::Object>> _recorder = make_unique<Nan::Persistent<v8::Object>>(info.Holder()); callback("success");
auto _async_callback = Nan::async_callback([call = std::move(_callback), recorder = move(_recorder)] {
Nan::HandleScope scope;
auto callback_function = call->Get(Nan::GetCurrentContext()->GetIsolate());
(void) callback_function->Call(Nan::GetCurrentContext(), Nan::Undefined(), 0, nullptr);
recorder->Reset();
call->Reset();
}).option_destroyed_execute(true); }).option_destroyed_execute(true);
std::thread([_async_callback, input, device]{ std::thread([_async_callback, input, device]{
@ -189,10 +194,11 @@ NAN_METHOD(AudioRecorderWrapper::_start) {
std::string error{}; std::string error{};
v8::Local<v8::Value> argv[1]; v8::Local<v8::Value> argv[1];
if(input->record(error)) if(input->record(error)) {
argv[0] = Nan::New<v8::Boolean>(true); argv[0] = Nan::New<v8::Boolean>(true);
else } else {
argv[0] = Nan::LocalString(error); argv[0] = Nan::LocalString(error);
}
(void) info[0].As<v8::Function>()->Call(Nan::GetCurrentContext(), Nan::Undefined(), 1, argv); (void) info[0].As<v8::Function>()->Call(Nan::GetCurrentContext(), Nan::Undefined(), 1, argv);
} }

View File

@ -4,61 +4,59 @@
#include <mutex> #include <mutex>
#include <deque> #include <deque>
namespace tc { namespace tc::audio {
namespace audio { class AudioInput;
class AudioInput;
namespace recorder { namespace recorder {
class AudioConsumerWrapper; class AudioConsumerWrapper;
extern NAN_MODULE_INIT(init_js); extern NAN_MODULE_INIT(init_js);
extern NAN_METHOD(create_recorder); extern NAN_METHOD(create_recorder);
class AudioRecorderWrapper : public Nan::ObjectWrap { class AudioRecorderWrapper : public Nan::ObjectWrap {
public: public:
static NAN_MODULE_INIT(Init); static NAN_MODULE_INIT(Init);
static NAN_METHOD(NewInstance); static NAN_METHOD(NewInstance);
static inline Nan::Persistent<v8::Function> & constructor() { static inline Nan::Persistent<v8::Function> & constructor() {
static Nan::Persistent<v8::Function> my_constructor; static Nan::Persistent<v8::Function> my_constructor;
return my_constructor; return my_constructor;
} }
explicit AudioRecorderWrapper(std::shared_ptr<AudioInput> /* input */); explicit AudioRecorderWrapper(std::shared_ptr<AudioInput> /* input */);
~AudioRecorderWrapper() override; ~AudioRecorderWrapper() override;
static NAN_METHOD(_get_device); static NAN_METHOD(_get_device);
static NAN_METHOD(_set_device); static NAN_METHOD(_set_device);
static NAN_METHOD(_start); static NAN_METHOD(_start);
static NAN_METHOD(_started); static NAN_METHOD(_started);
static NAN_METHOD(_stop); static NAN_METHOD(_stop);
static NAN_METHOD(_create_consumer); static NAN_METHOD(_create_consumer);
static NAN_METHOD(_get_consumers); static NAN_METHOD(_get_consumers);
static NAN_METHOD(_delete_consumer); static NAN_METHOD(_delete_consumer);
static NAN_METHOD(_set_volume); static NAN_METHOD(_set_volume);
static NAN_METHOD(_get_volume); static NAN_METHOD(_get_volume);
std::shared_ptr<AudioConsumerWrapper> create_consumer(); std::shared_ptr<AudioConsumerWrapper> create_consumer();
void delete_consumer(const AudioConsumerWrapper*); void delete_consumer(const AudioConsumerWrapper*);
inline std::deque<std::shared_ptr<AudioConsumerWrapper>> consumers() { inline std::deque<std::shared_ptr<AudioConsumerWrapper>> consumers() {
std::lock_guard lock(this->_consumer_lock); std::lock_guard lock(this->_consumer_lock);
return this->_consumers; return this->_consumers;
} }
void do_wrap(const v8::Local<v8::Object>& /* obj */); void do_wrap(const v8::Local<v8::Object>& /* obj */);
inline void js_ref() { this->Ref(); } inline void js_ref() { this->Ref(); }
inline void js_unref() { this->Unref(); } inline void js_unref() { this->Unref(); }
private: private:
std::shared_ptr<AudioInput> _input; std::shared_ptr<AudioInput> _input;
std::mutex _consumer_lock; std::mutex _consumer_lock;
std::deque<std::shared_ptr<AudioConsumerWrapper>> _consumers; std::deque<std::shared_ptr<AudioConsumerWrapper>> _consumers;
}; };
} }
}
} }

View File

@ -21,7 +21,7 @@ namespace tc {
friend inline std::shared_ptr<_Tp> std::static_pointer_cast(const std::shared_ptr<_Up>& __r) noexcept; friend inline std::shared_ptr<_Tp> std::static_pointer_cast(const std::shared_ptr<_Up>& __r) noexcept;
friend class VoiceConnection; friend class VoiceConnection;
public: public:
VoiceSender(VoiceConnection*); explicit VoiceSender(VoiceConnection*);
virtual ~VoiceSender(); virtual ~VoiceSender();
codec::value get_codec() { return this->_current_codec; } codec::value get_codec() { return this->_current_codec; }

View File

@ -37,17 +37,17 @@ NAN_MODULE_INIT(VoiceConnectionWrap::Init) {
Nan::SetPrototypeMethod(klass, "decoding_supported", VoiceConnectionWrap::_decoding_supported); Nan::SetPrototypeMethod(klass, "decoding_supported", VoiceConnectionWrap::_decoding_supported);
Nan::SetPrototypeMethod(klass, "encoding_supported", VoiceConnectionWrap::_encoding_supported); Nan::SetPrototypeMethod(klass, "encoding_supported", VoiceConnectionWrap::_encoding_supported);
Nan::SetPrototypeMethod(klass, "register_client", VoiceConnectionWrap::_register_client); Nan::SetPrototypeMethod(klass, "register_client", VoiceConnectionWrap::register_client);
Nan::SetPrototypeMethod(klass, "available_clients", VoiceConnectionWrap::_available_clients); Nan::SetPrototypeMethod(klass, "available_clients", VoiceConnectionWrap::available_clients);
Nan::SetPrototypeMethod(klass, "unregister_client", VoiceConnectionWrap::_unregister_client); Nan::SetPrototypeMethod(klass, "unregister_client", VoiceConnectionWrap::unregister_client);
Nan::SetPrototypeMethod(klass, "audio_source", VoiceConnectionWrap::_audio_source); Nan::SetPrototypeMethod(klass, "audio_source", VoiceConnectionWrap::audio_source);
Nan::SetPrototypeMethod(klass, "set_audio_source", VoiceConnectionWrap::_set_audio_source); Nan::SetPrototypeMethod(klass, "set_audio_source", VoiceConnectionWrap::set_audio_source);
Nan::SetPrototypeMethod(klass, "get_encoder_codec", VoiceConnectionWrap::_get_encoder_codec); Nan::SetPrototypeMethod(klass, "get_encoder_codec", VoiceConnectionWrap::get_encoder_codec);
Nan::SetPrototypeMethod(klass, "set_encoder_codec", VoiceConnectionWrap::_set_encoder_codec); Nan::SetPrototypeMethod(klass, "set_encoder_codec", VoiceConnectionWrap::set_encoder_codec);
Nan::SetPrototypeMethod(klass, "enable_voice_send", VoiceConnectionWrap::_enable_voice_send); Nan::SetPrototypeMethod(klass, "enable_voice_send", VoiceConnectionWrap::enable_voice_send);
constructor().Reset(Nan::GetFunction(klass).ToLocalChecked()); constructor().Reset(Nan::GetFunction(klass).ToLocalChecked());
} }
@ -81,17 +81,15 @@ NAN_METHOD(VoiceConnectionWrap::_decoding_supported) {
info.GetReturnValue().Set(codec >= 4 && codec <= 5); /* ignore SPEX currently :/ */ info.GetReturnValue().Set(codec >= 4 && codec <= 5); /* ignore SPEX currently :/ */
} }
NAN_METHOD(VoiceConnectionWrap::_register_client) {
return ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder())->register_client(info);
}
NAN_METHOD(VoiceConnectionWrap::register_client) { NAN_METHOD(VoiceConnectionWrap::register_client) {
auto connection = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder());
if(info.Length() != 1) { if(info.Length() != 1) {
Nan::ThrowError("invalid argument count"); Nan::ThrowError("invalid argument count");
return; return;
} }
auto id = info[0]->Uint32Value(Nan::GetCurrentContext()).FromMaybe(0); auto id = info[0]->Uint32Value(Nan::GetCurrentContext()).FromMaybe(0);
auto handle = this->handle.lock(); auto handle = connection->handle.lock();
if(!handle) { if(!handle) {
Nan::ThrowError("handle has been deallocated"); Nan::ThrowError("handle has been deallocated");
return; return;
@ -106,14 +104,10 @@ NAN_METHOD(VoiceConnectionWrap::register_client) {
info.GetReturnValue().Set(client->js_handle()); info.GetReturnValue().Set(client->js_handle());
} }
NAN_METHOD(VoiceConnectionWrap::_available_clients) {
return ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder())->available_clients(info);
}
NAN_METHOD(VoiceConnectionWrap::available_clients) { NAN_METHOD(VoiceConnectionWrap::available_clients) {
auto handle = this->handle.lock(); auto connection = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder());
auto handle = connection->handle.lock();
if(!handle) { if(!handle) {
Nan::ThrowError("handle has been deallocated"); Nan::ThrowError("handle has been deallocated");
return; return;
@ -128,20 +122,20 @@ NAN_METHOD(VoiceConnectionWrap::available_clients) {
info.GetReturnValue().Set(result); info.GetReturnValue().Set(result);
} }
NAN_METHOD(VoiceConnectionWrap::_unregister_client) {
return ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder())->unregister_client(info);
}
NAN_METHOD(VoiceConnectionWrap::unregister_client) { NAN_METHOD(VoiceConnectionWrap::unregister_client) {
auto connection = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder());
if(info.Length() != 1) { if(info.Length() != 1) {
Nan::ThrowError("invalid argument count"); Nan::ThrowError("invalid argument count");
return; return;
} }
auto id = info[0]->Uint32Value(Nan::GetCurrentContext()).FromMaybe(0); auto id = info[0]->Uint32Value(Nan::GetCurrentContext()).FromMaybe(0);
auto handle = this->handle.lock(); auto handle = connection->handle.lock();
if(!handle) { if(!handle) {
Nan::ThrowError("handle has been deallocated"); Nan::ThrowError("handle has been deallocated");
return; return;
} }
auto client = handle->find_client(id); auto client = handle->find_client(id);
if(!client) { if(!client) {
Nan::ThrowError("missing client"); Nan::ThrowError("missing client");
@ -152,16 +146,14 @@ NAN_METHOD(VoiceConnectionWrap::unregister_client) {
handle->delete_client(client); handle->delete_client(client);
} }
NAN_METHOD(VoiceConnectionWrap::_audio_source) { NAN_METHOD(VoiceConnectionWrap::audio_source) {
auto client = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder()); auto client = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder());
info.GetReturnValue().Set(client->_voice_recoder_handle.Get(info.GetIsolate())); info.GetReturnValue().Set(client->_voice_recoder_handle.Get(info.GetIsolate()));
} }
NAN_METHOD(VoiceConnectionWrap::_set_audio_source) {
ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder())->set_audio_source(info);
}
NAN_METHOD(VoiceConnectionWrap::set_audio_source) { NAN_METHOD(VoiceConnectionWrap::set_audio_source) {
auto connection = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder());
if(info.Length() != 1) { if(info.Length() != 1) {
Nan::ThrowError("invalid argument count"); Nan::ThrowError("invalid argument count");
return; return;
@ -173,36 +165,38 @@ NAN_METHOD(VoiceConnectionWrap::set_audio_source) {
return; return;
} }
if(!this->handle.lock()) { auto handle = connection->handle.lock();
if(!handle) {
Nan::ThrowError("handle has been deallocated"); Nan::ThrowError("handle has been deallocated");
return; return;
} }
this->release_recorder(); connection->release_recorder();
if(!info[0]->IsNullOrUndefined()) { if(!info[0]->IsNullOrUndefined()) {
this->_voice_recoder_ptr = ObjectWrap::Unwrap<audio::recorder::AudioConsumerWrapper>(info[0]->ToObject(Nan::GetCurrentContext()).ToLocalChecked()); connection->_voice_recoder_ptr = ObjectWrap::Unwrap<audio::recorder::AudioConsumerWrapper>(info[0]->ToObject(Nan::GetCurrentContext()).ToLocalChecked());
this->_voice_recoder_handle.Reset(info[0]->ToObject(Nan::GetCurrentContext()).ToLocalChecked()); connection->_voice_recoder_handle.Reset(info[0]->ToObject(Nan::GetCurrentContext()).ToLocalChecked());
auto native_consumer = this->_voice_recoder_ptr->native_consumer(); auto native_consumer = connection->_voice_recoder_ptr->native_consumer();
weak_ptr weak_handle = this->handle; weak_ptr weak_handle = handle;
auto sample_rate = native_consumer->sample_rate; auto sample_rate = native_consumer->sample_rate;
auto channels = native_consumer->channel_count; auto channels = native_consumer->channel_count;
lock_guard read_lock(this->_voice_recoder_ptr->native_read_callback_lock); lock_guard read_lock(connection->_voice_recoder_ptr->native_read_callback_lock);
this->_voice_recoder_ptr->native_read_callback = [weak_handle, sample_rate, channels](const void* buffer, size_t length) { connection->_voice_recoder_ptr->native_read_callback = [weak_handle, sample_rate, channels](const void* buffer, size_t length) {
auto handle = weak_handle.lock(); auto handle = weak_handle.lock();
if(!handle) { if(!handle) {
log_warn(category::audio, tr("Missing voice connection handle. Dropping input!")); log_warn(category::audio, tr("Missing voice connection handle. Dropping input!"));
return; return;
} }
shared_ptr<VoiceSender> sender = handle->voice_sender(); auto sender = handle->voice_sender();
if(sender) { if(sender) {
if(length > 0 && buffer) if(length > 0 && buffer) {
sender->send_data(buffer, length, sample_rate, channels); sender->send_data(buffer, length, sample_rate, channels);
else } else {
sender->send_stop(); sender->send_stop();
}
} else { } else {
log_warn(category::audio, tr("Missing voice connection audio sender. Dropping input!")); log_warn(category::audio, tr("Missing voice connection audio sender. Dropping input!"));
return; return;
@ -211,9 +205,9 @@ NAN_METHOD(VoiceConnectionWrap::set_audio_source) {
} }
} }
NAN_METHOD(VoiceConnectionWrap::_get_encoder_codec) { NAN_METHOD(VoiceConnectionWrap::get_encoder_codec) {
auto _this = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder()); auto connection = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder());
auto handle = _this->handle.lock(); auto handle = connection->handle.lock();
if(!handle) { if(!handle) {
Nan::ThrowError("handle has been deallocated"); Nan::ThrowError("handle has been deallocated");
return; return;
@ -222,9 +216,9 @@ NAN_METHOD(VoiceConnectionWrap::_get_encoder_codec) {
info.GetReturnValue().Set(handle->get_encoder_codec()); info.GetReturnValue().Set(handle->get_encoder_codec());
} }
NAN_METHOD(VoiceConnectionWrap::_set_encoder_codec) { NAN_METHOD(VoiceConnectionWrap::set_encoder_codec) {
auto _this = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder()); auto connection = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder());
auto handle = _this->handle.lock(); auto handle = connection->handle.lock();
if(!handle) { if(!handle) {
Nan::ThrowError("handle has been deallocated"); Nan::ThrowError("handle has been deallocated");
return; return;
@ -239,7 +233,7 @@ NAN_METHOD(VoiceConnectionWrap::_set_encoder_codec) {
handle->set_encoder_codec((uint8_t) info[0]->NumberValue(Nan::GetCurrentContext()).FromMaybe(0)); handle->set_encoder_codec((uint8_t) info[0]->NumberValue(Nan::GetCurrentContext()).FromMaybe(0));
} }
NAN_METHOD(VoiceConnectionWrap::_enable_voice_send) { NAN_METHOD(VoiceConnectionWrap::enable_voice_send) {
auto _this = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder()); auto _this = ObjectWrap::Unwrap<VoiceConnectionWrap>(info.Holder());
auto handle = _this->handle.lock(); auto handle = _this->handle.lock();
if(!handle) { if(!handle) {

View File

@ -29,7 +29,7 @@ namespace tc {
} }
explicit VoiceConnectionWrap(const std::shared_ptr<VoiceConnection>&); explicit VoiceConnectionWrap(const std::shared_ptr<VoiceConnection>&);
virtual ~VoiceConnectionWrap(); ~VoiceConnectionWrap() override;
void do_wrap(const v8::Local<v8::Object>&); void do_wrap(const v8::Local<v8::Object>&);
private: private:
@ -37,20 +37,16 @@ namespace tc {
static NAN_METHOD(_encoding_supported); static NAN_METHOD(_encoding_supported);
static NAN_METHOD(_decoding_supported); static NAN_METHOD(_decoding_supported);
static NAN_METHOD(_register_client); static NAN_METHOD(register_client);
NAN_METHOD(register_client); static NAN_METHOD(available_clients);
static NAN_METHOD(_available_clients); static NAN_METHOD(unregister_client);
NAN_METHOD(available_clients);
static NAN_METHOD(_unregister_client);
NAN_METHOD(unregister_client);
static NAN_METHOD(_audio_source); static NAN_METHOD(audio_source);
static NAN_METHOD(_set_audio_source); static NAN_METHOD(set_audio_source);
NAN_METHOD(set_audio_source);
static NAN_METHOD(_get_encoder_codec); static NAN_METHOD(get_encoder_codec);
static NAN_METHOD(_set_encoder_codec); static NAN_METHOD(set_encoder_codec);
static NAN_METHOD(_enable_voice_send); static NAN_METHOD(enable_voice_send);
void release_recorder(); void release_recorder();

View File

@ -1,6 +1,6 @@
{ {
"name": "TeaClient", "name": "TeaClient",
"version": "1.4.9", "version": "1.4.10",
"description": "", "description": "",
"main": "main.js", "main": "main.js",
"scripts": { "scripts": {