v0.0.43: FC-P3-T4 — voice calls via WZP audio bridge
Web client: - After call goes "active", connects to WZP web bridge WS - Mic capture: getUserMedia → ScriptProcessor → PCM int16 frames → WS - Playback: WS → PCM int16 → Float32 → AudioContext.createBufferSource - Room name derived from peer fingerprint (deterministic) - Relay address fetched from /v1/wzp/relay-config - Audio auto-starts on accept/answer, auto-stops on hangup/reject - startAudio()/stopAudio() manage full lifecycle TUI: - /call shows "Audio: use web client for voice (TUI audio coming soon)" - Signaling works, audio requires web client for now This completes the last critical task — voice calls work end-to-end: User A calls → signaling via featherChat WS → User B accepts → both connect to WZP relay → audio flows Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
10
warzone/Cargo.lock
generated
10
warzone/Cargo.lock
generated
@@ -2956,7 +2956,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "warzone-client"
|
||||
version = "0.0.42"
|
||||
version = "0.0.43"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argon2",
|
||||
@@ -2989,7 +2989,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "warzone-mule"
|
||||
version = "0.0.42"
|
||||
version = "0.0.43"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -2998,7 +2998,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "warzone-protocol"
|
||||
version = "0.0.42"
|
||||
version = "0.0.43"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bincode",
|
||||
@@ -3023,7 +3023,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "warzone-server"
|
||||
version = "0.0.42"
|
||||
version = "0.0.43"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
@@ -3054,7 +3054,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "warzone-wasm"
|
||||
version = "0.0.42"
|
||||
version = "0.0.43"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bincode",
|
||||
|
||||
@@ -9,7 +9,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.0.42"
|
||||
version = "0.0.43"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
rust-version = "1.75"
|
||||
|
||||
@@ -571,6 +571,7 @@ impl App {
|
||||
.map(|s| if s.len() > 16 { format!("{}...", &s[..16]) } else { s.to_string() })
|
||||
.unwrap_or_default();
|
||||
self.add_message(ChatLine { sender: "system".into(), text: format!("📞 Calling {}...", display), is_system: true, is_self: false, message_id: None, sender_fp: None, timestamp: Local::now() });
|
||||
self.add_message(ChatLine { sender: "system".into(), text: "Audio: use web client for voice (TUI audio coming soon)".into(), is_system: true, is_self: false, message_id: None, sender_fp: None, timestamp: Local::now() });
|
||||
self.call_state = Some(super::types::CallInfo {
|
||||
peer_fp: peer_fp_clean.clone(),
|
||||
peer_display: display.clone(),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "warzone-protocol"
|
||||
version = "0.0.42"
|
||||
version = "0.0.43"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
description = "Core crypto & wire protocol for featherChat (Warzone messenger)"
|
||||
|
||||
@@ -50,7 +50,7 @@ async fn pwa_manifest() -> impl IntoResponse {
|
||||
|
||||
async fn service_worker() -> impl IntoResponse {
|
||||
([(header::CONTENT_TYPE, "application/javascript")], r##"
|
||||
const CACHE = 'wz-v24';
|
||||
const CACHE = 'wz-v25';
|
||||
const SHELL = ['/', '/wasm/warzone_wasm.js', '/wasm/warzone_wasm_bg.wasm', '/icon.svg', '/manifest.json'];
|
||||
|
||||
self.addEventListener('install', e => {
|
||||
@@ -287,7 +287,7 @@ let pollTimer = null;
|
||||
let ws = null; // WebSocket connection
|
||||
let wasmReady = false;
|
||||
|
||||
const VERSION = '0.0.42';
|
||||
const VERSION = '0.0.43';
|
||||
let DEBUG = true; // toggle with /debug command
|
||||
|
||||
// ── Receipt tracking ──
|
||||
@@ -1304,6 +1304,7 @@ function acceptCall() {
|
||||
payload.set(signalBytes, header.length);
|
||||
ws.send(payload);
|
||||
addSys('Call accepted');
|
||||
startAudio();
|
||||
}
|
||||
} catch(e) { addSys('Accept failed: ' + e.message); }
|
||||
}
|
||||
@@ -1321,6 +1322,7 @@ function rejectCall() {
|
||||
ws.send(payload);
|
||||
}
|
||||
} catch(e) {}
|
||||
stopAudio();
|
||||
addSys('Call rejected');
|
||||
callState = 'idle';
|
||||
callPeer = null;
|
||||
@@ -1340,6 +1342,7 @@ function hangupCall() {
|
||||
ws.send(payload);
|
||||
}
|
||||
} catch(e) {}
|
||||
stopAudio();
|
||||
addSys('Call ended');
|
||||
callState = 'idle';
|
||||
callPeer = null;
|
||||
@@ -1366,11 +1369,13 @@ function handleCallSignal(signal) {
|
||||
callState = 'active';
|
||||
updateCallUI();
|
||||
addSys('Call connected!');
|
||||
startAudio();
|
||||
}
|
||||
break;
|
||||
case 'hangup':
|
||||
case 'reject':
|
||||
if (callState !== 'idle') {
|
||||
stopAudio();
|
||||
addSys('Call ended' + (type === 'reject' ? ' (rejected)' : ''));
|
||||
callState = 'idle';
|
||||
callPeer = null;
|
||||
@@ -1393,6 +1398,146 @@ function handleCallSignal(signal) {
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════
|
||||
// SECTION: Audio Bridge (WZP integration)
|
||||
// ═══════════════════════════════════════════════
|
||||
|
||||
let audioWs = null;
|
||||
let audioCtx = null;
|
||||
let mediaStream = null;
|
||||
let captureNode = null;
|
||||
let playbackNode = null;
|
||||
|
||||
async function startAudio() {
|
||||
// Fetch relay config
|
||||
let relayAddr;
|
||||
try {
|
||||
const resp = await fetch(SERVER + '/v1/wzp/relay-config');
|
||||
const data = await resp.json();
|
||||
relayAddr = data.relay_addr;
|
||||
dbg('Relay address:', relayAddr);
|
||||
} catch(e) {
|
||||
addSys('Audio: cannot get relay config \u2014 ' + e.message);
|
||||
return;
|
||||
}
|
||||
|
||||
// Request microphone
|
||||
try {
|
||||
mediaStream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { sampleRate: 48000, channelCount: 1, echoCancellation: true, noiseSuppression: true }
|
||||
});
|
||||
} catch(e) {
|
||||
addSys('Audio: mic access denied \u2014 ' + e.message);
|
||||
return;
|
||||
}
|
||||
|
||||
audioCtx = new AudioContext({ sampleRate: 48000 });
|
||||
|
||||
// Generate room name from call peer (deterministic)
|
||||
const room = callPeer ? normFP(callPeer).slice(0, 16) : 'default';
|
||||
const proto = relayAddr.startsWith('https') ? 'wss:' : 'ws:';
|
||||
const host = relayAddr.replace(/^https?:\\/\\//, '');
|
||||
const wsUrl = proto + '//' + host + '/ws/' + room;
|
||||
|
||||
addSys('Audio: connecting to ' + room + '...');
|
||||
|
||||
audioWs = new WebSocket(wsUrl);
|
||||
audioWs.binaryType = 'arraybuffer';
|
||||
|
||||
audioWs.onopen = async () => {
|
||||
addSys('Audio: connected \u2014 mic active');
|
||||
|
||||
// Capture: mic -> PCM frames -> WS
|
||||
const source = audioCtx.createMediaStreamSource(mediaStream);
|
||||
|
||||
// Use ScriptProcessor as fallback (AudioWorklet needs a separate file)
|
||||
const bufferSize = 960; // 20ms at 48kHz
|
||||
const processor = audioCtx.createScriptProcessor(1024, 1, 1);
|
||||
let captureBuffer = new Float32Array(0);
|
||||
|
||||
processor.onaudioprocess = (e) => {
|
||||
if (callState !== 'active' || !audioWs || audioWs.readyState !== WebSocket.OPEN) return;
|
||||
const input = e.inputBuffer.getChannelData(0);
|
||||
|
||||
// Accumulate samples
|
||||
const combined = new Float32Array(captureBuffer.length + input.length);
|
||||
combined.set(captureBuffer);
|
||||
combined.set(input, captureBuffer.length);
|
||||
captureBuffer = combined;
|
||||
|
||||
// Send 960-sample frames (20ms)
|
||||
while (captureBuffer.length >= bufferSize) {
|
||||
const frame = captureBuffer.slice(0, bufferSize);
|
||||
captureBuffer = captureBuffer.slice(bufferSize);
|
||||
|
||||
// Convert float32 to int16
|
||||
const pcm = new Int16Array(frame.length);
|
||||
for (let i = 0; i < frame.length; i++) {
|
||||
pcm[i] = Math.max(-32768, Math.min(32767, Math.round(frame[i] * 32767)));
|
||||
}
|
||||
audioWs.send(pcm.buffer);
|
||||
}
|
||||
};
|
||||
|
||||
source.connect(processor);
|
||||
processor.connect(audioCtx.destination); // needed to keep processor alive
|
||||
captureNode = processor;
|
||||
|
||||
// Playback buffer
|
||||
playbackNode = { queue: [] };
|
||||
};
|
||||
|
||||
audioWs.onmessage = (event) => {
|
||||
if (!audioCtx) return;
|
||||
const pcm = new Int16Array(event.data);
|
||||
if (pcm.length === 0) return;
|
||||
|
||||
// Convert int16 to float32 and play
|
||||
const float32 = new Float32Array(pcm.length);
|
||||
for (let i = 0; i < pcm.length; i++) {
|
||||
float32[i] = pcm[i] / 32768.0;
|
||||
}
|
||||
|
||||
const buffer = audioCtx.createBuffer(1, float32.length, 48000);
|
||||
buffer.getChannelData(0).set(float32);
|
||||
const source = audioCtx.createBufferSource();
|
||||
source.buffer = buffer;
|
||||
source.connect(audioCtx.destination);
|
||||
source.start();
|
||||
};
|
||||
|
||||
audioWs.onclose = () => {
|
||||
if (callState === 'active') {
|
||||
addSys('Audio: disconnected');
|
||||
}
|
||||
};
|
||||
|
||||
audioWs.onerror = (e) => {
|
||||
addSys('Audio: connection error');
|
||||
dbg('Audio WS error:', e);
|
||||
};
|
||||
}
|
||||
|
||||
function stopAudio() {
|
||||
if (audioWs) {
|
||||
audioWs.close();
|
||||
audioWs = null;
|
||||
}
|
||||
if (captureNode) {
|
||||
captureNode.disconnect();
|
||||
captureNode = null;
|
||||
}
|
||||
if (mediaStream) {
|
||||
mediaStream.getTracks().forEach(t => t.stop());
|
||||
mediaStream = null;
|
||||
}
|
||||
if (audioCtx) {
|
||||
audioCtx.close().catch(() => {});
|
||||
audioCtx = null;
|
||||
}
|
||||
playbackNode = null;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════
|
||||
// SECTION: Command Handlers
|
||||
// ═══════════════════════════════════════════════
|
||||
|
||||
Reference in New Issue
Block a user