Compare commits
7 Commits
7806d4ec04
...
feature/wz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d33f3ed4e | ||
|
|
2de6e19956 | ||
|
|
ec437afbce | ||
|
|
137e7973c4 | ||
|
|
55d4004f86 | ||
|
|
09a18b086b | ||
|
|
f3c8e11995 |
@@ -1,5 +0,0 @@
|
||||
[target.aarch64-linux-android]
|
||||
linker = "aarch64-linux-android26-clang"
|
||||
|
||||
[target.armv7-linux-androideabi]
|
||||
linker = "armv7a-linux-androideabi26-clang"
|
||||
@@ -2,55 +2,187 @@ name: Build Release Binaries
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 'feat/*'
|
||||
tags:
|
||||
- 'v*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
targets:
|
||||
description: 'Targets to build (comma-separated: amd64,arm64,armv7,mac-arm64)'
|
||||
required: false
|
||||
default: 'amd64'
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
jobs:
|
||||
# Always builds on push tags. On manual dispatch, reads inputs.
|
||||
build-amd64:
|
||||
if: >-
|
||||
github.event_name == 'push' ||
|
||||
contains(github.event.inputs.targets, 'amd64')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
image: rust:1-bookworm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Init submodules
|
||||
run: |
|
||||
git config --global url."https://git.manko.yoga/".insteadOf "ssh://git@git.manko.yoga:222/"
|
||||
git submodule update --init --recursive
|
||||
- name: Install dependencies
|
||||
run: apt-get update && apt-get install -y cmake pkg-config libasound2-dev
|
||||
|
||||
- name: Install Rust + dependencies
|
||||
run: |
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
source "$HOME/.cargo/env"
|
||||
apt-get update && apt-get install -y cmake pkg-config libasound2-dev ninja-build
|
||||
rustc --version
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: cargo-amd64-${{ hashFiles('Cargo.lock') }}
|
||||
restore-keys: cargo-amd64-
|
||||
|
||||
- name: Build relay + tools
|
||||
- name: Build headless binaries
|
||||
run: cargo build --release --bin wzp-relay --bin wzp-client --bin wzp-bench --bin wzp-web
|
||||
|
||||
- name: Build audio client
|
||||
run: |
|
||||
source "$HOME/.cargo/env"
|
||||
cargo build --release --bin wzp-relay --bin wzp-client --bin wzp-bench --bin wzp-web
|
||||
cargo build --release --bin wzp-client --features audio
|
||||
cp target/release/wzp-client target/release/wzp-client-audio
|
||||
cargo build --release --bin wzp-client
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
source "$HOME/.cargo/env"
|
||||
cargo test --workspace --lib
|
||||
run: cargo test --workspace --lib
|
||||
|
||||
- name: Upload to rustypaste
|
||||
env:
|
||||
PASTE_AUTH: ${{ secrets.PASTE_AUTH }}
|
||||
PASTE_URL: ${{ secrets.PASTE_URL }}
|
||||
- name: Package
|
||||
run: |
|
||||
tar czf /tmp/wzp-linux-amd64.tar.gz \
|
||||
-C target/release wzp-relay wzp-client wzp-web wzp-bench
|
||||
ls -lh /tmp/wzp-linux-amd64.tar.gz
|
||||
LINK=$(curl -sF "file=@/tmp/wzp-linux-amd64.tar.gz" \
|
||||
-H "Authorization: ${PASTE_AUTH}" \
|
||||
"https://${PASTE_URL}")
|
||||
echo "Download: ${LINK}"
|
||||
mkdir -p dist/wzp-linux-amd64
|
||||
cp target/release/wzp-relay dist/wzp-linux-amd64/
|
||||
cp target/release/wzp-client dist/wzp-linux-amd64/
|
||||
cp target/release/wzp-client-audio dist/wzp-linux-amd64/
|
||||
cp target/release/wzp-web dist/wzp-linux-amd64/
|
||||
cp target/release/wzp-bench dist/wzp-linux-amd64/
|
||||
cp -r crates/wzp-web/static dist/wzp-linux-amd64/
|
||||
cd dist && tar czf wzp-linux-amd64.tar.gz wzp-linux-amd64/
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wzp-linux-amd64
|
||||
path: dist/wzp-linux-amd64.tar.gz
|
||||
|
||||
build-arm64:
|
||||
if: >-
|
||||
github.event_name == 'push' ||
|
||||
contains(github.event.inputs.targets, 'arm64')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: rust:1-bookworm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cross-compilation tools
|
||||
run: |
|
||||
dpkg --add-architecture arm64
|
||||
apt-get update
|
||||
apt-get install -y cmake pkg-config gcc-aarch64-linux-gnu libc6-dev-arm64-cross
|
||||
rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: cargo-arm64-${{ hashFiles('Cargo.lock') }}
|
||||
restore-keys: cargo-arm64-
|
||||
|
||||
- name: Build
|
||||
env:
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
|
||||
CC_aarch64_unknown_linux_gnu: aarch64-linux-gnu-gcc
|
||||
run: |
|
||||
cargo build --release --target aarch64-unknown-linux-gnu \
|
||||
--bin wzp-relay --bin wzp-client --bin wzp-bench --bin wzp-web
|
||||
|
||||
- name: Package
|
||||
run: |
|
||||
mkdir -p dist/wzp-linux-arm64
|
||||
cp target/aarch64-unknown-linux-gnu/release/wzp-relay dist/wzp-linux-arm64/
|
||||
cp target/aarch64-unknown-linux-gnu/release/wzp-client dist/wzp-linux-arm64/
|
||||
cp target/aarch64-unknown-linux-gnu/release/wzp-web dist/wzp-linux-arm64/
|
||||
cp target/aarch64-unknown-linux-gnu/release/wzp-bench dist/wzp-linux-arm64/
|
||||
cp -r crates/wzp-web/static dist/wzp-linux-arm64/
|
||||
cd dist && tar czf wzp-linux-arm64.tar.gz wzp-linux-arm64/
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wzp-linux-arm64
|
||||
path: dist/wzp-linux-arm64.tar.gz
|
||||
|
||||
build-armv7:
|
||||
if: >-
|
||||
github.event_name == 'push' ||
|
||||
contains(github.event.inputs.targets, 'armv7')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: rust:1-bookworm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install cross-compilation tools
|
||||
run: |
|
||||
dpkg --add-architecture armhf
|
||||
apt-get update
|
||||
apt-get install -y cmake pkg-config gcc-arm-linux-gnueabihf libc6-dev-armhf-cross
|
||||
rustup target add armv7-unknown-linux-gnueabihf
|
||||
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: cargo-armv7-${{ hashFiles('Cargo.lock') }}
|
||||
restore-keys: cargo-armv7-
|
||||
|
||||
- name: Build
|
||||
env:
|
||||
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||
CC_armv7_unknown_linux_gnueabihf: arm-linux-gnueabihf-gcc
|
||||
run: |
|
||||
cargo build --release --target armv7-unknown-linux-gnueabihf \
|
||||
--bin wzp-relay --bin wzp-client --bin wzp-bench --bin wzp-web
|
||||
|
||||
- name: Package
|
||||
run: |
|
||||
mkdir -p dist/wzp-linux-armv7
|
||||
cp target/armv7-unknown-linux-gnueabihf/release/wzp-relay dist/wzp-linux-armv7/
|
||||
cp target/armv7-unknown-linux-gnueabihf/release/wzp-client dist/wzp-linux-armv7/
|
||||
cp target/armv7-unknown-linux-gnueabihf/release/wzp-web dist/wzp-linux-armv7/
|
||||
cp target/armv7-unknown-linux-gnueabihf/release/wzp-bench dist/wzp-linux-armv7/
|
||||
cp -r crates/wzp-web/static dist/wzp-linux-armv7/
|
||||
cd dist && tar czf wzp-linux-armv7.tar.gz wzp-linux-armv7/
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wzp-linux-armv7
|
||||
path: dist/wzp-linux-armv7.tar.gz
|
||||
|
||||
# Release job — creates a release with all artifacts when a tag is pushed
|
||||
release:
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
needs: [build-amd64]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Create release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: artifacts/**/*.tar.gz
|
||||
generate_release_notes: true
|
||||
|
||||
3147
Cargo.lock
generated
3147
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
24
Cargo.toml
24
Cargo.toml
@@ -9,8 +9,7 @@ members = [
|
||||
"crates/wzp-relay",
|
||||
"crates/wzp-client",
|
||||
"crates/wzp-web",
|
||||
"crates/wzp-android",
|
||||
"desktop/src-tauri",
|
||||
"crates/wzp-wasm",
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
@@ -54,24 +53,3 @@ wzp-fec = { path = "crates/wzp-fec" }
|
||||
wzp-crypto = { path = "crates/wzp-crypto" }
|
||||
wzp-transport = { path = "crates/wzp-transport" }
|
||||
wzp-client = { path = "crates/wzp-client" }
|
||||
|
||||
# Fast dev profile: optimized but with debug info and incremental compilation.
|
||||
# Use with: cargo run --profile dev-fast
|
||||
[profile.dev-fast]
|
||||
inherits = "dev"
|
||||
opt-level = 2
|
||||
|
||||
# Optimize heavy compute deps even in debug builds —
|
||||
# real-time audio needs < 20ms per frame, impossible unoptimized.
|
||||
[profile.dev.package.nnnoiseless]
|
||||
opt-level = 3
|
||||
[profile.dev.package.audiopus_sys]
|
||||
opt-level = 3
|
||||
[profile.dev.package.audiopus]
|
||||
opt-level = 3
|
||||
[profile.dev.package.raptorq]
|
||||
opt-level = 3
|
||||
[profile.dev.package.wzp-codec]
|
||||
opt-level = 3
|
||||
[profile.dev.package.wzp-fec]
|
||||
opt-level = 3
|
||||
|
||||
6
android/.gitignore
vendored
6
android/.gitignore
vendored
@@ -1,6 +0,0 @@
|
||||
.gradle/
|
||||
build/
|
||||
app/build/
|
||||
app/src/main/jniLibs/
|
||||
local.properties
|
||||
keystore/*.jks
|
||||
Binary file not shown.
@@ -1,85 +0,0 @@
|
||||
plugins {
|
||||
id("com.android.application")
|
||||
id("org.jetbrains.kotlin.android")
|
||||
}
|
||||
|
||||
android {
|
||||
namespace = "com.wzp.phone"
|
||||
compileSdk = 34
|
||||
|
||||
defaultConfig {
|
||||
applicationId = "com.wzp.phone"
|
||||
minSdk = 26 // AAudio requires API 26
|
||||
targetSdk = 34
|
||||
versionCode = 1
|
||||
versionName = "0.1.0"
|
||||
ndk { abiFilters += listOf("arm64-v8a") }
|
||||
}
|
||||
|
||||
signingConfigs {
|
||||
create("release") {
|
||||
storeFile = file("${project.rootDir}/keystore/wzp-release.jks")
|
||||
storePassword = "wzphone2024"
|
||||
keyAlias = "wzp-release"
|
||||
keyPassword = "wzphone2024"
|
||||
}
|
||||
getByName("debug") {
|
||||
storeFile = file("${project.rootDir}/keystore/wzp-debug.jks")
|
||||
storePassword = "android"
|
||||
keyAlias = "wzp-debug"
|
||||
keyPassword = "android"
|
||||
}
|
||||
}
|
||||
|
||||
buildTypes {
|
||||
debug {
|
||||
signingConfig = signingConfigs.getByName("debug")
|
||||
isDebuggable = true
|
||||
}
|
||||
release {
|
||||
signingConfig = signingConfigs.getByName("release")
|
||||
isMinifyEnabled = false
|
||||
proguardFiles(
|
||||
getDefaultProguardFile("proguard-android-optimize.txt"),
|
||||
"proguard-rules.pro"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
compileOptions {
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
}
|
||||
|
||||
kotlinOptions {
|
||||
jvmTarget = "1.8"
|
||||
}
|
||||
|
||||
buildFeatures { compose = true }
|
||||
composeOptions { kotlinCompilerExtensionVersion = "1.5.8" }
|
||||
|
||||
ndkVersion = "26.1.10909125"
|
||||
}
|
||||
|
||||
// cargo-ndk integration: build the Rust native library for Android targets
|
||||
tasks.register<Exec>("cargoNdkBuild") {
|
||||
workingDir = file("${project.rootDir}/..")
|
||||
commandLine(
|
||||
"cargo", "ndk",
|
||||
"-t", "arm64-v8a",
|
||||
"-o", "${project.projectDir}/src/main/jniLibs",
|
||||
"build", "--release", "-p", "wzp-android"
|
||||
)
|
||||
}
|
||||
|
||||
// Skip cargo-ndk in CI/Docker — .so is pre-built into jniLibs
|
||||
// tasks.named("preBuild") { dependsOn("cargoNdkBuild") }
|
||||
|
||||
dependencies {
|
||||
implementation("androidx.core:core-ktx:1.12.0")
|
||||
implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.7.0")
|
||||
implementation("androidx.activity:activity-compose:1.8.2")
|
||||
implementation(platform("androidx.compose:compose-bom:2024.01.00"))
|
||||
implementation("androidx.compose.ui:ui")
|
||||
implementation("androidx.compose.material3:material3")
|
||||
}
|
||||
9
android/app/proguard-rules.pro
vendored
9
android/app/proguard-rules.pro
vendored
@@ -1,9 +0,0 @@
|
||||
# WZPhone ProGuard rules
|
||||
|
||||
# Keep JNI native methods
|
||||
-keepclasseswithmembernames class * {
|
||||
native <methods>;
|
||||
}
|
||||
|
||||
# Keep the WZP engine bridge class
|
||||
-keep class com.wzp.phone.engine.** { *; }
|
||||
@@ -1,33 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
<uses-permission android:name="android.permission.INTERNET" />
|
||||
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
|
||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_MICROPHONE" />
|
||||
<uses-permission android:name="android.permission.WAKE_LOCK" />
|
||||
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
|
||||
<uses-permission android:name="android.permission.BLUETOOTH_CONNECT" />
|
||||
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
||||
|
||||
<application
|
||||
android:name="com.wzp.WzpApplication"
|
||||
android:label="WZ Phone"
|
||||
android:supportsRtl="true"
|
||||
android:theme="@android:style/Theme.Material.Light.NoActionBar">
|
||||
|
||||
<activity
|
||||
android:name="com.wzp.ui.call.CallActivity"
|
||||
android:exported="true"
|
||||
android:launchMode="singleTask">
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.MAIN" />
|
||||
<category android:name="android.intent.category.LAUNCHER" />
|
||||
</intent-filter>
|
||||
</activity>
|
||||
|
||||
<service
|
||||
android:name="com.wzp.service.CallService"
|
||||
android:foregroundServiceType="microphone"
|
||||
android:exported="false" />
|
||||
</application>
|
||||
</manifest>
|
||||
@@ -1,38 +0,0 @@
|
||||
package com.wzp
|
||||
|
||||
import android.app.Application
|
||||
import android.app.NotificationChannel
|
||||
import android.app.NotificationManager
|
||||
import android.os.Build
|
||||
|
||||
/**
|
||||
* Application entry point for WarzonePhone.
|
||||
*
|
||||
* Creates the notification channel required for the foreground [com.wzp.service.CallService].
|
||||
*/
|
||||
class WzpApplication : Application() {
|
||||
|
||||
override fun onCreate() {
|
||||
super.onCreate()
|
||||
createNotificationChannel()
|
||||
}
|
||||
|
||||
private fun createNotificationChannel() {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
|
||||
val channel = NotificationChannel(
|
||||
CHANNEL_ID,
|
||||
"Active Call",
|
||||
NotificationManager.IMPORTANCE_LOW
|
||||
).apply {
|
||||
description = "Shown while a VoIP call is in progress"
|
||||
setShowBadge(false)
|
||||
}
|
||||
val nm = getSystemService(NotificationManager::class.java)
|
||||
nm.createNotificationChannel(channel)
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
const val CHANNEL_ID = "wzp_call_channel"
|
||||
}
|
||||
}
|
||||
@@ -1,204 +0,0 @@
|
||||
package com.wzp.audio
|
||||
|
||||
import android.Manifest
|
||||
import android.content.Context
|
||||
import android.content.pm.PackageManager
|
||||
import android.media.AudioAttributes
|
||||
import android.media.AudioFormat
|
||||
import android.media.AudioRecord
|
||||
import android.media.AudioTrack
|
||||
import android.media.MediaRecorder
|
||||
import android.util.Log
|
||||
import androidx.core.content.ContextCompat
|
||||
import com.wzp.engine.WzpEngine
|
||||
import kotlin.math.pow
|
||||
|
||||
/**
|
||||
* Audio pipeline that captures mic audio and plays received audio using
|
||||
* Android AudioRecord/AudioTrack APIs running on JVM threads.
|
||||
*
|
||||
* PCM samples are shuttled to/from the Rust engine via JNI ring buffers:
|
||||
* - Capture: AudioRecord → WzpEngine.writeAudio() → Rust encoder → network
|
||||
* - Playout: network → Rust decoder → WzpEngine.readAudio() → AudioTrack
|
||||
*
|
||||
* All audio is 48kHz, mono, 16-bit PCM (matching Opus codec requirements).
|
||||
*/
|
||||
class AudioPipeline(private val context: Context) {
|
||||
|
||||
companion object {
|
||||
private const val TAG = "AudioPipeline"
|
||||
private const val SAMPLE_RATE = 48000
|
||||
private const val CHANNEL_IN = AudioFormat.CHANNEL_IN_MONO
|
||||
private const val CHANNEL_OUT = AudioFormat.CHANNEL_OUT_MONO
|
||||
private const val ENCODING = AudioFormat.ENCODING_PCM_16BIT
|
||||
/** 20ms frame at 48kHz = 960 samples */
|
||||
private const val FRAME_SAMPLES = 960
|
||||
}
|
||||
|
||||
@Volatile
|
||||
private var running = false
|
||||
/** Playout (incoming voice) gain in dB. 0 = unity. */
|
||||
@Volatile
|
||||
var playoutGainDb: Float = 0f
|
||||
/** Capture (mic) gain in dB. 0 = unity. */
|
||||
@Volatile
|
||||
var captureGainDb: Float = 0f
|
||||
private var captureThread: Thread? = null
|
||||
private var playoutThread: Thread? = null
|
||||
|
||||
fun start(engine: WzpEngine) {
|
||||
if (running) return
|
||||
running = true
|
||||
|
||||
captureThread = Thread({
|
||||
runCapture(engine)
|
||||
// Park thread forever — exiting triggers a libcrypto TLS destructor
|
||||
// crash (SIGSEGV in OPENSSL_free) on Android when a JNI-calling thread exits.
|
||||
parkThread()
|
||||
}, "wzp-capture").apply {
|
||||
isDaemon = true
|
||||
priority = Thread.MAX_PRIORITY
|
||||
start()
|
||||
}
|
||||
|
||||
playoutThread = Thread({
|
||||
runPlayout(engine)
|
||||
parkThread()
|
||||
}, "wzp-playout").apply {
|
||||
isDaemon = true
|
||||
priority = Thread.MAX_PRIORITY
|
||||
start()
|
||||
}
|
||||
|
||||
Log.i(TAG, "audio pipeline started")
|
||||
}
|
||||
|
||||
fun stop() {
|
||||
running = false
|
||||
// Don't join — threads are parked as daemons to avoid native TLS crash
|
||||
captureThread = null
|
||||
playoutThread = null
|
||||
Log.i(TAG, "audio pipeline stopped")
|
||||
}
|
||||
|
||||
private fun applyGain(pcm: ShortArray, count: Int, db: Float) {
|
||||
if (db == 0f) return
|
||||
val linear = 10f.pow(db / 20f)
|
||||
for (i in 0 until count) {
|
||||
pcm[i] = (pcm[i] * linear).toInt().coerceIn(-32000, 32000).toShort()
|
||||
}
|
||||
}
|
||||
|
||||
private fun parkThread() {
|
||||
try {
|
||||
Thread.sleep(Long.MAX_VALUE)
|
||||
} catch (_: InterruptedException) {
|
||||
// process exiting
|
||||
}
|
||||
}
|
||||
|
||||
private fun runCapture(engine: WzpEngine) {
|
||||
if (ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO)
|
||||
!= PackageManager.PERMISSION_GRANTED
|
||||
) {
|
||||
Log.e(TAG, "RECORD_AUDIO permission not granted, capture disabled")
|
||||
return
|
||||
}
|
||||
|
||||
val minBuf = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_IN, ENCODING)
|
||||
val bufSize = maxOf(minBuf, FRAME_SAMPLES * 2 * 4) // at least 4 frames
|
||||
|
||||
val recorder = try {
|
||||
AudioRecord(
|
||||
MediaRecorder.AudioSource.VOICE_COMMUNICATION,
|
||||
SAMPLE_RATE,
|
||||
CHANNEL_IN,
|
||||
ENCODING,
|
||||
bufSize
|
||||
)
|
||||
} catch (e: SecurityException) {
|
||||
Log.e(TAG, "AudioRecord SecurityException: ${e.message}")
|
||||
return
|
||||
}
|
||||
|
||||
if (recorder.state != AudioRecord.STATE_INITIALIZED) {
|
||||
Log.e(TAG, "AudioRecord failed to initialize")
|
||||
recorder.release()
|
||||
return
|
||||
}
|
||||
|
||||
recorder.startRecording()
|
||||
Log.i(TAG, "capture started: ${SAMPLE_RATE}Hz mono, buf=$bufSize")
|
||||
|
||||
val pcm = ShortArray(FRAME_SAMPLES)
|
||||
try {
|
||||
while (running) {
|
||||
val read = recorder.read(pcm, 0, FRAME_SAMPLES)
|
||||
if (read > 0) {
|
||||
applyGain(pcm, read, captureGainDb)
|
||||
engine.writeAudio(pcm)
|
||||
} else if (read < 0) {
|
||||
Log.e(TAG, "AudioRecord.read error: $read")
|
||||
break
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
recorder.stop()
|
||||
recorder.release()
|
||||
Log.i(TAG, "capture stopped")
|
||||
}
|
||||
}
|
||||
|
||||
private fun runPlayout(engine: WzpEngine) {
|
||||
val minBuf = AudioTrack.getMinBufferSize(SAMPLE_RATE, CHANNEL_OUT, ENCODING)
|
||||
val bufSize = maxOf(minBuf, FRAME_SAMPLES * 2 * 4)
|
||||
|
||||
val track = AudioTrack.Builder()
|
||||
.setAudioAttributes(
|
||||
AudioAttributes.Builder()
|
||||
.setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
|
||||
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
|
||||
.build()
|
||||
)
|
||||
.setAudioFormat(
|
||||
AudioFormat.Builder()
|
||||
.setSampleRate(SAMPLE_RATE)
|
||||
.setChannelMask(CHANNEL_OUT)
|
||||
.setEncoding(ENCODING)
|
||||
.build()
|
||||
)
|
||||
.setBufferSizeInBytes(bufSize)
|
||||
.setTransferMode(AudioTrack.MODE_STREAM)
|
||||
.build()
|
||||
|
||||
if (track.state != AudioTrack.STATE_INITIALIZED) {
|
||||
Log.e(TAG, "AudioTrack failed to initialize")
|
||||
track.release()
|
||||
return
|
||||
}
|
||||
|
||||
track.play()
|
||||
Log.i(TAG, "playout started: ${SAMPLE_RATE}Hz mono, buf=$bufSize")
|
||||
|
||||
val pcm = ShortArray(FRAME_SAMPLES)
|
||||
val silence = ShortArray(FRAME_SAMPLES) // pre-allocated silence
|
||||
try {
|
||||
while (running) {
|
||||
val read = engine.readAudio(pcm)
|
||||
if (read >= FRAME_SAMPLES) {
|
||||
applyGain(pcm, read, playoutGainDb)
|
||||
track.write(pcm, 0, read)
|
||||
} else {
|
||||
// Not enough decoded audio — write silence to keep stream alive
|
||||
track.write(silence, 0, FRAME_SAMPLES)
|
||||
// Sleep briefly to avoid busy-spinning
|
||||
Thread.sleep(5)
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
track.stop()
|
||||
track.release()
|
||||
Log.i(TAG, "playout stopped")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
package com.wzp.audio
|
||||
|
||||
import android.content.Context
|
||||
import android.media.AudioDeviceCallback
|
||||
import android.media.AudioDeviceInfo
|
||||
import android.media.AudioManager
|
||||
import android.os.Handler
|
||||
import android.os.Looper
|
||||
|
||||
/**
|
||||
* Manages audio routing between earpiece, speaker, and Bluetooth devices.
|
||||
*
|
||||
* Wraps [AudioManager] operations and listens for device connection changes
|
||||
* via [AudioDeviceCallback] (API 23+).
|
||||
*
|
||||
* Usage:
|
||||
* 1. Call [register] when the call starts
|
||||
* 2. Use [setSpeaker] and [setBluetoothSco] to switch routes
|
||||
* 3. Call [unregister] when the call ends
|
||||
*/
|
||||
class AudioRouteManager(context: Context) {
|
||||
|
||||
private val audioManager = context.getSystemService(Context.AUDIO_SERVICE) as AudioManager
|
||||
private val mainHandler = Handler(Looper.getMainLooper())
|
||||
|
||||
/** Listener for audio route changes. */
|
||||
var onRouteChanged: ((AudioRoute) -> Unit)? = null
|
||||
|
||||
/** Current active route. */
|
||||
var currentRoute: AudioRoute = AudioRoute.EARPIECE
|
||||
private set
|
||||
|
||||
// -- Device callback (API 23+) -------------------------------------------
|
||||
|
||||
private val deviceCallback = object : AudioDeviceCallback() {
|
||||
override fun onAudioDevicesAdded(addedDevices: Array<out AudioDeviceInfo>) {
|
||||
for (device in addedDevices) {
|
||||
if (device.type == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) {
|
||||
// A Bluetooth headset was connected — optionally auto-switch
|
||||
onRouteChanged?.invoke(AudioRoute.BLUETOOTH)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun onAudioDevicesRemoved(removedDevices: Array<out AudioDeviceInfo>) {
|
||||
for (device in removedDevices) {
|
||||
if (device.type == AudioDeviceInfo.TYPE_BLUETOOTH_SCO) {
|
||||
// Bluetooth disconnected — fall back to earpiece or speaker
|
||||
val fallback = if (audioManager.isSpeakerphoneOn) {
|
||||
AudioRoute.SPEAKER
|
||||
} else {
|
||||
AudioRoute.EARPIECE
|
||||
}
|
||||
currentRoute = fallback
|
||||
onRouteChanged?.invoke(fallback)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -- Public API -----------------------------------------------------------
|
||||
|
||||
/** Register the device callback. Call when a call starts. */
|
||||
fun register() {
|
||||
audioManager.registerAudioDeviceCallback(deviceCallback, mainHandler)
|
||||
}
|
||||
|
||||
/** Unregister the device callback and release Bluetooth SCO. Call when the call ends. */
|
||||
fun unregister() {
|
||||
audioManager.unregisterAudioDeviceCallback(deviceCallback)
|
||||
stopBluetoothSco()
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable or disable the loudspeaker.
|
||||
*
|
||||
* When enabling speaker, Bluetooth SCO is disconnected.
|
||||
*/
|
||||
@Suppress("DEPRECATION")
|
||||
fun setSpeaker(enabled: Boolean) {
|
||||
if (enabled) {
|
||||
stopBluetoothSco()
|
||||
}
|
||||
audioManager.isSpeakerphoneOn = enabled
|
||||
currentRoute = if (enabled) AudioRoute.SPEAKER else AudioRoute.EARPIECE
|
||||
onRouteChanged?.invoke(currentRoute)
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable or disable Bluetooth SCO (Synchronous Connection Oriented) audio.
|
||||
*
|
||||
* When enabling Bluetooth, the speaker is turned off.
|
||||
*/
|
||||
@Suppress("DEPRECATION")
|
||||
fun setBluetoothSco(enabled: Boolean) {
|
||||
if (enabled) {
|
||||
audioManager.isSpeakerphoneOn = false
|
||||
audioManager.startBluetoothSco()
|
||||
audioManager.isBluetoothScoOn = true
|
||||
currentRoute = AudioRoute.BLUETOOTH
|
||||
} else {
|
||||
stopBluetoothSco()
|
||||
currentRoute = AudioRoute.EARPIECE
|
||||
}
|
||||
onRouteChanged?.invoke(currentRoute)
|
||||
}
|
||||
|
||||
/** Check whether a Bluetooth SCO device is currently connected. */
|
||||
fun isBluetoothAvailable(): Boolean {
|
||||
val devices = audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS)
|
||||
return devices.any { it.type == AudioDeviceInfo.TYPE_BLUETOOTH_SCO }
|
||||
}
|
||||
|
||||
/** List available output audio routes. */
|
||||
fun availableRoutes(): List<AudioRoute> {
|
||||
val routes = mutableListOf(AudioRoute.EARPIECE, AudioRoute.SPEAKER)
|
||||
if (isBluetoothAvailable()) {
|
||||
routes.add(AudioRoute.BLUETOOTH)
|
||||
}
|
||||
return routes
|
||||
}
|
||||
|
||||
// -- Internal -------------------------------------------------------------
|
||||
|
||||
@Suppress("DEPRECATION")
|
||||
private fun stopBluetoothSco() {
|
||||
if (audioManager.isBluetoothScoOn) {
|
||||
audioManager.isBluetoothScoOn = false
|
||||
audioManager.stopBluetoothSco()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Audio output route. */
|
||||
enum class AudioRoute {
|
||||
/** Phone earpiece (default for calls). */
|
||||
EARPIECE,
|
||||
/** Built-in loudspeaker. */
|
||||
SPEAKER,
|
||||
/** Bluetooth SCO headset/headphones. */
|
||||
BLUETOOTH
|
||||
}
|
||||
@@ -1,135 +0,0 @@
|
||||
package com.wzp.data
|
||||
|
||||
import android.content.Context
|
||||
import android.content.SharedPreferences
|
||||
import com.wzp.ui.call.ServerEntry
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import java.security.SecureRandom
|
||||
|
||||
/**
|
||||
* Persists user settings via SharedPreferences.
|
||||
*
|
||||
* Stores: servers, default server index, room name, alias, gain values,
|
||||
* IPv6 preference, and the identity seed (hex-encoded 32 bytes).
|
||||
*/
|
||||
class SettingsRepository(context: Context) {
|
||||
|
||||
private val prefs: SharedPreferences =
|
||||
context.applicationContext.getSharedPreferences("wzp_settings", Context.MODE_PRIVATE)
|
||||
|
||||
companion object {
|
||||
private const val KEY_SERVERS = "servers_json"
|
||||
private const val KEY_SELECTED_SERVER = "selected_server"
|
||||
private const val KEY_ROOM = "room_name"
|
||||
private const val KEY_ALIAS = "alias"
|
||||
private const val KEY_PLAYOUT_GAIN = "playout_gain_db"
|
||||
private const val KEY_CAPTURE_GAIN = "capture_gain_db"
|
||||
private const val KEY_PREFER_IPV6 = "prefer_ipv6"
|
||||
private const val KEY_IDENTITY_SEED = "identity_seed_hex"
|
||||
}
|
||||
|
||||
// --- Servers ---
|
||||
|
||||
fun saveServers(servers: List<ServerEntry>) {
|
||||
val arr = JSONArray()
|
||||
servers.forEach { entry ->
|
||||
arr.put(JSONObject().apply {
|
||||
put("address", entry.address)
|
||||
put("label", entry.label)
|
||||
})
|
||||
}
|
||||
prefs.edit().putString(KEY_SERVERS, arr.toString()).apply()
|
||||
}
|
||||
|
||||
fun loadServers(): List<ServerEntry>? {
|
||||
val json = prefs.getString(KEY_SERVERS, null) ?: return null
|
||||
return try {
|
||||
val arr = JSONArray(json)
|
||||
(0 until arr.length()).map { i ->
|
||||
val obj = arr.getJSONObject(i)
|
||||
ServerEntry(obj.getString("address"), obj.getString("label"))
|
||||
}
|
||||
} catch (_: Exception) { null }
|
||||
}
|
||||
|
||||
fun saveSelectedServer(index: Int) {
|
||||
prefs.edit().putInt(KEY_SELECTED_SERVER, index).apply()
|
||||
}
|
||||
|
||||
fun loadSelectedServer(): Int = prefs.getInt(KEY_SELECTED_SERVER, 0)
|
||||
|
||||
// --- Room ---
|
||||
|
||||
fun saveRoom(name: String) { prefs.edit().putString(KEY_ROOM, name).apply() }
|
||||
fun loadRoom(): String = prefs.getString(KEY_ROOM, "android") ?: "android"
|
||||
|
||||
// --- Alias ---
|
||||
|
||||
fun saveAlias(alias: String) { prefs.edit().putString(KEY_ALIAS, alias).apply() }
|
||||
|
||||
/**
|
||||
* Load alias, generating a random name on first launch.
|
||||
*/
|
||||
fun getOrCreateAlias(): String {
|
||||
val existing = prefs.getString(KEY_ALIAS, null)
|
||||
if (!existing.isNullOrEmpty()) return existing
|
||||
val name = generateRandomName()
|
||||
prefs.edit().putString(KEY_ALIAS, name).apply()
|
||||
return name
|
||||
}
|
||||
|
||||
private fun generateRandomName(): String {
|
||||
val adjectives = listOf(
|
||||
"Swift", "Silent", "Brave", "Calm", "Dark", "Fierce", "Ghost",
|
||||
"Iron", "Lucky", "Noble", "Quick", "Sharp", "Storm", "Wild",
|
||||
"Cold", "Bright", "Lone", "Red", "Grey", "Frosty", "Dusty",
|
||||
"Rusty", "Neon", "Void", "Solar", "Lunar", "Cyber", "Pixel",
|
||||
"Sonic", "Hyper", "Turbo", "Nano", "Mega", "Ultra", "Zinc"
|
||||
)
|
||||
val nouns = listOf(
|
||||
"Wolf", "Hawk", "Fox", "Bear", "Lynx", "Crow", "Viper",
|
||||
"Cobra", "Tiger", "Eagle", "Shark", "Raven", "Falcon", "Otter",
|
||||
"Mantis", "Panda", "Jackal", "Badger", "Heron", "Bison",
|
||||
"Condor", "Coyote", "Gecko", "Hornet", "Marten", "Osprey",
|
||||
"Parrot", "Puma", "Raptor", "Stork", "Toucan", "Walrus"
|
||||
)
|
||||
val adj = adjectives.random()
|
||||
val noun = nouns.random()
|
||||
return "$adj $noun"
|
||||
}
|
||||
|
||||
// --- Gain ---
|
||||
|
||||
fun savePlayoutGain(db: Float) { prefs.edit().putFloat(KEY_PLAYOUT_GAIN, db).apply() }
|
||||
fun loadPlayoutGain(): Float = prefs.getFloat(KEY_PLAYOUT_GAIN, 0f)
|
||||
|
||||
fun saveCaptureGain(db: Float) { prefs.edit().putFloat(KEY_CAPTURE_GAIN, db).apply() }
|
||||
fun loadCaptureGain(): Float = prefs.getFloat(KEY_CAPTURE_GAIN, 0f)
|
||||
|
||||
// --- IPv6 ---
|
||||
|
||||
fun savePreferIPv6(prefer: Boolean) { prefs.edit().putBoolean(KEY_PREFER_IPV6, prefer).apply() }
|
||||
fun loadPreferIPv6(): Boolean = prefs.getBoolean(KEY_PREFER_IPV6, false)
|
||||
|
||||
// --- Identity seed ---
|
||||
|
||||
/**
|
||||
* Get or generate the identity seed. On first call, generates a random
|
||||
* 32-byte seed and persists it. Subsequent calls return the same seed.
|
||||
*/
|
||||
fun getOrCreateSeedHex(): String {
|
||||
val existing = prefs.getString(KEY_IDENTITY_SEED, null)
|
||||
if (!existing.isNullOrEmpty()) return existing
|
||||
val seed = ByteArray(32).also { SecureRandom().nextBytes(it) }
|
||||
val hex = seed.joinToString("") { "%02x".format(it) }
|
||||
prefs.edit().putString(KEY_IDENTITY_SEED, hex).apply()
|
||||
return hex
|
||||
}
|
||||
|
||||
fun loadSeedHex(): String = prefs.getString(KEY_IDENTITY_SEED, "") ?: ""
|
||||
|
||||
fun saveSeedHex(hex: String) {
|
||||
prefs.edit().putString(KEY_IDENTITY_SEED, hex).apply()
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
package com.wzp.engine
|
||||
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
|
||||
/**
|
||||
* Snapshot of call statistics, mirroring the Rust `CallStats` struct.
|
||||
*
|
||||
* Constructed from the JSON string returned by [WzpEngine.getStats].
|
||||
*/
|
||||
data class CallStats(
|
||||
/** Current call state ordinal (see [CallStateConstants]). */
|
||||
val state: Int = 0,
|
||||
/** Call duration in seconds. */
|
||||
val durationSecs: Double = 0.0,
|
||||
/** Quality tier: 0 = Good, 1 = Degraded, 2 = Catastrophic. */
|
||||
val qualityTier: Int = 0,
|
||||
/** Observed packet loss percentage (0..100). */
|
||||
val lossPct: Float = 0f,
|
||||
/** Smoothed round-trip time in milliseconds. */
|
||||
val rttMs: Int = 0,
|
||||
/** Jitter in milliseconds. */
|
||||
val jitterMs: Int = 0,
|
||||
/** Current jitter buffer depth in packets. */
|
||||
val jitterBufferDepth: Int = 0,
|
||||
/** Total frames encoded since call start. */
|
||||
val framesEncoded: Long = 0,
|
||||
/** Total frames decoded since call start. */
|
||||
val framesDecoded: Long = 0,
|
||||
/** Number of playout underruns (buffer empty when audio was needed). */
|
||||
val underruns: Long = 0,
|
||||
/** Frames recovered by FEC. */
|
||||
val fecRecovered: Long = 0,
|
||||
/** Current mic audio level (RMS, 0-32767). */
|
||||
val audioLevel: Int = 0,
|
||||
/** Number of participants in the room. */
|
||||
val roomParticipantCount: Int = 0,
|
||||
/** Participants in the room (fingerprint + optional alias). */
|
||||
val roomParticipants: List<RoomMember> = emptyList(),
|
||||
) {
|
||||
/** Human-readable quality label. */
|
||||
val qualityLabel: String
|
||||
get() = when (qualityTier) {
|
||||
0 -> "Good"
|
||||
1 -> "Degraded"
|
||||
2 -> "Catastrophic"
|
||||
else -> "Unknown"
|
||||
}
|
||||
|
||||
companion object {
|
||||
private fun parseParticipants(arr: JSONArray?): List<RoomMember> {
|
||||
if (arr == null) return emptyList()
|
||||
return (0 until arr.length()).map { i ->
|
||||
val o = arr.getJSONObject(i)
|
||||
RoomMember(
|
||||
fingerprint = o.optString("fingerprint", ""),
|
||||
alias = o.optString("alias", null)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Deserialise from the JSON string produced by the native engine. */
|
||||
fun fromJson(json: String): CallStats {
|
||||
return try {
|
||||
val obj = JSONObject(json)
|
||||
CallStats(
|
||||
state = obj.optInt("state", 0),
|
||||
durationSecs = obj.optDouble("duration_secs", 0.0),
|
||||
qualityTier = obj.optInt("quality_tier", 0),
|
||||
lossPct = obj.optDouble("loss_pct", 0.0).toFloat(),
|
||||
rttMs = obj.optInt("rtt_ms", 0),
|
||||
jitterMs = obj.optInt("jitter_ms", 0),
|
||||
jitterBufferDepth = obj.optInt("jitter_buffer_depth", 0),
|
||||
framesEncoded = obj.optLong("frames_encoded", 0),
|
||||
framesDecoded = obj.optLong("frames_decoded", 0),
|
||||
underruns = obj.optLong("underruns", 0),
|
||||
fecRecovered = obj.optLong("fec_recovered", 0),
|
||||
audioLevel = obj.optInt("audio_level", 0),
|
||||
roomParticipantCount = obj.optInt("room_participant_count", 0),
|
||||
roomParticipants = parseParticipants(obj.optJSONArray("room_participants"))
|
||||
)
|
||||
} catch (e: Exception) {
|
||||
CallStats()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data class RoomMember(
|
||||
val fingerprint: String,
|
||||
val alias: String? = null
|
||||
) {
|
||||
/** Short display name: alias if set, otherwise first 8 chars of fingerprint. */
|
||||
val displayName: String
|
||||
get() = alias ?: fingerprint.take(8)
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package com.wzp.engine
|
||||
|
||||
/**
|
||||
* Callback interface for VoIP engine events.
|
||||
*
|
||||
* All callbacks are invoked on the main/UI thread.
|
||||
*/
|
||||
interface WzpCallback {
|
||||
|
||||
/**
|
||||
* Called when the call state changes.
|
||||
*
|
||||
* @param state one of [CallStateConstants]: IDLE(0), CONNECTING(1), ACTIVE(2),
|
||||
* RECONNECTING(3), CLOSED(4)
|
||||
*/
|
||||
fun onCallStateChanged(state: Int)
|
||||
|
||||
/**
|
||||
* Called when the network quality tier changes.
|
||||
*
|
||||
* @param tier 0 = Good, 1 = Degraded, 2 = Catastrophic
|
||||
*/
|
||||
fun onQualityTierChanged(tier: Int)
|
||||
|
||||
/**
|
||||
* Called when an error occurs in the native engine.
|
||||
*
|
||||
* @param code numeric error code (negative)
|
||||
* @param message human-readable description
|
||||
*/
|
||||
fun onError(code: Int, message: String)
|
||||
}
|
||||
@@ -1,149 +0,0 @@
|
||||
package com.wzp.engine
|
||||
|
||||
/**
|
||||
* Native VoIP engine wrapper. Delegates all work to libwzp_android.so via JNI.
|
||||
*
|
||||
* Lifecycle:
|
||||
* 1. Construct with a [WzpCallback]
|
||||
* 2. Call [init] to create the native engine
|
||||
* 3. Call [startCall] to begin a VoIP session
|
||||
* 4. Use [setMute], [setSpeaker], [getStats], [forceProfile] during the call
|
||||
* 5. Call [stopCall] to end the session
|
||||
* 6. Call [destroy] when the engine is no longer needed
|
||||
*
|
||||
* Thread safety: all methods must be called from the same thread (typically main).
|
||||
*/
|
||||
class WzpEngine(private val callback: WzpCallback) {
|
||||
|
||||
/** Opaque pointer to the native EngineHandle. 0 means not initialised. */
|
||||
private var nativeHandle: Long = 0L
|
||||
|
||||
/** Whether the engine has been initialised. */
|
||||
val isInitialized: Boolean get() = nativeHandle != 0L
|
||||
|
||||
/** Create the native engine. Must be called before any other method. */
|
||||
fun init() {
|
||||
check(nativeHandle == 0L) { "Engine already initialized" }
|
||||
nativeHandle = nativeInit()
|
||||
check(nativeHandle != 0L) { "Native engine creation failed" }
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a call.
|
||||
*
|
||||
* @param relayAddr relay server address (host:port)
|
||||
* @param room room identifier (used as QUIC SNI)
|
||||
* @param seedHex 64-char hex-encoded 32-byte identity seed (empty = random)
|
||||
* @param token authentication token (empty = no auth)
|
||||
* @param alias display name sent to relay for room participant list
|
||||
* @return 0 on success, negative error code on failure
|
||||
*/
|
||||
fun startCall(relayAddr: String, room: String, seedHex: String = "", token: String = "", alias: String = ""): Int {
|
||||
check(nativeHandle != 0L) { "Engine not initialized" }
|
||||
val result = nativeStartCall(nativeHandle, relayAddr, room, seedHex, token, alias)
|
||||
if (result == 0) {
|
||||
callback.onCallStateChanged(CallStateConstants.CONNECTING)
|
||||
} else {
|
||||
callback.onError(result, "Failed to start call")
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/** Stop the active call. Safe to call when no call is active. */
|
||||
fun stopCall() {
|
||||
if (nativeHandle != 0L) {
|
||||
nativeStopCall(nativeHandle)
|
||||
callback.onCallStateChanged(CallStateConstants.CLOSED)
|
||||
}
|
||||
}
|
||||
|
||||
/** Mute or unmute the microphone. */
|
||||
fun setMute(muted: Boolean) {
|
||||
if (nativeHandle != 0L) nativeSetMute(nativeHandle, muted)
|
||||
}
|
||||
|
||||
/** Enable or disable loudspeaker mode. */
|
||||
fun setSpeaker(speaker: Boolean) {
|
||||
if (nativeHandle != 0L) nativeSetSpeaker(nativeHandle, speaker)
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get current call statistics as a JSON string.
|
||||
*
|
||||
* @return JSON-serialised [CallStats], or `"{}"` if the engine is not initialised.
|
||||
*/
|
||||
fun getStats(): String {
|
||||
if (nativeHandle == 0L) return "{}"
|
||||
return try {
|
||||
nativeGetStats(nativeHandle) ?: "{}"
|
||||
} catch (_: Exception) {
|
||||
"{}"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Force a quality profile, overriding adaptive selection.
|
||||
*
|
||||
* @param profile 0 = GOOD, 1 = DEGRADED, 2 = CATASTROPHIC
|
||||
*/
|
||||
fun forceProfile(profile: Int) {
|
||||
if (nativeHandle != 0L) nativeForceProfile(nativeHandle, profile)
|
||||
}
|
||||
|
||||
/** Destroy the native engine and free all resources. The instance must not be reused. */
|
||||
fun destroy() {
|
||||
if (nativeHandle != 0L) {
|
||||
nativeDestroy(nativeHandle)
|
||||
nativeHandle = 0L
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write captured PCM samples into the engine's capture ring buffer.
|
||||
* Called from the AudioRecord capture thread.
|
||||
*/
|
||||
fun writeAudio(pcm: ShortArray): Int {
|
||||
if (nativeHandle == 0L) return 0
|
||||
return nativeWriteAudio(nativeHandle, pcm)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read decoded PCM samples from the engine's playout ring buffer.
|
||||
* Called from the AudioTrack playout thread.
|
||||
*/
|
||||
fun readAudio(pcm: ShortArray): Int {
|
||||
if (nativeHandle == 0L) return 0
|
||||
return nativeReadAudio(nativeHandle, pcm)
|
||||
}
|
||||
|
||||
// -- JNI native methods --------------------------------------------------
|
||||
|
||||
private external fun nativeInit(): Long
|
||||
private external fun nativeStartCall(
|
||||
handle: Long, relay: String, room: String, seed: String, token: String, alias: String
|
||||
): Int
|
||||
private external fun nativeStopCall(handle: Long)
|
||||
private external fun nativeSetMute(handle: Long, muted: Boolean)
|
||||
private external fun nativeSetSpeaker(handle: Long, speaker: Boolean)
|
||||
private external fun nativeGetStats(handle: Long): String?
|
||||
private external fun nativeForceProfile(handle: Long, profile: Int)
|
||||
private external fun nativeWriteAudio(handle: Long, pcm: ShortArray): Int
|
||||
private external fun nativeReadAudio(handle: Long, pcm: ShortArray): Int
|
||||
private external fun nativeDestroy(handle: Long)
|
||||
|
||||
companion object {
|
||||
init {
|
||||
System.loadLibrary("wzp_android")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Integer constants matching the Rust [CallState] enum ordinals. */
|
||||
object CallStateConstants {
|
||||
const val IDLE = 0
|
||||
const val CONNECTING = 1
|
||||
const val ACTIVE = 2
|
||||
const val RECONNECTING = 3
|
||||
const val CLOSED = 4
|
||||
}
|
||||
@@ -1,172 +0,0 @@
|
||||
package com.wzp.service
|
||||
|
||||
import android.app.Notification
|
||||
import android.app.PendingIntent
|
||||
import android.app.Service
|
||||
import android.content.Context
|
||||
import android.content.Intent
|
||||
import android.media.AudioManager
|
||||
import android.net.wifi.WifiManager
|
||||
import android.os.IBinder
|
||||
import android.os.PowerManager
|
||||
import androidx.core.app.NotificationCompat
|
||||
import com.wzp.WzpApplication
|
||||
import com.wzp.ui.call.CallActivity
|
||||
|
||||
/**
|
||||
* Foreground service that keeps the VoIP call alive when the app is backgrounded.
|
||||
*
|
||||
* Responsibilities:
|
||||
* - Shows a persistent notification during the call
|
||||
* - Acquires a partial wake lock so the CPU stays on
|
||||
* - Acquires a Wi-Fi lock to prevent Wi-Fi from going to sleep
|
||||
* - Sets [AudioManager] mode to [AudioManager.MODE_IN_COMMUNICATION]
|
||||
* - Releases all resources when the call ends
|
||||
*/
|
||||
class CallService : Service() {
|
||||
|
||||
private var wakeLock: PowerManager.WakeLock? = null
|
||||
private var wifiLock: WifiManager.WifiLock? = null
|
||||
private var previousAudioMode: Int = AudioManager.MODE_NORMAL
|
||||
|
||||
// -- Lifecycle ------------------------------------------------------------
|
||||
|
||||
override fun onCreate() {
|
||||
super.onCreate()
|
||||
acquireWakeLock()
|
||||
acquireWifiLock()
|
||||
setAudioMode()
|
||||
}
|
||||
|
||||
override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
|
||||
when (intent?.action) {
|
||||
ACTION_STOP -> {
|
||||
onStopFromNotification?.invoke()
|
||||
stopSelf()
|
||||
return START_NOT_STICKY
|
||||
}
|
||||
}
|
||||
|
||||
startForeground(NOTIFICATION_ID, buildNotification())
|
||||
return START_STICKY
|
||||
}
|
||||
|
||||
override fun onDestroy() {
|
||||
restoreAudioMode()
|
||||
releaseWifiLock()
|
||||
releaseWakeLock()
|
||||
super.onDestroy()
|
||||
}
|
||||
|
||||
override fun onBind(intent: Intent?): IBinder? = null
|
||||
|
||||
// -- Notification ---------------------------------------------------------
|
||||
|
||||
private fun buildNotification(): Notification {
|
||||
// Tapping the notification returns to the call screen
|
||||
val contentIntent = PendingIntent.getActivity(
|
||||
this,
|
||||
0,
|
||||
Intent(this, CallActivity::class.java).apply {
|
||||
flags = Intent.FLAG_ACTIVITY_SINGLE_TOP
|
||||
},
|
||||
PendingIntent.FLAG_IMMUTABLE or PendingIntent.FLAG_UPDATE_CURRENT
|
||||
)
|
||||
|
||||
// "End call" action button
|
||||
val stopIntent = PendingIntent.getService(
|
||||
this,
|
||||
1,
|
||||
Intent(this, CallService::class.java).apply { action = ACTION_STOP },
|
||||
PendingIntent.FLAG_IMMUTABLE or PendingIntent.FLAG_UPDATE_CURRENT
|
||||
)
|
||||
|
||||
return NotificationCompat.Builder(this, WzpApplication.CHANNEL_ID)
|
||||
.setContentTitle("WZ Phone")
|
||||
.setContentText("Call in progress")
|
||||
.setSmallIcon(android.R.drawable.ic_menu_call)
|
||||
.setOngoing(true)
|
||||
.setContentIntent(contentIntent)
|
||||
.addAction(android.R.drawable.ic_menu_close_clear_cancel, "End Call", stopIntent)
|
||||
.setCategory(NotificationCompat.CATEGORY_CALL)
|
||||
.setPriority(NotificationCompat.PRIORITY_LOW)
|
||||
.build()
|
||||
}
|
||||
|
||||
// -- Wake lock ------------------------------------------------------------
|
||||
|
||||
private fun acquireWakeLock() {
|
||||
val pm = getSystemService(Context.POWER_SERVICE) as PowerManager
|
||||
wakeLock = pm.newWakeLock(
|
||||
PowerManager.PARTIAL_WAKE_LOCK,
|
||||
"wzp:call_wake_lock"
|
||||
).apply {
|
||||
acquire(MAX_CALL_DURATION_MS)
|
||||
}
|
||||
}
|
||||
|
||||
private fun releaseWakeLock() {
|
||||
wakeLock?.let {
|
||||
if (it.isHeld) it.release()
|
||||
}
|
||||
wakeLock = null
|
||||
}
|
||||
|
||||
// -- Wi-Fi lock -----------------------------------------------------------
|
||||
|
||||
@Suppress("DEPRECATION")
|
||||
private fun acquireWifiLock() {
|
||||
val wm = applicationContext.getSystemService(Context.WIFI_SERVICE) as WifiManager
|
||||
wifiLock = wm.createWifiLock(
|
||||
WifiManager.WIFI_MODE_FULL_HIGH_PERF,
|
||||
"wzp:call_wifi_lock"
|
||||
).apply {
|
||||
acquire()
|
||||
}
|
||||
}
|
||||
|
||||
private fun releaseWifiLock() {
|
||||
wifiLock?.let {
|
||||
if (it.isHeld) it.release()
|
||||
}
|
||||
wifiLock = null
|
||||
}
|
||||
|
||||
// -- Audio mode -----------------------------------------------------------
|
||||
|
||||
private fun setAudioMode() {
|
||||
val am = getSystemService(Context.AUDIO_SERVICE) as AudioManager
|
||||
previousAudioMode = am.mode
|
||||
am.mode = AudioManager.MODE_IN_COMMUNICATION
|
||||
}
|
||||
|
||||
private fun restoreAudioMode() {
|
||||
val am = getSystemService(Context.AUDIO_SERVICE) as AudioManager
|
||||
am.mode = previousAudioMode
|
||||
}
|
||||
|
||||
// -- Static helpers -------------------------------------------------------
|
||||
|
||||
companion object {
|
||||
private const val NOTIFICATION_ID = 1001
|
||||
private const val ACTION_STOP = "com.wzp.service.STOP"
|
||||
private const val MAX_CALL_DURATION_MS = 4L * 60 * 60 * 1000 // 4 hours
|
||||
|
||||
/** Called when the user taps "End Call" in the notification. */
|
||||
var onStopFromNotification: (() -> Unit)? = null
|
||||
|
||||
/** Start the foreground call service. */
|
||||
fun start(context: Context) {
|
||||
val intent = Intent(context, CallService::class.java)
|
||||
context.startForegroundService(intent)
|
||||
}
|
||||
|
||||
/** Stop the foreground call service. */
|
||||
fun stop(context: Context) {
|
||||
val intent = Intent(context, CallService::class.java).apply {
|
||||
action = ACTION_STOP
|
||||
}
|
||||
context.startService(intent)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,99 +0,0 @@
|
||||
package com.wzp.ui.call
|
||||
|
||||
import android.Manifest
|
||||
import android.content.pm.PackageManager
|
||||
import android.os.Bundle
|
||||
import android.widget.Toast
|
||||
import androidx.activity.ComponentActivity
|
||||
import androidx.activity.compose.setContent
|
||||
import androidx.activity.result.contract.ActivityResultContracts
|
||||
import androidx.activity.viewModels
|
||||
import androidx.compose.material3.MaterialTheme
|
||||
import androidx.compose.material3.darkColorScheme
|
||||
import androidx.compose.material3.dynamicDarkColorScheme
|
||||
import androidx.compose.material3.dynamicLightColorScheme
|
||||
import androidx.compose.material3.lightColorScheme
|
||||
import androidx.compose.foundation.isSystemInDarkTheme
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.getValue
|
||||
import androidx.compose.runtime.mutableStateOf
|
||||
import androidx.compose.runtime.remember
|
||||
import androidx.compose.runtime.setValue
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.core.content.ContextCompat
|
||||
import com.wzp.ui.settings.SettingsScreen
|
||||
|
||||
/**
|
||||
* Main activity hosting the in-call Compose UI.
|
||||
*
|
||||
* Call lifecycle (wake lock, Wi-Fi lock, audio mode, notification)
|
||||
* is managed by [com.wzp.service.CallService] foreground service.
|
||||
*/
|
||||
class CallActivity : ComponentActivity() {
|
||||
|
||||
private val viewModel: CallViewModel by viewModels()
|
||||
|
||||
private val audioPermissionLauncher = registerForActivityResult(
|
||||
ActivityResultContracts.RequestPermission()
|
||||
) { granted ->
|
||||
if (!granted) {
|
||||
Toast.makeText(this, "Microphone permission is required for calls", Toast.LENGTH_LONG).show()
|
||||
}
|
||||
}
|
||||
|
||||
override fun onCreate(savedInstanceState: Bundle?) {
|
||||
super.onCreate(savedInstanceState)
|
||||
|
||||
viewModel.setContext(this)
|
||||
|
||||
setContent {
|
||||
WzpTheme {
|
||||
var showSettings by remember { mutableStateOf(false) }
|
||||
if (showSettings) {
|
||||
SettingsScreen(
|
||||
viewModel = viewModel,
|
||||
onBack = { showSettings = false }
|
||||
)
|
||||
} else {
|
||||
InCallScreen(
|
||||
viewModel = viewModel,
|
||||
onHangUp = { viewModel.stopCall() },
|
||||
onOpenSettings = { showSettings = true }
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO)
|
||||
!= PackageManager.PERMISSION_GRANTED
|
||||
) {
|
||||
audioPermissionLauncher.launch(Manifest.permission.RECORD_AUDIO)
|
||||
}
|
||||
}
|
||||
|
||||
override fun onDestroy() {
|
||||
super.onDestroy()
|
||||
if (isFinishing) {
|
||||
viewModel.stopCall()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
fun WzpTheme(content: @Composable () -> Unit) {
|
||||
val darkTheme = isSystemInDarkTheme()
|
||||
val context = LocalContext.current
|
||||
|
||||
val colorScheme = when {
|
||||
android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.S -> {
|
||||
if (darkTheme) dynamicDarkColorScheme(context) else dynamicLightColorScheme(context)
|
||||
}
|
||||
darkTheme -> darkColorScheme()
|
||||
else -> lightColorScheme()
|
||||
}
|
||||
|
||||
MaterialTheme(
|
||||
colorScheme = colorScheme,
|
||||
content = content
|
||||
)
|
||||
}
|
||||
@@ -1,362 +0,0 @@
|
||||
package com.wzp.ui.call
|
||||
|
||||
import android.content.Context
|
||||
import android.util.Log
|
||||
import androidx.lifecycle.ViewModel
|
||||
import androidx.lifecycle.viewModelScope
|
||||
import com.wzp.audio.AudioPipeline
|
||||
import com.wzp.audio.AudioRouteManager
|
||||
import com.wzp.data.SettingsRepository
|
||||
import com.wzp.engine.CallStats
|
||||
import com.wzp.service.CallService
|
||||
import com.wzp.engine.WzpCallback
|
||||
import com.wzp.engine.WzpEngine
|
||||
import kotlinx.coroutines.Job
|
||||
import kotlinx.coroutines.delay
|
||||
import kotlinx.coroutines.flow.MutableStateFlow
|
||||
import kotlinx.coroutines.flow.StateFlow
|
||||
import kotlinx.coroutines.flow.asStateFlow
|
||||
import kotlinx.coroutines.isActive
|
||||
import kotlinx.coroutines.launch
|
||||
import java.net.Inet4Address
|
||||
import java.net.Inet6Address
|
||||
import java.net.InetAddress
|
||||
|
||||
data class ServerEntry(val address: String, val label: String)
|
||||
|
||||
class CallViewModel : ViewModel(), WzpCallback {
|
||||
|
||||
private var engine: WzpEngine? = null
|
||||
private var engineInitialized = false
|
||||
private var audioPipeline: AudioPipeline? = null
|
||||
private var audioRouteManager: AudioRouteManager? = null
|
||||
private var audioStarted = false
|
||||
private var appContext: Context? = null
|
||||
private var settings: SettingsRepository? = null
|
||||
|
||||
private val _callState = MutableStateFlow(0)
|
||||
val callState: StateFlow<Int> get() = _callState.asStateFlow()
|
||||
|
||||
private val _isMuted = MutableStateFlow(false)
|
||||
val isMuted: StateFlow<Boolean> = _isMuted.asStateFlow()
|
||||
|
||||
private val _isSpeaker = MutableStateFlow(false)
|
||||
val isSpeaker: StateFlow<Boolean> = _isSpeaker.asStateFlow()
|
||||
|
||||
private val _stats = MutableStateFlow(CallStats())
|
||||
val stats: StateFlow<CallStats> = _stats.asStateFlow()
|
||||
|
||||
private val _qualityTier = MutableStateFlow(0)
|
||||
val qualityTier: StateFlow<Int> = _qualityTier.asStateFlow()
|
||||
|
||||
private val _errorMessage = MutableStateFlow<String?>(null)
|
||||
val errorMessage: StateFlow<String?> = _errorMessage.asStateFlow()
|
||||
|
||||
private val _roomName = MutableStateFlow(DEFAULT_ROOM)
|
||||
val roomName: StateFlow<String> = _roomName.asStateFlow()
|
||||
|
||||
private val _selectedServer = MutableStateFlow(0)
|
||||
val selectedServer: StateFlow<Int> = _selectedServer.asStateFlow()
|
||||
|
||||
private val _servers = MutableStateFlow(DEFAULT_SERVERS.toList())
|
||||
val servers: StateFlow<List<ServerEntry>> = _servers.asStateFlow()
|
||||
|
||||
private val _preferIPv6 = MutableStateFlow(false)
|
||||
val preferIPv6: StateFlow<Boolean> = _preferIPv6.asStateFlow()
|
||||
|
||||
private val _playoutGainDb = MutableStateFlow(0f)
|
||||
val playoutGainDb: StateFlow<Float> = _playoutGainDb.asStateFlow()
|
||||
|
||||
private val _captureGainDb = MutableStateFlow(0f)
|
||||
val captureGainDb: StateFlow<Float> = _captureGainDb.asStateFlow()
|
||||
|
||||
private val _alias = MutableStateFlow("")
|
||||
val alias: StateFlow<String> = _alias.asStateFlow()
|
||||
|
||||
private val _seedHex = MutableStateFlow("")
|
||||
val seedHex: StateFlow<String> = _seedHex.asStateFlow()
|
||||
|
||||
private var statsJob: Job? = null
|
||||
|
||||
companion object {
|
||||
private const val TAG = "WzpCall"
|
||||
val DEFAULT_SERVERS = listOf(
|
||||
ServerEntry("172.16.81.175:4433", "LAN (172.16.81.175)"),
|
||||
ServerEntry("193.180.213.68:4433", "Pangolin (IP)"),
|
||||
)
|
||||
const val DEFAULT_ROOM = "android"
|
||||
}
|
||||
|
||||
fun setContext(context: Context) {
|
||||
val appCtx = context.applicationContext
|
||||
appContext = appCtx
|
||||
if (audioPipeline == null) {
|
||||
audioPipeline = AudioPipeline(appCtx)
|
||||
}
|
||||
if (audioRouteManager == null) {
|
||||
audioRouteManager = AudioRouteManager(appCtx)
|
||||
}
|
||||
if (settings == null) {
|
||||
settings = SettingsRepository(appCtx)
|
||||
loadSettings()
|
||||
}
|
||||
}
|
||||
|
||||
private fun loadSettings() {
|
||||
val s = settings ?: return
|
||||
s.loadServers()?.let { saved ->
|
||||
if (saved.isNotEmpty()) _servers.value = saved
|
||||
}
|
||||
_selectedServer.value = s.loadSelectedServer().coerceIn(0, _servers.value.lastIndex)
|
||||
_roomName.value = s.loadRoom()
|
||||
_alias.value = s.getOrCreateAlias()
|
||||
_preferIPv6.value = s.loadPreferIPv6()
|
||||
_playoutGainDb.value = s.loadPlayoutGain()
|
||||
_captureGainDb.value = s.loadCaptureGain()
|
||||
_seedHex.value = s.getOrCreateSeedHex()
|
||||
}
|
||||
|
||||
fun selectServer(index: Int) {
|
||||
if (index in _servers.value.indices) {
|
||||
_selectedServer.value = index
|
||||
settings?.saveSelectedServer(index)
|
||||
}
|
||||
}
|
||||
|
||||
fun setPreferIPv6(prefer: Boolean) {
|
||||
_preferIPv6.value = prefer
|
||||
settings?.savePreferIPv6(prefer)
|
||||
}
|
||||
|
||||
fun addServer(hostPort: String, label: String) {
|
||||
val current = _servers.value.toMutableList()
|
||||
current.add(ServerEntry(hostPort, label))
|
||||
_servers.value = current
|
||||
settings?.saveServers(current)
|
||||
}
|
||||
|
||||
fun removeServer(index: Int) {
|
||||
if (index < DEFAULT_SERVERS.size) return // don't remove built-in servers
|
||||
val current = _servers.value.toMutableList()
|
||||
if (index in current.indices) {
|
||||
current.removeAt(index)
|
||||
_servers.value = current
|
||||
if (_selectedServer.value >= current.size) {
|
||||
_selectedServer.value = 0
|
||||
}
|
||||
settings?.saveServers(current)
|
||||
settings?.saveSelectedServer(_selectedServer.value)
|
||||
}
|
||||
}
|
||||
|
||||
fun setRoomName(name: String) {
|
||||
_roomName.value = name
|
||||
settings?.saveRoom(name)
|
||||
}
|
||||
|
||||
fun setPlayoutGainDb(db: Float) {
|
||||
_playoutGainDb.value = db
|
||||
audioPipeline?.playoutGainDb = db
|
||||
settings?.savePlayoutGain(db)
|
||||
}
|
||||
|
||||
fun setCaptureGainDb(db: Float) {
|
||||
_captureGainDb.value = db
|
||||
audioPipeline?.captureGainDb = db
|
||||
settings?.saveCaptureGain(db)
|
||||
}
|
||||
|
||||
fun setAlias(alias: String) {
|
||||
_alias.value = alias
|
||||
settings?.saveAlias(alias)
|
||||
}
|
||||
|
||||
fun restoreSeed(hex: String) {
|
||||
_seedHex.value = hex
|
||||
settings?.saveSeedHex(hex)
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve DNS hostname to IP address on the Kotlin/Android side,
|
||||
* since Rust's DNS resolution may not work on Android.
|
||||
* Returns "ip:port" string.
|
||||
*/
|
||||
private fun resolveToIp(hostPort: String): String {
|
||||
val parts = hostPort.split(":")
|
||||
if (parts.size != 2) return hostPort
|
||||
val host = parts[0]
|
||||
val port = parts[1]
|
||||
|
||||
// Already an IP address — return as-is
|
||||
if (host.matches(Regex("""\d+\.\d+\.\d+\.\d+"""))) return hostPort
|
||||
if (host.contains(":")) return hostPort // IPv6 literal
|
||||
|
||||
return try {
|
||||
val addresses = InetAddress.getAllByName(host)
|
||||
val preferV6 = _preferIPv6.value
|
||||
val picked = if (preferV6) {
|
||||
addresses.firstOrNull { it is Inet6Address } ?: addresses.firstOrNull { it is Inet4Address }
|
||||
} else {
|
||||
addresses.firstOrNull { it is Inet4Address } ?: addresses.firstOrNull { it is Inet6Address }
|
||||
}
|
||||
if (picked != null) {
|
||||
val ip = picked.hostAddress ?: host
|
||||
val formatted = if (picked is Inet6Address) "[$ip]:$port" else "$ip:$port"
|
||||
formatted
|
||||
} else {
|
||||
hostPort
|
||||
}
|
||||
} catch (_: Exception) {
|
||||
hostPort // resolution failed — pass through and let Rust try
|
||||
}
|
||||
}
|
||||
|
||||
/** Tear down engine and audio. Pass stopService=true to also stop the foreground service. */
|
||||
private fun teardown(stopService: Boolean = true) {
|
||||
Log.i(TAG, "teardown: stopping audio, stopService=$stopService")
|
||||
CallService.onStopFromNotification = null
|
||||
stopAudio()
|
||||
stopStatsPolling()
|
||||
Log.i(TAG, "teardown: stopping engine")
|
||||
try { engine?.stopCall() } catch (e: Exception) { Log.w(TAG, "stopCall err: $e") }
|
||||
try { engine?.destroy() } catch (e: Exception) { Log.w(TAG, "destroy err: $e") }
|
||||
engine = null
|
||||
engineInitialized = false
|
||||
_callState.value = 0
|
||||
if (stopService) {
|
||||
try { appContext?.let { CallService.stop(it) } } catch (_: Exception) {}
|
||||
}
|
||||
Log.i(TAG, "teardown: done")
|
||||
}
|
||||
|
||||
fun startCall() {
|
||||
val serverEntry = _servers.value[_selectedServer.value]
|
||||
val room = _roomName.value
|
||||
Log.i(TAG, "startCall: server=${serverEntry.address} room=$room")
|
||||
try {
|
||||
// Teardown previous call but don't stop the service (we're about to restart it)
|
||||
teardown(stopService = false)
|
||||
|
||||
Log.i(TAG, "startCall: creating engine")
|
||||
engine = WzpEngine(this)
|
||||
engine!!.init()
|
||||
engineInitialized = true
|
||||
_callState.value = 1
|
||||
_errorMessage.value = null
|
||||
try { appContext?.let { CallService.start(it) } } catch (e: Exception) {
|
||||
Log.w(TAG, "service start err: $e")
|
||||
}
|
||||
startStatsPolling()
|
||||
|
||||
viewModelScope.launch(kotlinx.coroutines.Dispatchers.IO) {
|
||||
try {
|
||||
val relay = resolveToIp(serverEntry.address)
|
||||
val seed = _seedHex.value
|
||||
val name = _alias.value
|
||||
Log.i(TAG, "startCall: resolved=$relay, alias=$name, calling engine.startCall")
|
||||
val result = engine?.startCall(relay, room, seedHex = seed, alias = name) ?: -1
|
||||
Log.i(TAG, "startCall: engine returned $result")
|
||||
// Only wire up notification callback after engine is running
|
||||
CallService.onStopFromNotification = { stopCall() }
|
||||
if (result != 0) {
|
||||
_callState.value = 0
|
||||
_errorMessage.value = "Failed to start call (code $result)"
|
||||
appContext?.let { CallService.stop(it) }
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Log.e(TAG, "startCall IO error", e)
|
||||
_callState.value = 0
|
||||
_errorMessage.value = "Engine error: ${e.message}"
|
||||
appContext?.let { CallService.stop(it) }
|
||||
}
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Log.e(TAG, "startCall error", e)
|
||||
_callState.value = 0
|
||||
_errorMessage.value = "Engine error: ${e.message}"
|
||||
appContext?.let { CallService.stop(it) }
|
||||
}
|
||||
}
|
||||
|
||||
fun stopCall() {
|
||||
Log.i(TAG, "stopCall")
|
||||
teardown()
|
||||
}
|
||||
|
||||
fun toggleMute() {
|
||||
val newMuted = !_isMuted.value
|
||||
_isMuted.value = newMuted
|
||||
try { engine?.setMute(newMuted) } catch (_: Exception) {}
|
||||
}
|
||||
|
||||
fun toggleSpeaker() {
|
||||
val newSpeaker = !_isSpeaker.value
|
||||
_isSpeaker.value = newSpeaker
|
||||
audioRouteManager?.setSpeaker(newSpeaker)
|
||||
}
|
||||
|
||||
fun clearError() { _errorMessage.value = null }
|
||||
|
||||
// WzpCallback
|
||||
override fun onCallStateChanged(state: Int) { _callState.value = state }
|
||||
override fun onQualityTierChanged(tier: Int) { _qualityTier.value = tier }
|
||||
override fun onError(code: Int, message: String) { _errorMessage.value = "Error $code: $message" }
|
||||
|
||||
private fun startAudio() {
|
||||
if (audioStarted) return
|
||||
val e = engine ?: return
|
||||
val ctx = appContext ?: return
|
||||
// Create a fresh pipeline each call to avoid stale threads
|
||||
audioPipeline = AudioPipeline(ctx).also {
|
||||
it.playoutGainDb = _playoutGainDb.value
|
||||
it.captureGainDb = _captureGainDb.value
|
||||
it.start(e)
|
||||
}
|
||||
audioRouteManager?.register()
|
||||
audioStarted = true
|
||||
}
|
||||
|
||||
private fun stopAudio() {
|
||||
if (!audioStarted) return
|
||||
audioPipeline?.stop()
|
||||
audioPipeline = null
|
||||
audioRouteManager?.unregister()
|
||||
audioRouteManager?.setSpeaker(false)
|
||||
_isSpeaker.value = false
|
||||
audioStarted = false
|
||||
}
|
||||
|
||||
private fun startStatsPolling() {
|
||||
statsJob?.cancel()
|
||||
statsJob = viewModelScope.launch {
|
||||
while (isActive) {
|
||||
try {
|
||||
val json = engine?.getStats() ?: "{}"
|
||||
if (json.isNotEmpty()) {
|
||||
Log.d(TAG, "raw: $json")
|
||||
val s = CallStats.fromJson(json)
|
||||
_stats.value = s
|
||||
if (s.state != 0) {
|
||||
_callState.value = s.state
|
||||
}
|
||||
if (s.state == 2 && !audioStarted) {
|
||||
startAudio()
|
||||
}
|
||||
}
|
||||
} catch (_: Exception) {}
|
||||
delay(500L)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun stopStatsPolling() {
|
||||
statsJob?.cancel()
|
||||
statsJob = null
|
||||
}
|
||||
|
||||
override fun onCleared() {
|
||||
super.onCleared()
|
||||
Log.i(TAG, "onCleared")
|
||||
teardown()
|
||||
}
|
||||
}
|
||||
@@ -1,600 +0,0 @@
|
||||
package com.wzp.ui.call
|
||||
|
||||
import androidx.compose.foundation.background
|
||||
import androidx.compose.foundation.layout.Arrangement
|
||||
import androidx.compose.foundation.layout.Box
|
||||
import androidx.compose.foundation.layout.Column
|
||||
import androidx.compose.foundation.layout.ExperimentalLayoutApi
|
||||
import androidx.compose.foundation.layout.FlowRow
|
||||
import androidx.compose.foundation.layout.Row
|
||||
import androidx.compose.foundation.layout.Spacer
|
||||
import androidx.compose.foundation.layout.fillMaxSize
|
||||
import androidx.compose.foundation.layout.fillMaxWidth
|
||||
import androidx.compose.foundation.layout.height
|
||||
import androidx.compose.foundation.layout.padding
|
||||
import androidx.compose.foundation.layout.size
|
||||
import androidx.compose.foundation.layout.width
|
||||
import androidx.compose.foundation.rememberScrollState
|
||||
import androidx.compose.foundation.shape.CircleShape
|
||||
import androidx.compose.foundation.shape.RoundedCornerShape
|
||||
import androidx.compose.foundation.verticalScroll
|
||||
import androidx.compose.material3.AlertDialog
|
||||
import androidx.compose.material3.Button
|
||||
import androidx.compose.material3.ButtonDefaults
|
||||
import androidx.compose.material3.FilledIconButton
|
||||
import androidx.compose.material3.FilledTonalIconButton
|
||||
import androidx.compose.material3.IconButtonDefaults
|
||||
import androidx.compose.material3.LinearProgressIndicator
|
||||
import androidx.compose.material3.MaterialTheme
|
||||
import androidx.compose.material3.OutlinedButton
|
||||
import androidx.compose.material3.OutlinedTextField
|
||||
import androidx.compose.material3.Slider
|
||||
import androidx.compose.material3.Surface
|
||||
import androidx.compose.material3.Switch
|
||||
import androidx.compose.material3.Text
|
||||
import androidx.compose.material3.TextButton
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.collectAsState
|
||||
import androidx.compose.runtime.getValue
|
||||
import androidx.compose.runtime.mutableStateOf
|
||||
import androidx.compose.runtime.remember
|
||||
import androidx.compose.runtime.setValue
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.draw.clip
|
||||
import androidx.compose.ui.graphics.Color
|
||||
import androidx.compose.ui.text.font.FontWeight
|
||||
import androidx.compose.ui.text.style.TextAlign
|
||||
import androidx.compose.ui.unit.dp
|
||||
import androidx.compose.ui.unit.sp
|
||||
import com.wzp.engine.CallStats
|
||||
import kotlin.math.roundToInt
|
||||
|
||||
@OptIn(ExperimentalLayoutApi::class)
|
||||
@Composable
|
||||
fun InCallScreen(
|
||||
viewModel: CallViewModel,
|
||||
onHangUp: () -> Unit,
|
||||
onOpenSettings: () -> Unit = {}
|
||||
) {
|
||||
val callState by viewModel.callState.collectAsState()
|
||||
val isMuted by viewModel.isMuted.collectAsState()
|
||||
val isSpeaker by viewModel.isSpeaker.collectAsState()
|
||||
val stats by viewModel.stats.collectAsState()
|
||||
val qualityTier by viewModel.qualityTier.collectAsState()
|
||||
val errorMessage by viewModel.errorMessage.collectAsState()
|
||||
val roomName by viewModel.roomName.collectAsState()
|
||||
val selectedServer by viewModel.selectedServer.collectAsState()
|
||||
val servers by viewModel.servers.collectAsState()
|
||||
val preferIPv6 by viewModel.preferIPv6.collectAsState()
|
||||
val playoutGainDb by viewModel.playoutGainDb.collectAsState()
|
||||
val captureGainDb by viewModel.captureGainDb.collectAsState()
|
||||
|
||||
var showAddServerDialog by remember { mutableStateOf(false) }
|
||||
|
||||
Surface(
|
||||
modifier = Modifier.fillMaxSize(),
|
||||
color = MaterialTheme.colorScheme.background
|
||||
) {
|
||||
Column(
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.padding(24.dp)
|
||||
.verticalScroll(rememberScrollState()),
|
||||
horizontalAlignment = Alignment.CenterHorizontally
|
||||
) {
|
||||
// Settings button (top-right)
|
||||
if (callState == 0) {
|
||||
Row(modifier = Modifier.fillMaxWidth(), horizontalArrangement = Arrangement.End) {
|
||||
TextButton(onClick = onOpenSettings) {
|
||||
Text("Settings")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(if (callState == 0) 16.dp else 48.dp))
|
||||
|
||||
Text(
|
||||
text = "WZ Phone",
|
||||
style = MaterialTheme.typography.headlineMedium.copy(
|
||||
fontWeight = FontWeight.Bold
|
||||
),
|
||||
color = MaterialTheme.colorScheme.primary
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
|
||||
CallStateLabel(callState)
|
||||
|
||||
if (callState == 0) {
|
||||
Spacer(modifier = Modifier.height(32.dp))
|
||||
|
||||
// Server selector
|
||||
Text(
|
||||
text = "Server",
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
FlowRow(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
horizontalArrangement = Arrangement.Center
|
||||
) {
|
||||
servers.forEachIndexed { idx, entry ->
|
||||
val isSelected = selectedServer == idx
|
||||
FilledTonalIconButton(
|
||||
onClick = { viewModel.selectServer(idx) },
|
||||
modifier = Modifier
|
||||
.padding(2.dp)
|
||||
.height(36.dp)
|
||||
.width(140.dp),
|
||||
shape = RoundedCornerShape(8.dp),
|
||||
colors = if (isSelected) {
|
||||
IconButtonDefaults.filledTonalIconButtonColors(
|
||||
containerColor = MaterialTheme.colorScheme.primaryContainer,
|
||||
contentColor = MaterialTheme.colorScheme.onPrimaryContainer
|
||||
)
|
||||
} else {
|
||||
IconButtonDefaults.filledTonalIconButtonColors()
|
||||
}
|
||||
) {
|
||||
Text(
|
||||
text = entry.label,
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
maxLines = 1
|
||||
)
|
||||
}
|
||||
}
|
||||
// + Add button
|
||||
OutlinedButton(
|
||||
onClick = { showAddServerDialog = true },
|
||||
modifier = Modifier
|
||||
.padding(2.dp)
|
||||
.height(36.dp),
|
||||
shape = RoundedCornerShape(8.dp)
|
||||
) {
|
||||
Text("+", style = MaterialTheme.typography.labelMedium)
|
||||
}
|
||||
}
|
||||
|
||||
// IPv4/IPv6 preference
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
Row(
|
||||
verticalAlignment = Alignment.CenterVertically,
|
||||
horizontalArrangement = Arrangement.Center
|
||||
) {
|
||||
Text(
|
||||
text = "IPv4",
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = if (!preferIPv6) MaterialTheme.colorScheme.primary
|
||||
else MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Switch(
|
||||
checked = preferIPv6,
|
||||
onCheckedChange = { viewModel.setPreferIPv6(it) },
|
||||
modifier = Modifier.padding(horizontal = 8.dp)
|
||||
)
|
||||
Text(
|
||||
text = "IPv6",
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = if (preferIPv6) MaterialTheme.colorScheme.primary
|
||||
else MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
}
|
||||
|
||||
// Selected server address
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
Text(
|
||||
text = servers.getOrNull(selectedServer)?.address ?: "",
|
||||
style = MaterialTheme.typography.bodySmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
OutlinedTextField(
|
||||
value = roomName,
|
||||
onValueChange = { viewModel.setRoomName(it) },
|
||||
label = { Text("Room") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth(0.6f)
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(24.dp))
|
||||
|
||||
Button(
|
||||
onClick = { viewModel.startCall() },
|
||||
modifier = Modifier
|
||||
.size(120.dp)
|
||||
.clip(CircleShape),
|
||||
shape = CircleShape,
|
||||
colors = ButtonDefaults.buttonColors(
|
||||
containerColor = Color(0xFF4CAF50)
|
||||
)
|
||||
) {
|
||||
Text(
|
||||
text = "CALL",
|
||||
style = MaterialTheme.typography.titleLarge.copy(
|
||||
fontWeight = FontWeight.Bold
|
||||
),
|
||||
color = Color.White
|
||||
)
|
||||
}
|
||||
|
||||
errorMessage?.let { err ->
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
Text(
|
||||
text = err,
|
||||
style = MaterialTheme.typography.bodySmall,
|
||||
color = MaterialTheme.colorScheme.error
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// In-call UI
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
|
||||
DurationDisplay(stats.durationSecs)
|
||||
|
||||
Spacer(modifier = Modifier.height(24.dp))
|
||||
|
||||
QualityIndicator(qualityTier, stats.qualityLabel)
|
||||
|
||||
if (stats.roomParticipantCount > 0) {
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
Text(
|
||||
text = "${stats.roomParticipantCount} in room",
|
||||
style = MaterialTheme.typography.bodySmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
stats.roomParticipants.forEach { member ->
|
||||
Text(
|
||||
text = member.displayName,
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(32.dp))
|
||||
|
||||
AudioLevelBar(stats.audioLevel)
|
||||
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
|
||||
// Gain sliders
|
||||
GainSlider(
|
||||
label = "Voice Volume",
|
||||
gainDb = playoutGainDb,
|
||||
onGainChange = { viewModel.setPlayoutGainDb(it) }
|
||||
)
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
GainSlider(
|
||||
label = "Mic Gain",
|
||||
gainDb = captureGainDb,
|
||||
onGainChange = { viewModel.setCaptureGainDb(it) }
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(32.dp))
|
||||
|
||||
ControlRow(
|
||||
isMuted = isMuted,
|
||||
isSpeaker = isSpeaker,
|
||||
onToggleMute = viewModel::toggleMute,
|
||||
onToggleSpeaker = viewModel::toggleSpeaker,
|
||||
onHangUp = {
|
||||
viewModel.stopCall()
|
||||
}
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(32.dp))
|
||||
|
||||
StatsOverlay(stats)
|
||||
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (showAddServerDialog) {
|
||||
AddServerDialog(
|
||||
onDismiss = { showAddServerDialog = false },
|
||||
onAdd = { host, port, label ->
|
||||
viewModel.addServer("$host:$port", label)
|
||||
showAddServerDialog = false
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun AddServerDialog(
|
||||
onDismiss: () -> Unit,
|
||||
onAdd: (host: String, port: String, label: String) -> Unit
|
||||
) {
|
||||
var host by remember { mutableStateOf("") }
|
||||
var port by remember { mutableStateOf("4433") }
|
||||
var label by remember { mutableStateOf("") }
|
||||
|
||||
AlertDialog(
|
||||
onDismissRequest = onDismiss,
|
||||
title = { Text("Add Server") },
|
||||
text = {
|
||||
Column {
|
||||
OutlinedTextField(
|
||||
value = host,
|
||||
onValueChange = { host = it },
|
||||
label = { Text("Host (IP or domain)") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
OutlinedTextField(
|
||||
value = port,
|
||||
onValueChange = { port = it },
|
||||
label = { Text("Port") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
OutlinedTextField(
|
||||
value = label,
|
||||
onValueChange = { label = it },
|
||||
label = { Text("Label (optional)") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
}
|
||||
},
|
||||
confirmButton = {
|
||||
TextButton(
|
||||
onClick = {
|
||||
if (host.isNotBlank()) {
|
||||
val displayLabel = label.ifBlank { host }
|
||||
onAdd(host.trim(), port.trim(), displayLabel)
|
||||
}
|
||||
}
|
||||
) { Text("Add") }
|
||||
},
|
||||
dismissButton = {
|
||||
TextButton(onClick = onDismiss) { Text("Cancel") }
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun CallStateLabel(state: Int) {
|
||||
val label = when (state) {
|
||||
0 -> "Ready to connect"
|
||||
1 -> "Connecting..."
|
||||
2 -> "Active"
|
||||
3 -> "Reconnecting..."
|
||||
4 -> "Call Ended"
|
||||
else -> "Unknown"
|
||||
}
|
||||
val color = when (state) {
|
||||
2 -> Color(0xFF4CAF50)
|
||||
1, 3 -> Color(0xFFFFC107)
|
||||
else -> MaterialTheme.colorScheme.onSurfaceVariant
|
||||
}
|
||||
Text(
|
||||
text = label,
|
||||
style = MaterialTheme.typography.titleMedium,
|
||||
color = color
|
||||
)
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun DurationDisplay(durationSecs: Double) {
|
||||
val totalSeconds = durationSecs.roundToInt()
|
||||
val minutes = totalSeconds / 60
|
||||
val seconds = totalSeconds % 60
|
||||
Text(
|
||||
text = "%02d:%02d".format(minutes, seconds),
|
||||
style = MaterialTheme.typography.displayLarge.copy(
|
||||
fontWeight = FontWeight.Light,
|
||||
letterSpacing = 4.sp
|
||||
),
|
||||
color = MaterialTheme.colorScheme.onBackground
|
||||
)
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun QualityIndicator(tier: Int, label: String) {
|
||||
val dotColor = when (tier) {
|
||||
0 -> Color(0xFF4CAF50)
|
||||
1 -> Color(0xFFFFC107)
|
||||
2 -> Color(0xFFF44336)
|
||||
else -> Color.Gray
|
||||
}
|
||||
Row(
|
||||
verticalAlignment = Alignment.CenterVertically,
|
||||
horizontalArrangement = Arrangement.Center
|
||||
) {
|
||||
Box(
|
||||
modifier = Modifier
|
||||
.size(12.dp)
|
||||
.clip(CircleShape)
|
||||
.background(dotColor)
|
||||
)
|
||||
Spacer(modifier = Modifier.width(8.dp))
|
||||
Text(
|
||||
text = label,
|
||||
style = MaterialTheme.typography.bodyMedium,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun AudioLevelBar(audioLevel: Int) {
|
||||
val level = if (audioLevel > 0) {
|
||||
(audioLevel.toFloat() / 8000f).coerceIn(0.02f, 1f)
|
||||
} else {
|
||||
0f
|
||||
}
|
||||
Column(horizontalAlignment = Alignment.CenterHorizontally) {
|
||||
Text(
|
||||
text = "Audio Level",
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
LinearProgressIndicator(
|
||||
progress = level,
|
||||
modifier = Modifier
|
||||
.fillMaxWidth(0.6f)
|
||||
.height(6.dp)
|
||||
.clip(RoundedCornerShape(3.dp)),
|
||||
color = MaterialTheme.colorScheme.primary,
|
||||
trackColor = MaterialTheme.colorScheme.surfaceVariant,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun GainSlider(label: String, gainDb: Float, onGainChange: (Float) -> Unit) {
|
||||
Column(
|
||||
modifier = Modifier.fillMaxWidth(0.8f),
|
||||
horizontalAlignment = Alignment.CenterHorizontally
|
||||
) {
|
||||
val sign = if (gainDb >= 0) "+" else ""
|
||||
Text(
|
||||
text = "$label: ${sign}${"%.0f".format(gainDb)} dB",
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
Slider(
|
||||
value = gainDb,
|
||||
onValueChange = { onGainChange(Math.round(it).toFloat()) },
|
||||
valueRange = -20f..20f,
|
||||
steps = 0,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun ControlRow(
|
||||
isMuted: Boolean,
|
||||
isSpeaker: Boolean,
|
||||
onToggleMute: () -> Unit,
|
||||
onToggleSpeaker: () -> Unit,
|
||||
onHangUp: () -> Unit
|
||||
) {
|
||||
Row(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
horizontalArrangement = Arrangement.SpaceEvenly,
|
||||
verticalAlignment = Alignment.CenterVertically
|
||||
) {
|
||||
FilledTonalIconButton(
|
||||
onClick = onToggleMute,
|
||||
modifier = Modifier.size(56.dp),
|
||||
colors = if (isMuted) {
|
||||
IconButtonDefaults.filledTonalIconButtonColors(
|
||||
containerColor = MaterialTheme.colorScheme.errorContainer,
|
||||
contentColor = MaterialTheme.colorScheme.onErrorContainer
|
||||
)
|
||||
} else {
|
||||
IconButtonDefaults.filledTonalIconButtonColors()
|
||||
}
|
||||
) {
|
||||
Text(
|
||||
text = if (isMuted) "MIC\nOFF" else "MIC",
|
||||
textAlign = TextAlign.Center,
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
lineHeight = 12.sp
|
||||
)
|
||||
}
|
||||
|
||||
FilledIconButton(
|
||||
onClick = onHangUp,
|
||||
modifier = Modifier.size(72.dp),
|
||||
shape = CircleShape,
|
||||
colors = IconButtonDefaults.filledIconButtonColors(
|
||||
containerColor = Color(0xFFF44336),
|
||||
contentColor = Color.White
|
||||
)
|
||||
) {
|
||||
Text(
|
||||
text = "END",
|
||||
style = MaterialTheme.typography.titleMedium.copy(
|
||||
fontWeight = FontWeight.Bold
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
FilledTonalIconButton(
|
||||
onClick = onToggleSpeaker,
|
||||
modifier = Modifier.size(56.dp),
|
||||
colors = if (isSpeaker) {
|
||||
IconButtonDefaults.filledTonalIconButtonColors(
|
||||
containerColor = MaterialTheme.colorScheme.primaryContainer,
|
||||
contentColor = MaterialTheme.colorScheme.onPrimaryContainer
|
||||
)
|
||||
} else {
|
||||
IconButtonDefaults.filledTonalIconButtonColors()
|
||||
}
|
||||
) {
|
||||
Text(
|
||||
text = if (isSpeaker) "SPK\nON" else "SPK",
|
||||
textAlign = TextAlign.Center,
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
lineHeight = 12.sp
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun StatsOverlay(stats: CallStats) {
|
||||
Surface(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
color = MaterialTheme.colorScheme.surfaceVariant.copy(alpha = 0.5f),
|
||||
shape = RoundedCornerShape(8.dp)
|
||||
) {
|
||||
Column(
|
||||
modifier = Modifier.padding(12.dp),
|
||||
horizontalAlignment = Alignment.CenterHorizontally
|
||||
) {
|
||||
Text(
|
||||
text = "Stats",
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
Row(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
horizontalArrangement = Arrangement.SpaceEvenly
|
||||
) {
|
||||
StatItem("Loss", "%.1f%%".format(stats.lossPct))
|
||||
StatItem("RTT", "${stats.rttMs}ms")
|
||||
StatItem("Jitter", "${stats.jitterMs}ms")
|
||||
}
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
Row(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
horizontalArrangement = Arrangement.SpaceEvenly
|
||||
) {
|
||||
StatItem("Sent", "${stats.framesEncoded}")
|
||||
StatItem("Recv", "${stats.framesDecoded}")
|
||||
StatItem("FEC", "${stats.fecRecovered}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun StatItem(label: String, value: String) {
|
||||
Column(horizontalAlignment = Alignment.CenterHorizontally) {
|
||||
Text(
|
||||
text = value,
|
||||
style = MaterialTheme.typography.bodySmall.copy(fontWeight = FontWeight.Medium),
|
||||
color = MaterialTheme.colorScheme.onSurface
|
||||
)
|
||||
Text(
|
||||
text = label,
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,437 +0,0 @@
|
||||
package com.wzp.ui.settings
|
||||
|
||||
import android.content.ClipData
|
||||
import android.content.ClipboardManager
|
||||
import android.content.Context
|
||||
import android.widget.Toast
|
||||
import androidx.compose.foundation.layout.Arrangement
|
||||
import androidx.compose.foundation.layout.Column
|
||||
import androidx.compose.foundation.layout.ExperimentalLayoutApi
|
||||
import androidx.compose.foundation.layout.FlowRow
|
||||
import androidx.compose.foundation.layout.Row
|
||||
import androidx.compose.foundation.layout.Spacer
|
||||
import androidx.compose.foundation.layout.fillMaxSize
|
||||
import androidx.compose.foundation.layout.fillMaxWidth
|
||||
import androidx.compose.foundation.layout.height
|
||||
import androidx.compose.foundation.layout.padding
|
||||
import androidx.compose.foundation.layout.width
|
||||
import androidx.compose.foundation.rememberScrollState
|
||||
import androidx.compose.foundation.shape.RoundedCornerShape
|
||||
import androidx.compose.foundation.verticalScroll
|
||||
import androidx.compose.material3.AlertDialog
|
||||
import androidx.compose.material3.Button
|
||||
import androidx.compose.material3.ButtonDefaults
|
||||
import androidx.compose.material3.FilledTonalButton
|
||||
import androidx.compose.material3.FilledTonalIconButton
|
||||
import androidx.compose.material3.Divider
|
||||
import androidx.compose.material3.IconButtonDefaults
|
||||
import androidx.compose.material3.MaterialTheme
|
||||
import androidx.compose.material3.OutlinedButton
|
||||
import androidx.compose.material3.OutlinedTextField
|
||||
import androidx.compose.material3.Slider
|
||||
import androidx.compose.material3.Surface
|
||||
import androidx.compose.material3.Switch
|
||||
import androidx.compose.material3.Text
|
||||
import androidx.compose.material3.TextButton
|
||||
import androidx.compose.runtime.Composable
|
||||
import androidx.compose.runtime.collectAsState
|
||||
import androidx.compose.runtime.getValue
|
||||
import androidx.compose.runtime.mutableStateOf
|
||||
import androidx.compose.runtime.remember
|
||||
import androidx.compose.runtime.setValue
|
||||
import androidx.compose.ui.Alignment
|
||||
import androidx.compose.ui.Modifier
|
||||
import androidx.compose.ui.graphics.Color
|
||||
import androidx.compose.ui.platform.LocalContext
|
||||
import androidx.compose.ui.text.font.FontFamily
|
||||
import androidx.compose.ui.text.font.FontWeight
|
||||
import androidx.compose.ui.unit.dp
|
||||
import com.wzp.ui.call.CallViewModel
|
||||
|
||||
@OptIn(ExperimentalLayoutApi::class)
|
||||
@Composable
|
||||
fun SettingsScreen(
|
||||
viewModel: CallViewModel,
|
||||
onBack: () -> Unit
|
||||
) {
|
||||
val context = LocalContext.current
|
||||
val servers by viewModel.servers.collectAsState()
|
||||
val selectedServer by viewModel.selectedServer.collectAsState()
|
||||
val roomName by viewModel.roomName.collectAsState()
|
||||
val preferIPv6 by viewModel.preferIPv6.collectAsState()
|
||||
val playoutGainDb by viewModel.playoutGainDb.collectAsState()
|
||||
val captureGainDb by viewModel.captureGainDb.collectAsState()
|
||||
val alias by viewModel.alias.collectAsState()
|
||||
val seedHex by viewModel.seedHex.collectAsState()
|
||||
|
||||
var showAddServerDialog by remember { mutableStateOf(false) }
|
||||
var showRestoreKeyDialog by remember { mutableStateOf(false) }
|
||||
|
||||
Surface(
|
||||
modifier = Modifier.fillMaxSize(),
|
||||
color = MaterialTheme.colorScheme.background
|
||||
) {
|
||||
Column(
|
||||
modifier = Modifier
|
||||
.fillMaxSize()
|
||||
.padding(24.dp)
|
||||
.verticalScroll(rememberScrollState())
|
||||
) {
|
||||
// Header
|
||||
Row(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
verticalAlignment = Alignment.CenterVertically
|
||||
) {
|
||||
TextButton(onClick = onBack) {
|
||||
Text("< Back")
|
||||
}
|
||||
Spacer(modifier = Modifier.weight(1f))
|
||||
Text(
|
||||
text = "Settings",
|
||||
style = MaterialTheme.typography.headlineSmall.copy(
|
||||
fontWeight = FontWeight.Bold
|
||||
),
|
||||
color = MaterialTheme.colorScheme.primary
|
||||
)
|
||||
Spacer(modifier = Modifier.weight(1f))
|
||||
// Balance the back button
|
||||
Spacer(modifier = Modifier.width(64.dp))
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(24.dp))
|
||||
|
||||
// --- Identity ---
|
||||
SectionHeader("Identity")
|
||||
|
||||
OutlinedTextField(
|
||||
value = alias,
|
||||
onValueChange = { viewModel.setAlias(it) },
|
||||
label = { Text("Display Name") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
|
||||
// Fingerprint display
|
||||
val fingerprint = if (seedHex.length >= 16) seedHex.take(16).uppercase() else "Not generated"
|
||||
Text(
|
||||
text = "Fingerprint",
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Text(
|
||||
text = fingerprint.chunked(4).joinToString(" "),
|
||||
style = MaterialTheme.typography.bodyMedium.copy(
|
||||
fontFamily = FontFamily.Monospace
|
||||
),
|
||||
color = MaterialTheme.colorScheme.onSurface
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(12.dp))
|
||||
|
||||
// Key backup/restore
|
||||
Row(horizontalArrangement = Arrangement.spacedBy(8.dp)) {
|
||||
FilledTonalButton(onClick = {
|
||||
val clipboard = context.getSystemService(Context.CLIPBOARD_SERVICE) as ClipboardManager
|
||||
clipboard.setPrimaryClip(ClipData.newPlainText("WZP Key", seedHex))
|
||||
Toast.makeText(context, "Key copied to clipboard", Toast.LENGTH_SHORT).show()
|
||||
}) {
|
||||
Text("Copy Key")
|
||||
}
|
||||
OutlinedButton(onClick = { showRestoreKeyDialog = true }) {
|
||||
Text("Restore Key")
|
||||
}
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(24.dp))
|
||||
Divider()
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
|
||||
// --- Audio ---
|
||||
SectionHeader("Audio Defaults")
|
||||
|
||||
GainSlider(
|
||||
label = "Voice Volume",
|
||||
gainDb = playoutGainDb,
|
||||
onGainChange = { viewModel.setPlayoutGainDb(it) }
|
||||
)
|
||||
Spacer(modifier = Modifier.height(4.dp))
|
||||
GainSlider(
|
||||
label = "Mic Gain",
|
||||
gainDb = captureGainDb,
|
||||
onGainChange = { viewModel.setCaptureGainDb(it) }
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(24.dp))
|
||||
Divider()
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
|
||||
// --- Servers ---
|
||||
SectionHeader("Servers")
|
||||
|
||||
FlowRow(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
horizontalArrangement = Arrangement.Start,
|
||||
verticalArrangement = Arrangement.spacedBy(4.dp)
|
||||
) {
|
||||
servers.forEachIndexed { idx, entry ->
|
||||
val isSelected = selectedServer == idx
|
||||
Row(verticalAlignment = Alignment.CenterVertically) {
|
||||
FilledTonalIconButton(
|
||||
onClick = { viewModel.selectServer(idx) },
|
||||
modifier = Modifier
|
||||
.padding(end = 2.dp)
|
||||
.height(36.dp)
|
||||
.width(140.dp),
|
||||
shape = RoundedCornerShape(8.dp),
|
||||
colors = if (isSelected) {
|
||||
IconButtonDefaults.filledTonalIconButtonColors(
|
||||
containerColor = MaterialTheme.colorScheme.primaryContainer,
|
||||
contentColor = MaterialTheme.colorScheme.onPrimaryContainer
|
||||
)
|
||||
} else {
|
||||
IconButtonDefaults.filledTonalIconButtonColors()
|
||||
}
|
||||
) {
|
||||
Text(
|
||||
text = entry.label,
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
maxLines = 1
|
||||
)
|
||||
}
|
||||
// Show remove button for non-default servers
|
||||
if (idx >= 2) {
|
||||
TextButton(
|
||||
onClick = { viewModel.removeServer(idx) },
|
||||
modifier = Modifier.height(36.dp)
|
||||
) {
|
||||
Text("X", color = MaterialTheme.colorScheme.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
OutlinedButton(
|
||||
onClick = { showAddServerDialog = true },
|
||||
shape = RoundedCornerShape(8.dp)
|
||||
) {
|
||||
Text("+ Add Server")
|
||||
}
|
||||
|
||||
// Show selected server address
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
Text(
|
||||
text = "Default: ${servers.getOrNull(selectedServer)?.address ?: "none"}",
|
||||
style = MaterialTheme.typography.bodySmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(24.dp))
|
||||
Divider()
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
|
||||
// --- Network ---
|
||||
SectionHeader("Network")
|
||||
|
||||
Row(
|
||||
verticalAlignment = Alignment.CenterVertically,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
) {
|
||||
Text(
|
||||
text = "Prefer IPv6",
|
||||
style = MaterialTheme.typography.bodyMedium,
|
||||
modifier = Modifier.weight(1f)
|
||||
)
|
||||
Switch(
|
||||
checked = preferIPv6,
|
||||
onCheckedChange = { viewModel.setPreferIPv6(it) }
|
||||
)
|
||||
}
|
||||
|
||||
Spacer(modifier = Modifier.height(24.dp))
|
||||
Divider()
|
||||
Spacer(modifier = Modifier.height(16.dp))
|
||||
|
||||
// --- Room ---
|
||||
SectionHeader("Room")
|
||||
|
||||
OutlinedTextField(
|
||||
value = roomName,
|
||||
onValueChange = { viewModel.setRoomName(it) },
|
||||
label = { Text("Default Room") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
|
||||
Spacer(modifier = Modifier.height(32.dp))
|
||||
}
|
||||
}
|
||||
|
||||
if (showAddServerDialog) {
|
||||
AddServerDialog(
|
||||
onDismiss = { showAddServerDialog = false },
|
||||
onAdd = { host, port, label ->
|
||||
viewModel.addServer("$host:$port", label)
|
||||
showAddServerDialog = false
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
if (showRestoreKeyDialog) {
|
||||
RestoreKeyDialog(
|
||||
onDismiss = { showRestoreKeyDialog = false },
|
||||
onRestore = { hex ->
|
||||
viewModel.restoreSeed(hex)
|
||||
showRestoreKeyDialog = false
|
||||
Toast.makeText(context, "Key restored", Toast.LENGTH_SHORT).show()
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun SectionHeader(title: String) {
|
||||
Text(
|
||||
text = title,
|
||||
style = MaterialTheme.typography.titleMedium.copy(fontWeight = FontWeight.Bold),
|
||||
color = MaterialTheme.colorScheme.primary
|
||||
)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun GainSlider(label: String, gainDb: Float, onGainChange: (Float) -> Unit) {
|
||||
Column(
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
horizontalAlignment = Alignment.CenterHorizontally
|
||||
) {
|
||||
val sign = if (gainDb >= 0) "+" else ""
|
||||
Text(
|
||||
text = "$label: ${sign}${"%.0f".format(gainDb)} dB",
|
||||
style = MaterialTheme.typography.labelSmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Slider(
|
||||
value = gainDb,
|
||||
onValueChange = { onGainChange(Math.round(it).toFloat()) },
|
||||
valueRange = -20f..20f,
|
||||
steps = 0,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun AddServerDialog(
|
||||
onDismiss: () -> Unit,
|
||||
onAdd: (host: String, port: String, label: String) -> Unit
|
||||
) {
|
||||
var host by remember { mutableStateOf("") }
|
||||
var port by remember { mutableStateOf("4433") }
|
||||
var label by remember { mutableStateOf("") }
|
||||
|
||||
AlertDialog(
|
||||
onDismissRequest = onDismiss,
|
||||
title = { Text("Add Server") },
|
||||
text = {
|
||||
Column {
|
||||
OutlinedTextField(
|
||||
value = host,
|
||||
onValueChange = { host = it },
|
||||
label = { Text("Host (IP or domain)") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
OutlinedTextField(
|
||||
value = port,
|
||||
onValueChange = { port = it },
|
||||
label = { Text("Port") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
OutlinedTextField(
|
||||
value = label,
|
||||
onValueChange = { label = it },
|
||||
label = { Text("Label (optional)") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth()
|
||||
)
|
||||
}
|
||||
},
|
||||
confirmButton = {
|
||||
TextButton(
|
||||
onClick = {
|
||||
if (host.isNotBlank()) {
|
||||
val displayLabel = label.ifBlank { host }
|
||||
onAdd(host.trim(), port.trim(), displayLabel)
|
||||
}
|
||||
}
|
||||
) { Text("Add") }
|
||||
},
|
||||
dismissButton = {
|
||||
TextButton(onClick = onDismiss) { Text("Cancel") }
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
@Composable
|
||||
private fun RestoreKeyDialog(
|
||||
onDismiss: () -> Unit,
|
||||
onRestore: (hex: String) -> Unit
|
||||
) {
|
||||
var keyInput by remember { mutableStateOf("") }
|
||||
var error by remember { mutableStateOf<String?>(null) }
|
||||
|
||||
AlertDialog(
|
||||
onDismissRequest = onDismiss,
|
||||
title = { Text("Restore Identity Key") },
|
||||
text = {
|
||||
Column {
|
||||
Text(
|
||||
text = "Paste your 64-character hex key below. This will replace your current identity.",
|
||||
style = MaterialTheme.typography.bodySmall,
|
||||
color = MaterialTheme.colorScheme.onSurfaceVariant
|
||||
)
|
||||
Spacer(modifier = Modifier.height(8.dp))
|
||||
OutlinedTextField(
|
||||
value = keyInput,
|
||||
onValueChange = {
|
||||
keyInput = it.trim().lowercase()
|
||||
error = null
|
||||
},
|
||||
label = { Text("Identity Key (hex)") },
|
||||
singleLine = true,
|
||||
modifier = Modifier.fillMaxWidth(),
|
||||
isError = error != null
|
||||
)
|
||||
error?.let {
|
||||
Text(
|
||||
text = it,
|
||||
style = MaterialTheme.typography.bodySmall,
|
||||
color = MaterialTheme.colorScheme.error
|
||||
)
|
||||
}
|
||||
}
|
||||
},
|
||||
confirmButton = {
|
||||
TextButton(
|
||||
onClick = {
|
||||
val cleaned = keyInput.replace("\\s".toRegex(), "")
|
||||
if (cleaned.length != 64 || !cleaned.all { it in '0'..'9' || it in 'a'..'f' }) {
|
||||
error = "Key must be exactly 64 hex characters"
|
||||
} else {
|
||||
onRestore(cleaned)
|
||||
}
|
||||
}
|
||||
) { Text("Restore") }
|
||||
},
|
||||
dismissButton = {
|
||||
TextButton(onClick = onDismiss) { Text("Cancel") }
|
||||
}
|
||||
)
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
plugins {
|
||||
id("com.android.application") version "8.2.0" apply false
|
||||
id("org.jetbrains.kotlin.android") version "1.9.22" apply false
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
|
||||
android.useAndroidX=true
|
||||
kotlin.code.style=official
|
||||
android.nonTransitiveRClass=true
|
||||
BIN
android/gradle/wrapper/gradle-wrapper.jar
vendored
BIN
android/gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
@@ -1,6 +0,0 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip
|
||||
networkTimeout=10000
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
5
android/gradlew
vendored
5
android/gradlew
vendored
@@ -1,5 +0,0 @@
|
||||
#!/bin/sh
|
||||
# Gradle wrapper script
|
||||
APP_HOME=$(cd "$(dirname "$0")" && pwd)
|
||||
CLASSPATH="$APP_HOME/gradle/wrapper/gradle-wrapper.jar"
|
||||
exec java -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
||||
@@ -1,18 +0,0 @@
|
||||
pluginManagement {
|
||||
repositories {
|
||||
google()
|
||||
mavenCentral()
|
||||
gradlePluginPortal()
|
||||
}
|
||||
}
|
||||
|
||||
dependencyResolutionManagement {
|
||||
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
|
||||
repositories {
|
||||
google()
|
||||
mavenCentral()
|
||||
}
|
||||
}
|
||||
|
||||
rootProject.name = "WZPhone"
|
||||
include(":app")
|
||||
@@ -1,33 +0,0 @@
|
||||
[package]
|
||||
name = "wzp-android"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
rust-version.workspace = true
|
||||
description = "WarzonePhone Android native VoIP engine — Oboe audio, JNI bridge, call pipeline"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
wzp-proto = { workspace = true }
|
||||
wzp-codec = { workspace = true }
|
||||
wzp-fec = { workspace = true }
|
||||
wzp-crypto = { workspace = true }
|
||||
wzp-transport = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
bytes = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = "1"
|
||||
thiserror = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
anyhow = "1"
|
||||
libc = "0.2"
|
||||
jni = { version = "0.21", default-features = false }
|
||||
rand = { workspace = true }
|
||||
rustls = { version = "0.23", default-features = false, features = ["ring"] }
|
||||
|
||||
[build-dependencies]
|
||||
cc = "1"
|
||||
@@ -1,154 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn main() {
|
||||
let target = std::env::var("TARGET").unwrap_or_default();
|
||||
|
||||
if target.contains("android") {
|
||||
// Override broken static getauxval from compiler-rt that crashes
|
||||
// in shared libraries. Must be compiled first to take link priority.
|
||||
cc::Build::new()
|
||||
.file("cpp/getauxval_fix.c")
|
||||
.compile("getauxval_fix");
|
||||
|
||||
let oboe_dir = fetch_oboe();
|
||||
match oboe_dir {
|
||||
Some(oboe_path) => {
|
||||
println!("cargo:warning=Building with Oboe from {:?}", oboe_path);
|
||||
|
||||
let mut build = cc::Build::new();
|
||||
build
|
||||
.cpp(true)
|
||||
.std("c++17")
|
||||
// Use shared libc++ — avoids pulling in static libc stubs
|
||||
// that crash in shared libraries (getauxval, pthread_create, etc.)
|
||||
.cpp_link_stdlib(Some("c++_shared"))
|
||||
.include("cpp")
|
||||
.include(oboe_path.join("include"))
|
||||
.include(oboe_path.join("src"))
|
||||
.define("WZP_HAS_OBOE", None)
|
||||
.file("cpp/oboe_bridge.cpp");
|
||||
|
||||
// Compile all Oboe source files
|
||||
let src_dir = oboe_path.join("src");
|
||||
add_cpp_files_recursive(&mut build, &src_dir);
|
||||
|
||||
build.compile("oboe_bridge");
|
||||
}
|
||||
None => {
|
||||
println!("cargo:warning=Oboe not found, building with stub");
|
||||
cc::Build::new()
|
||||
.cpp(true)
|
||||
.std("c++17")
|
||||
.cpp_link_stdlib(Some("c++_shared"))
|
||||
.file("cpp/oboe_stub.cpp")
|
||||
.include("cpp")
|
||||
.compile("oboe_bridge");
|
||||
}
|
||||
}
|
||||
|
||||
// Dynamic C++ runtime — libc++_shared.so must be in jniLibs alongside
|
||||
// libwzp_android.so. We copy it there from the NDK sysroot.
|
||||
//
|
||||
// WHY NOT STATIC: libc++_static.a + libc++abi.a transitively pull in
|
||||
// object files from libc.a (static libc) which contain broken stubs for
|
||||
// getauxval, __init_tcb, pthread_create, etc. These stubs only work in
|
||||
// statically-linked executables. In shared libraries loaded by dlopen(),
|
||||
// they SIGSEGV because the static libc init hasn't run.
|
||||
// Google's official recommendation: use libc++_shared.so for native libs.
|
||||
if let Ok(ndk) = std::env::var("ANDROID_NDK_HOME") {
|
||||
let arch = if target.contains("aarch64") {
|
||||
"aarch64-linux-android"
|
||||
} else if target.contains("armv7") {
|
||||
"arm-linux-androideabi"
|
||||
} else if target.contains("x86_64") {
|
||||
"x86_64-linux-android"
|
||||
} else {
|
||||
"aarch64-linux-android"
|
||||
};
|
||||
let lib_dir = format!(
|
||||
"{ndk}/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/{arch}"
|
||||
);
|
||||
println!("cargo:rustc-link-search=native={lib_dir}");
|
||||
|
||||
// Copy libc++_shared.so to the jniLibs directory
|
||||
let shared_so = format!("{lib_dir}/libc++_shared.so");
|
||||
if std::path::Path::new(&shared_so).exists() {
|
||||
let jni_abi = if target.contains("aarch64") {
|
||||
"arm64-v8a"
|
||||
} else if target.contains("armv7") {
|
||||
"armeabi-v7a"
|
||||
} else {
|
||||
"arm64-v8a"
|
||||
};
|
||||
// Try to copy to the Gradle jniLibs directory
|
||||
let manifest = std::env::var("CARGO_MANIFEST_DIR").unwrap_or_default();
|
||||
let jni_dir = format!(
|
||||
"{manifest}/../../android/app/src/main/jniLibs/{jni_abi}"
|
||||
);
|
||||
if let Ok(_) = std::fs::create_dir_all(&jni_dir) {
|
||||
let _ = std::fs::copy(&shared_so, format!("{jni_dir}/libc++_shared.so"));
|
||||
println!("cargo:warning=Copied libc++_shared.so to {jni_dir}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Oboe needs liblog and libOpenSLES from Android
|
||||
println!("cargo:rustc-link-lib=log");
|
||||
println!("cargo:rustc-link-lib=OpenSLES");
|
||||
} else {
|
||||
// Non-Android: always use stub
|
||||
cc::Build::new()
|
||||
.cpp(true)
|
||||
.std("c++17")
|
||||
.file("cpp/oboe_stub.cpp")
|
||||
.include("cpp")
|
||||
.compile("oboe_bridge");
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively add all .cpp files from a directory to a cc::Build.
|
||||
fn add_cpp_files_recursive(build: &mut cc::Build, dir: &std::path::Path) {
|
||||
if !dir.is_dir() {
|
||||
return;
|
||||
}
|
||||
for entry in std::fs::read_dir(dir).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
add_cpp_files_recursive(build, &path);
|
||||
} else if path.extension().map_or(false, |e| e == "cpp") {
|
||||
build.file(&path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to find or fetch Oboe headers + source.
|
||||
fn fetch_oboe() -> Option<PathBuf> {
|
||||
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
||||
let oboe_dir = out_dir.join("oboe");
|
||||
|
||||
if oboe_dir.join("include").join("oboe").join("Oboe.h").exists() {
|
||||
return Some(oboe_dir);
|
||||
}
|
||||
|
||||
let status = std::process::Command::new("git")
|
||||
.args([
|
||||
"clone",
|
||||
"--depth=1",
|
||||
"--branch=1.8.1",
|
||||
"https://github.com/google/oboe.git",
|
||||
oboe_dir.to_str().unwrap(),
|
||||
])
|
||||
.status();
|
||||
|
||||
match status {
|
||||
Ok(s) if s.success() => {
|
||||
if oboe_dir.join("include").join("oboe").join("Oboe.h").exists() {
|
||||
Some(oboe_dir)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
// Override the broken static getauxval from compiler-rt/CRT.
|
||||
// The static version reads from __libc_auxv which is NULL in shared libs
|
||||
// loaded via dlopen, causing SIGSEGV in init_have_lse_atomics at load time.
|
||||
// This version calls the real bionic getauxval via dlsym.
|
||||
#ifdef __ANDROID__
|
||||
#include <dlfcn.h>
|
||||
#include <stdint.h>
|
||||
|
||||
typedef unsigned long (*getauxval_fn)(unsigned long);
|
||||
|
||||
unsigned long getauxval(unsigned long type) {
|
||||
static getauxval_fn real_getauxval = (getauxval_fn)0;
|
||||
if (!real_getauxval) {
|
||||
real_getauxval = (getauxval_fn)dlsym((void*)-1L /* RTLD_DEFAULT */, "getauxval");
|
||||
if (!real_getauxval) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
return real_getauxval(type);
|
||||
}
|
||||
#endif
|
||||
@@ -1,278 +0,0 @@
|
||||
// Full Oboe implementation for Android
|
||||
// This file is compiled only when targeting Android
|
||||
|
||||
#include "oboe_bridge.h"
|
||||
|
||||
#ifdef __ANDROID__
|
||||
#include <oboe/Oboe.h>
|
||||
#include <android/log.h>
|
||||
#include <cstring>
|
||||
#include <atomic>
|
||||
|
||||
#define LOG_TAG "wzp-oboe"
|
||||
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
|
||||
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
|
||||
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Ring buffer helpers (SPSC, lock-free)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
static inline int32_t ring_available_read(const wzp_atomic_int* write_idx,
|
||||
const wzp_atomic_int* read_idx,
|
||||
int32_t capacity) {
|
||||
int32_t w = std::atomic_load_explicit(write_idx, std::memory_order_acquire);
|
||||
int32_t r = std::atomic_load_explicit(read_idx, std::memory_order_relaxed);
|
||||
int32_t avail = w - r;
|
||||
if (avail < 0) avail += capacity;
|
||||
return avail;
|
||||
}
|
||||
|
||||
static inline int32_t ring_available_write(const wzp_atomic_int* write_idx,
|
||||
const wzp_atomic_int* read_idx,
|
||||
int32_t capacity) {
|
||||
return capacity - 1 - ring_available_read(write_idx, read_idx, capacity);
|
||||
}
|
||||
|
||||
static inline void ring_write(int16_t* buf, int32_t capacity,
|
||||
wzp_atomic_int* write_idx, const wzp_atomic_int* read_idx,
|
||||
const int16_t* src, int32_t count) {
|
||||
int32_t w = std::atomic_load_explicit(write_idx, std::memory_order_relaxed);
|
||||
for (int32_t i = 0; i < count; i++) {
|
||||
buf[w] = src[i];
|
||||
w++;
|
||||
if (w >= capacity) w = 0;
|
||||
}
|
||||
std::atomic_store_explicit(write_idx, w, std::memory_order_release);
|
||||
}
|
||||
|
||||
static inline void ring_read(int16_t* buf, int32_t capacity,
|
||||
const wzp_atomic_int* write_idx, wzp_atomic_int* read_idx,
|
||||
int16_t* dst, int32_t count) {
|
||||
int32_t r = std::atomic_load_explicit(read_idx, std::memory_order_relaxed);
|
||||
for (int32_t i = 0; i < count; i++) {
|
||||
dst[i] = buf[r];
|
||||
r++;
|
||||
if (r >= capacity) r = 0;
|
||||
}
|
||||
std::atomic_store_explicit(read_idx, r, std::memory_order_release);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Global state
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
static std::shared_ptr<oboe::AudioStream> g_capture_stream;
|
||||
static std::shared_ptr<oboe::AudioStream> g_playout_stream;
|
||||
static const WzpOboeRings* g_rings = nullptr;
|
||||
static std::atomic<bool> g_running{false};
|
||||
static std::atomic<float> g_capture_latency_ms{0.0f};
|
||||
static std::atomic<float> g_playout_latency_ms{0.0f};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Capture callback
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
class CaptureCallback : public oboe::AudioStreamDataCallback {
|
||||
public:
|
||||
oboe::DataCallbackResult onAudioReady(
|
||||
oboe::AudioStream* stream,
|
||||
void* audioData,
|
||||
int32_t numFrames) override {
|
||||
if (!g_running.load(std::memory_order_relaxed) || !g_rings) {
|
||||
return oboe::DataCallbackResult::Stop;
|
||||
}
|
||||
|
||||
const int16_t* src = static_cast<const int16_t*>(audioData);
|
||||
int32_t avail = ring_available_write(g_rings->capture_write_idx,
|
||||
g_rings->capture_read_idx,
|
||||
g_rings->capture_capacity);
|
||||
int32_t to_write = (numFrames < avail) ? numFrames : avail;
|
||||
if (to_write > 0) {
|
||||
ring_write(g_rings->capture_buf, g_rings->capture_capacity,
|
||||
g_rings->capture_write_idx, g_rings->capture_read_idx,
|
||||
src, to_write);
|
||||
}
|
||||
|
||||
// Update latency estimate
|
||||
auto result = stream->calculateLatencyMillis();
|
||||
if (result) {
|
||||
g_capture_latency_ms.store(static_cast<float>(result.value()),
|
||||
std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
return oboe::DataCallbackResult::Continue;
|
||||
}
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Playout callback
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
class PlayoutCallback : public oboe::AudioStreamDataCallback {
|
||||
public:
|
||||
oboe::DataCallbackResult onAudioReady(
|
||||
oboe::AudioStream* stream,
|
||||
void* audioData,
|
||||
int32_t numFrames) override {
|
||||
if (!g_running.load(std::memory_order_relaxed) || !g_rings) {
|
||||
memset(audioData, 0, numFrames * sizeof(int16_t));
|
||||
return oboe::DataCallbackResult::Stop;
|
||||
}
|
||||
|
||||
int16_t* dst = static_cast<int16_t*>(audioData);
|
||||
int32_t avail = ring_available_read(g_rings->playout_write_idx,
|
||||
g_rings->playout_read_idx,
|
||||
g_rings->playout_capacity);
|
||||
int32_t to_read = (numFrames < avail) ? numFrames : avail;
|
||||
|
||||
if (to_read > 0) {
|
||||
ring_read(g_rings->playout_buf, g_rings->playout_capacity,
|
||||
g_rings->playout_write_idx, g_rings->playout_read_idx,
|
||||
dst, to_read);
|
||||
}
|
||||
// Fill remainder with silence on underrun
|
||||
if (to_read < numFrames) {
|
||||
memset(dst + to_read, 0, (numFrames - to_read) * sizeof(int16_t));
|
||||
}
|
||||
|
||||
// Update latency estimate
|
||||
auto result = stream->calculateLatencyMillis();
|
||||
if (result) {
|
||||
g_playout_latency_ms.store(static_cast<float>(result.value()),
|
||||
std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
return oboe::DataCallbackResult::Continue;
|
||||
}
|
||||
};
|
||||
|
||||
static CaptureCallback g_capture_cb;
|
||||
static PlayoutCallback g_playout_cb;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public C API
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
int wzp_oboe_start(const WzpOboeConfig* config, const WzpOboeRings* rings) {
|
||||
if (g_running.load(std::memory_order_relaxed)) {
|
||||
LOGW("wzp_oboe_start: already running");
|
||||
return -1;
|
||||
}
|
||||
|
||||
g_rings = rings;
|
||||
|
||||
// Build capture stream
|
||||
oboe::AudioStreamBuilder captureBuilder;
|
||||
captureBuilder.setDirection(oboe::Direction::Input)
|
||||
->setPerformanceMode(oboe::PerformanceMode::LowLatency)
|
||||
->setSharingMode(oboe::SharingMode::Exclusive)
|
||||
->setFormat(oboe::AudioFormat::I16)
|
||||
->setChannelCount(config->channel_count)
|
||||
->setSampleRate(config->sample_rate)
|
||||
->setFramesPerDataCallback(config->frames_per_burst)
|
||||
->setInputPreset(oboe::InputPreset::VoiceCommunication)
|
||||
->setDataCallback(&g_capture_cb);
|
||||
|
||||
oboe::Result result = captureBuilder.openStream(g_capture_stream);
|
||||
if (result != oboe::Result::OK) {
|
||||
LOGE("Failed to open capture stream: %s", oboe::convertToText(result));
|
||||
return -2;
|
||||
}
|
||||
|
||||
// Build playout stream
|
||||
oboe::AudioStreamBuilder playoutBuilder;
|
||||
playoutBuilder.setDirection(oboe::Direction::Output)
|
||||
->setPerformanceMode(oboe::PerformanceMode::LowLatency)
|
||||
->setSharingMode(oboe::SharingMode::Exclusive)
|
||||
->setFormat(oboe::AudioFormat::I16)
|
||||
->setChannelCount(config->channel_count)
|
||||
->setSampleRate(config->sample_rate)
|
||||
->setFramesPerDataCallback(config->frames_per_burst)
|
||||
->setUsage(oboe::Usage::VoiceCommunication)
|
||||
->setDataCallback(&g_playout_cb);
|
||||
|
||||
result = playoutBuilder.openStream(g_playout_stream);
|
||||
if (result != oboe::Result::OK) {
|
||||
LOGE("Failed to open playout stream: %s", oboe::convertToText(result));
|
||||
g_capture_stream->close();
|
||||
g_capture_stream.reset();
|
||||
return -3;
|
||||
}
|
||||
|
||||
g_running.store(true, std::memory_order_release);
|
||||
|
||||
// Start both streams
|
||||
result = g_capture_stream->requestStart();
|
||||
if (result != oboe::Result::OK) {
|
||||
LOGE("Failed to start capture: %s", oboe::convertToText(result));
|
||||
g_running.store(false, std::memory_order_release);
|
||||
g_capture_stream->close();
|
||||
g_playout_stream->close();
|
||||
g_capture_stream.reset();
|
||||
g_playout_stream.reset();
|
||||
return -4;
|
||||
}
|
||||
|
||||
result = g_playout_stream->requestStart();
|
||||
if (result != oboe::Result::OK) {
|
||||
LOGE("Failed to start playout: %s", oboe::convertToText(result));
|
||||
g_running.store(false, std::memory_order_release);
|
||||
g_capture_stream->requestStop();
|
||||
g_capture_stream->close();
|
||||
g_playout_stream->close();
|
||||
g_capture_stream.reset();
|
||||
g_playout_stream.reset();
|
||||
return -5;
|
||||
}
|
||||
|
||||
LOGI("Oboe started: sr=%d burst=%d ch=%d",
|
||||
config->sample_rate, config->frames_per_burst, config->channel_count);
|
||||
return 0;
|
||||
}
|
||||
|
||||
void wzp_oboe_stop(void) {
|
||||
g_running.store(false, std::memory_order_release);
|
||||
|
||||
if (g_capture_stream) {
|
||||
g_capture_stream->requestStop();
|
||||
g_capture_stream->close();
|
||||
g_capture_stream.reset();
|
||||
}
|
||||
if (g_playout_stream) {
|
||||
g_playout_stream->requestStop();
|
||||
g_playout_stream->close();
|
||||
g_playout_stream.reset();
|
||||
}
|
||||
|
||||
g_rings = nullptr;
|
||||
LOGI("Oboe stopped");
|
||||
}
|
||||
|
||||
float wzp_oboe_capture_latency_ms(void) {
|
||||
return g_capture_latency_ms.load(std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
float wzp_oboe_playout_latency_ms(void) {
|
||||
return g_playout_latency_ms.load(std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
int wzp_oboe_is_running(void) {
|
||||
return g_running.load(std::memory_order_relaxed) ? 1 : 0;
|
||||
}
|
||||
|
||||
#else
|
||||
// Non-Android fallback — should not be reached; oboe_stub.cpp is used instead.
|
||||
// Provide empty implementations just in case.
|
||||
|
||||
int wzp_oboe_start(const WzpOboeConfig* config, const WzpOboeRings* rings) {
|
||||
(void)config; (void)rings;
|
||||
return -99;
|
||||
}
|
||||
|
||||
void wzp_oboe_stop(void) {}
|
||||
float wzp_oboe_capture_latency_ms(void) { return 0.0f; }
|
||||
float wzp_oboe_playout_latency_ms(void) { return 0.0f; }
|
||||
int wzp_oboe_is_running(void) { return 0; }
|
||||
|
||||
#endif // __ANDROID__
|
||||
@@ -1,43 +0,0 @@
|
||||
#ifndef WZP_OBOE_BRIDGE_H
|
||||
#define WZP_OBOE_BRIDGE_H
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
#include <atomic>
|
||||
typedef std::atomic<int32_t> wzp_atomic_int;
|
||||
extern "C" {
|
||||
#else
|
||||
#include <stdatomic.h>
|
||||
typedef atomic_int wzp_atomic_int;
|
||||
#endif
|
||||
|
||||
typedef struct {
|
||||
int32_t sample_rate;
|
||||
int32_t frames_per_burst;
|
||||
int32_t channel_count;
|
||||
} WzpOboeConfig;
|
||||
|
||||
typedef struct {
|
||||
int16_t* capture_buf;
|
||||
int32_t capture_capacity;
|
||||
wzp_atomic_int* capture_write_idx;
|
||||
wzp_atomic_int* capture_read_idx;
|
||||
|
||||
int16_t* playout_buf;
|
||||
int32_t playout_capacity;
|
||||
wzp_atomic_int* playout_write_idx;
|
||||
wzp_atomic_int* playout_read_idx;
|
||||
} WzpOboeRings;
|
||||
|
||||
int wzp_oboe_start(const WzpOboeConfig* config, const WzpOboeRings* rings);
|
||||
void wzp_oboe_stop(void);
|
||||
float wzp_oboe_capture_latency_ms(void);
|
||||
float wzp_oboe_playout_latency_ms(void);
|
||||
int wzp_oboe_is_running(void);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // WZP_OBOE_BRIDGE_H
|
||||
@@ -1,27 +0,0 @@
|
||||
// Stub implementation for non-Android host builds (testing, cargo check, etc.)
|
||||
|
||||
#include "oboe_bridge.h"
|
||||
#include <stdio.h>
|
||||
|
||||
int wzp_oboe_start(const WzpOboeConfig* config, const WzpOboeRings* rings) {
|
||||
(void)config;
|
||||
(void)rings;
|
||||
fprintf(stderr, "wzp_oboe_start: stub (not on Android)\n");
|
||||
return 0;
|
||||
}
|
||||
|
||||
void wzp_oboe_stop(void) {
|
||||
fprintf(stderr, "wzp_oboe_stop: stub (not on Android)\n");
|
||||
}
|
||||
|
||||
float wzp_oboe_capture_latency_ms(void) {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
float wzp_oboe_playout_latency_ms(void) {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
int wzp_oboe_is_running(void) {
|
||||
return 0;
|
||||
}
|
||||
@@ -1,424 +0,0 @@
|
||||
//! Lock-free SPSC ring buffer audio backend for Android (Oboe).
|
||||
//!
|
||||
//! The ring buffers are shared between Rust and C++: the Oboe callbacks
|
||||
//! (running on a high-priority audio thread) read/write directly into
|
||||
//! the buffers via atomic indices, while the Rust codec thread on the
|
||||
//! other side does the same.
|
||||
|
||||
use std::sync::atomic::{AtomicI32, Ordering};
|
||||
|
||||
use tracing::info;
|
||||
#[allow(unused_imports)]
|
||||
use tracing::warn;
|
||||
|
||||
/// Number of samples per 20 ms frame at 48 kHz mono.
|
||||
pub const FRAME_SAMPLES: usize = 960;
|
||||
|
||||
/// Default ring buffer capacity: 8 frames = 160 ms at 48 kHz.
|
||||
const RING_CAPACITY: usize = 7680;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// FFI declarations matching oboe_bridge.h
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[repr(C)]
|
||||
#[allow(non_snake_case)]
|
||||
struct WzpOboeConfig {
|
||||
sample_rate: i32,
|
||||
frames_per_burst: i32,
|
||||
channel_count: i32,
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[allow(non_snake_case)]
|
||||
struct WzpOboeRings {
|
||||
capture_buf: *mut i16,
|
||||
capture_capacity: i32,
|
||||
capture_write_idx: *mut AtomicI32,
|
||||
capture_read_idx: *mut AtomicI32,
|
||||
|
||||
playout_buf: *mut i16,
|
||||
playout_capacity: i32,
|
||||
playout_write_idx: *mut AtomicI32,
|
||||
playout_read_idx: *mut AtomicI32,
|
||||
}
|
||||
|
||||
unsafe impl Send for WzpOboeRings {}
|
||||
unsafe impl Sync for WzpOboeRings {}
|
||||
|
||||
unsafe extern "C" {
|
||||
fn wzp_oboe_start(config: *const WzpOboeConfig, rings: *const WzpOboeRings) -> i32;
|
||||
fn wzp_oboe_stop();
|
||||
fn wzp_oboe_capture_latency_ms() -> f32;
|
||||
fn wzp_oboe_playout_latency_ms() -> f32;
|
||||
fn wzp_oboe_is_running() -> i32;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// SPSC Ring Buffer
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Single-producer single-consumer lock-free ring buffer.
|
||||
///
|
||||
/// The producer calls `write()` and the consumer calls `read()`.
|
||||
/// Atomics use acquire/release ordering to ensure correct visibility
|
||||
/// across the Oboe audio thread and the Rust codec thread.
|
||||
pub struct RingBuffer {
|
||||
buf: Vec<i16>,
|
||||
capacity: usize,
|
||||
write_idx: AtomicI32,
|
||||
read_idx: AtomicI32,
|
||||
}
|
||||
|
||||
impl RingBuffer {
|
||||
/// Create a new ring buffer with the given capacity (in samples).
|
||||
///
|
||||
/// The actual usable capacity is `capacity - 1` to distinguish
|
||||
/// full from empty.
|
||||
pub fn new(capacity: usize) -> Self {
|
||||
Self {
|
||||
buf: vec![0i16; capacity],
|
||||
capacity,
|
||||
write_idx: AtomicI32::new(0),
|
||||
read_idx: AtomicI32::new(0),
|
||||
}
|
||||
}
|
||||
|
||||
/// Number of samples available to read.
|
||||
pub fn available_read(&self) -> usize {
|
||||
let w = self.write_idx.load(Ordering::Acquire);
|
||||
let r = self.read_idx.load(Ordering::Relaxed);
|
||||
let avail = w - r;
|
||||
if avail < 0 {
|
||||
(avail + self.capacity as i32) as usize
|
||||
} else {
|
||||
avail as usize
|
||||
}
|
||||
}
|
||||
|
||||
/// Number of samples that can be written before the buffer is full.
|
||||
pub fn available_write(&self) -> usize {
|
||||
self.capacity - 1 - self.available_read()
|
||||
}
|
||||
|
||||
/// Write samples into the ring buffer (producer side).
|
||||
///
|
||||
/// Returns the number of samples actually written (may be less than
|
||||
/// `data.len()` if the buffer is nearly full).
|
||||
pub fn write(&self, data: &[i16]) -> usize {
|
||||
let avail = self.available_write();
|
||||
let count = data.len().min(avail);
|
||||
if count == 0 {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let mut w = self.write_idx.load(Ordering::Relaxed) as usize;
|
||||
let cap = self.capacity;
|
||||
let buf_ptr = self.buf.as_ptr() as *mut i16;
|
||||
|
||||
for i in 0..count {
|
||||
// SAFETY: w is always in [0, capacity) and we are the sole producer.
|
||||
unsafe {
|
||||
*buf_ptr.add(w) = data[i];
|
||||
}
|
||||
w += 1;
|
||||
if w >= cap {
|
||||
w = 0;
|
||||
}
|
||||
}
|
||||
|
||||
self.write_idx.store(w as i32, Ordering::Release);
|
||||
count
|
||||
}
|
||||
|
||||
/// Read samples from the ring buffer (consumer side).
|
||||
///
|
||||
/// Returns the number of samples actually read (may be less than
|
||||
/// `out.len()` if the buffer doesn't have enough data).
|
||||
pub fn read(&self, out: &mut [i16]) -> usize {
|
||||
let avail = self.available_read();
|
||||
let count = out.len().min(avail);
|
||||
if count == 0 {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let mut r = self.read_idx.load(Ordering::Relaxed) as usize;
|
||||
let cap = self.capacity;
|
||||
let buf_ptr = self.buf.as_ptr();
|
||||
|
||||
for i in 0..count {
|
||||
// SAFETY: r is always in [0, capacity) and we are the sole consumer.
|
||||
unsafe {
|
||||
out[i] = *buf_ptr.add(r);
|
||||
}
|
||||
r += 1;
|
||||
if r >= cap {
|
||||
r = 0;
|
||||
}
|
||||
}
|
||||
|
||||
self.read_idx.store(r as i32, Ordering::Release);
|
||||
count
|
||||
}
|
||||
|
||||
/// Get a raw pointer to the buffer data (for FFI).
|
||||
fn buf_ptr(&self) -> *mut i16 {
|
||||
self.buf.as_ptr() as *mut i16
|
||||
}
|
||||
|
||||
/// Get a raw pointer to the write index atomic (for FFI).
|
||||
fn write_idx_ptr(&self) -> *mut AtomicI32 {
|
||||
&self.write_idx as *const AtomicI32 as *mut AtomicI32
|
||||
}
|
||||
|
||||
/// Get a raw pointer to the read index atomic (for FFI).
|
||||
fn read_idx_ptr(&self) -> *mut AtomicI32 {
|
||||
&self.read_idx as *const AtomicI32 as *mut AtomicI32
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: The ring buffer is designed for SPSC use where producer and consumer
|
||||
// are on different threads. The atomic indices provide the synchronization.
|
||||
unsafe impl Send for RingBuffer {}
|
||||
unsafe impl Sync for RingBuffer {}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Oboe Backend
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Oboe-based audio backend for Android.
|
||||
///
|
||||
/// Owns two SPSC ring buffers (capture and playout) that are shared with
|
||||
/// the C++ Oboe callbacks via raw pointers. The Oboe callbacks run on
|
||||
/// high-priority audio threads managed by the Android audio system.
|
||||
pub struct OboeBackend {
|
||||
capture_ring: RingBuffer,
|
||||
playout_ring: RingBuffer,
|
||||
started: bool,
|
||||
}
|
||||
|
||||
impl OboeBackend {
|
||||
/// Create a new backend with default ring buffer sizes (160 ms each).
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
capture_ring: RingBuffer::new(RING_CAPACITY),
|
||||
playout_ring: RingBuffer::new(RING_CAPACITY),
|
||||
started: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Start Oboe audio streams.
|
||||
///
|
||||
/// This sets up the ring buffer pointers and calls into the C++ layer
|
||||
/// to open and start the capture and playout Oboe streams.
|
||||
pub fn start(&mut self) -> Result<(), anyhow::Error> {
|
||||
if self.started {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let config = WzpOboeConfig {
|
||||
sample_rate: 48_000,
|
||||
frames_per_burst: FRAME_SAMPLES as i32,
|
||||
channel_count: 1,
|
||||
};
|
||||
|
||||
let rings = WzpOboeRings {
|
||||
capture_buf: self.capture_ring.buf_ptr(),
|
||||
capture_capacity: self.capture_ring.capacity as i32,
|
||||
capture_write_idx: self.capture_ring.write_idx_ptr(),
|
||||
capture_read_idx: self.capture_ring.read_idx_ptr(),
|
||||
|
||||
playout_buf: self.playout_ring.buf_ptr(),
|
||||
playout_capacity: self.playout_ring.capacity as i32,
|
||||
playout_write_idx: self.playout_ring.write_idx_ptr(),
|
||||
playout_read_idx: self.playout_ring.read_idx_ptr(),
|
||||
};
|
||||
|
||||
let ret = unsafe { wzp_oboe_start(&config, &rings) };
|
||||
if ret != 0 {
|
||||
return Err(anyhow::anyhow!("wzp_oboe_start failed with code {}", ret));
|
||||
}
|
||||
|
||||
self.started = true;
|
||||
info!("Oboe backend started");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Stop Oboe audio streams.
|
||||
pub fn stop(&mut self) {
|
||||
if !self.started {
|
||||
return;
|
||||
}
|
||||
unsafe { wzp_oboe_stop() };
|
||||
self.started = false;
|
||||
info!("Oboe backend stopped");
|
||||
}
|
||||
|
||||
/// Read captured audio samples from the capture ring buffer.
|
||||
///
|
||||
/// Returns the number of samples actually read. The caller should
|
||||
/// provide a buffer of at least `FRAME_SAMPLES` (960) samples.
|
||||
pub fn read_capture(&self, out: &mut [i16]) -> usize {
|
||||
self.capture_ring.read(out)
|
||||
}
|
||||
|
||||
/// Write audio samples to the playout ring buffer.
|
||||
///
|
||||
/// Returns the number of samples actually written.
|
||||
pub fn write_playout(&self, samples: &[i16]) -> usize {
|
||||
self.playout_ring.write(samples)
|
||||
}
|
||||
|
||||
/// Get the current capture latency in milliseconds (from Oboe).
|
||||
#[allow(unused)]
|
||||
pub fn capture_latency_ms(&self) -> f32 {
|
||||
unsafe { wzp_oboe_capture_latency_ms() }
|
||||
}
|
||||
|
||||
/// Get the current playout latency in milliseconds (from Oboe).
|
||||
#[allow(unused)]
|
||||
pub fn playout_latency_ms(&self) -> f32 {
|
||||
unsafe { wzp_oboe_playout_latency_ms() }
|
||||
}
|
||||
|
||||
/// Check if the Oboe streams are currently running.
|
||||
#[allow(unused)]
|
||||
pub fn is_running(&self) -> bool {
|
||||
unsafe { wzp_oboe_is_running() != 0 }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for OboeBackend {
|
||||
fn drop(&mut self) {
|
||||
self.stop();
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Thread affinity / priority helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Pin the current thread to the highest-numbered CPU cores (big cores on
|
||||
/// ARM big.LITTLE architectures). Falls back silently on failure.
|
||||
#[allow(unused)]
|
||||
pub fn pin_to_big_core() {
|
||||
#[cfg(target_os = "android")]
|
||||
{
|
||||
unsafe {
|
||||
let num_cpus = libc::sysconf(libc::_SC_NPROCESSORS_ONLN);
|
||||
if num_cpus <= 0 {
|
||||
warn!("pin_to_big_core: could not determine CPU count");
|
||||
return;
|
||||
}
|
||||
let num_cpus = num_cpus as usize;
|
||||
|
||||
// Target the upper half of CPUs (big cores on most big.LITTLE SoCs)
|
||||
let start = num_cpus / 2;
|
||||
let mut set: libc::cpu_set_t = std::mem::zeroed();
|
||||
libc::CPU_ZERO(&mut set);
|
||||
for cpu in start..num_cpus {
|
||||
libc::CPU_SET(cpu, &mut set);
|
||||
}
|
||||
|
||||
let ret = libc::sched_setaffinity(
|
||||
0, // current thread
|
||||
std::mem::size_of::<libc::cpu_set_t>(),
|
||||
&set,
|
||||
);
|
||||
if ret != 0 {
|
||||
warn!("sched_setaffinity failed: {}", std::io::Error::last_os_error());
|
||||
} else {
|
||||
info!(start, num_cpus, "pinned to big cores");
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_os = "android"))]
|
||||
{
|
||||
// No-op on non-Android
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to set SCHED_FIFO real-time priority for the current thread.
|
||||
/// Falls back silently on failure (requires appropriate permissions on Android).
|
||||
#[allow(unused)]
|
||||
pub fn set_realtime_priority() {
|
||||
#[cfg(target_os = "android")]
|
||||
{
|
||||
unsafe {
|
||||
let param = libc::sched_param {
|
||||
sched_priority: 2, // Low RT priority — enough for audio, safe
|
||||
};
|
||||
let ret = libc::sched_setscheduler(0, libc::SCHED_FIFO, ¶m);
|
||||
if ret != 0 {
|
||||
warn!(
|
||||
"sched_setscheduler(SCHED_FIFO) failed: {}",
|
||||
std::io::Error::last_os_error()
|
||||
);
|
||||
} else {
|
||||
info!("set SCHED_FIFO priority 2");
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_os = "android"))]
|
||||
{
|
||||
// No-op on non-Android
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn ring_buffer_write_read() {
|
||||
let ring = RingBuffer::new(16);
|
||||
let data = [1i16, 2, 3, 4, 5];
|
||||
assert_eq!(ring.write(&data), 5);
|
||||
assert_eq!(ring.available_read(), 5);
|
||||
|
||||
let mut out = [0i16; 5];
|
||||
assert_eq!(ring.read(&mut out), 5);
|
||||
assert_eq!(out, [1, 2, 3, 4, 5]);
|
||||
assert_eq!(ring.available_read(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ring_buffer_wraparound() {
|
||||
let ring = RingBuffer::new(8);
|
||||
let data = [10i16, 20, 30, 40, 50, 60]; // 6 samples, capacity 8 (usable 7)
|
||||
assert_eq!(ring.write(&data), 6);
|
||||
|
||||
let mut out = [0i16; 4];
|
||||
assert_eq!(ring.read(&mut out), 4);
|
||||
assert_eq!(out, [10, 20, 30, 40]);
|
||||
|
||||
// Now write more, which should wrap around
|
||||
let data2 = [70i16, 80, 90, 100];
|
||||
assert_eq!(ring.write(&data2), 4);
|
||||
|
||||
let mut out2 = [0i16; 6];
|
||||
assert_eq!(ring.read(&mut out2), 6);
|
||||
assert_eq!(out2, [50, 60, 70, 80, 90, 100]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ring_buffer_full() {
|
||||
let ring = RingBuffer::new(4); // usable capacity = 3
|
||||
let data = [1i16, 2, 3, 4, 5];
|
||||
assert_eq!(ring.write(&data), 3); // Only 3 fit
|
||||
assert_eq!(ring.available_write(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn oboe_backend_stub_start_stop() {
|
||||
let mut backend = OboeBackend::new();
|
||||
backend.start().expect("stub start should succeed");
|
||||
assert!(backend.started);
|
||||
backend.stop();
|
||||
assert!(!backend.started);
|
||||
}
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
//! Lock-free SPSC ring buffers for audio PCM transfer between
|
||||
//! Kotlin AudioRecord/AudioTrack threads and the Rust engine.
|
||||
//!
|
||||
//! These use a simple spin-free design: the producer writes and advances
|
||||
//! a write cursor, the consumer reads and advances a read cursor.
|
||||
//! Both cursors are atomic so no mutex is needed.
|
||||
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
/// Ring buffer capacity in i16 samples.
|
||||
/// 960 samples * 10 frames = ~200ms of audio at 48kHz mono.
|
||||
const RING_CAPACITY: usize = 960 * 10;
|
||||
|
||||
/// Lock-free single-producer single-consumer ring buffer for i16 PCM samples.
|
||||
pub struct AudioRing {
|
||||
buf: Box<[i16; RING_CAPACITY]>,
|
||||
write_pos: AtomicUsize,
|
||||
read_pos: AtomicUsize,
|
||||
}
|
||||
|
||||
// SAFETY: AudioRing is designed for SPSC — one thread writes, one reads.
|
||||
// The atomics ensure visibility. The buffer itself is never accessed
|
||||
// from the same index by both threads simultaneously because the
|
||||
// producer only writes to positions between write_pos and read_pos,
|
||||
// and the consumer only reads from positions between read_pos and write_pos.
|
||||
unsafe impl Send for AudioRing {}
|
||||
unsafe impl Sync for AudioRing {}
|
||||
|
||||
impl AudioRing {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
buf: Box::new([0i16; RING_CAPACITY]),
|
||||
write_pos: AtomicUsize::new(0),
|
||||
read_pos: AtomicUsize::new(0),
|
||||
}
|
||||
}
|
||||
|
||||
/// Number of samples available to read.
|
||||
pub fn available(&self) -> usize {
|
||||
let w = self.write_pos.load(Ordering::Acquire);
|
||||
let r = self.read_pos.load(Ordering::Acquire);
|
||||
w.wrapping_sub(r)
|
||||
}
|
||||
|
||||
/// Number of samples that can be written without overwriting.
|
||||
pub fn free_space(&self) -> usize {
|
||||
RING_CAPACITY - self.available()
|
||||
}
|
||||
|
||||
/// Write samples into the ring. Returns number of samples written.
|
||||
/// Drops oldest samples if the ring is full.
|
||||
pub fn write(&self, samples: &[i16]) -> usize {
|
||||
let w = self.write_pos.load(Ordering::Relaxed);
|
||||
let count = samples.len().min(RING_CAPACITY);
|
||||
|
||||
for i in 0..count {
|
||||
let idx = (w + i) % RING_CAPACITY;
|
||||
// SAFETY: We're the only writer, and the reader won't read
|
||||
// past read_pos which we haven't advanced past yet.
|
||||
unsafe {
|
||||
let ptr = self.buf.as_ptr() as *mut i16;
|
||||
*ptr.add(idx) = samples[i];
|
||||
}
|
||||
}
|
||||
|
||||
self.write_pos.store(w.wrapping_add(count), Ordering::Release);
|
||||
|
||||
// If we overwrote unread data, advance read_pos
|
||||
if self.available() > RING_CAPACITY {
|
||||
let new_read = self.write_pos.load(Ordering::Relaxed).wrapping_sub(RING_CAPACITY);
|
||||
self.read_pos.store(new_read, Ordering::Release);
|
||||
}
|
||||
|
||||
count
|
||||
}
|
||||
|
||||
/// Read samples from the ring into `out`. Returns number of samples read.
|
||||
pub fn read(&self, out: &mut [i16]) -> usize {
|
||||
let avail = self.available();
|
||||
let count = out.len().min(avail);
|
||||
|
||||
let r = self.read_pos.load(Ordering::Relaxed);
|
||||
for i in 0..count {
|
||||
let idx = (r + i) % RING_CAPACITY;
|
||||
out[i] = unsafe { *self.buf.as_ptr().add(idx) };
|
||||
}
|
||||
|
||||
self.read_pos.store(r.wrapping_add(count), Ordering::Release);
|
||||
count
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
//! Engine commands sent from the JNI/UI thread to the engine.
|
||||
|
||||
use wzp_proto::QualityProfile;
|
||||
|
||||
/// Commands that can be sent to the running engine.
|
||||
pub enum EngineCommand {
|
||||
/// Mute or unmute the microphone.
|
||||
SetMute(bool),
|
||||
/// Enable or disable speaker (loudspeaker) mode.
|
||||
SetSpeaker(bool),
|
||||
/// Force a specific quality profile (overrides adaptive logic).
|
||||
ForceProfile(QualityProfile),
|
||||
/// Stop the call and shut down the engine.
|
||||
Stop,
|
||||
}
|
||||
@@ -1,580 +0,0 @@
|
||||
//! Engine orchestrator — manages the call lifecycle.
|
||||
//!
|
||||
//! IMPORTANT: On Android, pthread_create crashes in shared libraries due to
|
||||
//! static bionic stubs in the Rust std prebuilt rlibs. ALL work must happen
|
||||
//! on the JNI calling thread or via the tokio current_thread runtime.
|
||||
//! No std::thread::spawn or tokio multi_thread allowed.
|
||||
//!
|
||||
//! Audio capture and playout happen on Kotlin JVM threads via AudioRecord
|
||||
//! and AudioTrack. PCM samples are transferred through lock-free ring buffers.
|
||||
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::atomic::{AtomicBool, AtomicU16, AtomicU32, Ordering};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::Instant;
|
||||
|
||||
use bytes::Bytes;
|
||||
use tracing::{error, info, warn};
|
||||
use wzp_codec::agc::AutoGainControl;
|
||||
use wzp_codec::opus_dec::OpusDecoder;
|
||||
use wzp_codec::opus_enc::OpusEncoder;
|
||||
use wzp_crypto::{KeyExchange, WarzoneKeyExchange};
|
||||
use wzp_fec::{RaptorQFecDecoder, RaptorQFecEncoder};
|
||||
use wzp_proto::{
|
||||
AudioDecoder, AudioEncoder, CodecId, FecDecoder, FecEncoder,
|
||||
MediaHeader, MediaPacket, MediaTransport, QualityProfile, SignalMessage,
|
||||
};
|
||||
|
||||
use crate::audio_ring::AudioRing;
|
||||
use crate::commands::EngineCommand;
|
||||
use crate::stats::{CallState, CallStats};
|
||||
|
||||
/// Opus frame size at 48kHz mono, 20ms = 960 samples.
|
||||
const FRAME_SAMPLES: usize = 960;
|
||||
|
||||
/// Configuration to start a call.
|
||||
pub struct CallStartConfig {
|
||||
pub profile: QualityProfile,
|
||||
pub relay_addr: String,
|
||||
pub room: String,
|
||||
pub auth_token: Vec<u8>,
|
||||
pub identity_seed: [u8; 32],
|
||||
pub alias: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for CallStartConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
profile: QualityProfile::GOOD,
|
||||
relay_addr: String::new(),
|
||||
room: String::new(),
|
||||
auth_token: Vec::new(),
|
||||
identity_seed: [0u8; 32],
|
||||
alias: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct EngineState {
|
||||
pub running: AtomicBool,
|
||||
pub muted: AtomicBool,
|
||||
pub stats: Mutex<CallStats>,
|
||||
pub command_tx: std::sync::mpsc::Sender<EngineCommand>,
|
||||
pub command_rx: Mutex<Option<std::sync::mpsc::Receiver<EngineCommand>>>,
|
||||
/// Ring buffer: Kotlin AudioRecord → Rust encoder
|
||||
pub capture_ring: AudioRing,
|
||||
/// Ring buffer: Rust decoder → Kotlin AudioTrack
|
||||
pub playout_ring: AudioRing,
|
||||
/// Current audio level (RMS) for UI display, updated by capture path.
|
||||
pub audio_level_rms: AtomicU32,
|
||||
}
|
||||
|
||||
pub struct WzpEngine {
|
||||
pub(crate) state: Arc<EngineState>,
|
||||
tokio_runtime: Option<tokio::runtime::Runtime>,
|
||||
call_start: Option<Instant>,
|
||||
}
|
||||
|
||||
impl WzpEngine {
|
||||
pub fn new() -> Self {
|
||||
let (tx, rx) = std::sync::mpsc::channel();
|
||||
let state = Arc::new(EngineState {
|
||||
running: AtomicBool::new(false),
|
||||
muted: AtomicBool::new(false),
|
||||
stats: Mutex::new(CallStats::default()),
|
||||
command_tx: tx,
|
||||
command_rx: Mutex::new(Some(rx)),
|
||||
capture_ring: AudioRing::new(),
|
||||
playout_ring: AudioRing::new(),
|
||||
audio_level_rms: AtomicU32::new(0),
|
||||
});
|
||||
Self {
|
||||
state,
|
||||
tokio_runtime: None,
|
||||
call_start: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_call(&mut self, config: CallStartConfig) -> Result<(), anyhow::Error> {
|
||||
if self.state.running.load(Ordering::Acquire) {
|
||||
return Err(anyhow::anyhow!("call already active"));
|
||||
}
|
||||
|
||||
{
|
||||
let mut stats = self.state.stats.lock().unwrap();
|
||||
*stats = CallStats {
|
||||
state: CallState::Connecting,
|
||||
..Default::default()
|
||||
};
|
||||
}
|
||||
|
||||
let runtime = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()?;
|
||||
|
||||
let relay_addr: SocketAddr = config.relay_addr.parse().map_err(|e| {
|
||||
anyhow::anyhow!("invalid relay address '{}': {e}", config.relay_addr)
|
||||
})?;
|
||||
|
||||
let room = config.room.clone();
|
||||
let identity_seed = config.identity_seed;
|
||||
let profile = config.profile;
|
||||
let alias = config.alias.clone();
|
||||
let state = self.state.clone();
|
||||
|
||||
self.state.running.store(true, Ordering::Release);
|
||||
self.call_start = Some(Instant::now());
|
||||
|
||||
let state_clone = state.clone();
|
||||
runtime.block_on(async move {
|
||||
if let Err(e) = run_call(relay_addr, &room, &identity_seed, profile, alias.as_deref(), state_clone).await
|
||||
{
|
||||
error!("call failed: {e}");
|
||||
}
|
||||
});
|
||||
|
||||
state.running.store(false, Ordering::Release);
|
||||
{
|
||||
let mut stats = state.stats.lock().unwrap();
|
||||
stats.state = CallState::Closed;
|
||||
}
|
||||
|
||||
self.tokio_runtime = Some(runtime);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn stop_call(&mut self) {
|
||||
self.state.running.store(false, Ordering::Release);
|
||||
let _ = self.state.command_tx.send(EngineCommand::Stop);
|
||||
if let Some(rt) = self.tokio_runtime.take() {
|
||||
rt.shutdown_background();
|
||||
}
|
||||
self.call_start = None;
|
||||
}
|
||||
|
||||
pub fn set_mute(&self, muted: bool) {
|
||||
self.state.muted.store(muted, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub fn set_speaker(&self, _enabled: bool) {}
|
||||
|
||||
pub fn force_profile(&self, _profile: QualityProfile) {}
|
||||
|
||||
pub fn get_stats(&self) -> CallStats {
|
||||
let mut stats = self.state.stats.lock().unwrap().clone();
|
||||
if let Some(start) = self.call_start {
|
||||
stats.duration_secs = start.elapsed().as_secs_f64();
|
||||
}
|
||||
stats.audio_level = self.state.audio_level_rms.load(Ordering::Relaxed);
|
||||
stats
|
||||
}
|
||||
|
||||
pub fn is_active(&self) -> bool {
|
||||
self.state.running.load(Ordering::Acquire)
|
||||
}
|
||||
|
||||
pub fn write_audio(&self, samples: &[i16]) -> usize {
|
||||
if self.state.muted.load(Ordering::Relaxed) {
|
||||
return samples.len();
|
||||
}
|
||||
// Compute RMS for audio level display
|
||||
if !samples.is_empty() {
|
||||
let sum_sq: f64 = samples.iter().map(|&s| (s as f64) * (s as f64)).sum();
|
||||
let rms = (sum_sq / samples.len() as f64).sqrt() as u32;
|
||||
self.state.audio_level_rms.store(rms, Ordering::Relaxed);
|
||||
}
|
||||
self.state.capture_ring.write(samples)
|
||||
}
|
||||
|
||||
pub fn read_audio(&self, out: &mut [i16]) -> usize {
|
||||
self.state.playout_ring.read(out)
|
||||
}
|
||||
|
||||
pub fn destroy(mut self) {
|
||||
self.stop_call();
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for WzpEngine {
|
||||
fn drop(&mut self) {
|
||||
self.stop_call();
|
||||
}
|
||||
}
|
||||
|
||||
/// Run the full call lifecycle: connect, handshake, send/recv media with Opus + FEC.
|
||||
async fn run_call(
|
||||
relay_addr: SocketAddr,
|
||||
room: &str,
|
||||
identity_seed: &[u8; 32],
|
||||
profile: QualityProfile,
|
||||
alias: Option<&str>,
|
||||
state: Arc<EngineState>,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
let _ = rustls::crypto::ring::default_provider().install_default();
|
||||
|
||||
let bind_addr: SocketAddr = "0.0.0.0:0".parse().unwrap();
|
||||
let endpoint = wzp_transport::create_endpoint(bind_addr, None)?;
|
||||
|
||||
let sni = if room.is_empty() { "android" } else { room };
|
||||
info!(%relay_addr, sni, "connecting to relay...");
|
||||
let client_cfg = wzp_transport::client_config();
|
||||
let conn = wzp_transport::connect(&endpoint, relay_addr, sni, client_cfg).await?;
|
||||
info!("QUIC connected to relay");
|
||||
|
||||
let transport = Arc::new(wzp_transport::QuinnTransport::new(conn));
|
||||
|
||||
// Crypto handshake
|
||||
let mut kx = WarzoneKeyExchange::from_identity_seed(identity_seed);
|
||||
let ephemeral_pub = kx.generate_ephemeral();
|
||||
let identity_pub = kx.identity_public_key();
|
||||
|
||||
let mut sign_data = Vec::with_capacity(42);
|
||||
sign_data.extend_from_slice(&ephemeral_pub);
|
||||
sign_data.extend_from_slice(b"call-offer");
|
||||
let signature = kx.sign(&sign_data);
|
||||
|
||||
let offer = SignalMessage::CallOffer {
|
||||
identity_pub,
|
||||
ephemeral_pub,
|
||||
signature,
|
||||
supported_profiles: vec![
|
||||
QualityProfile::GOOD,
|
||||
QualityProfile::DEGRADED,
|
||||
QualityProfile::CATASTROPHIC,
|
||||
],
|
||||
alias: alias.map(|s| s.to_string()),
|
||||
};
|
||||
transport.send_signal(&offer).await?;
|
||||
info!("CallOffer sent, waiting for CallAnswer...");
|
||||
|
||||
let answer = transport
|
||||
.recv_signal()
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("connection closed before CallAnswer"))?;
|
||||
|
||||
let relay_ephemeral_pub = match answer {
|
||||
SignalMessage::CallAnswer { ephemeral_pub, .. } => ephemeral_pub,
|
||||
other => {
|
||||
return Err(anyhow::anyhow!(
|
||||
"expected CallAnswer, got {:?}",
|
||||
std::mem::discriminant(&other)
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let _session = kx.derive_session(&relay_ephemeral_pub)?;
|
||||
info!("handshake complete, call active");
|
||||
|
||||
{
|
||||
let mut stats = state.stats.lock().unwrap();
|
||||
stats.state = CallState::Active;
|
||||
}
|
||||
|
||||
// Initialize Opus codec
|
||||
let mut encoder =
|
||||
OpusEncoder::new(profile).map_err(|e| anyhow::anyhow!("opus encoder init: {e}"))?;
|
||||
let mut decoder =
|
||||
OpusDecoder::new(profile).map_err(|e| anyhow::anyhow!("opus decoder init: {e}"))?;
|
||||
|
||||
// Initialize FEC encoder/decoder
|
||||
let mut fec_enc = wzp_fec::create_encoder(&profile);
|
||||
let mut fec_dec = wzp_fec::create_decoder(&profile);
|
||||
|
||||
// AGC: normalize volume on both capture and playout paths
|
||||
let mut capture_agc = AutoGainControl::new();
|
||||
let mut playout_agc = AutoGainControl::new();
|
||||
|
||||
info!(
|
||||
fec_ratio = profile.fec_ratio,
|
||||
frames_per_block = profile.frames_per_block,
|
||||
"codec + FEC + AGC initialized (48kHz mono, 20ms frames)"
|
||||
);
|
||||
|
||||
let seq = AtomicU16::new(0);
|
||||
let ts = AtomicU32::new(0);
|
||||
let transport_recv = transport.clone();
|
||||
|
||||
// Pre-allocate buffers
|
||||
let mut capture_buf = vec![0i16; FRAME_SAMPLES];
|
||||
let mut encode_buf = vec![0u8; encoder.max_frame_bytes()];
|
||||
let mut frame_in_block: u8 = 0;
|
||||
let mut block_id: u8 = 0;
|
||||
|
||||
// Send task: capture ring → Opus encode → FEC → MediaPackets
|
||||
let send_task = async {
|
||||
info!("send task started (Opus + RaptorQ FEC)");
|
||||
loop {
|
||||
if !state.running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
|
||||
let avail = state.capture_ring.available();
|
||||
if avail < FRAME_SAMPLES {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(5)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
let read = state.capture_ring.read(&mut capture_buf);
|
||||
if read < FRAME_SAMPLES {
|
||||
continue;
|
||||
}
|
||||
|
||||
// AGC: normalize capture volume before encoding
|
||||
capture_agc.process_frame(&mut capture_buf);
|
||||
|
||||
// Opus encode
|
||||
let encoded_len = match encoder.encode(&capture_buf, &mut encode_buf) {
|
||||
Ok(n) => n,
|
||||
Err(e) => {
|
||||
warn!("opus encode error: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let encoded = &encode_buf[..encoded_len];
|
||||
|
||||
// Build source packet
|
||||
let s = seq.fetch_add(1, Ordering::Relaxed);
|
||||
let t = ts.fetch_add(FRAME_SAMPLES as u32, Ordering::Relaxed);
|
||||
|
||||
let source_pkt = MediaPacket {
|
||||
header: MediaHeader {
|
||||
version: 0,
|
||||
is_repair: false,
|
||||
codec_id: profile.codec,
|
||||
has_quality_report: false,
|
||||
fec_ratio_encoded: MediaHeader::encode_fec_ratio(profile.fec_ratio),
|
||||
seq: s,
|
||||
timestamp: t,
|
||||
fec_block: block_id,
|
||||
fec_symbol: frame_in_block,
|
||||
reserved: 0,
|
||||
csrc_count: 0,
|
||||
},
|
||||
payload: Bytes::copy_from_slice(encoded),
|
||||
quality_report: None,
|
||||
};
|
||||
|
||||
// Send source packet
|
||||
if let Err(e) = transport.send_media(&source_pkt).await {
|
||||
error!("send error: {e}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Feed encoded frame to FEC encoder
|
||||
if let Err(e) = fec_enc.add_source_symbol(encoded) {
|
||||
warn!("fec add_source error: {e}");
|
||||
}
|
||||
frame_in_block += 1;
|
||||
|
||||
// When block is full, generate repair packets
|
||||
if frame_in_block >= profile.frames_per_block {
|
||||
match fec_enc.generate_repair(profile.fec_ratio) {
|
||||
Ok(repairs) => {
|
||||
let repair_count = repairs.len();
|
||||
for (sym_idx, repair_data) in repairs {
|
||||
let rs = seq.fetch_add(1, Ordering::Relaxed);
|
||||
let repair_pkt = MediaPacket {
|
||||
header: MediaHeader {
|
||||
version: 0,
|
||||
is_repair: true,
|
||||
codec_id: profile.codec,
|
||||
has_quality_report: false,
|
||||
fec_ratio_encoded: MediaHeader::encode_fec_ratio(
|
||||
profile.fec_ratio,
|
||||
),
|
||||
seq: rs,
|
||||
timestamp: t,
|
||||
fec_block: block_id,
|
||||
fec_symbol: sym_idx,
|
||||
reserved: 0,
|
||||
csrc_count: 0,
|
||||
},
|
||||
payload: Bytes::from(repair_data),
|
||||
quality_report: None,
|
||||
};
|
||||
if let Err(e) = transport.send_media(&repair_pkt).await {
|
||||
error!("send repair error: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
if repair_count > 0 && (block_id % 50 == 0 || block_id == 0) {
|
||||
info!(
|
||||
block_id,
|
||||
repair_count,
|
||||
fec_ratio = profile.fec_ratio,
|
||||
"FEC block complete"
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("fec generate_repair error: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
let _ = fec_enc.finalize_block();
|
||||
block_id = block_id.wrapping_add(1);
|
||||
frame_in_block = 0;
|
||||
}
|
||||
|
||||
if s % 500 == 0 {
|
||||
info!(seq = s, block_id, frame_in_block, "sending");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Pre-allocate decode buffer
|
||||
let mut decode_buf = vec![0i16; FRAME_SAMPLES];
|
||||
|
||||
// Recv task: MediaPackets → FEC decode → Opus decode → playout ring
|
||||
let recv_task = async {
|
||||
let mut frames_decoded: u64 = 0;
|
||||
let mut fec_recovered: u64 = 0;
|
||||
info!("recv task started (Opus + RaptorQ FEC)");
|
||||
loop {
|
||||
if !state.running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
match transport_recv.recv_media().await {
|
||||
Ok(Some(pkt)) => {
|
||||
let is_repair = pkt.header.is_repair;
|
||||
let pkt_block = pkt.header.fec_block;
|
||||
let pkt_symbol = pkt.header.fec_symbol;
|
||||
|
||||
// Feed every packet (source + repair) to FEC decoder
|
||||
let _ = fec_dec.add_symbol(
|
||||
pkt_block,
|
||||
pkt_symbol,
|
||||
is_repair,
|
||||
&pkt.payload,
|
||||
);
|
||||
|
||||
// Source packets: decode directly
|
||||
if !is_repair {
|
||||
match decoder.decode(&pkt.payload, &mut decode_buf) {
|
||||
Ok(samples) => {
|
||||
// AGC on playout — normalizes received audio volume
|
||||
playout_agc.process_frame(&mut decode_buf[..samples]);
|
||||
state.playout_ring.write(&decode_buf[..samples]);
|
||||
frames_decoded += 1;
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("opus decode error: {e}");
|
||||
if let Ok(samples) = decoder.decode_lost(&mut decode_buf) {
|
||||
playout_agc.process_frame(&mut decode_buf[..samples]);
|
||||
state.playout_ring.write(&decode_buf[..samples]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try FEC recovery for this block
|
||||
// (useful when source packets were lost but repair arrived)
|
||||
if let Ok(Some(recovered_frames)) = fec_dec.try_decode(pkt_block) {
|
||||
// FEC recovered the block — any previously missing frames
|
||||
// are now available. In a full jitter buffer implementation,
|
||||
// we'd insert recovered frames at the right position.
|
||||
// For now, log recovery for telemetry.
|
||||
fec_recovered += recovered_frames.len() as u64;
|
||||
if fec_recovered % 50 == 1 {
|
||||
info!(
|
||||
fec_recovered,
|
||||
block = pkt_block,
|
||||
frames = recovered_frames.len(),
|
||||
"FEC block recovered"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Expire old blocks to prevent memory growth
|
||||
if pkt_block > 3 {
|
||||
fec_dec.expire_before(pkt_block.wrapping_sub(3));
|
||||
}
|
||||
|
||||
if frames_decoded == 1 || frames_decoded % 500 == 0 {
|
||||
info!(frames_decoded, fec_recovered, "recv stats");
|
||||
}
|
||||
|
||||
let mut stats = state.stats.lock().unwrap();
|
||||
stats.frames_decoded = frames_decoded;
|
||||
stats.fec_recovered = fec_recovered;
|
||||
}
|
||||
Ok(None) => {
|
||||
info!("relay disconnected");
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("recv error: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Stats task — polls path quality + quinn RTT every 500ms
|
||||
let transport_stats = transport.clone();
|
||||
let stats_task = async {
|
||||
loop {
|
||||
if !state.running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
// Feed quinn's QUIC-level RTT into our path monitor
|
||||
let quic_rtt_ms = transport_stats.connection().stats().path.rtt.as_millis() as u32;
|
||||
if quic_rtt_ms > 0 {
|
||||
transport_stats.feed_rtt(quic_rtt_ms);
|
||||
}
|
||||
let pq = transport_stats.path_quality();
|
||||
{
|
||||
let mut stats = state.stats.lock().unwrap();
|
||||
stats.frames_encoded = seq.load(Ordering::Relaxed) as u64;
|
||||
stats.loss_pct = pq.loss_pct;
|
||||
stats.rtt_ms = quic_rtt_ms;
|
||||
stats.jitter_ms = pq.jitter_ms;
|
||||
}
|
||||
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
|
||||
}
|
||||
};
|
||||
|
||||
// Signal recv task — listens for RoomUpdate and other signaling messages
|
||||
let transport_signal = transport.clone();
|
||||
let state_signal = state.clone();
|
||||
let signal_task = async {
|
||||
loop {
|
||||
match transport_signal.recv_signal().await {
|
||||
Ok(Some(SignalMessage::RoomUpdate { count, participants })) => {
|
||||
info!(count, "RoomUpdate received");
|
||||
let members: Vec<crate::stats::RoomMember> = participants
|
||||
.iter()
|
||||
.map(|p| crate::stats::RoomMember {
|
||||
fingerprint: p.fingerprint.clone(),
|
||||
alias: p.alias.clone(),
|
||||
})
|
||||
.collect();
|
||||
let mut stats = state_signal.stats.lock().unwrap();
|
||||
stats.room_participant_count = count;
|
||||
stats.room_participants = members;
|
||||
}
|
||||
Ok(Some(msg)) => {
|
||||
info!("signal received: {:?}", std::mem::discriminant(&msg));
|
||||
}
|
||||
Ok(None) => {
|
||||
info!("signal stream closed");
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("signal recv error: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
tokio::select! {
|
||||
_ = send_task => {}
|
||||
_ = recv_task => {}
|
||||
_ = stats_task => {}
|
||||
_ = signal_task => {}
|
||||
}
|
||||
|
||||
transport.close().await.ok();
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,240 +0,0 @@
|
||||
//! JNI bridge for Android — thin layer between Kotlin and the WzpEngine.
|
||||
|
||||
use std::panic;
|
||||
|
||||
use jni::objects::{JClass, JObject, JString};
|
||||
use jni::sys::{jboolean, jint, jlong, jstring};
|
||||
use jni::JNIEnv;
|
||||
use tracing::{error, info};
|
||||
use wzp_proto::QualityProfile;
|
||||
|
||||
use crate::engine::{CallStartConfig, WzpEngine};
|
||||
|
||||
/// Opaque engine handle passed to/from Kotlin as a `jlong`.
|
||||
struct EngineHandle {
|
||||
engine: WzpEngine,
|
||||
}
|
||||
|
||||
/// Recover the `EngineHandle` from a raw handle value.
|
||||
unsafe fn handle_ref(handle: jlong) -> &'static mut EngineHandle {
|
||||
unsafe { &mut *(handle as *mut EngineHandle) }
|
||||
}
|
||||
|
||||
fn profile_from_int(value: jint) -> QualityProfile {
|
||||
match value {
|
||||
1 => QualityProfile::DEGRADED,
|
||||
2 => QualityProfile::CATASTROPHIC,
|
||||
_ => QualityProfile::GOOD,
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeInit(
|
||||
_env: JNIEnv,
|
||||
_class: JClass,
|
||||
) -> jlong {
|
||||
let result = panic::catch_unwind(|| {
|
||||
let handle = Box::new(EngineHandle {
|
||||
engine: WzpEngine::new(),
|
||||
});
|
||||
Box::into_raw(handle) as jlong
|
||||
});
|
||||
match result {
|
||||
Ok(h) => h,
|
||||
Err(_) => 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeStartCall(
|
||||
mut env: JNIEnv,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
relay_addr_j: JString,
|
||||
room_j: JString,
|
||||
seed_hex_j: JString,
|
||||
token_j: JString,
|
||||
alias_j: JString,
|
||||
) -> jint {
|
||||
let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let relay_addr: String = env.get_string(&relay_addr_j).map(|s| s.into()).unwrap_or_default();
|
||||
let room: String = env.get_string(&room_j).map(|s| s.into()).unwrap_or_default();
|
||||
let seed_hex: String = env.get_string(&seed_hex_j).map(|s| s.into()).unwrap_or_default();
|
||||
let token: String = env.get_string(&token_j).map(|s| s.into()).unwrap_or_default();
|
||||
let alias: String = env.get_string(&alias_j).map(|s| s.into()).unwrap_or_default();
|
||||
|
||||
let h = unsafe { handle_ref(handle) };
|
||||
|
||||
// Parse hex seed
|
||||
let mut identity_seed = [0u8; 32];
|
||||
if seed_hex.len() == 64 {
|
||||
for i in 0..32 {
|
||||
if let Ok(byte) = u8::from_str_radix(&seed_hex[i * 2..i * 2 + 2], 16) {
|
||||
identity_seed[i] = byte;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Generate random seed if not provided
|
||||
use rand::RngCore;
|
||||
rand::thread_rng().fill_bytes(&mut identity_seed);
|
||||
}
|
||||
|
||||
let config = CallStartConfig {
|
||||
profile: QualityProfile::GOOD,
|
||||
relay_addr,
|
||||
room,
|
||||
auth_token: if token.is_empty() { Vec::new() } else { token.into_bytes() },
|
||||
identity_seed,
|
||||
alias: if alias.is_empty() { None } else { Some(alias) },
|
||||
};
|
||||
|
||||
match h.engine.start_call(config) {
|
||||
Ok(()) => 0,
|
||||
Err(e) => {
|
||||
error!("start_call failed: {e}");
|
||||
-1
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
match result {
|
||||
Ok(code) => code,
|
||||
Err(_) => -1,
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeStopCall(
|
||||
_env: JNIEnv,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
) {
|
||||
let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let h = unsafe { handle_ref(handle) };
|
||||
h.engine.stop_call();
|
||||
}));
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeSetMute(
|
||||
_env: JNIEnv,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
muted: jboolean,
|
||||
) {
|
||||
let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let h = unsafe { handle_ref(handle) };
|
||||
h.engine.set_mute(muted != 0);
|
||||
}));
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeSetSpeaker(
|
||||
_env: JNIEnv,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
speaker: jboolean,
|
||||
) {
|
||||
let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let h = unsafe { handle_ref(handle) };
|
||||
h.engine.set_speaker(speaker != 0);
|
||||
}));
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeGetStats<'a>(
|
||||
mut env: JNIEnv<'a>,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
) -> jstring {
|
||||
let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let h = unsafe { handle_ref(handle) };
|
||||
let stats = h.engine.get_stats();
|
||||
serde_json::to_string(&stats).unwrap_or_else(|_| "{}".to_string())
|
||||
}));
|
||||
|
||||
let json = match result {
|
||||
Ok(s) => s,
|
||||
Err(_) => "{}".to_string(),
|
||||
};
|
||||
|
||||
env.new_string(&json)
|
||||
.map(|s| s.into_raw())
|
||||
.unwrap_or(JObject::null().into_raw())
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeForceProfile(
|
||||
_env: JNIEnv,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
profile: jint,
|
||||
) {
|
||||
let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let h = unsafe { handle_ref(handle) };
|
||||
let qp = profile_from_int(profile);
|
||||
h.engine.force_profile(qp);
|
||||
}));
|
||||
}
|
||||
|
||||
/// Write captured PCM samples from Kotlin AudioRecord into the engine's capture ring.
|
||||
/// pcm is a Java short[] array.
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeWriteAudio(
|
||||
env: JNIEnv,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
pcm: jni::objects::JShortArray,
|
||||
) -> jint {
|
||||
let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let h = unsafe { handle_ref(handle) };
|
||||
let len = env.get_array_length(&pcm).unwrap_or(0) as usize;
|
||||
if len == 0 {
|
||||
return 0;
|
||||
}
|
||||
let mut buf = vec![0i16; len];
|
||||
// GetShortArrayRegion copies Java array into our buffer
|
||||
if env.get_short_array_region(&pcm, 0, &mut buf).is_err() {
|
||||
return 0;
|
||||
}
|
||||
h.engine.write_audio(&buf) as jint
|
||||
}));
|
||||
result.unwrap_or(0)
|
||||
}
|
||||
|
||||
/// Read decoded PCM samples from the engine's playout ring for Kotlin AudioTrack.
|
||||
/// pcm is a Java short[] array to fill. Returns number of samples actually read.
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeReadAudio(
|
||||
env: JNIEnv,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
pcm: jni::objects::JShortArray,
|
||||
) -> jint {
|
||||
let result = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let h = unsafe { handle_ref(handle) };
|
||||
let len = env.get_array_length(&pcm).unwrap_or(0) as usize;
|
||||
if len == 0 {
|
||||
return 0;
|
||||
}
|
||||
let mut buf = vec![0i16; len];
|
||||
let read = h.engine.read_audio(&mut buf);
|
||||
if read > 0 {
|
||||
let _ = env.set_short_array_region(&pcm, 0, &buf[..read]);
|
||||
}
|
||||
read as jint
|
||||
}));
|
||||
result.unwrap_or(0)
|
||||
}
|
||||
|
||||
#[unsafe(no_mangle)]
|
||||
pub unsafe extern "system" fn Java_com_wzp_engine_WzpEngine_nativeDestroy(
|
||||
_env: JNIEnv,
|
||||
_class: JClass,
|
||||
handle: jlong,
|
||||
) {
|
||||
let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| {
|
||||
let h = unsafe { Box::from_raw(handle as *mut EngineHandle) };
|
||||
drop(h);
|
||||
}));
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
//! WarzonePhone Android native VoIP engine.
|
||||
//!
|
||||
//! Provides:
|
||||
//! - Oboe audio backend with lock-free SPSC ring buffers
|
||||
//! - Engine orchestrator managing call lifecycle
|
||||
//! - Codec pipeline thread (encode/decode/FEC/jitter)
|
||||
//! - Call statistics and command interface
|
||||
//!
|
||||
//! On non-Android targets, the Oboe C++ layer compiles as a stub,
|
||||
//! allowing `cargo check` and unit tests on the host.
|
||||
|
||||
pub mod audio_android;
|
||||
pub mod audio_ring;
|
||||
pub mod commands;
|
||||
pub mod engine;
|
||||
pub mod pipeline;
|
||||
pub mod stats;
|
||||
pub mod jni_bridge;
|
||||
@@ -1,262 +0,0 @@
|
||||
//! Codec pipeline — encode/decode with FEC and jitter buffer.
|
||||
//!
|
||||
//! Runs on a dedicated thread, processing 20 ms frames at 48 kHz.
|
||||
//! The pipeline is NOT Send/Sync (Opus encoder state) — it is owned
|
||||
//! exclusively by the codec thread.
|
||||
|
||||
use tracing::{debug, warn};
|
||||
use wzp_codec::{AdaptiveDecoder, AdaptiveEncoder, AutoGainControl, EchoCanceller};
|
||||
use wzp_fec::{RaptorQFecDecoder, RaptorQFecEncoder};
|
||||
use wzp_proto::jitter::{JitterBuffer, PlayoutResult};
|
||||
use wzp_proto::quality::AdaptiveQualityController;
|
||||
use wzp_proto::traits::{AudioDecoder, AudioEncoder, FecDecoder, FecEncoder};
|
||||
use wzp_proto::traits::QualityController;
|
||||
use wzp_proto::{MediaPacket, QualityProfile};
|
||||
|
||||
use crate::audio_android::FRAME_SAMPLES;
|
||||
|
||||
/// Maximum encoded frame size (Opus worst case at highest bitrate).
|
||||
const MAX_ENCODED_BYTES: usize = 1275;
|
||||
|
||||
/// Pipeline statistics snapshot.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct PipelineStats {
|
||||
pub frames_encoded: u64,
|
||||
pub frames_decoded: u64,
|
||||
pub underruns: u64,
|
||||
pub jitter_depth: usize,
|
||||
pub quality_tier: u8,
|
||||
}
|
||||
|
||||
/// The codec pipeline: encode, FEC, jitter buffer, decode.
|
||||
///
|
||||
/// This struct is owned by the codec thread and not shared.
|
||||
pub struct Pipeline {
|
||||
encoder: AdaptiveEncoder,
|
||||
decoder: AdaptiveDecoder,
|
||||
fec_encoder: RaptorQFecEncoder,
|
||||
fec_decoder: RaptorQFecDecoder,
|
||||
jitter_buffer: JitterBuffer,
|
||||
quality_ctrl: AdaptiveQualityController,
|
||||
/// Acoustic echo canceller applied before encoding.
|
||||
aec: EchoCanceller,
|
||||
/// Automatic gain control applied before encoding.
|
||||
agc: AutoGainControl,
|
||||
/// Last decoded PCM frame, used as the AEC far-end reference.
|
||||
last_decoded_farend: Option<Vec<i16>>,
|
||||
// Pre-allocated scratch buffers
|
||||
capture_buf: Vec<i16>,
|
||||
#[allow(dead_code)]
|
||||
playout_buf: Vec<i16>,
|
||||
encode_out: Vec<u8>,
|
||||
// Stats counters
|
||||
frames_encoded: u64,
|
||||
frames_decoded: u64,
|
||||
underruns: u64,
|
||||
}
|
||||
|
||||
impl Pipeline {
|
||||
/// Create a new pipeline configured for the given quality profile.
|
||||
pub fn new(profile: QualityProfile) -> Result<Self, anyhow::Error> {
|
||||
let encoder = AdaptiveEncoder::new(profile)
|
||||
.map_err(|e| anyhow::anyhow!("encoder init: {e}"))?;
|
||||
let decoder = AdaptiveDecoder::new(profile)
|
||||
.map_err(|e| anyhow::anyhow!("decoder init: {e}"))?;
|
||||
let fec_encoder =
|
||||
RaptorQFecEncoder::with_defaults(profile.frames_per_block as usize);
|
||||
let fec_decoder =
|
||||
RaptorQFecDecoder::with_defaults(profile.frames_per_block as usize);
|
||||
let jitter_buffer = JitterBuffer::new(10, 250, 3);
|
||||
let quality_ctrl = AdaptiveQualityController::new();
|
||||
|
||||
Ok(Self {
|
||||
encoder,
|
||||
decoder,
|
||||
fec_encoder,
|
||||
fec_decoder,
|
||||
jitter_buffer,
|
||||
quality_ctrl,
|
||||
aec: EchoCanceller::new(48000, 100), // 100 ms echo tail
|
||||
agc: AutoGainControl::new(),
|
||||
last_decoded_farend: None,
|
||||
capture_buf: vec![0i16; FRAME_SAMPLES],
|
||||
playout_buf: vec![0i16; FRAME_SAMPLES],
|
||||
encode_out: vec![0u8; MAX_ENCODED_BYTES],
|
||||
frames_encoded: 0,
|
||||
frames_decoded: 0,
|
||||
underruns: 0,
|
||||
})
|
||||
}
|
||||
|
||||
/// Encode a PCM frame into a compressed packet.
|
||||
///
|
||||
/// If `muted` is true, a silence frame is encoded (all zeros).
|
||||
/// Returns the encoded bytes, or `None` on encoder error.
|
||||
pub fn encode_frame(&mut self, pcm: &[i16], muted: bool) -> Option<Vec<u8>> {
|
||||
let input = if muted {
|
||||
// Zero the capture buffer for silence
|
||||
for s in self.capture_buf.iter_mut() {
|
||||
*s = 0;
|
||||
}
|
||||
&self.capture_buf[..]
|
||||
} else {
|
||||
// Feed the last decoded playout as AEC far-end reference.
|
||||
if let Some(ref farend) = self.last_decoded_farend {
|
||||
self.aec.feed_farend(farend);
|
||||
}
|
||||
|
||||
// Apply AEC + AGC to the captured PCM.
|
||||
let len = pcm.len().min(self.capture_buf.len());
|
||||
self.capture_buf[..len].copy_from_slice(&pcm[..len]);
|
||||
self.aec.process_frame(&mut self.capture_buf[..len]);
|
||||
self.agc.process_frame(&mut self.capture_buf[..len]);
|
||||
&self.capture_buf[..len]
|
||||
};
|
||||
|
||||
match self.encoder.encode(input, &mut self.encode_out) {
|
||||
Ok(n) => {
|
||||
self.frames_encoded += 1;
|
||||
let encoded = self.encode_out[..n].to_vec();
|
||||
|
||||
// Feed into FEC encoder
|
||||
if let Err(e) = self.fec_encoder.add_source_symbol(&encoded) {
|
||||
warn!("FEC encode error: {e}");
|
||||
}
|
||||
|
||||
Some(encoded)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("encode error: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Feed a received media packet into the jitter buffer.
|
||||
pub fn feed_packet(&mut self, packet: MediaPacket) {
|
||||
// Feed FEC symbols if present
|
||||
let header = &packet.header;
|
||||
if header.fec_block != 0 || header.fec_symbol != 0 {
|
||||
let is_repair = header.is_repair;
|
||||
if let Err(e) = self.fec_decoder.add_symbol(
|
||||
header.fec_block,
|
||||
header.fec_symbol,
|
||||
is_repair,
|
||||
&packet.payload,
|
||||
) {
|
||||
debug!("FEC symbol feed error: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
self.jitter_buffer.push(packet);
|
||||
}
|
||||
|
||||
/// Decode the next frame from the jitter buffer.
|
||||
///
|
||||
/// Returns decoded PCM samples, or `None` if the buffer is not ready.
|
||||
/// Decoded PCM is also stored as the AEC far-end reference for the next
|
||||
/// encode cycle.
|
||||
pub fn decode_frame(&mut self) -> Option<Vec<i16>> {
|
||||
let result = match self.jitter_buffer.pop() {
|
||||
PlayoutResult::Packet(pkt) => {
|
||||
let mut pcm = vec![0i16; FRAME_SAMPLES];
|
||||
match self.decoder.decode(&pkt.payload, &mut pcm) {
|
||||
Ok(n) => {
|
||||
self.frames_decoded += 1;
|
||||
pcm.truncate(n);
|
||||
Some(pcm)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("decode error: {e}");
|
||||
// Attempt PLC
|
||||
self.generate_plc()
|
||||
}
|
||||
}
|
||||
}
|
||||
PlayoutResult::Missing { seq } => {
|
||||
debug!(seq, "jitter buffer: missing packet, generating PLC");
|
||||
self.generate_plc()
|
||||
}
|
||||
PlayoutResult::NotReady => {
|
||||
self.underruns += 1;
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
// Save decoded PCM as far-end reference for AEC.
|
||||
if let Some(ref pcm) = result {
|
||||
self.last_decoded_farend = Some(pcm.clone());
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Generate packet loss concealment output.
|
||||
fn generate_plc(&mut self) -> Option<Vec<i16>> {
|
||||
let mut pcm = vec![0i16; FRAME_SAMPLES];
|
||||
match self.decoder.decode_lost(&mut pcm) {
|
||||
Ok(n) => {
|
||||
self.frames_decoded += 1;
|
||||
pcm.truncate(n);
|
||||
Some(pcm)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("PLC error: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Feed a quality report into the adaptive quality controller.
|
||||
///
|
||||
/// Returns a new profile if a tier transition occurred.
|
||||
#[allow(unused)]
|
||||
pub fn observe_quality(
|
||||
&mut self,
|
||||
report: &wzp_proto::QualityReport,
|
||||
) -> Option<QualityProfile> {
|
||||
let new_profile = self.quality_ctrl.observe(report);
|
||||
if let Some(ref profile) = new_profile {
|
||||
if let Err(e) = self.encoder.set_profile(*profile) {
|
||||
warn!("encoder set_profile error: {e}");
|
||||
}
|
||||
if let Err(e) = self.decoder.set_profile(*profile) {
|
||||
warn!("decoder set_profile error: {e}");
|
||||
}
|
||||
}
|
||||
new_profile
|
||||
}
|
||||
|
||||
/// Force a specific quality profile.
|
||||
#[allow(unused)]
|
||||
pub fn force_profile(&mut self, profile: QualityProfile) {
|
||||
self.quality_ctrl.force_profile(profile);
|
||||
if let Err(e) = self.encoder.set_profile(profile) {
|
||||
warn!("encoder set_profile error: {e}");
|
||||
}
|
||||
if let Err(e) = self.decoder.set_profile(profile) {
|
||||
warn!("decoder set_profile error: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
/// Get current pipeline statistics.
|
||||
pub fn stats(&self) -> PipelineStats {
|
||||
PipelineStats {
|
||||
frames_encoded: self.frames_encoded,
|
||||
frames_decoded: self.frames_decoded,
|
||||
underruns: self.underruns,
|
||||
jitter_depth: self.jitter_buffer.stats().current_depth,
|
||||
quality_tier: self.quality_ctrl.tier() as u8,
|
||||
}
|
||||
}
|
||||
|
||||
/// Enable or disable acoustic echo cancellation.
|
||||
pub fn set_aec_enabled(&mut self, enabled: bool) {
|
||||
self.aec.set_enabled(enabled);
|
||||
}
|
||||
|
||||
/// Enable or disable automatic gain control.
|
||||
pub fn set_agc_enabled(&mut self, enabled: bool) {
|
||||
self.agc.set_enabled(enabled);
|
||||
}
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
//! Call statistics for the Android engine.
|
||||
|
||||
/// State of the call.
|
||||
/// Serializes as integer for easy parsing on the Kotlin side:
|
||||
/// 0=Idle, 1=Connecting, 2=Active, 3=Reconnecting, 4=Closed
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub enum CallState {
|
||||
#[default]
|
||||
Idle,
|
||||
Connecting,
|
||||
Active,
|
||||
Reconnecting,
|
||||
Closed,
|
||||
}
|
||||
|
||||
impl serde::Serialize for CallState {
|
||||
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
let n: u8 = match self {
|
||||
CallState::Idle => 0,
|
||||
CallState::Connecting => 1,
|
||||
CallState::Active => 2,
|
||||
CallState::Reconnecting => 3,
|
||||
CallState::Closed => 4,
|
||||
};
|
||||
serializer.serialize_u8(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Aggregated call statistics, serializable for JNI bridge.
|
||||
#[derive(Clone, Debug, Default, serde::Serialize)]
|
||||
pub struct CallStats {
|
||||
/// Current call state.
|
||||
pub state: CallState,
|
||||
/// Call duration in seconds.
|
||||
pub duration_secs: f64,
|
||||
/// Current quality tier (0=GOOD, 1=DEGRADED, 2=CATASTROPHIC).
|
||||
pub quality_tier: u8,
|
||||
/// Observed packet loss percentage.
|
||||
pub loss_pct: f32,
|
||||
/// Smoothed round-trip time in milliseconds.
|
||||
pub rtt_ms: u32,
|
||||
/// Jitter in milliseconds.
|
||||
pub jitter_ms: u32,
|
||||
/// Current jitter buffer depth in packets.
|
||||
pub jitter_buffer_depth: usize,
|
||||
/// Total frames encoded since call start.
|
||||
pub frames_encoded: u64,
|
||||
/// Total frames decoded since call start.
|
||||
pub frames_decoded: u64,
|
||||
/// Number of playout underruns (buffer empty when audio needed).
|
||||
pub underruns: u64,
|
||||
/// Frames recovered by FEC.
|
||||
pub fec_recovered: u64,
|
||||
/// Current mic audio level (RMS of i16 samples, 0-32767).
|
||||
pub audio_level: u32,
|
||||
/// Number of participants in the room (from last RoomUpdate).
|
||||
pub room_participant_count: u32,
|
||||
/// Participant list (fingerprint + optional alias) serialized as JSON array.
|
||||
pub room_participants: Vec<RoomMember>,
|
||||
}
|
||||
|
||||
/// A room member entry, serialized into the stats JSON.
|
||||
#[derive(Clone, Debug, Default, serde::Serialize)]
|
||||
pub struct RoomMember {
|
||||
pub fingerprint: String,
|
||||
pub alias: Option<String>,
|
||||
}
|
||||
@@ -23,13 +23,10 @@ serde_json = "1"
|
||||
chrono = "0.4"
|
||||
rustls = { version = "0.23", default-features = false, features = ["ring", "std"] }
|
||||
cpal = { version = "0.15", optional = true }
|
||||
coreaudio-rs = { version = "0.11", optional = true }
|
||||
libc = "0.2"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
audio = ["cpal"]
|
||||
vpio = ["coreaudio-rs"]
|
||||
|
||||
[[bin]]
|
||||
name = "wzp-client"
|
||||
|
||||
@@ -3,10 +3,12 @@
|
||||
//! Both structs use 48 kHz, mono, i16 format to match the WarzonePhone codec
|
||||
//! pipeline. Frames are 960 samples (20 ms at 48 kHz).
|
||||
//!
|
||||
//! Audio callbacks are **lock-free**: they read/write directly to an `AudioRing`
|
||||
//! (atomic SPSC ring buffer). No Mutex, no channel, no allocation on the hot path.
|
||||
//! The cpal `Stream` type is not `Send`, so each struct spawns a dedicated OS
|
||||
//! thread that owns the stream. The public API exposes only `Send + Sync`
|
||||
//! channel handles.
|
||||
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::mpsc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
@@ -14,8 +16,6 @@ use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
|
||||
use cpal::{SampleFormat, SampleRate, StreamConfig};
|
||||
use tracing::{info, warn};
|
||||
|
||||
use crate::audio_ring::AudioRing;
|
||||
|
||||
/// Number of samples per 20 ms frame at 48 kHz mono.
|
||||
pub const FRAME_SAMPLES: usize = 960;
|
||||
|
||||
@@ -23,25 +23,23 @@ pub const FRAME_SAMPLES: usize = 960;
|
||||
// AudioCapture
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Captures microphone input via CPAL and writes PCM into a lock-free ring buffer.
|
||||
/// Captures microphone input and yields 960-sample PCM frames.
|
||||
///
|
||||
/// The cpal stream lives on a dedicated OS thread; this handle is `Send + Sync`.
|
||||
pub struct AudioCapture {
|
||||
ring: Arc<AudioRing>,
|
||||
rx: mpsc::Receiver<Vec<i16>>,
|
||||
running: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl AudioCapture {
|
||||
/// Create and start capturing from the default input device at 48 kHz mono.
|
||||
pub fn start() -> Result<Self, anyhow::Error> {
|
||||
let ring = Arc::new(AudioRing::new());
|
||||
let (tx, rx) = mpsc::sync_channel::<Vec<i16>>(64);
|
||||
let running = Arc::new(AtomicBool::new(true));
|
||||
|
||||
let (init_tx, init_rx) = std::sync::mpsc::sync_channel::<Result<(), String>>(1);
|
||||
|
||||
let ring_cb = ring.clone();
|
||||
let running_clone = running.clone();
|
||||
|
||||
let (init_tx, init_rx) = mpsc::sync_channel::<Result<(), String>>(1);
|
||||
|
||||
std::thread::Builder::new()
|
||||
.name("wzp-audio-capture".into())
|
||||
.spawn(move || {
|
||||
@@ -61,51 +59,53 @@ impl AudioCapture {
|
||||
|
||||
let use_f32 = !supports_i16_input(&device)?;
|
||||
|
||||
let buf = Arc::new(std::sync::Mutex::new(
|
||||
Vec::<i16>::with_capacity(FRAME_SAMPLES),
|
||||
));
|
||||
let err_cb = |e: cpal::StreamError| {
|
||||
warn!("input stream error: {e}");
|
||||
};
|
||||
|
||||
let logged_cb_size = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let stream = if use_f32 {
|
||||
let ring = ring_cb.clone();
|
||||
let buf = buf.clone();
|
||||
let tx = tx.clone();
|
||||
let running = running_clone.clone();
|
||||
let logged = logged_cb_size.clone();
|
||||
device.build_input_stream(
|
||||
&config,
|
||||
move |data: &[f32], _: &cpal::InputCallbackInfo| {
|
||||
if !running.load(Ordering::Relaxed) {
|
||||
return;
|
||||
}
|
||||
if !logged.swap(true, Ordering::Relaxed) {
|
||||
eprintln!("[audio] capture callback: {} f32 samples", data.len());
|
||||
}
|
||||
let mut tmp = [0i16; FRAME_SAMPLES];
|
||||
for chunk in data.chunks(FRAME_SAMPLES) {
|
||||
let n = chunk.len();
|
||||
for i in 0..n {
|
||||
tmp[i] = f32_to_i16(chunk[i]);
|
||||
let mut lock = buf.lock().unwrap();
|
||||
for &s in data {
|
||||
lock.push(f32_to_i16(s));
|
||||
if lock.len() == FRAME_SAMPLES {
|
||||
let frame = lock.drain(..).collect();
|
||||
let _ = tx.try_send(frame);
|
||||
}
|
||||
ring.write(&tmp[..n]);
|
||||
}
|
||||
},
|
||||
err_cb,
|
||||
None,
|
||||
)?
|
||||
} else {
|
||||
let ring = ring_cb.clone();
|
||||
let buf = buf.clone();
|
||||
let tx = tx.clone();
|
||||
let running = running_clone.clone();
|
||||
let logged = logged_cb_size.clone();
|
||||
device.build_input_stream(
|
||||
&config,
|
||||
move |data: &[i16], _: &cpal::InputCallbackInfo| {
|
||||
if !running.load(Ordering::Relaxed) {
|
||||
return;
|
||||
}
|
||||
if !logged.swap(true, Ordering::Relaxed) {
|
||||
eprintln!("[audio] capture callback: {} i16 samples", data.len());
|
||||
let mut lock = buf.lock().unwrap();
|
||||
for &s in data {
|
||||
lock.push(s);
|
||||
if lock.len() == FRAME_SAMPLES {
|
||||
let frame = lock.drain(..).collect();
|
||||
let _ = tx.try_send(frame);
|
||||
}
|
||||
}
|
||||
ring.write(data);
|
||||
},
|
||||
err_cb,
|
||||
None,
|
||||
@@ -114,6 +114,7 @@ impl AudioCapture {
|
||||
|
||||
stream.play().context("failed to start input stream")?;
|
||||
|
||||
// Signal success to the caller before parking.
|
||||
let _ = init_tx.send(Ok(()));
|
||||
|
||||
// Keep stream alive until stopped.
|
||||
@@ -134,12 +135,15 @@ impl AudioCapture {
|
||||
.map_err(|_| anyhow!("capture thread exited before signaling"))?
|
||||
.map_err(|e| anyhow!("{e}"))?;
|
||||
|
||||
Ok(Self { ring, running })
|
||||
Ok(Self { rx, running })
|
||||
}
|
||||
|
||||
/// Get a reference to the capture ring buffer for direct polling.
|
||||
pub fn ring(&self) -> &Arc<AudioRing> {
|
||||
&self.ring
|
||||
/// Read the next frame of 960 PCM samples (blocking until available).
|
||||
///
|
||||
/// Returns `None` when the stream has been stopped or the channel is
|
||||
/// disconnected.
|
||||
pub fn read_frame(&self) -> Option<Vec<i16>> {
|
||||
self.rx.recv().ok()
|
||||
}
|
||||
|
||||
/// Stop capturing.
|
||||
@@ -148,35 +152,27 @@ impl AudioCapture {
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AudioCapture {
|
||||
fn drop(&mut self) {
|
||||
self.stop();
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// AudioPlayback
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Plays PCM through the default output device, reading from a lock-free ring buffer.
|
||||
/// Plays PCM frames through the default output device at 48 kHz mono.
|
||||
///
|
||||
/// The cpal stream lives on a dedicated OS thread; this handle is `Send + Sync`.
|
||||
pub struct AudioPlayback {
|
||||
ring: Arc<AudioRing>,
|
||||
tx: mpsc::SyncSender<Vec<i16>>,
|
||||
running: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl AudioPlayback {
|
||||
/// Create and start playback on the default output device at 48 kHz mono.
|
||||
pub fn start() -> Result<Self, anyhow::Error> {
|
||||
let ring = Arc::new(AudioRing::new());
|
||||
let (tx, rx) = mpsc::sync_channel::<Vec<i16>>(64);
|
||||
let running = Arc::new(AtomicBool::new(true));
|
||||
|
||||
let (init_tx, init_rx) = std::sync::mpsc::sync_channel::<Result<(), String>>(1);
|
||||
|
||||
let ring_cb = ring.clone();
|
||||
let running_clone = running.clone();
|
||||
|
||||
let (init_tx, init_rx) = mpsc::sync_channel::<Result<(), String>>(1);
|
||||
|
||||
std::thread::Builder::new()
|
||||
.name("wzp-audio-playback".into())
|
||||
.spawn(move || {
|
||||
@@ -196,40 +192,62 @@ impl AudioPlayback {
|
||||
|
||||
let use_f32 = !supports_i16_output(&device)?;
|
||||
|
||||
// Shared ring of samples the cpal callback drains from.
|
||||
let ring = Arc::new(std::sync::Mutex::new(
|
||||
std::collections::VecDeque::<i16>::with_capacity(FRAME_SAMPLES * 8),
|
||||
));
|
||||
|
||||
// Background drainer: moves frames from the mpsc channel into the ring.
|
||||
{
|
||||
let ring = ring.clone();
|
||||
let running = running_clone.clone();
|
||||
std::thread::Builder::new()
|
||||
.name("wzp-playback-drain".into())
|
||||
.spawn(move || {
|
||||
while running.load(Ordering::Relaxed) {
|
||||
match rx.recv_timeout(std::time::Duration::from_millis(100)) {
|
||||
Ok(frame) => {
|
||||
let mut lock = ring.lock().unwrap();
|
||||
lock.extend(frame);
|
||||
while lock.len() > FRAME_SAMPLES * 16 {
|
||||
lock.pop_front();
|
||||
}
|
||||
}
|
||||
Err(mpsc::RecvTimeoutError::Timeout) => {}
|
||||
Err(mpsc::RecvTimeoutError::Disconnected) => break,
|
||||
}
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
let err_cb = |e: cpal::StreamError| {
|
||||
warn!("output stream error: {e}");
|
||||
};
|
||||
|
||||
let stream = if use_f32 {
|
||||
let ring = ring_cb.clone();
|
||||
let ring = ring.clone();
|
||||
device.build_output_stream(
|
||||
&config,
|
||||
move |data: &mut [f32], _: &cpal::OutputCallbackInfo| {
|
||||
let mut tmp = [0i16; FRAME_SAMPLES];
|
||||
for chunk in data.chunks_mut(FRAME_SAMPLES) {
|
||||
let n = chunk.len();
|
||||
let read = ring.read(&mut tmp[..n]);
|
||||
for i in 0..read {
|
||||
chunk[i] = i16_to_f32(tmp[i]);
|
||||
}
|
||||
// Fill remainder with silence if ring underran
|
||||
for i in read..n {
|
||||
chunk[i] = 0.0;
|
||||
}
|
||||
let mut lock = ring.lock().unwrap();
|
||||
for sample in data.iter_mut() {
|
||||
*sample = match lock.pop_front() {
|
||||
Some(s) => i16_to_f32(s),
|
||||
None => 0.0,
|
||||
};
|
||||
}
|
||||
},
|
||||
err_cb,
|
||||
None,
|
||||
)?
|
||||
} else {
|
||||
let ring = ring_cb.clone();
|
||||
let ring = ring.clone();
|
||||
device.build_output_stream(
|
||||
&config,
|
||||
move |data: &mut [i16], _: &cpal::OutputCallbackInfo| {
|
||||
let read = ring.read(data);
|
||||
// Fill remainder with silence if ring underran
|
||||
for sample in &mut data[read..] {
|
||||
*sample = 0;
|
||||
let mut lock = ring.lock().unwrap();
|
||||
for sample in data.iter_mut() {
|
||||
*sample = lock.pop_front().unwrap_or(0);
|
||||
}
|
||||
},
|
||||
err_cb,
|
||||
@@ -239,6 +257,7 @@ impl AudioPlayback {
|
||||
|
||||
stream.play().context("failed to start output stream")?;
|
||||
|
||||
// Signal success to the caller before parking.
|
||||
let _ = init_tx.send(Ok(()));
|
||||
|
||||
// Keep stream alive until stopped.
|
||||
@@ -259,12 +278,12 @@ impl AudioPlayback {
|
||||
.map_err(|_| anyhow!("playback thread exited before signaling"))?
|
||||
.map_err(|e| anyhow!("{e}"))?;
|
||||
|
||||
Ok(Self { ring, running })
|
||||
Ok(Self { tx, running })
|
||||
}
|
||||
|
||||
/// Get a reference to the playout ring buffer for direct writing.
|
||||
pub fn ring(&self) -> &Arc<AudioRing> {
|
||||
&self.ring
|
||||
/// Write a frame of PCM samples for playback.
|
||||
pub fn write_frame(&self, pcm: &[i16]) {
|
||||
let _ = self.tx.try_send(pcm.to_vec());
|
||||
}
|
||||
|
||||
/// Stop playback.
|
||||
@@ -273,16 +292,11 @@ impl AudioPlayback {
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AudioPlayback {
|
||||
fn drop(&mut self) {
|
||||
self.stop();
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Check if the input device supports i16 at 48 kHz mono.
|
||||
fn supports_i16_input(device: &cpal::Device) -> Result<bool, anyhow::Error> {
|
||||
let supported = device
|
||||
.supported_input_configs()
|
||||
@@ -299,6 +313,7 @@ fn supports_i16_input(device: &cpal::Device) -> Result<bool, anyhow::Error> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
/// Check if the output device supports i16 at 48 kHz mono.
|
||||
fn supports_i16_output(device: &cpal::Device) -> Result<bool, anyhow::Error> {
|
||||
let supported = device
|
||||
.supported_output_configs()
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
//! Lock-free SPSC ring buffer for audio PCM transfer between
|
||||
//! CPAL audio callbacks and the Rust engine.
|
||||
//!
|
||||
//! Identical design to wzp-android's audio_ring: the producer writes and
|
||||
//! advances a write cursor, the consumer reads and advances a read cursor.
|
||||
//! Both cursors are atomic — no mutex, no blocking on the audio thread.
|
||||
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
/// Ring buffer capacity in i16 samples.
|
||||
/// 960 samples * 10 frames = ~200ms of audio at 48kHz mono.
|
||||
const RING_CAPACITY: usize = 960 * 10;
|
||||
|
||||
/// Lock-free single-producer single-consumer ring buffer for i16 PCM samples.
|
||||
pub struct AudioRing {
|
||||
buf: Box<[i16; RING_CAPACITY]>,
|
||||
write_pos: AtomicUsize,
|
||||
read_pos: AtomicUsize,
|
||||
}
|
||||
|
||||
// SAFETY: AudioRing is designed for SPSC — one thread writes, one reads.
|
||||
// The atomics ensure visibility. The buffer itself is never accessed
|
||||
// from the same index by both threads simultaneously because the
|
||||
// producer only writes to positions between write_pos and read_pos,
|
||||
// and the consumer only reads from positions between read_pos and write_pos.
|
||||
unsafe impl Send for AudioRing {}
|
||||
unsafe impl Sync for AudioRing {}
|
||||
|
||||
impl AudioRing {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
buf: Box::new([0i16; RING_CAPACITY]),
|
||||
write_pos: AtomicUsize::new(0),
|
||||
read_pos: AtomicUsize::new(0),
|
||||
}
|
||||
}
|
||||
|
||||
/// Number of samples available to read.
|
||||
pub fn available(&self) -> usize {
|
||||
let w = self.write_pos.load(Ordering::Acquire);
|
||||
let r = self.read_pos.load(Ordering::Acquire);
|
||||
w.wrapping_sub(r)
|
||||
}
|
||||
|
||||
/// Write samples into the ring. Returns number of samples written.
|
||||
/// Drops oldest samples if the ring is full.
|
||||
pub fn write(&self, samples: &[i16]) -> usize {
|
||||
let w = self.write_pos.load(Ordering::Relaxed);
|
||||
let count = samples.len().min(RING_CAPACITY);
|
||||
|
||||
for i in 0..count {
|
||||
let idx = (w + i) % RING_CAPACITY;
|
||||
unsafe {
|
||||
let ptr = self.buf.as_ptr() as *mut i16;
|
||||
*ptr.add(idx) = samples[i];
|
||||
}
|
||||
}
|
||||
|
||||
self.write_pos
|
||||
.store(w.wrapping_add(count), Ordering::Release);
|
||||
|
||||
// If we overwrote unread data, advance read_pos
|
||||
if self.available() > RING_CAPACITY {
|
||||
let new_read = self
|
||||
.write_pos
|
||||
.load(Ordering::Relaxed)
|
||||
.wrapping_sub(RING_CAPACITY);
|
||||
self.read_pos.store(new_read, Ordering::Release);
|
||||
}
|
||||
|
||||
count
|
||||
}
|
||||
|
||||
/// Read samples from the ring into `out`. Returns number of samples read.
|
||||
pub fn read(&self, out: &mut [i16]) -> usize {
|
||||
let avail = self.available();
|
||||
let count = out.len().min(avail);
|
||||
|
||||
let r = self.read_pos.load(Ordering::Relaxed);
|
||||
for i in 0..count {
|
||||
let idx = (r + i) % RING_CAPACITY;
|
||||
out[i] = unsafe { *self.buf.as_ptr().add(idx) };
|
||||
}
|
||||
|
||||
self.read_pos
|
||||
.store(r.wrapping_add(count), Ordering::Release);
|
||||
count
|
||||
}
|
||||
}
|
||||
@@ -1,179 +0,0 @@
|
||||
//! macOS Voice Processing I/O — uses Apple's VoiceProcessingIO audio unit
|
||||
//! for hardware-accelerated echo cancellation, AGC, and noise suppression.
|
||||
//!
|
||||
//! VoiceProcessingIO is a combined input+output unit that knows what's going
|
||||
//! to the speaker, so it can cancel the echo from the mic signal internally.
|
||||
//! This is the same engine FaceTime and other Apple apps use.
|
||||
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context;
|
||||
use coreaudio::audio_unit::audio_format::LinearPcmFlags;
|
||||
use coreaudio::audio_unit::render_callback::{self, data};
|
||||
use coreaudio::audio_unit::{AudioUnit, Element, IOType, SampleFormat, Scope, StreamFormat};
|
||||
use coreaudio::sys;
|
||||
use tracing::info;
|
||||
|
||||
use crate::audio_ring::AudioRing;
|
||||
|
||||
/// Number of samples per 20 ms frame at 48 kHz mono.
|
||||
pub const FRAME_SAMPLES: usize = 960;
|
||||
|
||||
/// Combined capture + playback via macOS VoiceProcessingIO.
|
||||
///
|
||||
/// The OS handles AEC internally — no manual far-end feeding needed.
|
||||
pub struct VpioAudio {
|
||||
capture_ring: Arc<AudioRing>,
|
||||
playout_ring: Arc<AudioRing>,
|
||||
_audio_unit: AudioUnit,
|
||||
running: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl VpioAudio {
|
||||
/// Start VoiceProcessingIO with AEC enabled.
|
||||
pub fn start() -> Result<Self, anyhow::Error> {
|
||||
let capture_ring = Arc::new(AudioRing::new());
|
||||
let playout_ring = Arc::new(AudioRing::new());
|
||||
let running = Arc::new(AtomicBool::new(true));
|
||||
|
||||
let mut au = AudioUnit::new(IOType::VoiceProcessingIO)
|
||||
.context("failed to create VoiceProcessingIO audio unit")?;
|
||||
|
||||
// Must uninitialize before configuring properties.
|
||||
au.uninitialize()
|
||||
.context("failed to uninitialize VPIO for configuration")?;
|
||||
|
||||
// Enable input (mic) on Element::Input (bus 1).
|
||||
let enable: u32 = 1;
|
||||
au.set_property(
|
||||
sys::kAudioOutputUnitProperty_EnableIO,
|
||||
Scope::Input,
|
||||
Element::Input,
|
||||
Some(&enable),
|
||||
)
|
||||
.context("failed to enable VPIO input")?;
|
||||
|
||||
// Output (speaker) is enabled by default on VPIO, but be explicit.
|
||||
au.set_property(
|
||||
sys::kAudioOutputUnitProperty_EnableIO,
|
||||
Scope::Output,
|
||||
Element::Output,
|
||||
Some(&enable),
|
||||
)
|
||||
.context("failed to enable VPIO output")?;
|
||||
|
||||
// Configure stream format: 48kHz mono f32 non-interleaved
|
||||
let stream_format = StreamFormat {
|
||||
sample_rate: 48_000.0,
|
||||
sample_format: SampleFormat::F32,
|
||||
flags: LinearPcmFlags::IS_FLOAT
|
||||
| LinearPcmFlags::IS_PACKED
|
||||
| LinearPcmFlags::IS_NON_INTERLEAVED,
|
||||
channels: 1,
|
||||
};
|
||||
|
||||
let asbd = stream_format.to_asbd();
|
||||
|
||||
// Input: set format on Output scope of Input element
|
||||
// (= the format the AU delivers to us from the mic)
|
||||
au.set_property(
|
||||
sys::kAudioUnitProperty_StreamFormat,
|
||||
Scope::Output,
|
||||
Element::Input,
|
||||
Some(&asbd),
|
||||
)
|
||||
.context("failed to set input stream format")?;
|
||||
|
||||
// Output: set format on Input scope of Output element
|
||||
// (= the format we feed to the AU for the speaker)
|
||||
au.set_property(
|
||||
sys::kAudioUnitProperty_StreamFormat,
|
||||
Scope::Input,
|
||||
Element::Output,
|
||||
Some(&asbd),
|
||||
)
|
||||
.context("failed to set output stream format")?;
|
||||
|
||||
// Set up input callback (mic capture with AEC applied)
|
||||
let cap_ring = capture_ring.clone();
|
||||
let cap_running = running.clone();
|
||||
let logged = Arc::new(AtomicBool::new(false));
|
||||
au.set_input_callback(
|
||||
move |args: render_callback::Args<data::NonInterleaved<f32>>| {
|
||||
if !cap_running.load(Ordering::Relaxed) {
|
||||
return Ok(());
|
||||
}
|
||||
let mut buffers = args.data.channels();
|
||||
if let Some(ch) = buffers.next() {
|
||||
if !logged.swap(true, Ordering::Relaxed) {
|
||||
eprintln!("[vpio] capture callback: {} f32 samples", ch.len());
|
||||
}
|
||||
let mut tmp = [0i16; FRAME_SAMPLES];
|
||||
for chunk in ch.chunks(FRAME_SAMPLES) {
|
||||
let n = chunk.len();
|
||||
for i in 0..n {
|
||||
tmp[i] = (chunk[i].clamp(-1.0, 1.0) * i16::MAX as f32) as i16;
|
||||
}
|
||||
cap_ring.write(&tmp[..n]);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.context("failed to set input callback")?;
|
||||
|
||||
// Set up output callback (speaker playback — AEC uses this as reference)
|
||||
let play_ring = playout_ring.clone();
|
||||
au.set_render_callback(
|
||||
move |mut args: render_callback::Args<data::NonInterleaved<f32>>| {
|
||||
let mut buffers = args.data.channels_mut();
|
||||
if let Some(ch) = buffers.next() {
|
||||
let mut tmp = [0i16; FRAME_SAMPLES];
|
||||
for chunk in ch.chunks_mut(FRAME_SAMPLES) {
|
||||
let n = chunk.len();
|
||||
let read = play_ring.read(&mut tmp[..n]);
|
||||
for i in 0..read {
|
||||
chunk[i] = tmp[i] as f32 / i16::MAX as f32;
|
||||
}
|
||||
for i in read..n {
|
||||
chunk[i] = 0.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.context("failed to set render callback")?;
|
||||
|
||||
au.initialize().context("failed to initialize VoiceProcessingIO")?;
|
||||
au.start().context("failed to start VoiceProcessingIO")?;
|
||||
|
||||
info!("VoiceProcessingIO started (OS-level AEC enabled)");
|
||||
|
||||
Ok(Self {
|
||||
capture_ring,
|
||||
playout_ring,
|
||||
_audio_unit: au,
|
||||
running,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn capture_ring(&self) -> &Arc<AudioRing> {
|
||||
&self.capture_ring
|
||||
}
|
||||
|
||||
pub fn playout_ring(&self) -> &Arc<AudioRing> {
|
||||
&self.playout_ring
|
||||
}
|
||||
|
||||
pub fn stop(&self) {
|
||||
self.running.store(false, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for VpioAudio {
|
||||
fn drop(&mut self) {
|
||||
self.stop();
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@ use std::time::{Duration, Instant};
|
||||
use bytes::Bytes;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
use wzp_codec::{AutoGainControl, ComfortNoise, EchoCanceller, NoiseSupressor, SilenceDetector};
|
||||
use wzp_codec::{ComfortNoise, NoiseSupressor, SilenceDetector};
|
||||
use wzp_fec::{RaptorQFecDecoder, RaptorQFecEncoder};
|
||||
use wzp_proto::jitter::{JitterBuffer, PlayoutResult};
|
||||
use wzp_proto::packet::{MediaHeader, MediaPacket, MiniFrameContext};
|
||||
@@ -42,9 +42,6 @@ pub struct CallConfig {
|
||||
/// When enabled, only every 50th frame carries a full 12-byte MediaHeader;
|
||||
/// intermediate frames use a compact 4-byte MiniHeader.
|
||||
pub mini_frames_enabled: bool,
|
||||
/// AEC far-end delay compensation in milliseconds (default: 40).
|
||||
/// Compensates for the round-trip audio latency from playout to mic capture.
|
||||
pub aec_delay_ms: u32,
|
||||
/// Enable adaptive jitter buffer (default: true).
|
||||
///
|
||||
/// When true, the jitter buffer target depth is automatically adjusted
|
||||
@@ -66,7 +63,6 @@ impl Default for CallConfig {
|
||||
noise_suppression: true,
|
||||
mini_frames_enabled: true,
|
||||
adaptive_jitter: true,
|
||||
aec_delay_ms: 40,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -211,10 +207,6 @@ pub struct CallEncoder {
|
||||
frame_in_block: u8,
|
||||
/// Timestamp counter (ms).
|
||||
timestamp_ms: u32,
|
||||
/// Acoustic echo canceller (removes speaker echo from mic signal).
|
||||
aec: EchoCanceller,
|
||||
/// Automatic gain control (normalises mic level).
|
||||
agc: AutoGainControl,
|
||||
/// Silence detector for suppression.
|
||||
silence_detector: SilenceDetector,
|
||||
/// Whether silence suppression is enabled.
|
||||
@@ -245,8 +237,6 @@ impl CallEncoder {
|
||||
block_id: 0,
|
||||
frame_in_block: 0,
|
||||
timestamp_ms: 0,
|
||||
aec: EchoCanceller::with_delay(48000, 60, config.aec_delay_ms),
|
||||
agc: AutoGainControl::new(),
|
||||
silence_detector: SilenceDetector::new(
|
||||
config.silence_threshold_rms,
|
||||
config.silence_hangover_frames,
|
||||
@@ -284,21 +274,15 @@ impl CallEncoder {
|
||||
/// Input: 48kHz mono PCM, frame size depends on profile (960 for 20ms, 1920 for 40ms).
|
||||
/// Output: one or more MediaPackets to send.
|
||||
pub fn encode_frame(&mut self, pcm: &[i16]) -> Result<Vec<MediaPacket>, anyhow::Error> {
|
||||
// Copy PCM into a mutable buffer for the processing pipeline.
|
||||
let mut pcm_buf = pcm.to_vec();
|
||||
|
||||
// Step 1: Echo cancellation (far-end reference must have been fed already).
|
||||
self.aec.process_frame(&mut pcm_buf);
|
||||
|
||||
// Step 2: Automatic gain control (normalise mic level).
|
||||
self.agc.process_frame(&mut pcm_buf);
|
||||
|
||||
// Step 3: Noise suppression (RNNoise).
|
||||
if self.denoiser.is_enabled() {
|
||||
self.denoiser.process(&mut pcm_buf);
|
||||
}
|
||||
|
||||
let pcm = &pcm_buf[..];
|
||||
// Noise suppression: denoise the PCM before silence detection and encoding.
|
||||
let pcm = if self.denoiser.is_enabled() {
|
||||
let mut buf = pcm.to_vec();
|
||||
self.denoiser.process(&mut buf);
|
||||
buf
|
||||
} else {
|
||||
pcm.to_vec()
|
||||
};
|
||||
let pcm = &pcm[..];
|
||||
|
||||
// Silence suppression: skip encoding silent frames, periodically send CN.
|
||||
if self.suppression_enabled && self.silence_detector.is_silent(pcm) {
|
||||
@@ -416,24 +400,6 @@ impl CallEncoder {
|
||||
self.frame_in_block = 0;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Feed decoded playout audio as the echo reference signal.
|
||||
///
|
||||
/// Must be called with each decoded frame BEFORE the corresponding
|
||||
/// microphone frame is processed.
|
||||
pub fn feed_aec_farend(&mut self, farend: &[i16]) {
|
||||
self.aec.feed_farend(farend);
|
||||
}
|
||||
|
||||
/// Enable or disable acoustic echo cancellation.
|
||||
pub fn set_aec_enabled(&mut self, enabled: bool) {
|
||||
self.aec.set_enabled(enabled);
|
||||
}
|
||||
|
||||
/// Enable or disable automatic gain control.
|
||||
pub fn set_agc_enabled(&mut self, enabled: bool) {
|
||||
self.agc.set_enabled(enabled);
|
||||
}
|
||||
}
|
||||
|
||||
/// Manages the recv/decode side of a call.
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use tracing::{error, info, warn};
|
||||
use tracing::{error, info};
|
||||
|
||||
use wzp_client::call::{CallConfig, CallDecoder, CallEncoder};
|
||||
use wzp_proto::MediaTransport;
|
||||
@@ -45,30 +45,12 @@ struct CliArgs {
|
||||
seed_hex: Option<String>,
|
||||
mnemonic: Option<String>,
|
||||
room: Option<String>,
|
||||
raw_room: bool,
|
||||
alias: Option<String>,
|
||||
no_denoise: bool,
|
||||
no_aec: bool,
|
||||
no_agc: bool,
|
||||
no_fec: bool,
|
||||
no_silence: bool,
|
||||
direct_playout: bool,
|
||||
aec_delay_ms: Option<u32>,
|
||||
os_aec: bool,
|
||||
token: Option<String>,
|
||||
_metrics_file: Option<String>,
|
||||
}
|
||||
|
||||
/// Default identity file path: ~/.wzp/identity
|
||||
fn default_identity_path() -> std::path::PathBuf {
|
||||
let home = std::env::var("HOME").unwrap_or_else(|_| ".".to_string());
|
||||
std::path::PathBuf::from(home).join(".wzp").join("identity")
|
||||
}
|
||||
|
||||
impl CliArgs {
|
||||
/// Resolve the identity seed from --seed, --mnemonic, or persistent file.
|
||||
///
|
||||
/// Priority: --seed > --mnemonic > ~/.wzp/identity > generate + save.
|
||||
/// Resolve the identity seed from --seed, --mnemonic, or generate a new one.
|
||||
pub fn resolve_seed(&self) -> wzp_crypto::Seed {
|
||||
if let Some(ref hex_str) = self.seed_hex {
|
||||
let seed = wzp_crypto::Seed::from_hex(hex_str).expect("invalid --seed hex");
|
||||
@@ -83,30 +65,10 @@ impl CliArgs {
|
||||
info!(fingerprint = %fp, "identity from --mnemonic");
|
||||
seed
|
||||
} else {
|
||||
let path = default_identity_path();
|
||||
// Try loading existing identity
|
||||
if path.exists() {
|
||||
if let Ok(hex_str) = std::fs::read_to_string(&path) {
|
||||
let hex_str = hex_str.trim();
|
||||
if let Ok(seed) = wzp_crypto::Seed::from_hex(hex_str) {
|
||||
let id = seed.derive_identity();
|
||||
let fp = id.public_identity().fingerprint;
|
||||
info!(fingerprint = %fp, path = %path.display(), "loaded persistent identity");
|
||||
return seed;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Generate new and save
|
||||
let seed = wzp_crypto::Seed::generate();
|
||||
let id = seed.derive_identity();
|
||||
let fp = id.public_identity().fingerprint;
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent).ok();
|
||||
}
|
||||
// Encode seed as hex manually (avoid dep on `hex` crate in binary)
|
||||
let hex_str: String = seed.0.iter().map(|b| format!("{b:02x}")).collect();
|
||||
std::fs::write(&path, hex_str).ok();
|
||||
info!(fingerprint = %fp, path = %path.display(), "generated and saved new identity");
|
||||
info!(fingerprint = %fp, "generated ephemeral identity");
|
||||
seed
|
||||
}
|
||||
}
|
||||
@@ -124,16 +86,6 @@ fn parse_args() -> CliArgs {
|
||||
let mut seed_hex = None;
|
||||
let mut mnemonic = None;
|
||||
let mut room = None;
|
||||
let mut raw_room = false;
|
||||
let mut alias = None;
|
||||
let mut no_denoise = false;
|
||||
let mut no_aec = false;
|
||||
let mut no_agc = false;
|
||||
let mut no_fec = false;
|
||||
let mut no_silence = false;
|
||||
let mut direct_playout = false;
|
||||
let mut aec_delay_ms = None;
|
||||
let mut os_aec = false;
|
||||
let mut token = None;
|
||||
let mut metrics_file = None;
|
||||
let mut relay_str = None;
|
||||
@@ -178,27 +130,6 @@ fn parse_args() -> CliArgs {
|
||||
i += 1;
|
||||
room = Some(args.get(i).expect("--room requires a name").to_string());
|
||||
}
|
||||
"--raw-room" => raw_room = true,
|
||||
"--no-denoise" => no_denoise = true,
|
||||
"--no-aec" => no_aec = true,
|
||||
"--no-agc" => no_agc = true,
|
||||
"--no-fec" => no_fec = true,
|
||||
"--no-silence" => no_silence = true,
|
||||
"--direct-playout" | "--android" => direct_playout = true,
|
||||
"--os-aec" => os_aec = true,
|
||||
"--aec-delay" => {
|
||||
i += 1;
|
||||
aec_delay_ms = Some(
|
||||
args.get(i)
|
||||
.expect("--aec-delay requires milliseconds")
|
||||
.parse()
|
||||
.expect("--aec-delay value must be a number"),
|
||||
);
|
||||
}
|
||||
"--alias" => {
|
||||
i += 1;
|
||||
alias = Some(args.get(i).expect("--alias requires a name").to_string());
|
||||
}
|
||||
"--token" => {
|
||||
i += 1;
|
||||
token = Some(args.get(i).expect("--token requires a value").to_string());
|
||||
@@ -252,22 +183,10 @@ fn parse_args() -> CliArgs {
|
||||
eprintln!(" --seed <hex> Identity seed (64 hex chars, featherChat compatible)");
|
||||
eprintln!(" --mnemonic <words...> Identity seed as BIP39 mnemonic (24 words)");
|
||||
eprintln!(" --room <name> Room name (hashed for privacy before sending)");
|
||||
eprintln!(" --raw-room Send room name as-is (no hash, for Android compat)");
|
||||
eprintln!(" --alias <name> Display name shown to other participants");
|
||||
eprintln!(" --no-denoise Disable RNNoise noise suppression");
|
||||
eprintln!(" --no-aec Disable acoustic echo cancellation");
|
||||
eprintln!(" --no-agc Disable automatic gain control");
|
||||
eprintln!(" --no-fec Disable forward error correction");
|
||||
eprintln!(" --no-silence Disable silence suppression");
|
||||
eprintln!(" --direct-playout Bypass jitter buffer (decode on recv, like Android)");
|
||||
eprintln!(" --aec-delay <ms> AEC far-end delay compensation (default: 40ms)");
|
||||
eprintln!(" --os-aec Use macOS VoiceProcessingIO for hardware AEC (requires --vpio feature)");
|
||||
eprintln!(" --android Alias for --no-denoise --no-silence --direct-playout");
|
||||
eprintln!(" --token <token> featherChat bearer token for relay auth");
|
||||
eprintln!(" --metrics-file <path> Write JSONL telemetry to file (1 line/sec)");
|
||||
eprintln!(" (48kHz mono s16le, play with ffplay -f s16le -ar 48000 -ch_layout mono file.raw)");
|
||||
eprintln!();
|
||||
eprintln!("Identity is auto-saved to ~/.wzp/identity on first run.");
|
||||
eprintln!("Default relay: 127.0.0.1:4433");
|
||||
std::process::exit(0);
|
||||
}
|
||||
@@ -300,16 +219,6 @@ fn parse_args() -> CliArgs {
|
||||
seed_hex,
|
||||
mnemonic,
|
||||
room,
|
||||
raw_room,
|
||||
alias,
|
||||
no_denoise,
|
||||
no_aec,
|
||||
no_agc,
|
||||
no_fec,
|
||||
no_silence,
|
||||
direct_playout,
|
||||
aec_delay_ms,
|
||||
os_aec,
|
||||
token,
|
||||
_metrics_file: metrics_file,
|
||||
}
|
||||
@@ -341,14 +250,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
"WarzonePhone client"
|
||||
);
|
||||
|
||||
// Compute SNI from room name.
|
||||
// --raw-room sends the name as-is (for Android compat — Android doesn't hash).
|
||||
// Default behaviour hashes for privacy.
|
||||
// Hash room name for SNI privacy (or "default" if none specified)
|
||||
let sni = match &cli.room {
|
||||
Some(name) if cli.raw_room => {
|
||||
info!(room = %name, "using raw room name as SNI (no hash)");
|
||||
name.clone()
|
||||
}
|
||||
Some(name) => {
|
||||
let hashed = wzp_crypto::hash_room_name(name);
|
||||
info!(room = %name, hashed = %hashed, "room name hashed for SNI");
|
||||
@@ -384,24 +287,13 @@ async fn main() -> anyhow::Result<()> {
|
||||
let _crypto_session = wzp_client::handshake::perform_handshake(
|
||||
&*transport,
|
||||
&seed.0,
|
||||
cli.alias.as_deref(),
|
||||
).await?;
|
||||
info!("crypto handshake complete");
|
||||
|
||||
if cli.live {
|
||||
#[cfg(feature = "audio")]
|
||||
{
|
||||
let audio_opts = AudioOpts {
|
||||
no_denoise: cli.no_denoise || cli.direct_playout,
|
||||
no_aec: cli.no_aec,
|
||||
no_agc: cli.no_agc,
|
||||
no_fec: cli.no_fec,
|
||||
no_silence: cli.no_silence || cli.direct_playout,
|
||||
direct_playout: cli.direct_playout,
|
||||
aec_delay_ms: cli.aec_delay_ms,
|
||||
os_aec: cli.os_aec,
|
||||
};
|
||||
return run_live(transport, audio_opts).await;
|
||||
return run_live(transport).await;
|
||||
}
|
||||
#[cfg(not(feature = "audio"))]
|
||||
{
|
||||
@@ -450,7 +342,8 @@ async fn run_silence(transport: Arc<wzp_transport::QuinnTransport>) -> anyhow::R
|
||||
}
|
||||
total_bytes += pkt.payload.len() as u64;
|
||||
if let Err(e) = transport.send_media(pkt).await {
|
||||
warn!("send_media error (dropping packet): {e}");
|
||||
error!("send error: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i + 1) % 50 == 0 {
|
||||
@@ -535,7 +428,8 @@ async fn run_file_mode(
|
||||
total_source += 1;
|
||||
}
|
||||
if let Err(e) = send_transport.send_media(pkt).await {
|
||||
warn!("send_media error (dropping packet): {e}");
|
||||
error!("send error: {e}");
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (frame_idx + 1) % 250 == 0 {
|
||||
@@ -654,519 +548,78 @@ async fn run_file_mode(
|
||||
}
|
||||
|
||||
/// Live mode: capture from mic, encode, send; receive, decode, play.
|
||||
///
|
||||
/// Architecture (mirrors wzp-android/engine.rs):
|
||||
/// CPAL capture callback → AudioRing → send task (5ms poll) → QUIC
|
||||
/// QUIC → recv task → jitter buffer → decode tick (20ms) → AudioRing → CPAL playback callback
|
||||
///
|
||||
/// All lock-free: CPAL callbacks use atomic ring buffers, no Mutex on the audio path.
|
||||
/// RAII guard for terminal raw mode. Restores on drop.
|
||||
struct RawModeGuard {
|
||||
orig: libc::termios,
|
||||
}
|
||||
|
||||
impl RawModeGuard {
|
||||
fn enter() -> Option<Self> {
|
||||
unsafe {
|
||||
let mut orig: libc::termios = std::mem::zeroed();
|
||||
if libc::tcgetattr(libc::STDIN_FILENO, &mut orig) != 0 {
|
||||
return None;
|
||||
}
|
||||
let mut raw = orig;
|
||||
// ICANON: character-at-a-time input
|
||||
// ECHO: don't echo typed characters
|
||||
// ISIG: let us handle Ctrl+C as a byte
|
||||
raw.c_lflag &= !(libc::ICANON | libc::ECHO | libc::ISIG);
|
||||
// IXON: disable Ctrl+S/Ctrl+Q flow control so we receive them
|
||||
raw.c_iflag &= !libc::IXON;
|
||||
raw.c_cc[libc::VMIN] = 1;
|
||||
raw.c_cc[libc::VTIME] = 0;
|
||||
libc::tcsetattr(libc::STDIN_FILENO, libc::TCSANOW, &raw);
|
||||
Some(Self { orig })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for RawModeGuard {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
libc::tcsetattr(libc::STDIN_FILENO, libc::TCSANOW, &self.orig);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct AudioOpts {
|
||||
no_denoise: bool,
|
||||
no_aec: bool,
|
||||
no_agc: bool,
|
||||
no_fec: bool,
|
||||
no_silence: bool,
|
||||
direct_playout: bool,
|
||||
aec_delay_ms: Option<u32>,
|
||||
os_aec: bool,
|
||||
}
|
||||
|
||||
#[cfg(feature = "audio")]
|
||||
async fn run_live(
|
||||
transport: Arc<wzp_transport::QuinnTransport>,
|
||||
opts: AudioOpts,
|
||||
) -> anyhow::Result<()> {
|
||||
use std::sync::Arc as StdArc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
async fn run_live(transport: Arc<wzp_transport::QuinnTransport>) -> anyhow::Result<()> {
|
||||
use wzp_client::audio_io::{AudioCapture, AudioPlayback};
|
||||
use wzp_client::audio_ring::AudioRing;
|
||||
use wzp_client::call::JitterTelemetry;
|
||||
|
||||
// Audio I/O: either VPIO (OS-level AEC) or separate CPAL streams.
|
||||
#[cfg(all(target_os = "macos", feature = "vpio"))]
|
||||
let vpio;
|
||||
let (capture_ring, playout_ring) = if opts.os_aec {
|
||||
#[cfg(all(target_os = "macos", feature = "vpio"))]
|
||||
{
|
||||
vpio = wzp_client::audio_vpio::VpioAudio::start()?;
|
||||
(vpio.capture_ring().clone(), vpio.playout_ring().clone())
|
||||
}
|
||||
#[cfg(all(target_os = "macos", not(feature = "vpio")))]
|
||||
{
|
||||
anyhow::bail!("--os-aec requires the 'vpio' feature (build with: cargo build --features audio,vpio)");
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
warn!("--os-aec on Windows is experimental and not yet tested");
|
||||
warn!("Windows Voice Capture DSP (MFT) AEC is not yet implemented");
|
||||
warn!("falling back to CPAL without AEC — please report issues");
|
||||
let capture = AudioCapture::start()?;
|
||||
let playback = AudioPlayback::start()?;
|
||||
let cr = capture.ring().clone();
|
||||
let pr = playback.ring().clone();
|
||||
std::mem::forget(capture);
|
||||
std::mem::forget(playback);
|
||||
(cr, pr)
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
warn!("--os-aec on Linux is experimental and not yet tested");
|
||||
warn!("PipeWire/PulseAudio echo-cancel module AEC is not yet implemented");
|
||||
warn!("falling back to CPAL without AEC — please report issues");
|
||||
let capture = AudioCapture::start()?;
|
||||
let playback = AudioPlayback::start()?;
|
||||
let cr = capture.ring().clone();
|
||||
let pr = playback.ring().clone();
|
||||
std::mem::forget(capture);
|
||||
std::mem::forget(playback);
|
||||
(cr, pr)
|
||||
}
|
||||
} else {
|
||||
let capture = AudioCapture::start()?;
|
||||
let playback = AudioPlayback::start()?;
|
||||
let cr = capture.ring().clone();
|
||||
let pr = playback.ring().clone();
|
||||
// Keep handles alive (streams stop when dropped)
|
||||
std::mem::forget(capture);
|
||||
std::mem::forget(playback);
|
||||
(cr, pr)
|
||||
};
|
||||
info!(os_aec = opts.os_aec, "audio I/O started — press Ctrl+C to stop");
|
||||
let capture = AudioCapture::start()?;
|
||||
let playback = AudioPlayback::start()?;
|
||||
info!("Audio I/O started — press Ctrl+C to stop");
|
||||
|
||||
// Far-end reference ring (only used when NOT using OS AEC).
|
||||
let farend_ring = StdArc::new(AudioRing::new());
|
||||
|
||||
let running = StdArc::new(AtomicBool::new(true));
|
||||
let mic_muted = StdArc::new(AtomicBool::new(false));
|
||||
let spk_muted = StdArc::new(AtomicBool::new(false));
|
||||
|
||||
// --- Signal handler: set running=false on first Ctrl+C, force-quit on second ---
|
||||
let signal_running = running.clone();
|
||||
tokio::spawn(async move {
|
||||
tokio::signal::ctrl_c().await.ok();
|
||||
eprintln!(); // newline after ^C
|
||||
info!("Ctrl+C received, shutting down...");
|
||||
signal_running.store(false, Ordering::SeqCst);
|
||||
|
||||
tokio::signal::ctrl_c().await.ok();
|
||||
eprintln!("\nForce quit");
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
let config = CallConfig {
|
||||
noise_suppression: !opts.no_denoise,
|
||||
suppression_enabled: !opts.no_silence,
|
||||
aec_delay_ms: opts.aec_delay_ms.unwrap_or(40),
|
||||
..CallConfig::default()
|
||||
};
|
||||
{
|
||||
let mut flags = Vec::new();
|
||||
if opts.no_denoise { flags.push("denoise"); }
|
||||
if opts.no_aec { flags.push("aec"); }
|
||||
if opts.no_agc { flags.push("agc"); }
|
||||
if opts.no_fec { flags.push("fec"); }
|
||||
if opts.no_silence { flags.push("silence"); }
|
||||
if opts.direct_playout { flags.push("jitter-buffer (direct playout)"); }
|
||||
if !flags.is_empty() {
|
||||
info!(disabled = %flags.join(", "), "audio processing overrides");
|
||||
}
|
||||
}
|
||||
|
||||
// --- Send task: poll capture ring → encode → send via async ---
|
||||
let send_transport = transport.clone();
|
||||
let send_running = running.clone();
|
||||
let send_mic_muted = mic_muted.clone();
|
||||
let no_aec = opts.no_aec || opts.os_aec; // OS AEC replaces software AEC
|
||||
let no_agc = opts.no_agc;
|
||||
let _no_fec = opts.no_fec;
|
||||
let send_farend = farend_ring.clone();
|
||||
let send_task = async move {
|
||||
let mut encoder = CallEncoder::new(&config);
|
||||
if no_aec { encoder.set_aec_enabled(false); }
|
||||
if no_agc { encoder.set_agc_enabled(false); }
|
||||
let mut capture_buf = vec![0i16; FRAME_SAMPLES];
|
||||
let mut farend_buf = vec![0i16; FRAME_SAMPLES];
|
||||
let mut frames_sent: u64 = 0;
|
||||
let mut frames_dropped: u64 = 0;
|
||||
let mut send_errors: u64 = 0;
|
||||
let mut last_send_err = std::time::Instant::now();
|
||||
let mut polls: u64 = 0;
|
||||
let mut last_diag = std::time::Instant::now();
|
||||
|
||||
loop {
|
||||
if !send_running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
|
||||
let avail = capture_ring.available();
|
||||
if avail < FRAME_SAMPLES {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(5)).await;
|
||||
polls += 1;
|
||||
// Diagnostic every 2 seconds
|
||||
if last_diag.elapsed().as_secs() >= 2 {
|
||||
info!(avail, polls, frames_sent, "send: ring starved (avail < {FRAME_SAMPLES})");
|
||||
last_diag = std::time::Instant::now();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let read = capture_ring.read(&mut capture_buf);
|
||||
if read < FRAME_SAMPLES {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Mic mute: zero out capture buffer (still encode + send silence to keep stream alive)
|
||||
if send_mic_muted.load(Ordering::Relaxed) {
|
||||
capture_buf.fill(0);
|
||||
}
|
||||
|
||||
// Feed AEC far-end reference: what was played through the speaker.
|
||||
// Must be called BEFORE encode_frame processes the mic signal.
|
||||
if !no_aec {
|
||||
while send_farend.available() >= FRAME_SAMPLES {
|
||||
send_farend.read(&mut farend_buf);
|
||||
encoder.feed_aec_farend(&farend_buf);
|
||||
}
|
||||
}
|
||||
|
||||
let t0 = std::time::Instant::now();
|
||||
let packets = match encoder.encode_frame(&capture_buf) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
error!("encode error: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
let encode_us = t0.elapsed().as_micros();
|
||||
|
||||
let mut dropped = false;
|
||||
for pkt in &packets {
|
||||
if let Err(e) = send_transport.send_media(pkt).await {
|
||||
send_errors += 1;
|
||||
frames_dropped += 1;
|
||||
dropped = true;
|
||||
if send_errors <= 3 || last_send_err.elapsed().as_secs() >= 1 {
|
||||
warn!(send_errors, frames_dropped,
|
||||
"send_media error (dropping packet): {e}");
|
||||
last_send_err = std::time::Instant::now();
|
||||
let rt_handle = tokio::runtime::Handle::current();
|
||||
let send_handle = std::thread::Builder::new()
|
||||
.name("wzp-send-loop".into())
|
||||
.spawn(move || {
|
||||
let config = CallConfig::default();
|
||||
let mut encoder = CallEncoder::new(&config);
|
||||
loop {
|
||||
let frame = match capture.read_frame() {
|
||||
Some(f) => f,
|
||||
None => break,
|
||||
};
|
||||
let packets = match encoder.encode_frame(&frame) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
error!("encode error: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
for pkt in &packets {
|
||||
if let Err(e) = rt_handle.block_on(send_transport.send_media(pkt)) {
|
||||
error!("send error: {e}");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
if !dropped {
|
||||
send_errors = 0; // reset on success
|
||||
}
|
||||
frames_sent += 1;
|
||||
if frames_sent <= 5 || frames_sent % 500 == 0 {
|
||||
info!(frames_sent, encode_us, pkts = packets.len(), "send progress");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// --- Recv + playout ---
|
||||
let recv_transport = transport.clone();
|
||||
let recv_running = running.clone();
|
||||
let recv_spk_muted = spk_muted.clone();
|
||||
let direct_playout = opts.direct_playout;
|
||||
|
||||
// Direct playout: decode on recv, write straight to playout ring (like Android).
|
||||
// Jitter buffer mode: ingest into jitter buffer, decode on 20ms tick.
|
||||
let recv_task = {
|
||||
let playout_ring = playout_ring.clone();
|
||||
let farend_ring = farend_ring.clone();
|
||||
let config = CallConfig::default();
|
||||
let decoder = StdArc::new(tokio::sync::Mutex::new(CallDecoder::new(&config)));
|
||||
let decoder_recv = decoder.clone();
|
||||
|
||||
async move {
|
||||
let mut packets_received: u64 = 0;
|
||||
let mut recv_errors: u64 = 0;
|
||||
let mut timeouts: u64 = 0;
|
||||
// For direct playout: raw Opus decoder + AGC
|
||||
let mut opus_dec = if direct_playout {
|
||||
Some(wzp_codec::create_decoder(wzp_proto::QualityProfile::GOOD))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut playout_agc = wzp_codec::AutoGainControl::new();
|
||||
let mut pcm_buf = vec![0i16; FRAME_SAMPLES];
|
||||
|
||||
loop {
|
||||
if !recv_running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
let result = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(100),
|
||||
recv_transport.recv_media(),
|
||||
)
|
||||
.await;
|
||||
match result {
|
||||
Ok(Ok(Some(pkt))) => {
|
||||
packets_received += 1;
|
||||
|
||||
if direct_playout {
|
||||
// Android path: decode immediately, AGC, write to ring
|
||||
if !pkt.header.is_repair {
|
||||
if let Some(ref mut dec) = opus_dec {
|
||||
match dec.decode(&pkt.payload, &mut pcm_buf) {
|
||||
Ok(n) => {
|
||||
if !no_agc {
|
||||
playout_agc.process_frame(&mut pcm_buf[..n]);
|
||||
}
|
||||
// Always feed AEC (even when speaker muted)
|
||||
farend_ring.write(&pcm_buf[..n]);
|
||||
// Speaker mute: don't write to playout ring
|
||||
if !recv_spk_muted.load(Ordering::Relaxed) {
|
||||
playout_ring.write(&pcm_buf[..n]);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
if let Ok(n) = dec.decode_lost(&mut pcm_buf) {
|
||||
if !recv_spk_muted.load(Ordering::Relaxed) {
|
||||
playout_ring.write(&pcm_buf[..n]);
|
||||
}
|
||||
}
|
||||
if packets_received < 10 {
|
||||
warn!("decode error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Jitter buffer path
|
||||
let mut dec = decoder_recv.lock().await;
|
||||
dec.ingest(pkt);
|
||||
}
|
||||
|
||||
if packets_received == 1 || packets_received % 500 == 0 {
|
||||
info!(packets_received, direct_playout, "recv progress");
|
||||
}
|
||||
timeouts = 0;
|
||||
}
|
||||
Ok(Ok(None)) => {
|
||||
info!("connection closed");
|
||||
break;
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
let msg = e.to_string();
|
||||
if msg.contains("closed") || msg.contains("reset") {
|
||||
error!("recv fatal: {e}");
|
||||
break;
|
||||
}
|
||||
recv_errors += 1;
|
||||
if recv_errors <= 3 {
|
||||
warn!("recv error (continuing): {e}");
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
timeouts += 1;
|
||||
if timeouts == 50 {
|
||||
info!("recv: no media packets received in 5s");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Playout tick — only used when NOT in direct playout mode
|
||||
let playout_running = running.clone();
|
||||
let playout_task = async move {
|
||||
if direct_playout {
|
||||
// Direct playout handles everything in recv_task — just park here
|
||||
loop {
|
||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||
if !playout_running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let recv_handle = tokio::spawn(async move {
|
||||
let config = CallConfig::default();
|
||||
let mut decoder = CallDecoder::new(&config);
|
||||
let mut pcm_buf = vec![0i16; FRAME_SAMPLES];
|
||||
let mut interval = tokio::time::interval(std::time::Duration::from_millis(20));
|
||||
interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
let mut telemetry = JitterTelemetry::new(5);
|
||||
loop {
|
||||
interval.tick().await;
|
||||
if !playout_running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
|
||||
let mut decoded_this_tick = 0;
|
||||
while let Some(n) = decoder.decode_next(&mut pcm_buf) {
|
||||
playout_ring.write(&pcm_buf[..n]);
|
||||
decoded_this_tick += 1;
|
||||
if decoded_this_tick >= 2 {
|
||||
match recv_transport.recv_media().await {
|
||||
Ok(Some(pkt)) => {
|
||||
let is_repair = pkt.header.is_repair;
|
||||
decoder.ingest(pkt);
|
||||
// Only decode for source packets (1 source = 1 audio frame).
|
||||
// Repair packets feed the FEC decoder but don't produce audio.
|
||||
if !is_repair {
|
||||
if let Some(_n) = decoder.decode_next(&mut pcm_buf) {
|
||||
playback.write_frame(&pcm_buf);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
info!("connection closed");
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("recv error: {e}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
telemetry.maybe_log(decoder.stats());
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// --- Signal task: listen for RoomUpdate and display presence ---
|
||||
let signal_transport = transport.clone();
|
||||
let signal_running = running.clone();
|
||||
let signal_task = async move {
|
||||
loop {
|
||||
if !signal_running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
let result = tokio::time::timeout(
|
||||
std::time::Duration::from_millis(200),
|
||||
signal_transport.recv_signal(),
|
||||
)
|
||||
.await;
|
||||
match result {
|
||||
Ok(Ok(Some(wzp_proto::SignalMessage::RoomUpdate { participants, .. }))) => {
|
||||
// Dedup by (fingerprint, alias) — same peer may appear multiple times
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
let unique: Vec<_> = participants
|
||||
.iter()
|
||||
.filter(|p| seen.insert((&p.fingerprint, &p.alias)))
|
||||
.collect();
|
||||
info!(count = unique.len(), "room update");
|
||||
for p in &unique {
|
||||
let name = p
|
||||
.alias
|
||||
.as_deref()
|
||||
.unwrap_or("(no alias)");
|
||||
let fp = if p.fingerprint.is_empty() {
|
||||
"(no fingerprint)"
|
||||
} else {
|
||||
&p.fingerprint
|
||||
};
|
||||
info!(" participant: {name} [{fp}]");
|
||||
}
|
||||
}
|
||||
Ok(Ok(Some(msg))) => {
|
||||
info!("signal: {:?}", std::mem::discriminant(&msg));
|
||||
}
|
||||
Ok(Ok(None)) => {
|
||||
info!("signal stream closed");
|
||||
break;
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
error!("signal recv error: {e}");
|
||||
break;
|
||||
}
|
||||
Err(_) => {} // timeout — loop and check running flag
|
||||
}
|
||||
}
|
||||
};
|
||||
tokio::signal::ctrl_c().await?;
|
||||
info!("Shutting down...");
|
||||
|
||||
// --- Keyboard task: Ctrl+M = toggle mic mute, Ctrl+S = toggle speaker mute ---
|
||||
let kb_running = running.clone();
|
||||
let kb_mic = mic_muted.clone();
|
||||
let kb_spk = spk_muted.clone();
|
||||
let keyboard_task = async move {
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
// Put terminal in raw mode so we get individual keypresses
|
||||
let _raw_guard = RawModeGuard::enter();
|
||||
|
||||
let mut stdin = tokio::io::stdin();
|
||||
let mut buf = [0u8; 1];
|
||||
loop {
|
||||
if !kb_running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_millis(200),
|
||||
stdin.read(&mut buf),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Ok(1)) => match buf[0] {
|
||||
b'm' | b'M' | 0x0D => {
|
||||
// 'm' or Ctrl+M
|
||||
let was = kb_mic.fetch_xor(true, Ordering::SeqCst);
|
||||
let state = if !was { "MUTED" } else { "unmuted" };
|
||||
eprintln!("\r[mic {state}]");
|
||||
}
|
||||
b's' | b'S' | 0x13 => {
|
||||
// 's' or Ctrl+S
|
||||
let was = kb_spk.fetch_xor(true, Ordering::SeqCst);
|
||||
let state = if !was { "MUTED" } else { "unmuted" };
|
||||
eprintln!("\r[speaker {state}]");
|
||||
}
|
||||
0x03 => {
|
||||
// Ctrl+C
|
||||
eprintln!();
|
||||
info!("Ctrl+C received, shutting down...");
|
||||
kb_running.store(false, Ordering::SeqCst);
|
||||
break;
|
||||
}
|
||||
b'q' | b'Q' => {
|
||||
eprintln!("\r[quit]");
|
||||
kb_running.store(false, Ordering::SeqCst);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
Ok(Ok(_)) | Ok(Err(_)) => break,
|
||||
Err(_) => {} // timeout
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// --- Run all tasks, exit when any finishes (or running flag cleared by Ctrl+C) ---
|
||||
tokio::select! {
|
||||
_ = send_task => info!("send task ended"),
|
||||
_ = recv_task => info!("recv task ended"),
|
||||
_ = playout_task => info!("playout task ended"),
|
||||
_ = signal_task => info!("signal task ended"),
|
||||
_ = keyboard_task => info!("keyboard task ended"),
|
||||
}
|
||||
|
||||
running.store(false, Ordering::SeqCst);
|
||||
// Audio streams stop when their handles are dropped (via mem::forget above or VPIO drop).
|
||||
|
||||
// Give transport 2s to close gracefully, then bail
|
||||
match tokio::time::timeout(std::time::Duration::from_secs(2), transport.close()).await {
|
||||
Ok(Ok(())) => info!("done"),
|
||||
Ok(Err(e)) => info!("close error (non-fatal): {e}"),
|
||||
Err(_) => info!("close timed out, exiting anyway"),
|
||||
}
|
||||
recv_handle.abort();
|
||||
drop(send_handle);
|
||||
transport.close().await?;
|
||||
info!("done");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -109,15 +109,12 @@ pub fn signal_to_call_type(signal: &SignalMessage) -> CallSignalType {
|
||||
SignalMessage::RouteResponse { .. } => CallSignalType::Offer, // reuse
|
||||
SignalMessage::SessionForward { .. } => CallSignalType::Offer, // reuse
|
||||
SignalMessage::SessionForwardAck { .. } => CallSignalType::Offer, // reuse
|
||||
SignalMessage::RoomUpdate { .. } => CallSignalType::Offer, // reuse
|
||||
SignalMessage::SetAlias { .. } => CallSignalType::Offer, // reuse
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use wzp_proto::QualityProfile;
|
||||
|
||||
#[test]
|
||||
fn payload_roundtrip() {
|
||||
|
||||
@@ -17,7 +17,6 @@ use wzp_proto::{MediaTransport, QualityProfile, SignalMessage};
|
||||
pub async fn perform_handshake(
|
||||
transport: &dyn MediaTransport,
|
||||
seed: &[u8; 32],
|
||||
alias: Option<&str>,
|
||||
) -> Result<Box<dyn CryptoSession>, anyhow::Error> {
|
||||
// 1. Create key exchange from identity seed
|
||||
let mut kx = WarzoneKeyExchange::from_identity_seed(seed);
|
||||
@@ -42,7 +41,6 @@ pub async fn perform_handshake(
|
||||
QualityProfile::DEGRADED,
|
||||
QualityProfile::CATASTROPHIC,
|
||||
],
|
||||
alias: alias.map(|s| s.to_string()),
|
||||
};
|
||||
transport.send_signal(&offer).await?;
|
||||
|
||||
|
||||
@@ -8,10 +8,6 @@
|
||||
|
||||
#[cfg(feature = "audio")]
|
||||
pub mod audio_io;
|
||||
#[cfg(feature = "audio")]
|
||||
pub mod audio_ring;
|
||||
#[cfg(feature = "vpio")]
|
||||
pub mod audio_vpio;
|
||||
pub mod bench;
|
||||
pub mod call;
|
||||
pub mod drift_test;
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::codec2_dec::Codec2Decoder;
|
||||
use crate::codec2_enc::Codec2Encoder;
|
||||
use crate::opus_dec::OpusDecoder;
|
||||
use crate::opus_enc::OpusEncoder;
|
||||
use crate::resample::{Downsampler48to8, Upsampler8to48};
|
||||
use crate::resample;
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
@@ -54,7 +54,6 @@ pub struct AdaptiveEncoder {
|
||||
opus: OpusEncoder,
|
||||
codec2: Codec2Encoder,
|
||||
active: CodecId,
|
||||
downsampler: Downsampler48to8,
|
||||
}
|
||||
|
||||
impl AdaptiveEncoder {
|
||||
@@ -67,7 +66,6 @@ impl AdaptiveEncoder {
|
||||
opus,
|
||||
codec2,
|
||||
active: profile.codec,
|
||||
downsampler: Downsampler48to8::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -76,7 +74,7 @@ impl AudioEncoder for AdaptiveEncoder {
|
||||
fn encode(&mut self, pcm: &[i16], out: &mut [u8]) -> Result<usize, CodecError> {
|
||||
if is_codec2(self.active) {
|
||||
// Downsample 48 kHz → 8 kHz then encode via Codec2.
|
||||
let pcm_8k = self.downsampler.process(pcm);
|
||||
let pcm_8k = resample::resample_48k_to_8k(pcm);
|
||||
self.codec2.encode(&pcm_8k, out)
|
||||
} else {
|
||||
self.opus.encode(pcm, out)
|
||||
@@ -128,7 +126,6 @@ pub struct AdaptiveDecoder {
|
||||
opus: OpusDecoder,
|
||||
codec2: Codec2Decoder,
|
||||
active: CodecId,
|
||||
upsampler: Upsampler8to48,
|
||||
}
|
||||
|
||||
impl AdaptiveDecoder {
|
||||
@@ -141,7 +138,6 @@ impl AdaptiveDecoder {
|
||||
opus,
|
||||
codec2,
|
||||
active: profile.codec,
|
||||
upsampler: Upsampler8to48::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -153,7 +149,7 @@ impl AudioDecoder for AdaptiveDecoder {
|
||||
let c2_samples = self.codec2_frame_samples();
|
||||
let mut buf_8k = vec![0i16; c2_samples];
|
||||
let n = self.codec2.decode(encoded, &mut buf_8k)?;
|
||||
let pcm_48k = self.upsampler.process(&buf_8k[..n]);
|
||||
let pcm_48k = resample::resample_8k_to_48k(&buf_8k[..n]);
|
||||
let out_len = pcm_48k.len().min(pcm.len());
|
||||
pcm[..out_len].copy_from_slice(&pcm_48k[..out_len]);
|
||||
Ok(out_len)
|
||||
@@ -167,7 +163,7 @@ impl AudioDecoder for AdaptiveDecoder {
|
||||
let c2_samples = self.codec2_frame_samples();
|
||||
let mut buf_8k = vec![0i16; c2_samples];
|
||||
let n = self.codec2.decode_lost(&mut buf_8k)?;
|
||||
let pcm_48k = self.upsampler.process(&buf_8k[..n]);
|
||||
let pcm_48k = resample::resample_8k_to_48k(&buf_8k[..n]);
|
||||
let out_len = pcm_48k.len().min(pcm.len());
|
||||
pcm[..out_len].copy_from_slice(&pcm_48k[..out_len]);
|
||||
Ok(out_len)
|
||||
|
||||
@@ -1,335 +0,0 @@
|
||||
//! Acoustic Echo Cancellation — delay-compensated leaky NLMS with
|
||||
//! Geigel double-talk detection.
|
||||
//!
|
||||
//! Key insight: on a laptop, the round-trip audio latency (playout → speaker
|
||||
//! → air → mic → capture) is 30–50ms. The far-end reference must be delayed
|
||||
//! by this amount so the adaptive filter models the *echo path*, not the
|
||||
//! *system delay + echo path*.
|
||||
//!
|
||||
//! The leaky coefficient decay prevents the filter from diverging when the
|
||||
//! echo path changes (e.g. hand near laptop) or when the delay estimate
|
||||
//! is slightly off.
|
||||
|
||||
/// Delay-compensated leaky NLMS echo canceller with Geigel DTD.
|
||||
pub struct EchoCanceller {
|
||||
// --- Adaptive filter ---
|
||||
filter: Vec<f32>,
|
||||
filter_len: usize,
|
||||
/// Circular buffer of far-end reference samples (after delay).
|
||||
far_buf: Vec<f32>,
|
||||
far_pos: usize,
|
||||
/// NLMS step size.
|
||||
mu: f32,
|
||||
/// Leakage factor: coefficients are multiplied by (1 - leak) each frame.
|
||||
/// Prevents unbounded growth / divergence. 0.0001 is gentle.
|
||||
leak: f32,
|
||||
enabled: bool,
|
||||
|
||||
// --- Delay buffer ---
|
||||
/// Raw far-end samples before delay compensation.
|
||||
delay_ring: Vec<f32>,
|
||||
delay_write: usize,
|
||||
delay_read: usize,
|
||||
/// Delay in samples (e.g. 1920 = 40ms at 48kHz).
|
||||
delay_samples: usize,
|
||||
/// Capacity of the delay ring.
|
||||
delay_cap: usize,
|
||||
|
||||
// --- Double-talk detection (Geigel) ---
|
||||
/// Peak far-end level over the last filter_len samples.
|
||||
far_peak: f32,
|
||||
/// Geigel threshold: if |near| > threshold * far_peak, assume double-talk.
|
||||
geigel_threshold: f32,
|
||||
/// Holdover counter: keep DTD active for a few frames after detection.
|
||||
dtd_holdover: u32,
|
||||
dtd_hold_frames: u32,
|
||||
}
|
||||
|
||||
impl EchoCanceller {
|
||||
/// Create a new echo canceller.
|
||||
///
|
||||
/// * `sample_rate` — typically 48000
|
||||
/// * `filter_ms` — echo-tail length in milliseconds (60ms recommended)
|
||||
/// * `delay_ms` — far-end delay compensation in milliseconds (40ms for laptops)
|
||||
pub fn new(sample_rate: u32, filter_ms: u32) -> Self {
|
||||
Self::with_delay(sample_rate, filter_ms, 40)
|
||||
}
|
||||
|
||||
pub fn with_delay(sample_rate: u32, filter_ms: u32, delay_ms: u32) -> Self {
|
||||
let filter_len = (sample_rate as usize) * (filter_ms as usize) / 1000;
|
||||
let delay_samples = (sample_rate as usize) * (delay_ms as usize) / 1000;
|
||||
// Delay ring must hold at least delay_samples + one frame (960) of headroom.
|
||||
let delay_cap = delay_samples + (sample_rate as usize / 10); // +100ms headroom
|
||||
Self {
|
||||
filter: vec![0.0; filter_len],
|
||||
filter_len,
|
||||
far_buf: vec![0.0; filter_len],
|
||||
far_pos: 0,
|
||||
mu: 0.01,
|
||||
leak: 0.0001,
|
||||
enabled: true,
|
||||
|
||||
delay_ring: vec![0.0; delay_cap],
|
||||
delay_write: 0,
|
||||
delay_read: 0,
|
||||
delay_samples,
|
||||
delay_cap,
|
||||
|
||||
far_peak: 0.0,
|
||||
geigel_threshold: 0.7,
|
||||
dtd_holdover: 0,
|
||||
dtd_hold_frames: 5,
|
||||
}
|
||||
}
|
||||
|
||||
/// Feed far-end (speaker) samples. These go into the delay buffer first;
|
||||
/// once enough samples have accumulated, they are released to the filter's
|
||||
/// circular buffer with the correct delay offset.
|
||||
pub fn feed_farend(&mut self, farend: &[i16]) {
|
||||
// Write raw samples into the delay ring.
|
||||
for &s in farend {
|
||||
self.delay_ring[self.delay_write % self.delay_cap] = s as f32;
|
||||
self.delay_write += 1;
|
||||
}
|
||||
|
||||
// Release delayed samples to the filter's far-end buffer.
|
||||
while self.delay_available() >= 1 {
|
||||
let sample = self.delay_ring[self.delay_read % self.delay_cap];
|
||||
self.delay_read += 1;
|
||||
|
||||
self.far_buf[self.far_pos] = sample;
|
||||
self.far_pos = (self.far_pos + 1) % self.filter_len;
|
||||
|
||||
// Track peak far-end level for Geigel DTD.
|
||||
let abs_s = sample.abs();
|
||||
if abs_s > self.far_peak {
|
||||
self.far_peak = abs_s;
|
||||
}
|
||||
}
|
||||
|
||||
// Decay far_peak slowly (avoids stale peak from a loud burst long ago).
|
||||
self.far_peak *= 0.9995;
|
||||
}
|
||||
|
||||
/// Number of delayed samples available to release.
|
||||
fn delay_available(&self) -> usize {
|
||||
let buffered = self.delay_write - self.delay_read;
|
||||
if buffered > self.delay_samples {
|
||||
buffered - self.delay_samples
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
/// Process a near-end (microphone) frame, removing the estimated echo.
|
||||
pub fn process_frame(&mut self, nearend: &mut [i16]) -> f32 {
|
||||
if !self.enabled {
|
||||
return 1.0;
|
||||
}
|
||||
|
||||
let n = nearend.len();
|
||||
let fl = self.filter_len;
|
||||
|
||||
// --- Geigel double-talk detection ---
|
||||
// If any near-end sample exceeds threshold * far_peak, assume
|
||||
// the local speaker is active and freeze adaptation.
|
||||
let mut is_doubletalk = self.dtd_holdover > 0;
|
||||
if !is_doubletalk {
|
||||
let threshold_level = self.geigel_threshold * self.far_peak;
|
||||
for &s in nearend.iter() {
|
||||
if (s as f32).abs() > threshold_level && self.far_peak > 100.0 {
|
||||
is_doubletalk = true;
|
||||
self.dtd_holdover = self.dtd_hold_frames;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if self.dtd_holdover > 0 {
|
||||
self.dtd_holdover -= 1;
|
||||
}
|
||||
|
||||
// Check if far-end is active (otherwise nothing to cancel).
|
||||
let far_active = self.far_peak > 100.0;
|
||||
|
||||
// --- Leaky coefficient decay ---
|
||||
// Applied once per frame for efficiency.
|
||||
let decay = 1.0 - self.leak;
|
||||
for c in self.filter.iter_mut() {
|
||||
*c *= decay;
|
||||
}
|
||||
|
||||
let mut sum_near_sq: f64 = 0.0;
|
||||
let mut sum_err_sq: f64 = 0.0;
|
||||
|
||||
for i in 0..n {
|
||||
let near_f = nearend[i] as f32;
|
||||
|
||||
// Position of far-end "now" for this near-end sample.
|
||||
let base = (self.far_pos + fl * ((n / fl) + 2) + i - n) % fl;
|
||||
|
||||
// --- Echo estimation: dot(filter, far_end_window) ---
|
||||
let mut echo_est: f32 = 0.0;
|
||||
let mut power: f32 = 0.0;
|
||||
|
||||
for k in 0..fl {
|
||||
let fe_idx = (base + fl - k) % fl;
|
||||
let fe = self.far_buf[fe_idx];
|
||||
echo_est += self.filter[k] * fe;
|
||||
power += fe * fe;
|
||||
}
|
||||
|
||||
let error = near_f - echo_est;
|
||||
|
||||
// --- NLMS adaptation (only when far-end active & no double-talk) ---
|
||||
if far_active && !is_doubletalk && power > 10.0 {
|
||||
let step = self.mu * error / (power + 1.0);
|
||||
for k in 0..fl {
|
||||
let fe_idx = (base + fl - k) % fl;
|
||||
self.filter[k] += step * self.far_buf[fe_idx];
|
||||
}
|
||||
}
|
||||
|
||||
let out = error.clamp(-32768.0, 32767.0);
|
||||
nearend[i] = out as i16;
|
||||
|
||||
sum_near_sq += (near_f as f64).powi(2);
|
||||
sum_err_sq += (out as f64).powi(2);
|
||||
}
|
||||
|
||||
if sum_err_sq < 1.0 {
|
||||
100.0
|
||||
} else {
|
||||
(sum_near_sq / sum_err_sq).sqrt() as f32
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_enabled(&mut self, enabled: bool) {
|
||||
self.enabled = enabled;
|
||||
}
|
||||
|
||||
pub fn is_enabled(&self) -> bool {
|
||||
self.enabled
|
||||
}
|
||||
|
||||
pub fn reset(&mut self) {
|
||||
self.filter.iter_mut().for_each(|c| *c = 0.0);
|
||||
self.far_buf.iter_mut().for_each(|s| *s = 0.0);
|
||||
self.far_pos = 0;
|
||||
self.far_peak = 0.0;
|
||||
self.delay_ring.iter_mut().for_each(|s| *s = 0.0);
|
||||
self.delay_write = 0;
|
||||
self.delay_read = 0;
|
||||
self.dtd_holdover = 0;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn creates_with_correct_sizes() {
|
||||
let aec = EchoCanceller::with_delay(48000, 60, 40);
|
||||
assert_eq!(aec.filter_len, 2880); // 60ms @ 48kHz
|
||||
assert_eq!(aec.delay_samples, 1920); // 40ms @ 48kHz
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn passthrough_when_disabled() {
|
||||
let mut aec = EchoCanceller::new(48000, 60);
|
||||
aec.set_enabled(false);
|
||||
|
||||
let original: Vec<i16> = (0..960).map(|i| (i * 10) as i16).collect();
|
||||
let mut frame = original.clone();
|
||||
aec.process_frame(&mut frame);
|
||||
assert_eq!(frame, original);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn silence_passthrough() {
|
||||
let mut aec = EchoCanceller::with_delay(48000, 30, 0);
|
||||
aec.feed_farend(&vec![0i16; 960]);
|
||||
let mut frame = vec![0i16; 960];
|
||||
aec.process_frame(&mut frame);
|
||||
assert!(frame.iter().all(|&s| s == 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reduces_echo_with_no_delay() {
|
||||
// Simulate: far-end plays, echo arrives at mic attenuated by ~50%
|
||||
// (realistic — speaker to mic on laptop loses volume).
|
||||
let mut aec = EchoCanceller::with_delay(48000, 10, 0);
|
||||
|
||||
let frame_len = 480;
|
||||
let make_tone = |offset: usize| -> Vec<i16> {
|
||||
(0..frame_len)
|
||||
.map(|i| {
|
||||
let t = (offset + i) as f64 / 48000.0;
|
||||
(5000.0 * (2.0 * std::f64::consts::PI * 300.0 * t).sin()) as i16
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
let mut last_erle = 1.0f32;
|
||||
for frame_idx in 0..100 {
|
||||
let farend = make_tone(frame_idx * frame_len);
|
||||
aec.feed_farend(&farend);
|
||||
|
||||
// Near-end = attenuated copy of far-end (echo at ~50% volume).
|
||||
let mut nearend: Vec<i16> = farend.iter().map(|&s| s / 2).collect();
|
||||
last_erle = aec.process_frame(&mut nearend);
|
||||
}
|
||||
|
||||
assert!(
|
||||
last_erle > 1.0,
|
||||
"expected ERLE > 1.0 after adaptation, got {last_erle}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn preserves_nearend_during_doubletalk() {
|
||||
let mut aec = EchoCanceller::with_delay(48000, 30, 0);
|
||||
|
||||
let frame_len = 960;
|
||||
let nearend: Vec<i16> = (0..frame_len)
|
||||
.map(|i| {
|
||||
let t = i as f64 / 48000.0;
|
||||
(10000.0 * (2.0 * std::f64::consts::PI * 440.0 * t).sin()) as i16
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Feed silence as far-end (no echo source).
|
||||
aec.feed_farend(&vec![0i16; frame_len]);
|
||||
|
||||
let mut frame = nearend.clone();
|
||||
aec.process_frame(&mut frame);
|
||||
|
||||
let input_energy: f64 = nearend.iter().map(|&s| (s as f64).powi(2)).sum();
|
||||
let output_energy: f64 = frame.iter().map(|&s| (s as f64).powi(2)).sum();
|
||||
let ratio = output_energy / input_energy;
|
||||
|
||||
assert!(
|
||||
ratio > 0.8,
|
||||
"near-end speech should be preserved, energy ratio = {ratio:.3}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delay_buffer_holds_samples() {
|
||||
let mut aec = EchoCanceller::with_delay(48000, 10, 20);
|
||||
// 20ms delay = 960 samples @ 48kHz.
|
||||
// After feeding, feed_farend auto-drains available samples to far_buf.
|
||||
// So delay_available() is always 0 after feed_farend returns.
|
||||
// Instead, verify far_pos advances only after the delay is filled.
|
||||
|
||||
// Feed 960 samples (= delay amount). No samples released yet.
|
||||
aec.feed_farend(&vec![1i16; 960]);
|
||||
// far_buf should still be all zeros (nothing released).
|
||||
assert!(aec.far_buf.iter().all(|&s| s == 0.0), "nothing should be released yet");
|
||||
|
||||
// Feed 480 more. 480 should be released to far_buf.
|
||||
aec.feed_farend(&vec![2i16; 480]);
|
||||
let non_zero = aec.far_buf.iter().filter(|&&s| s != 0.0).count();
|
||||
assert!(non_zero > 0, "samples should have been released to far_buf");
|
||||
}
|
||||
}
|
||||
@@ -1,219 +0,0 @@
|
||||
//! Automatic Gain Control (AGC) with two-stage smoothing.
|
||||
//!
|
||||
//! Uses a fast attack / slow release envelope follower to keep the
|
||||
//! output signal near a configurable target RMS level. This prevents
|
||||
//! both clipping (when the speaker is too loud) and inaudibility (when
|
||||
//! the speaker is too quiet or far from the mic).
|
||||
|
||||
/// Two-stage automatic gain control.
|
||||
///
|
||||
/// The gain is adjusted per-frame based on the measured RMS energy,
|
||||
/// with a fast attack (gain decreases quickly when signal gets louder)
|
||||
/// and a slow release (gain increases gradually when signal gets quieter).
|
||||
pub struct AutoGainControl {
|
||||
target_rms: f64,
|
||||
current_gain: f64,
|
||||
min_gain: f64,
|
||||
max_gain: f64,
|
||||
attack_alpha: f64,
|
||||
release_alpha: f64,
|
||||
enabled: bool,
|
||||
}
|
||||
|
||||
impl AutoGainControl {
|
||||
/// Create a new AGC with sensible VoIP defaults.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
target_rms: 3000.0, // ~-20 dBFS for i16
|
||||
current_gain: 1.0,
|
||||
min_gain: 0.5,
|
||||
max_gain: 32.0,
|
||||
attack_alpha: 0.3, // fast attack
|
||||
release_alpha: 0.02, // slow release
|
||||
enabled: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Process a frame of PCM audio in-place, applying gain adjustment.
|
||||
pub fn process_frame(&mut self, pcm: &mut [i16]) {
|
||||
if !self.enabled {
|
||||
return;
|
||||
}
|
||||
|
||||
// Compute RMS of the frame.
|
||||
let rms = Self::compute_rms(pcm);
|
||||
|
||||
// Don't amplify near-silence — it would just boost noise.
|
||||
if rms < 10.0 {
|
||||
return;
|
||||
}
|
||||
|
||||
// Desired instantaneous gain.
|
||||
let desired_gain = (self.target_rms / rms).clamp(self.min_gain, self.max_gain);
|
||||
|
||||
// Smooth the gain transition.
|
||||
let alpha = if desired_gain < self.current_gain {
|
||||
// Signal is louder than target → reduce gain quickly (attack).
|
||||
self.attack_alpha
|
||||
} else {
|
||||
// Signal is quieter than target → raise gain slowly (release).
|
||||
self.release_alpha
|
||||
};
|
||||
|
||||
self.current_gain = self.current_gain * (1.0 - alpha) + desired_gain * alpha;
|
||||
|
||||
// Apply gain to each sample with hard limiting at ±31000 (~0.946 * i16::MAX).
|
||||
const LIMIT: f64 = 31000.0;
|
||||
let gain = self.current_gain;
|
||||
for sample in pcm.iter_mut() {
|
||||
let amplified = (*sample as f64) * gain;
|
||||
let clamped = amplified.clamp(-LIMIT, LIMIT);
|
||||
*sample = clamped as i16;
|
||||
}
|
||||
}
|
||||
|
||||
/// Enable or disable the AGC.
|
||||
pub fn set_enabled(&mut self, enabled: bool) {
|
||||
self.enabled = enabled;
|
||||
}
|
||||
|
||||
/// Returns whether the AGC is currently enabled.
|
||||
pub fn is_enabled(&self) -> bool {
|
||||
self.enabled
|
||||
}
|
||||
|
||||
/// Current gain expressed in dB.
|
||||
pub fn current_gain_db(&self) -> f64 {
|
||||
20.0 * self.current_gain.log10()
|
||||
}
|
||||
|
||||
/// Compute the RMS (root mean square) of a PCM buffer.
|
||||
fn compute_rms(pcm: &[i16]) -> f64 {
|
||||
if pcm.is_empty() {
|
||||
return 0.0;
|
||||
}
|
||||
let sum_sq: f64 = pcm.iter().map(|&s| (s as f64) * (s as f64)).sum();
|
||||
(sum_sq / pcm.len() as f64).sqrt()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AutoGainControl {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn agc_creates_with_defaults() {
|
||||
let agc = AutoGainControl::new();
|
||||
assert!(agc.is_enabled());
|
||||
assert!((agc.current_gain - 1.0).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agc_passthrough_when_disabled() {
|
||||
let mut agc = AutoGainControl::new();
|
||||
agc.set_enabled(false);
|
||||
|
||||
let original: Vec<i16> = (0..960).map(|i| (i * 5) as i16).collect();
|
||||
let mut frame = original.clone();
|
||||
agc.process_frame(&mut frame);
|
||||
|
||||
assert_eq!(frame, original);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agc_does_not_amplify_silence() {
|
||||
let mut agc = AutoGainControl::new();
|
||||
let mut frame = vec![0i16; 960];
|
||||
agc.process_frame(&mut frame);
|
||||
assert!(frame.iter().all(|&s| s == 0));
|
||||
// Gain should remain at initial value.
|
||||
assert!((agc.current_gain - 1.0).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agc_amplifies_quiet_signal() {
|
||||
let mut agc = AutoGainControl::new();
|
||||
|
||||
// Very quiet signal (RMS ~ 50).
|
||||
let mut frame: Vec<i16> = (0..960)
|
||||
.map(|i| {
|
||||
let t = i as f64 / 48000.0;
|
||||
(50.0 * (2.0 * std::f64::consts::PI * 440.0 * t).sin()) as i16
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Process several frames to let the gain ramp up.
|
||||
for _ in 0..50 {
|
||||
let mut f = frame.clone();
|
||||
agc.process_frame(&mut f);
|
||||
frame = f;
|
||||
}
|
||||
|
||||
// Gain should have increased past 1.0.
|
||||
assert!(
|
||||
agc.current_gain > 1.05,
|
||||
"expected gain > 1.05 for quiet signal, got {}",
|
||||
agc.current_gain
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agc_attenuates_loud_signal() {
|
||||
let mut agc = AutoGainControl::new();
|
||||
|
||||
// Loud signal (RMS ~ 20000).
|
||||
let frame: Vec<i16> = (0..960)
|
||||
.map(|i| {
|
||||
let t = i as f64 / 48000.0;
|
||||
(28000.0 * (2.0 * std::f64::consts::PI * 440.0 * t).sin()) as i16
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Process several frames.
|
||||
for _ in 0..20 {
|
||||
let mut f = frame.clone();
|
||||
agc.process_frame(&mut f);
|
||||
}
|
||||
|
||||
// Gain should have decreased below 1.0.
|
||||
assert!(
|
||||
agc.current_gain < 1.0,
|
||||
"expected gain < 1.0 for loud signal, got {}",
|
||||
agc.current_gain
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agc_output_within_limits() {
|
||||
let mut agc = AutoGainControl::new();
|
||||
// Force a high gain by processing many quiet frames first.
|
||||
for _ in 0..100 {
|
||||
let mut f: Vec<i16> = vec![100; 960];
|
||||
agc.process_frame(&mut f);
|
||||
}
|
||||
|
||||
// Now send a louder frame — output should still be within ±31000.
|
||||
let mut frame: Vec<i16> = vec![20000; 960];
|
||||
agc.process_frame(&mut frame);
|
||||
assert!(
|
||||
frame.iter().all(|&s| s.abs() <= 31000),
|
||||
"output samples must be within ±31000"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn agc_gain_db_at_unity() {
|
||||
let agc = AutoGainControl::new();
|
||||
let db = agc.current_gain_db();
|
||||
assert!(
|
||||
db.abs() < 0.01,
|
||||
"expected ~0 dB at unity gain, got {db}"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -10,8 +10,6 @@
|
||||
//! trait-object encoders/decoders that handle adaptive switching internally.
|
||||
|
||||
pub mod adaptive;
|
||||
pub mod aec;
|
||||
pub mod agc;
|
||||
pub mod codec2_dec;
|
||||
pub mod codec2_enc;
|
||||
pub mod denoise;
|
||||
@@ -21,8 +19,6 @@ pub mod resample;
|
||||
pub mod silence;
|
||||
|
||||
pub use adaptive::{AdaptiveDecoder, AdaptiveEncoder};
|
||||
pub use aec::EchoCanceller;
|
||||
pub use agc::AutoGainControl;
|
||||
pub use denoise::NoiseSupressor;
|
||||
pub use silence::{ComfortNoise, SilenceDetector};
|
||||
pub use wzp_proto::{AudioDecoder, AudioEncoder, CodecId, QualityProfile};
|
||||
|
||||
@@ -40,11 +40,6 @@ impl OpusEncoder {
|
||||
.set_signal(Signal::Voice)
|
||||
.map_err(|e| CodecError::EncodeFailed(format!("set signal: {e}")))?;
|
||||
|
||||
// Default complexity 7 — good quality/CPU trade-off for VoIP
|
||||
enc.inner
|
||||
.set_complexity(7)
|
||||
.map_err(|e| CodecError::EncodeFailed(format!("set complexity: {e}")))?;
|
||||
|
||||
Ok(enc)
|
||||
}
|
||||
|
||||
@@ -61,21 +56,6 @@ impl OpusEncoder {
|
||||
pub fn frame_samples(&self) -> usize {
|
||||
(48_000 * self.frame_duration_ms as usize) / 1000
|
||||
}
|
||||
|
||||
/// Set the encoder complexity (0-10). Higher values produce better quality
|
||||
/// at the cost of more CPU. Default is 7.
|
||||
pub fn set_complexity(&mut self, complexity: i32) {
|
||||
let c = (complexity as u8).min(10);
|
||||
let _ = self.inner.set_complexity(c);
|
||||
}
|
||||
|
||||
/// Hint the encoder about expected packet loss percentage (0-100).
|
||||
///
|
||||
/// Higher values cause the encoder to use more redundancy to survive
|
||||
/// packet loss, at the expense of slightly higher bitrate.
|
||||
pub fn set_expected_loss(&mut self, loss_pct: u8) {
|
||||
let _ = self.inner.set_packet_loss_perc(loss_pct.min(100));
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioEncoder for OpusEncoder {
|
||||
|
||||
@@ -1,258 +1,55 @@
|
||||
//! Windowed-sinc FIR resampler for 48 kHz <-> 8 kHz conversion.
|
||||
//! Simple linear resampler for 48 kHz <-> 8 kHz conversion.
|
||||
//!
|
||||
//! Provides both stateless free functions (backward-compatible) and stateful
|
||||
//! `Downsampler48to8` / `Upsampler8to48` structs that maintain overlap history
|
||||
//! between frames for glitch-free streaming.
|
||||
//! These are basic implementations suitable for voice. For higher quality,
|
||||
//! replace with the `rubato` crate later.
|
||||
|
||||
use std::f64::consts::PI;
|
||||
|
||||
// ─── FIR kernel parameters ─────────────────────────────────────────────────
|
||||
|
||||
/// Number of FIR taps in the anti-alias / interpolation filter.
|
||||
const FIR_TAPS: usize = 48;
|
||||
/// Kaiser window beta parameter — controls sidelobe attenuation.
|
||||
const KAISER_BETA: f64 = 8.0;
|
||||
/// Cutoff frequency in Hz for the low-pass filter (just below 4 kHz Nyquist of 8 kHz).
|
||||
const CUTOFF_HZ: f64 = 3800.0;
|
||||
/// Working sample rate in Hz.
|
||||
const SAMPLE_RATE: f64 = 48000.0;
|
||||
/// Decimation / interpolation ratio between 48 kHz and 8 kHz.
|
||||
const RATIO: usize = 6;
|
||||
|
||||
// ─── Kaiser window helpers ─────────────────────────────────────────────────
|
||||
|
||||
/// Zeroth-order modified Bessel function of the first kind, I₀(x).
|
||||
/// Downsample from 48 kHz to 8 kHz (6:1 decimation with averaging).
|
||||
///
|
||||
/// Computed via the well-known power-series expansion, converging rapidly
|
||||
/// for the moderate values of x used in Kaiser window design.
|
||||
fn bessel_i0(x: f64) -> f64 {
|
||||
let mut sum = 1.0f64;
|
||||
let mut term = 1.0f64;
|
||||
let half_x = x / 2.0;
|
||||
for k in 1..=25 {
|
||||
term *= (half_x / k as f64) * (half_x / k as f64);
|
||||
sum += term;
|
||||
if term < 1e-12 * sum {
|
||||
break;
|
||||
}
|
||||
/// Each output sample is the average of 6 consecutive input samples,
|
||||
/// providing basic anti-aliasing via a box filter.
|
||||
pub fn resample_48k_to_8k(input: &[i16]) -> Vec<i16> {
|
||||
const RATIO: usize = 6;
|
||||
let out_len = input.len() / RATIO;
|
||||
let mut output = Vec::with_capacity(out_len);
|
||||
|
||||
for chunk in input.chunks_exact(RATIO) {
|
||||
let sum: i32 = chunk.iter().map(|&s| s as i32).sum();
|
||||
output.push((sum / RATIO as i32) as i16);
|
||||
}
|
||||
sum
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
/// Build a windowed-sinc low-pass FIR kernel.
|
||||
/// Upsample from 8 kHz to 48 kHz (1:6 interpolation with linear interp).
|
||||
///
|
||||
/// Returns `FIR_TAPS` coefficients normalised so that the DC gain is exactly 1.0.
|
||||
fn build_fir_kernel() -> [f64; FIR_TAPS] {
|
||||
let mut kernel = [0.0f64; FIR_TAPS];
|
||||
let m = (FIR_TAPS - 1) as f64;
|
||||
let fc = CUTOFF_HZ / SAMPLE_RATE; // normalised cutoff (0..0.5)
|
||||
let beta_denom = bessel_i0(KAISER_BETA);
|
||||
/// Linearly interpolates between each pair of input samples to produce
|
||||
/// 6 output samples per input sample.
|
||||
pub fn resample_8k_to_48k(input: &[i16]) -> Vec<i16> {
|
||||
const RATIO: usize = 6;
|
||||
if input.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
for i in 0..FIR_TAPS {
|
||||
// Sinc
|
||||
let n = i as f64 - m / 2.0;
|
||||
let sinc = if n.abs() < 1e-12 {
|
||||
2.0 * fc
|
||||
let out_len = input.len() * RATIO;
|
||||
let mut output = Vec::with_capacity(out_len);
|
||||
|
||||
for i in 0..input.len() {
|
||||
let current = input[i] as i32;
|
||||
let next = if i + 1 < input.len() {
|
||||
input[i + 1] as i32
|
||||
} else {
|
||||
(2.0 * PI * fc * n).sin() / (PI * n)
|
||||
current // hold last sample
|
||||
};
|
||||
|
||||
// Kaiser window
|
||||
let t = 2.0 * i as f64 / m - 1.0; // range [-1, 1]
|
||||
let kaiser = bessel_i0(KAISER_BETA * (1.0 - t * t).max(0.0).sqrt()) / beta_denom;
|
||||
|
||||
kernel[i] = sinc * kaiser;
|
||||
}
|
||||
|
||||
// Normalise to unity DC gain.
|
||||
let sum: f64 = kernel.iter().sum();
|
||||
if sum.abs() > 1e-15 {
|
||||
for k in kernel.iter_mut() {
|
||||
*k /= sum;
|
||||
for j in 0..RATIO {
|
||||
let interp = current + (next - current) * j as i32 / RATIO as i32;
|
||||
output.push(interp as i16);
|
||||
}
|
||||
}
|
||||
|
||||
kernel
|
||||
output
|
||||
}
|
||||
|
||||
// ─── Stateful Downsampler 48→8 ─────────────────────────────────────────────
|
||||
|
||||
/// Stateful polyphase FIR downsampler from 48 kHz to 8 kHz.
|
||||
///
|
||||
/// Maintains `FIR_TAPS - 1` samples of history between successive calls to
|
||||
/// `process()` for seamless frame boundaries.
|
||||
pub struct Downsampler48to8 {
|
||||
kernel: [f64; FIR_TAPS],
|
||||
history: Vec<f64>,
|
||||
}
|
||||
|
||||
impl Downsampler48to8 {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
kernel: build_fir_kernel(),
|
||||
history: vec![0.0; FIR_TAPS - 1],
|
||||
}
|
||||
}
|
||||
|
||||
/// Downsample a block of 48 kHz samples to 8 kHz.
|
||||
///
|
||||
/// The input length should be a multiple of 6; any trailing samples that
|
||||
/// don't form a complete output sample are consumed into the history.
|
||||
pub fn process(&mut self, input: &[i16]) -> Vec<i16> {
|
||||
let hist_len = self.history.len(); // FIR_TAPS - 1
|
||||
let total_len = hist_len + input.len();
|
||||
|
||||
// Build a working buffer: history ++ input (as f64).
|
||||
let mut work = Vec::with_capacity(total_len);
|
||||
work.extend_from_slice(&self.history);
|
||||
work.extend(input.iter().map(|&s| s as f64));
|
||||
|
||||
let out_len = input.len() / RATIO;
|
||||
let mut output = Vec::with_capacity(out_len);
|
||||
|
||||
for i in 0..out_len {
|
||||
// The centre of the filter for output sample i sits at
|
||||
// position hist_len + i*RATIO in the work buffer (aligning
|
||||
// with the first new input sample at decimation phase 0).
|
||||
let centre = hist_len + i * RATIO;
|
||||
let start = centre + 1 - FIR_TAPS; // may be 0 for the first few
|
||||
|
||||
let mut acc = 0.0f64;
|
||||
for k in 0..FIR_TAPS {
|
||||
let idx = start + k;
|
||||
if idx < work.len() {
|
||||
acc += work[idx] * self.kernel[k];
|
||||
}
|
||||
}
|
||||
output.push(acc.round().clamp(-32768.0, 32767.0) as i16);
|
||||
}
|
||||
|
||||
// Update history: keep the last (FIR_TAPS - 1) samples from work.
|
||||
if work.len() >= hist_len {
|
||||
self.history
|
||||
.copy_from_slice(&work[work.len() - hist_len..]);
|
||||
} else {
|
||||
// Input was shorter than history — shift.
|
||||
let shift = hist_len - work.len();
|
||||
self.history.copy_within(shift.., 0);
|
||||
for (i, &v) in work.iter().enumerate() {
|
||||
self.history[hist_len - work.len() + i] = v;
|
||||
}
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Downsampler48to8 {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Stateful Upsampler 8→48 ───────────────────────────────────────────────
|
||||
|
||||
/// Stateful FIR upsampler from 8 kHz to 48 kHz.
|
||||
///
|
||||
/// Inserts zeros between input samples (zero-stuffing), then applies the
|
||||
/// low-pass FIR to remove imaging, with gain compensation of `RATIO`.
|
||||
pub struct Upsampler8to48 {
|
||||
kernel: [f64; FIR_TAPS],
|
||||
history: Vec<f64>,
|
||||
}
|
||||
|
||||
impl Upsampler8to48 {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
kernel: build_fir_kernel(),
|
||||
history: vec![0.0; FIR_TAPS - 1],
|
||||
}
|
||||
}
|
||||
|
||||
/// Upsample a block of 8 kHz samples to 48 kHz.
|
||||
pub fn process(&mut self, input: &[i16]) -> Vec<i16> {
|
||||
let hist_len = self.history.len(); // FIR_TAPS - 1
|
||||
|
||||
// Zero-stuff: insert RATIO-1 zeros between each input sample.
|
||||
let stuffed_len = input.len() * RATIO;
|
||||
let total_len = hist_len + stuffed_len;
|
||||
|
||||
let mut work = Vec::with_capacity(total_len);
|
||||
work.extend_from_slice(&self.history);
|
||||
for &s in input {
|
||||
work.push(s as f64);
|
||||
for _ in 1..RATIO {
|
||||
work.push(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
let out_len = stuffed_len;
|
||||
let mut output = Vec::with_capacity(out_len);
|
||||
|
||||
// The gain factor compensates for the zeros introduced by stuffing.
|
||||
let gain = RATIO as f64;
|
||||
|
||||
for i in 0..out_len {
|
||||
let centre = hist_len + i;
|
||||
let start = centre + 1 - FIR_TAPS;
|
||||
|
||||
let mut acc = 0.0f64;
|
||||
for k in 0..FIR_TAPS {
|
||||
let idx = start + k;
|
||||
if idx < work.len() {
|
||||
acc += work[idx] * self.kernel[k];
|
||||
}
|
||||
}
|
||||
acc *= gain;
|
||||
output.push(acc.round().clamp(-32768.0, 32767.0) as i16);
|
||||
}
|
||||
|
||||
// Update history.
|
||||
if work.len() >= hist_len {
|
||||
self.history
|
||||
.copy_from_slice(&work[work.len() - hist_len..]);
|
||||
} else {
|
||||
let shift = hist_len - work.len();
|
||||
self.history.copy_within(shift.., 0);
|
||||
for (i, &v) in work.iter().enumerate() {
|
||||
self.history[hist_len - work.len() + i] = v;
|
||||
}
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Upsampler8to48 {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Backward-compatible free functions ─────────────────────────────────────
|
||||
|
||||
/// Downsample from 48 kHz to 8 kHz (6:1 decimation with FIR anti-alias filter).
|
||||
///
|
||||
/// This is a convenience wrapper that creates a temporary [`Downsampler48to8`].
|
||||
/// For streaming use, prefer the stateful struct to avoid edge artefacts between
|
||||
/// frames.
|
||||
pub fn resample_48k_to_8k(input: &[i16]) -> Vec<i16> {
|
||||
let mut ds = Downsampler48to8::new();
|
||||
ds.process(input)
|
||||
}
|
||||
|
||||
/// Upsample from 8 kHz to 48 kHz (1:6 interpolation with FIR imaging filter).
|
||||
///
|
||||
/// This is a convenience wrapper that creates a temporary [`Upsampler8to48`].
|
||||
/// For streaming use, prefer the stateful struct to avoid edge artefacts between
|
||||
/// frames.
|
||||
pub fn resample_8k_to_48k(input: &[i16]) -> Vec<i16> {
|
||||
let mut us = Upsampler8to48::new();
|
||||
us.process(input)
|
||||
}
|
||||
|
||||
// ─── Tests ──────────────────────────────────────────────────────────────────
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -269,28 +66,12 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn dc_signal_preserved() {
|
||||
// A constant signal should survive resampling (approximately).
|
||||
// A constant signal should survive resampling
|
||||
let input = vec![1000i16; 960];
|
||||
let down = resample_48k_to_8k(&input);
|
||||
// Allow some edge transient — check that the middle samples are close.
|
||||
let mid_start = down.len() / 4;
|
||||
let mid_end = 3 * down.len() / 4;
|
||||
for &s in &down[mid_start..mid_end] {
|
||||
assert!(
|
||||
(s - 1000).abs() < 50,
|
||||
"DC downsampled sample {s} too far from 1000"
|
||||
);
|
||||
}
|
||||
|
||||
assert!(down.iter().all(|&s| s == 1000));
|
||||
let up = resample_8k_to_48k(&down);
|
||||
let mid_start_up = up.len() / 4;
|
||||
let mid_end_up = 3 * up.len() / 4;
|
||||
for &s in &up[mid_start_up..mid_end_up] {
|
||||
assert!(
|
||||
(s - 1000).abs() < 100,
|
||||
"DC upsampled sample {s} too far from 1000"
|
||||
);
|
||||
}
|
||||
assert!(up.iter().all(|&s| s == 1000));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -298,40 +79,4 @@ mod tests {
|
||||
assert!(resample_48k_to_8k(&[]).is_empty());
|
||||
assert!(resample_8k_to_48k(&[]).is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stateful_downsampler_produces_correct_length() {
|
||||
let mut ds = Downsampler48to8::new();
|
||||
let out = ds.process(&vec![0i16; 960]);
|
||||
assert_eq!(out.len(), 160);
|
||||
let out2 = ds.process(&vec![0i16; 960]);
|
||||
assert_eq!(out2.len(), 160);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stateful_upsampler_produces_correct_length() {
|
||||
let mut us = Upsampler8to48::new();
|
||||
let out = us.process(&vec![0i16; 160]);
|
||||
assert_eq!(out.len(), 960);
|
||||
let out2 = us.process(&vec![0i16; 160]);
|
||||
assert_eq!(out2.len(), 960);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fir_kernel_has_unity_dc_gain() {
|
||||
let kernel = build_fir_kernel();
|
||||
let sum: f64 = kernel.iter().sum();
|
||||
assert!(
|
||||
(sum - 1.0).abs() < 1e-10,
|
||||
"FIR kernel DC gain should be 1.0, got {sum}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bessel_i0_known_values() {
|
||||
// I₀(0) = 1
|
||||
assert!((bessel_i0(0.0) - 1.0).abs() < 1e-12);
|
||||
// I₀(1) ≈ 1.2660658
|
||||
assert!((bessel_i0(1.0) - 1.2660658).abs() < 1e-5);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::packet::MediaPacket;
|
||||
|
||||
@@ -21,29 +20,19 @@ pub struct AdaptivePlayoutDelay {
|
||||
max_delay: usize,
|
||||
/// Exponential moving average of inter-packet arrival jitter (ms).
|
||||
jitter_ema: f64,
|
||||
/// EMA smoothing factor for jitter increases (fast reaction).
|
||||
alpha_up: f64,
|
||||
/// EMA smoothing factor for jitter decreases (slow decay).
|
||||
alpha_down: f64,
|
||||
/// EMA smoothing factor (0.0-1.0, lower = smoother).
|
||||
alpha: f64,
|
||||
/// Last packet arrival timestamp (for computing inter-arrival jitter).
|
||||
last_arrival_ms: Option<u64>,
|
||||
/// Last packet expected timestamp.
|
||||
last_expected_ms: Option<u64>,
|
||||
/// Safety margin added to jitter-derived target (in packets).
|
||||
safety_margin: f64,
|
||||
/// Instant when a jitter spike was detected (handoff detection).
|
||||
spike_detected_at: Option<Instant>,
|
||||
/// Duration to hold max_delay after a spike is detected.
|
||||
spike_cooldown: Duration,
|
||||
/// Multiplier of jitter_ema that constitutes a spike.
|
||||
spike_threshold_multiplier: f64,
|
||||
}
|
||||
|
||||
/// Frame duration in milliseconds (20ms Opus/Codec2 frames).
|
||||
const FRAME_DURATION_MS: f64 = 20.0;
|
||||
/// Default safety margin in packets.
|
||||
const DEFAULT_SAFETY_MARGIN: f64 = 2.0;
|
||||
/// Default EMA smoothing factor (used for both up/down in non-mobile mode).
|
||||
/// Safety margin added to jitter-derived target (in packets).
|
||||
const SAFETY_MARGIN_PACKETS: f64 = 2.0;
|
||||
/// Default EMA smoothing factor.
|
||||
const DEFAULT_ALPHA: f64 = 0.05;
|
||||
|
||||
impl AdaptivePlayoutDelay {
|
||||
@@ -57,14 +46,9 @@ impl AdaptivePlayoutDelay {
|
||||
min_delay,
|
||||
max_delay,
|
||||
jitter_ema: 0.0,
|
||||
alpha_up: DEFAULT_ALPHA,
|
||||
alpha_down: DEFAULT_ALPHA,
|
||||
alpha: DEFAULT_ALPHA,
|
||||
last_arrival_ms: None,
|
||||
last_expected_ms: None,
|
||||
safety_margin: DEFAULT_SAFETY_MARGIN,
|
||||
spike_detected_at: None,
|
||||
spike_cooldown: Duration::from_secs(2),
|
||||
spike_threshold_multiplier: 3.0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,38 +64,13 @@ impl AdaptivePlayoutDelay {
|
||||
let expected_delta = expected_ms as f64 - last_expected as f64;
|
||||
let jitter = (actual_delta - expected_delta).abs();
|
||||
|
||||
// Spike detection: check before EMA update
|
||||
if self.jitter_ema > 0.0
|
||||
&& jitter > self.jitter_ema * self.spike_threshold_multiplier
|
||||
{
|
||||
self.spike_detected_at = Some(Instant::now());
|
||||
}
|
||||
// Update EMA
|
||||
self.jitter_ema = self.alpha * jitter + (1.0 - self.alpha) * self.jitter_ema;
|
||||
|
||||
// Asymmetric EMA update
|
||||
let alpha = if jitter > self.jitter_ema {
|
||||
self.alpha_up
|
||||
} else {
|
||||
self.alpha_down
|
||||
};
|
||||
self.jitter_ema = alpha * jitter + (1.0 - alpha) * self.jitter_ema;
|
||||
|
||||
// Check if spike cooldown has expired
|
||||
if let Some(spike_time) = self.spike_detected_at {
|
||||
if spike_time.elapsed() >= self.spike_cooldown {
|
||||
self.spike_detected_at = None;
|
||||
}
|
||||
}
|
||||
|
||||
// If within spike cooldown, return max_delay
|
||||
if self.spike_detected_at.is_some() {
|
||||
self.target_delay = self.max_delay;
|
||||
} else {
|
||||
// Convert jitter estimate to target delay in packets
|
||||
let raw_target =
|
||||
(self.jitter_ema / FRAME_DURATION_MS).ceil() + self.safety_margin;
|
||||
self.target_delay =
|
||||
(raw_target as usize).clamp(self.min_delay, self.max_delay);
|
||||
}
|
||||
// Convert jitter estimate to target delay in packets
|
||||
let raw_target = (self.jitter_ema / FRAME_DURATION_MS).ceil() + SAFETY_MARGIN_PACKETS;
|
||||
self.target_delay =
|
||||
(raw_target as usize).clamp(self.min_delay, self.max_delay);
|
||||
}
|
||||
|
||||
self.last_arrival_ms = Some(arrival_ms);
|
||||
@@ -128,28 +87,6 @@ impl AdaptivePlayoutDelay {
|
||||
pub fn jitter_estimate_ms(&self) -> f64 {
|
||||
self.jitter_ema
|
||||
}
|
||||
|
||||
/// Enable or disable mobile mode, adjusting parameters for cellular networks.
|
||||
///
|
||||
/// Mobile mode uses:
|
||||
/// - Asymmetric alpha (fast up=0.3, slow down=0.02) for quicker spike detection
|
||||
/// - Higher safety margin (3.0 packets) to absorb handoff jitter
|
||||
/// - Spike detection with 2-second cooldown at 3x threshold
|
||||
pub fn set_mobile_mode(&mut self, enabled: bool) {
|
||||
if enabled {
|
||||
self.safety_margin = 3.0;
|
||||
self.alpha_up = 0.3;
|
||||
self.alpha_down = 0.02;
|
||||
self.spike_threshold_multiplier = 3.0;
|
||||
self.spike_cooldown = Duration::from_secs(2);
|
||||
} else {
|
||||
self.safety_margin = DEFAULT_SAFETY_MARGIN;
|
||||
self.alpha_up = DEFAULT_ALPHA;
|
||||
self.alpha_down = DEFAULT_ALPHA;
|
||||
self.spike_threshold_multiplier = 3.0;
|
||||
self.spike_cooldown = Duration::from_secs(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -454,11 +391,6 @@ impl JitterBuffer {
|
||||
self.adaptive.as_ref()
|
||||
}
|
||||
|
||||
/// Get a mutable reference to the adaptive playout delay estimator.
|
||||
pub fn adaptive_delay_mut(&mut self) -> Option<&mut AdaptivePlayoutDelay> {
|
||||
self.adaptive.as_mut()
|
||||
}
|
||||
|
||||
/// Adjust target depth based on observed jitter.
|
||||
pub fn set_target_depth(&mut self, depth: usize) {
|
||||
self.target_depth = depth.min(self.max_depth);
|
||||
@@ -788,29 +720,4 @@ mod tests {
|
||||
let ad = jb.adaptive_delay().unwrap();
|
||||
assert_eq!(ad.target_delay(), 3);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// Mobile mode tests
|
||||
// ---------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn mobile_mode_increases_safety_margin() {
|
||||
let mut apd = AdaptivePlayoutDelay::new(3, 50);
|
||||
apd.set_mobile_mode(true);
|
||||
assert_eq!(apd.safety_margin, 3.0);
|
||||
assert_eq!(apd.alpha_up, 0.3);
|
||||
assert_eq!(apd.alpha_down, 0.02);
|
||||
|
||||
apd.set_mobile_mode(false);
|
||||
assert_eq!(apd.safety_margin, DEFAULT_SAFETY_MARGIN);
|
||||
assert_eq!(apd.alpha_up, DEFAULT_ALPHA);
|
||||
assert_eq!(apd.alpha_down, DEFAULT_ALPHA);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mobile_mode_accessible_via_jitter_buffer() {
|
||||
let mut jb = JitterBuffer::new_adaptive(3, 50);
|
||||
jb.adaptive_delay_mut().unwrap().set_mobile_mode(true);
|
||||
assert_eq!(jb.adaptive_delay().unwrap().safety_margin, 3.0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,9 +26,9 @@ pub use codec_id::{CodecId, QualityProfile};
|
||||
pub use error::*;
|
||||
pub use packet::{
|
||||
HangupReason, MediaHeader, MediaPacket, MiniFrameContext, MiniHeader, QualityReport,
|
||||
RoomParticipant, SignalMessage, TrunkEntry, TrunkFrame, FRAME_TYPE_FULL, FRAME_TYPE_MINI,
|
||||
SignalMessage, TrunkEntry, TrunkFrame, FRAME_TYPE_FULL, FRAME_TYPE_MINI,
|
||||
};
|
||||
pub use bandwidth::{BandwidthEstimator, CongestionState};
|
||||
pub use quality::{AdaptiveQualityController, NetworkContext, Tier};
|
||||
pub use quality::{AdaptiveQualityController, Tier};
|
||||
pub use session::{Session, SessionEvent, SessionState};
|
||||
pub use traits::*;
|
||||
|
||||
@@ -548,9 +548,6 @@ pub enum SignalMessage {
|
||||
signature: Vec<u8>,
|
||||
/// Supported quality profiles.
|
||||
supported_profiles: Vec<crate::QualityProfile>,
|
||||
/// Optional display name set by the caller.
|
||||
#[serde(default)]
|
||||
alias: Option<String>,
|
||||
},
|
||||
|
||||
/// Call acceptance (analogous to Warzone's WireMessage::CallAnswer).
|
||||
@@ -648,28 +645,6 @@ pub enum SignalMessage {
|
||||
session_id: String,
|
||||
room_name: String,
|
||||
},
|
||||
|
||||
/// Room membership update — sent by relay to all participants when someone joins or leaves.
|
||||
RoomUpdate {
|
||||
/// Current participant count.
|
||||
count: u32,
|
||||
/// List of participants currently in the room.
|
||||
participants: Vec<RoomParticipant>,
|
||||
},
|
||||
|
||||
/// Set or update the client's display name.
|
||||
/// Sent by client after joining; relay updates the participant entry and
|
||||
/// re-broadcasts a RoomUpdate to all participants.
|
||||
SetAlias { alias: String },
|
||||
}
|
||||
|
||||
/// A participant entry in a RoomUpdate message.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct RoomParticipant {
|
||||
/// Identity fingerprint (hex string, stable across reconnects if seed is persisted).
|
||||
pub fingerprint: String,
|
||||
/// Optional display name set by the client.
|
||||
pub alias: Option<String>,
|
||||
}
|
||||
|
||||
/// Reasons for ending a call.
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::packet::QualityReport;
|
||||
use crate::traits::QualityController;
|
||||
@@ -25,71 +24,24 @@ impl Tier {
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine which tier a quality report belongs to (default/WiFi thresholds).
|
||||
/// Determine which tier a quality report belongs to.
|
||||
pub fn classify(report: &QualityReport) -> Self {
|
||||
Self::classify_with_context(report, NetworkContext::Unknown)
|
||||
}
|
||||
|
||||
/// Classify with network-context-aware thresholds.
|
||||
pub fn classify_with_context(report: &QualityReport, context: NetworkContext) -> Self {
|
||||
let loss = report.loss_percent();
|
||||
let rtt = report.rtt_ms();
|
||||
|
||||
match context {
|
||||
NetworkContext::CellularLte
|
||||
| NetworkContext::Cellular5g
|
||||
| NetworkContext::Cellular3g => {
|
||||
// Tighter thresholds for cellular networks
|
||||
if loss > 25.0 || rtt > 500 {
|
||||
Self::Catastrophic
|
||||
} else if loss > 8.0 || rtt > 300 {
|
||||
Self::Degraded
|
||||
} else {
|
||||
Self::Good
|
||||
}
|
||||
}
|
||||
NetworkContext::WiFi | NetworkContext::Unknown => {
|
||||
// Original thresholds
|
||||
if loss > 40.0 || rtt > 600 {
|
||||
Self::Catastrophic
|
||||
} else if loss > 10.0 || rtt > 400 {
|
||||
Self::Degraded
|
||||
} else {
|
||||
Self::Good
|
||||
}
|
||||
}
|
||||
if loss > 40.0 || rtt > 600 {
|
||||
Self::Catastrophic
|
||||
} else if loss > 10.0 || rtt > 400 {
|
||||
Self::Degraded
|
||||
} else {
|
||||
Self::Good
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the next lower (worse) tier, or None if already at the worst.
|
||||
pub fn downgrade(self) -> Option<Tier> {
|
||||
match self {
|
||||
Self::Good => Some(Self::Degraded),
|
||||
Self::Degraded => Some(Self::Catastrophic),
|
||||
Self::Catastrophic => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Describes the network transport type for context-aware quality decisions.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum NetworkContext {
|
||||
WiFi,
|
||||
CellularLte,
|
||||
Cellular5g,
|
||||
Cellular3g,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
impl Default for NetworkContext {
|
||||
fn default() -> Self {
|
||||
Self::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
/// Adaptive quality controller with hysteresis to prevent tier flapping.
|
||||
///
|
||||
/// - Downgrade: 3 consecutive reports in a worse tier (2 on cellular)
|
||||
/// - Downgrade: 3 consecutive reports in a worse tier
|
||||
/// - Upgrade: 10 consecutive reports in a better tier
|
||||
pub struct AdaptiveQualityController {
|
||||
current_tier: Tier,
|
||||
@@ -102,26 +54,14 @@ pub struct AdaptiveQualityController {
|
||||
history: VecDeque<QualityReport>,
|
||||
/// Whether the profile was manually forced (disables adaptive logic).
|
||||
forced: bool,
|
||||
/// Current network context for threshold selection.
|
||||
network_context: NetworkContext,
|
||||
/// FEC boost expiry time (set during network handoff).
|
||||
fec_boost_until: Option<Instant>,
|
||||
/// FEC boost amount to add during handoff recovery window.
|
||||
fec_boost_amount: f32,
|
||||
}
|
||||
|
||||
/// Threshold for downgrading (fast reaction to degradation).
|
||||
const DOWNGRADE_THRESHOLD: u32 = 3;
|
||||
/// Threshold for downgrading on cellular networks (even faster).
|
||||
const CELLULAR_DOWNGRADE_THRESHOLD: u32 = 2;
|
||||
/// Threshold for upgrading (slow, cautious improvement).
|
||||
const UPGRADE_THRESHOLD: u32 = 10;
|
||||
/// Maximum history window size.
|
||||
const HISTORY_SIZE: usize = 20;
|
||||
/// Default FEC boost amount during handoff recovery.
|
||||
const DEFAULT_FEC_BOOST: f32 = 0.2;
|
||||
/// Duration of FEC boost after a network handoff.
|
||||
const FEC_BOOST_DURATION_SECS: u64 = 10;
|
||||
|
||||
impl AdaptiveQualityController {
|
||||
pub fn new() -> Self {
|
||||
@@ -132,9 +72,6 @@ impl AdaptiveQualityController {
|
||||
consecutive_down: 0,
|
||||
history: VecDeque::with_capacity(HISTORY_SIZE),
|
||||
forced: false,
|
||||
network_context: NetworkContext::default(),
|
||||
fec_boost_until: None,
|
||||
fec_boost_amount: DEFAULT_FEC_BOOST,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,69 +80,6 @@ impl AdaptiveQualityController {
|
||||
self.current_tier
|
||||
}
|
||||
|
||||
/// Get the current network context.
|
||||
pub fn network_context(&self) -> NetworkContext {
|
||||
self.network_context
|
||||
}
|
||||
|
||||
/// Signal a network transport change (e.g., WiFi to cellular handoff).
|
||||
///
|
||||
/// When switching from WiFi to any cellular type, this preemptively
|
||||
/// downgrades one quality tier and activates a temporary FEC boost.
|
||||
pub fn signal_network_change(&mut self, new_context: NetworkContext) {
|
||||
let old = self.network_context;
|
||||
self.network_context = new_context;
|
||||
|
||||
let new_is_cellular = matches!(
|
||||
new_context,
|
||||
NetworkContext::CellularLte | NetworkContext::Cellular5g | NetworkContext::Cellular3g
|
||||
);
|
||||
|
||||
// If switching from WiFi to cellular, preemptively downgrade one tier
|
||||
if old == NetworkContext::WiFi && new_is_cellular {
|
||||
if let Some(lower_tier) = self.current_tier.downgrade() {
|
||||
self.current_tier = lower_tier;
|
||||
self.current_profile = lower_tier.profile();
|
||||
}
|
||||
// Reset counters to avoid stale hysteresis state
|
||||
self.consecutive_up = 0;
|
||||
self.consecutive_down = 0;
|
||||
// Un-force so adaptive logic resumes
|
||||
self.forced = false;
|
||||
}
|
||||
|
||||
// Activate FEC boost for any network change
|
||||
self.fec_boost_until = Some(Instant::now() + Duration::from_secs(FEC_BOOST_DURATION_SECS));
|
||||
}
|
||||
|
||||
/// Returns the FEC boost amount if within the handoff recovery window, 0.0 otherwise.
|
||||
///
|
||||
/// Callers should add this to their base FEC ratio during the boost window.
|
||||
pub fn fec_boost(&self) -> f32 {
|
||||
if let Some(until) = self.fec_boost_until {
|
||||
if Instant::now() < until {
|
||||
return self.fec_boost_amount;
|
||||
}
|
||||
}
|
||||
0.0
|
||||
}
|
||||
|
||||
/// Reset the hysteresis counters.
|
||||
pub fn reset_counters(&mut self) {
|
||||
self.consecutive_up = 0;
|
||||
self.consecutive_down = 0;
|
||||
}
|
||||
|
||||
/// Get the effective downgrade threshold based on network context.
|
||||
fn downgrade_threshold(&self) -> u32 {
|
||||
match self.network_context {
|
||||
NetworkContext::CellularLte
|
||||
| NetworkContext::Cellular5g
|
||||
| NetworkContext::Cellular3g => CELLULAR_DOWNGRADE_THRESHOLD,
|
||||
_ => DOWNGRADE_THRESHOLD,
|
||||
}
|
||||
}
|
||||
|
||||
fn try_transition(&mut self, observed_tier: Tier) -> Option<QualityProfile> {
|
||||
if observed_tier == self.current_tier {
|
||||
self.consecutive_up = 0;
|
||||
@@ -222,7 +96,7 @@ impl AdaptiveQualityController {
|
||||
if is_worse {
|
||||
self.consecutive_up = 0;
|
||||
self.consecutive_down += 1;
|
||||
if self.consecutive_down >= self.downgrade_threshold() {
|
||||
if self.consecutive_down >= DOWNGRADE_THRESHOLD {
|
||||
self.current_tier = observed_tier;
|
||||
self.current_profile = observed_tier.profile();
|
||||
self.consecutive_down = 0;
|
||||
@@ -268,7 +142,7 @@ impl QualityController for AdaptiveQualityController {
|
||||
return None;
|
||||
}
|
||||
|
||||
let observed = Tier::classify_with_context(report, self.network_context);
|
||||
let observed = Tier::classify(report);
|
||||
self.try_transition(observed)
|
||||
}
|
||||
|
||||
@@ -372,110 +246,4 @@ mod tests {
|
||||
assert_eq!(Tier::classify(&make_report(50.0, 200)), Tier::Catastrophic);
|
||||
assert_eq!(Tier::classify(&make_report(5.0, 700)), Tier::Catastrophic);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// Network context tests
|
||||
// ---------------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn cellular_tighter_thresholds() {
|
||||
// 12% loss: Good on WiFi, Degraded on cellular
|
||||
let report = make_report(12.0, 200);
|
||||
assert_eq!(
|
||||
Tier::classify_with_context(&report, NetworkContext::WiFi),
|
||||
Tier::Degraded
|
||||
);
|
||||
assert_eq!(
|
||||
Tier::classify_with_context(&report, NetworkContext::CellularLte),
|
||||
Tier::Degraded
|
||||
);
|
||||
|
||||
// 9% loss: Good on WiFi, Degraded on cellular
|
||||
let report = make_report(9.0, 200);
|
||||
assert_eq!(
|
||||
Tier::classify_with_context(&report, NetworkContext::WiFi),
|
||||
Tier::Good
|
||||
);
|
||||
assert_eq!(
|
||||
Tier::classify_with_context(&report, NetworkContext::CellularLte),
|
||||
Tier::Degraded
|
||||
);
|
||||
|
||||
// 30% loss: Degraded on WiFi, Catastrophic on cellular
|
||||
let report = make_report(30.0, 200);
|
||||
assert_eq!(
|
||||
Tier::classify_with_context(&report, NetworkContext::WiFi),
|
||||
Tier::Degraded
|
||||
);
|
||||
assert_eq!(
|
||||
Tier::classify_with_context(&report, NetworkContext::Cellular3g),
|
||||
Tier::Catastrophic
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cellular_rtt_thresholds() {
|
||||
// RTT 350ms: Good on WiFi, Degraded on cellular
|
||||
let report = make_report(2.0, 348); // rtt_4ms rounds so use 348
|
||||
assert_eq!(
|
||||
Tier::classify_with_context(&report, NetworkContext::WiFi),
|
||||
Tier::Good
|
||||
);
|
||||
assert_eq!(
|
||||
Tier::classify_with_context(&report, NetworkContext::CellularLte),
|
||||
Tier::Degraded
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cellular_faster_downgrade() {
|
||||
let mut ctrl = AdaptiveQualityController::new();
|
||||
ctrl.signal_network_change(NetworkContext::CellularLte);
|
||||
// Reset tier back to Good for testing downgrade threshold
|
||||
ctrl.current_tier = Tier::Good;
|
||||
ctrl.current_profile = Tier::Good.profile();
|
||||
|
||||
// On cellular, downgrade threshold is 2 instead of 3
|
||||
let bad = make_report(50.0, 200);
|
||||
assert!(ctrl.observe(&bad).is_none()); // 1st bad
|
||||
let result = ctrl.observe(&bad); // 2nd bad — should trigger on cellular
|
||||
assert!(result.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signal_network_change_preemptive_downgrade() {
|
||||
let mut ctrl = AdaptiveQualityController::new();
|
||||
assert_eq!(ctrl.tier(), Tier::Good);
|
||||
|
||||
// Switch from WiFi to cellular
|
||||
ctrl.network_context = NetworkContext::WiFi;
|
||||
ctrl.signal_network_change(NetworkContext::CellularLte);
|
||||
|
||||
// Should have downgraded one tier: Good -> Degraded
|
||||
assert_eq!(ctrl.tier(), Tier::Degraded);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signal_network_change_fec_boost() {
|
||||
let mut ctrl = AdaptiveQualityController::new();
|
||||
assert_eq!(ctrl.fec_boost(), 0.0);
|
||||
|
||||
ctrl.signal_network_change(NetworkContext::CellularLte);
|
||||
|
||||
// FEC boost should be active
|
||||
assert!(ctrl.fec_boost() > 0.0);
|
||||
assert_eq!(ctrl.fec_boost(), DEFAULT_FEC_BOOST);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tier_downgrade() {
|
||||
assert_eq!(Tier::Good.downgrade(), Some(Tier::Degraded));
|
||||
assert_eq!(Tier::Degraded.downgrade(), Some(Tier::Catastrophic));
|
||||
assert_eq!(Tier::Catastrophic.downgrade(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn network_context_default() {
|
||||
assert_eq!(NetworkContext::default(), NetworkContext::Unknown);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,27 +15,25 @@ use wzp_proto::{MediaTransport, QualityProfile, SignalMessage};
|
||||
/// 5. Derive shared ChaCha20-Poly1305 session
|
||||
/// 6. Send `CallAnswer` back
|
||||
///
|
||||
/// Returns the derived `CryptoSession`, the chosen `QualityProfile`, the caller's fingerprint,
|
||||
/// and the caller's alias (if provided in CallOffer).
|
||||
/// Returns the derived `CryptoSession` and the chosen `QualityProfile`.
|
||||
pub async fn accept_handshake(
|
||||
transport: &dyn MediaTransport,
|
||||
seed: &[u8; 32],
|
||||
) -> Result<(Box<dyn CryptoSession>, QualityProfile, String, Option<String>), anyhow::Error> {
|
||||
) -> Result<(Box<dyn CryptoSession>, QualityProfile), anyhow::Error> {
|
||||
// 1. Receive CallOffer
|
||||
let offer = transport
|
||||
.recv_signal()
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("connection closed before receiving CallOffer"))?;
|
||||
|
||||
let (caller_identity_pub, caller_ephemeral_pub, caller_signature, supported_profiles, caller_alias) =
|
||||
let (caller_identity_pub, caller_ephemeral_pub, caller_signature, supported_profiles) =
|
||||
match offer {
|
||||
SignalMessage::CallOffer {
|
||||
identity_pub,
|
||||
ephemeral_pub,
|
||||
signature,
|
||||
supported_profiles,
|
||||
alias,
|
||||
} => (identity_pub, ephemeral_pub, signature, supported_profiles, alias),
|
||||
} => (identity_pub, ephemeral_pub, signature, supported_profiles),
|
||||
other => {
|
||||
return Err(anyhow::anyhow!(
|
||||
"expected CallOffer, got {:?}",
|
||||
@@ -78,13 +76,7 @@ pub async fn accept_handshake(
|
||||
};
|
||||
transport.send_signal(&answer).await?;
|
||||
|
||||
// Derive caller fingerprint from their identity public key (first 8 bytes as hex)
|
||||
let caller_fp = caller_identity_pub[..8]
|
||||
.iter()
|
||||
.map(|b| format!("{b:02x}"))
|
||||
.collect::<String>();
|
||||
|
||||
Ok((session, chosen_profile, caller_fp, caller_alias))
|
||||
Ok((session, chosen_profile))
|
||||
}
|
||||
|
||||
/// Select the best quality profile from those the caller supports.
|
||||
|
||||
@@ -431,7 +431,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
|
||||
// Crypto handshake: verify client identity + negotiate quality profile
|
||||
let handshake_start = std::time::Instant::now();
|
||||
let (_crypto_session, _chosen_profile, caller_fp, caller_alias) = match wzp_relay::handshake::accept_handshake(
|
||||
let (_crypto_session, _chosen_profile) = match wzp_relay::handshake::accept_handshake(
|
||||
&*transport,
|
||||
&relay_seed_bytes,
|
||||
).await {
|
||||
@@ -448,13 +448,10 @@ async fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
};
|
||||
|
||||
// Use the caller's identity fingerprint from the handshake
|
||||
let participant_fp = authenticated_fp.clone().unwrap_or(caller_fp);
|
||||
|
||||
// Register in presence registry
|
||||
{
|
||||
if let Some(ref fp) = authenticated_fp {
|
||||
let mut reg = presence.lock().await;
|
||||
reg.register_local(&participant_fp, None, Some(room_name.clone()));
|
||||
reg.register_local(fp, None, Some(room_name.clone()));
|
||||
}
|
||||
|
||||
info!(%addr, room = %room_name, "client joining");
|
||||
@@ -505,21 +502,14 @@ async fn main() -> anyhow::Result<()> {
|
||||
|
||||
let participant_id = {
|
||||
let mut mgr = room_mgr.lock().await;
|
||||
match mgr.join(
|
||||
&room_name,
|
||||
addr,
|
||||
room::ParticipantSender::Quic(transport.clone()),
|
||||
Some(&participant_fp),
|
||||
caller_alias.as_deref(),
|
||||
) {
|
||||
Ok((id, update, senders)) => {
|
||||
match mgr.join(&room_name, addr, room::ParticipantSender::Quic(transport.clone()), authenticated_fp.as_deref()) {
|
||||
Ok(id) => {
|
||||
metrics.active_rooms.set(mgr.list().len() as i64);
|
||||
drop(mgr); // release lock before async broadcast
|
||||
room::broadcast_signal(&senders, &update).await;
|
||||
id
|
||||
}
|
||||
Err(e) => {
|
||||
error!(%addr, room = %room_name, "room join denied: {e}");
|
||||
// Clean up the session we just created
|
||||
metrics.active_sessions.dec();
|
||||
let mut smgr = session_mgr.lock().await;
|
||||
smgr.remove_session(session_id);
|
||||
|
||||
@@ -67,24 +67,11 @@ impl ParticipantSender {
|
||||
}
|
||||
}
|
||||
|
||||
/// Broadcast a signal message to a list of participant senders.
|
||||
pub async fn broadcast_signal(senders: &[ParticipantSender], msg: &wzp_proto::SignalMessage) {
|
||||
for sender in senders {
|
||||
if let ParticipantSender::Quic(t) = sender {
|
||||
if let Err(e) = t.send_signal(msg).await {
|
||||
warn!("broadcast_signal error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A participant in a room.
|
||||
struct Participant {
|
||||
id: ParticipantId,
|
||||
_addr: std::net::SocketAddr,
|
||||
sender: ParticipantSender,
|
||||
fingerprint: Option<String>,
|
||||
alias: Option<String>,
|
||||
}
|
||||
|
||||
/// A room holding multiple participants.
|
||||
@@ -99,16 +86,10 @@ impl Room {
|
||||
}
|
||||
}
|
||||
|
||||
fn add(
|
||||
&mut self,
|
||||
addr: std::net::SocketAddr,
|
||||
sender: ParticipantSender,
|
||||
fingerprint: Option<String>,
|
||||
alias: Option<String>,
|
||||
) -> ParticipantId {
|
||||
fn add(&mut self, addr: std::net::SocketAddr, sender: ParticipantSender) -> ParticipantId {
|
||||
let id = next_id();
|
||||
info!(room_size = self.participants.len() + 1, participant = id, %addr, "joined room");
|
||||
self.participants.push(Participant { id, _addr: addr, sender, fingerprint, alias });
|
||||
self.participants.push(Participant { id, _addr: addr, sender });
|
||||
id
|
||||
}
|
||||
|
||||
@@ -125,33 +106,6 @@ impl Room {
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Build a RoomUpdate participant list.
|
||||
fn participant_list(&self) -> Vec<wzp_proto::packet::RoomParticipant> {
|
||||
self.participants
|
||||
.iter()
|
||||
.map(|p| wzp_proto::packet::RoomParticipant {
|
||||
fingerprint: p.fingerprint.clone().unwrap_or_default(),
|
||||
alias: p.alias.clone(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all senders (for broadcasting to everyone including the joiner).
|
||||
fn all_senders(&self) -> Vec<ParticipantSender> {
|
||||
self.participants.iter().map(|p| p.sender.clone()).collect()
|
||||
}
|
||||
|
||||
/// Update a participant's alias. Returns true if the participant was found.
|
||||
fn set_alias(&mut self, id: ParticipantId, alias: String) -> bool {
|
||||
if let Some(p) = self.participants.iter_mut().find(|p| p.id == id) {
|
||||
info!(participant = id, %alias, "alias updated");
|
||||
p.alias = Some(alias);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.participants.is_empty()
|
||||
}
|
||||
@@ -211,27 +165,20 @@ impl RoomManager {
|
||||
}
|
||||
}
|
||||
|
||||
/// Join a room. Returns (participant_id, room_update_msg, all_senders) for broadcasting.
|
||||
/// Join a room. Returns the participant ID or an error if unauthorized.
|
||||
pub fn join(
|
||||
&mut self,
|
||||
room_name: &str,
|
||||
addr: std::net::SocketAddr,
|
||||
sender: ParticipantSender,
|
||||
fingerprint: Option<&str>,
|
||||
alias: Option<&str>,
|
||||
) -> Result<(ParticipantId, wzp_proto::SignalMessage, Vec<ParticipantSender>), String> {
|
||||
) -> Result<ParticipantId, String> {
|
||||
if !self.is_authorized(room_name, fingerprint) {
|
||||
warn!(room = room_name, fingerprint = ?fingerprint, "unauthorized room join attempt");
|
||||
return Err("not authorized for this room".to_string());
|
||||
}
|
||||
let room = self.rooms.entry(room_name.to_string()).or_insert_with(Room::new);
|
||||
let id = room.add(addr, sender, fingerprint.map(|s| s.to_string()), alias.map(|s| s.to_string()));
|
||||
let update = wzp_proto::SignalMessage::RoomUpdate {
|
||||
count: room.len() as u32,
|
||||
participants: room.participant_list(),
|
||||
};
|
||||
let senders = room.all_senders();
|
||||
Ok((id, update, senders))
|
||||
Ok(room.add(addr, sender))
|
||||
}
|
||||
|
||||
/// Join a room via WebSocket. Convenience wrapper around `join()`.
|
||||
@@ -242,48 +189,18 @@ impl RoomManager {
|
||||
sender: tokio::sync::mpsc::Sender<Bytes>,
|
||||
fingerprint: Option<&str>,
|
||||
) -> Result<ParticipantId, String> {
|
||||
let (id, _update, _senders) = self.join(room_name, addr, ParticipantSender::WebSocket(sender), fingerprint, None)?;
|
||||
Ok(id)
|
||||
self.join(room_name, addr, ParticipantSender::WebSocket(sender), fingerprint)
|
||||
}
|
||||
|
||||
/// Leave a room. Returns (room_update_msg, remaining_senders) for broadcasting, or None if room is now empty.
|
||||
pub fn leave(&mut self, room_name: &str, participant_id: ParticipantId) -> Option<(wzp_proto::SignalMessage, Vec<ParticipantSender>)> {
|
||||
/// Leave a room. Removes the room if empty.
|
||||
pub fn leave(&mut self, room_name: &str, participant_id: ParticipantId) {
|
||||
if let Some(room) = self.rooms.get_mut(room_name) {
|
||||
room.remove(participant_id);
|
||||
if room.is_empty() {
|
||||
self.rooms.remove(room_name);
|
||||
info!(room = room_name, "room closed (empty)");
|
||||
return None;
|
||||
}
|
||||
let update = wzp_proto::SignalMessage::RoomUpdate {
|
||||
count: room.len() as u32,
|
||||
participants: room.participant_list(),
|
||||
};
|
||||
let senders = room.all_senders();
|
||||
Some((update, senders))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Update a participant's alias and return a RoomUpdate + senders for broadcasting.
|
||||
pub fn set_alias(
|
||||
&mut self,
|
||||
room_name: &str,
|
||||
participant_id: ParticipantId,
|
||||
alias: String,
|
||||
) -> Option<(wzp_proto::SignalMessage, Vec<ParticipantSender>)> {
|
||||
if let Some(room) = self.rooms.get_mut(room_name) {
|
||||
if room.set_alias(participant_id, alias) {
|
||||
let update = wzp_proto::SignalMessage::RoomUpdate {
|
||||
count: room.len() as u32,
|
||||
participants: room.participant_list(),
|
||||
};
|
||||
let senders = room.all_senders();
|
||||
return Some((update, senders));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Get senders for all OTHER participants in a room.
|
||||
@@ -405,119 +322,73 @@ async fn run_participant_plain(
|
||||
session_id: &str,
|
||||
) {
|
||||
let addr = transport.connection().remote_address();
|
||||
let mut packets_forwarded = 0u64;
|
||||
|
||||
// Media forwarding task
|
||||
let media_room_mgr = room_mgr.clone();
|
||||
let media_room_name = room_name.clone();
|
||||
let media_transport = transport.clone();
|
||||
let media_metrics = metrics.clone();
|
||||
let media_session_id = session_id.to_string();
|
||||
let media_task = async move {
|
||||
let mut packets_forwarded = 0u64;
|
||||
loop {
|
||||
let pkt = match media_transport.recv_media().await {
|
||||
Ok(Some(pkt)) => pkt,
|
||||
Ok(None) => {
|
||||
info!(%addr, participant = participant_id, "disconnected");
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = e.to_string();
|
||||
if msg.contains("timed out") || msg.contains("reset") || msg.contains("closed") {
|
||||
info!(%addr, participant = participant_id, "connection closed: {e}");
|
||||
} else {
|
||||
error!(%addr, participant = participant_id, "recv error: {e}");
|
||||
}
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(ref report) = pkt.quality_report {
|
||||
media_metrics.update_session_quality(&media_session_id, report);
|
||||
loop {
|
||||
let pkt = match transport.recv_media().await {
|
||||
Ok(Some(pkt)) => pkt,
|
||||
Ok(None) => {
|
||||
info!(%addr, participant = participant_id, "disconnected");
|
||||
break;
|
||||
}
|
||||
|
||||
let others = {
|
||||
let mgr = media_room_mgr.lock().await;
|
||||
mgr.others(&media_room_name, participant_id)
|
||||
};
|
||||
|
||||
let pkt_bytes = pkt.payload.len() as u64;
|
||||
for other in &others {
|
||||
match other {
|
||||
ParticipantSender::Quic(t) => {
|
||||
let _ = t.send_media(&pkt).await;
|
||||
}
|
||||
ParticipantSender::WebSocket(_) => {
|
||||
let _ = other.send_raw(&pkt.payload).await;
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = e.to_string();
|
||||
if msg.contains("timed out") || msg.contains("reset") || msg.contains("closed") {
|
||||
info!(%addr, participant = participant_id, "connection closed: {e}");
|
||||
} else {
|
||||
error!(%addr, participant = participant_id, "recv error: {e}");
|
||||
}
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
let fan_out = others.len() as u64;
|
||||
media_metrics.packets_forwarded.inc_by(fan_out);
|
||||
media_metrics.bytes_forwarded.inc_by(pkt_bytes * fan_out);
|
||||
packets_forwarded += 1;
|
||||
if packets_forwarded % 500 == 0 {
|
||||
let room_size = {
|
||||
let mgr = media_room_mgr.lock().await;
|
||||
mgr.room_size(&media_room_name)
|
||||
};
|
||||
info!(
|
||||
room = %media_room_name,
|
||||
participant = participant_id,
|
||||
forwarded = packets_forwarded,
|
||||
room_size,
|
||||
"participant stats"
|
||||
);
|
||||
}
|
||||
// Update per-session quality metrics if a quality report is present
|
||||
if let Some(ref report) = pkt.quality_report {
|
||||
metrics.update_session_quality(session_id, report);
|
||||
}
|
||||
};
|
||||
|
||||
// Signal handling task — processes SetAlias and other in-call signals
|
||||
let signal_room_mgr = room_mgr.clone();
|
||||
let signal_room_name = room_name.clone();
|
||||
let signal_transport = transport.clone();
|
||||
let signal_task = async move {
|
||||
loop {
|
||||
match signal_transport.recv_signal().await {
|
||||
Ok(Some(wzp_proto::SignalMessage::SetAlias { alias })) => {
|
||||
info!(%addr, participant = participant_id, %alias, "SetAlias received");
|
||||
let mut mgr = signal_room_mgr.lock().await;
|
||||
if let Some((update, senders)) =
|
||||
mgr.set_alias(&signal_room_name, participant_id, alias)
|
||||
{
|
||||
drop(mgr);
|
||||
broadcast_signal(&senders, &update).await;
|
||||
}
|
||||
// Get current list of other participants
|
||||
let others = {
|
||||
let mgr = room_mgr.lock().await;
|
||||
mgr.others(&room_name, participant_id)
|
||||
};
|
||||
|
||||
// Forward to all others
|
||||
let pkt_bytes = pkt.payload.len() as u64;
|
||||
for other in &others {
|
||||
match other {
|
||||
ParticipantSender::Quic(t) => {
|
||||
let _ = t.send_media(&pkt).await;
|
||||
}
|
||||
Ok(Some(wzp_proto::SignalMessage::Hangup { .. })) => {
|
||||
info!(%addr, participant = participant_id, "hangup received");
|
||||
break;
|
||||
}
|
||||
Ok(Some(msg)) => {
|
||||
info!(%addr, participant = participant_id, "signal: {:?}", std::mem::discriminant(&msg));
|
||||
}
|
||||
Ok(None) => break,
|
||||
Err(e) => {
|
||||
warn!(%addr, participant = participant_id, "signal recv error: {e}");
|
||||
break;
|
||||
ParticipantSender::WebSocket(_) => {
|
||||
// WS clients receive raw payload bytes
|
||||
let _ = other.send_raw(&pkt.payload).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Run both in parallel — exit when either finishes (disconnection)
|
||||
tokio::select! {
|
||||
_ = media_task => {}
|
||||
_ = signal_task => {}
|
||||
let fan_out = others.len() as u64;
|
||||
metrics.packets_forwarded.inc_by(fan_out);
|
||||
metrics.bytes_forwarded.inc_by(pkt_bytes * fan_out);
|
||||
packets_forwarded += 1;
|
||||
if packets_forwarded % 500 == 0 {
|
||||
let room_size = {
|
||||
let mgr = room_mgr.lock().await;
|
||||
mgr.room_size(&room_name)
|
||||
};
|
||||
info!(
|
||||
room = %room_name,
|
||||
participant = participant_id,
|
||||
forwarded = packets_forwarded,
|
||||
room_size,
|
||||
"participant stats"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up — leave room and broadcast update to remaining participants
|
||||
// Clean up
|
||||
let mut mgr = room_mgr.lock().await;
|
||||
if let Some((update, senders)) = mgr.leave(&room_name, participant_id) {
|
||||
drop(mgr); // release lock before async broadcast
|
||||
broadcast_signal(&senders, &update).await;
|
||||
}
|
||||
mgr.leave(&room_name, participant_id);
|
||||
}
|
||||
|
||||
/// Trunked forwarding loop — batches outgoing packets per peer.
|
||||
@@ -626,10 +497,7 @@ async fn run_participant_trunked(
|
||||
}
|
||||
|
||||
let mut mgr = room_mgr.lock().await;
|
||||
if let Some((update, senders)) = mgr.leave(&room_name, participant_id) {
|
||||
drop(mgr);
|
||||
broadcast_signal(&senders, &update).await;
|
||||
}
|
||||
mgr.leave(&room_name, participant_id);
|
||||
}
|
||||
|
||||
/// Parse up to the first 2 bytes of a hex session-id string into `[u8; 2]`.
|
||||
|
||||
@@ -136,11 +136,6 @@ impl PathMonitor {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get raw packet counts for debugging.
|
||||
pub fn counts(&self) -> (u64, u64) {
|
||||
(self.total_sent, self.total_received)
|
||||
}
|
||||
|
||||
/// Estimate bandwidth in kbps from bytes received over time.
|
||||
fn estimate_bandwidth_kbps(&self) -> u32 {
|
||||
if let (Some(first), Some(last)) = (self.first_recv_time_ms, self.last_recv_time_ms) {
|
||||
@@ -154,27 +149,6 @@ impl PathMonitor {
|
||||
}
|
||||
0
|
||||
}
|
||||
|
||||
/// Detect whether a network handoff likely occurred.
|
||||
///
|
||||
/// Returns `true` if the most recent RTT jitter measurement exceeds 3x
|
||||
/// the EWMA-smoothed jitter average, which is characteristic of a cellular
|
||||
/// network handoff (tower switch, WiFi-to-cellular transition, etc.).
|
||||
pub fn detect_handoff(&self) -> bool {
|
||||
// We need at least two RTT observations to have a meaningful jitter value,
|
||||
// and the EWMA must be non-zero to avoid division/multiplication by zero.
|
||||
if self.jitter_ewma <= 0.0 {
|
||||
return false;
|
||||
}
|
||||
|
||||
if let (Some(last_rtt), Some(_)) = (self.last_rtt_ms, Some(self.rtt_ewma)) {
|
||||
// Compute the most recent instantaneous jitter (RTT deviation from EWMA)
|
||||
let instant_jitter = (last_rtt - self.rtt_ewma).abs();
|
||||
instant_jitter > self.jitter_ewma * 3.0
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PathMonitor {
|
||||
|
||||
@@ -33,16 +33,6 @@ impl QuinnTransport {
|
||||
&self.connection
|
||||
}
|
||||
|
||||
/// Feed an external RTT observation (e.g. from QUIC path stats) into the path monitor.
|
||||
pub fn feed_rtt(&self, rtt_ms: u32) {
|
||||
self.path_monitor.lock().unwrap().observe_rtt(rtt_ms);
|
||||
}
|
||||
|
||||
/// Get raw packet counts from path monitor (sent, received).
|
||||
pub fn monitor_counts(&self) -> (u64, u64) {
|
||||
self.path_monitor.lock().unwrap().counts()
|
||||
}
|
||||
|
||||
/// Get the maximum datagram payload size, if datagrams are supported.
|
||||
pub fn max_datagram_size(&self) -> Option<usize> {
|
||||
datagram::max_datagram_payload(&self.connection)
|
||||
|
||||
25
crates/wzp-wasm/Cargo.toml
Normal file
25
crates/wzp-wasm/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "wzp-wasm"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "WarzonePhone WASM bindings — FEC (RaptorQ) + crypto (ChaCha20-Poly1305, X25519)"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
wasm-bindgen = "0.2"
|
||||
raptorq = "2"
|
||||
js-sys = "0.3"
|
||||
|
||||
# Crypto (ChaCha20-Poly1305 + X25519 key exchange)
|
||||
chacha20poly1305 = "0.10"
|
||||
hkdf = "0.12"
|
||||
sha2 = "0.10"
|
||||
x25519-dalek = { version = "2", features = ["static_secrets"] }
|
||||
rand = "0.8"
|
||||
getrandom = { version = "0.2", features = ["js"] } # CRITICAL for WASM randomness
|
||||
|
||||
[profile.release]
|
||||
opt-level = "s"
|
||||
lto = true
|
||||
692
crates/wzp-wasm/src/lib.rs
Normal file
692
crates/wzp-wasm/src/lib.rs
Normal file
@@ -0,0 +1,692 @@
|
||||
//! WarzonePhone WASM bindings.
|
||||
//!
|
||||
//! Exports two subsystems for browser-side usage:
|
||||
//!
|
||||
//! **FEC** — RaptorQ forward error correction (encode/decode).
|
||||
//! Audio frames are padded to a fixed symbol size (default 256 bytes) with a
|
||||
//! 2-byte little-endian length prefix, matching the native wzp-fec wire format.
|
||||
//!
|
||||
//! Wire format per symbol:
|
||||
//! [block_id:1][symbol_idx:1][is_repair:1][symbol_data:symbol_size]
|
||||
//!
|
||||
//! Encoder output: concatenated symbols in the above format when a block completes.
|
||||
//! Decoder input: individual symbols in the above format.
|
||||
//! Decoder output: concatenated original source data (length-prefix stripped).
|
||||
//!
|
||||
//! **Crypto** — X25519 key exchange + ChaCha20-Poly1305 AEAD encryption.
|
||||
//! Mirrors `wzp-crypto` nonce/session/handshake logic so WASM and native
|
||||
//! peers produce interoperable ciphertext.
|
||||
|
||||
use wasm_bindgen::prelude::*;
|
||||
use raptorq::{
|
||||
EncodingPacket, ObjectTransmissionInformation, PayloadId, SourceBlockDecoder,
|
||||
SourceBlockEncoder,
|
||||
};
|
||||
|
||||
/// Header size prepended to each symbol on the wire: block_id + symbol_idx + is_repair.
|
||||
const HEADER_SIZE: usize = 3;
|
||||
|
||||
/// Length prefix size inside each padded symbol (u16 LE), matching wzp-fec.
|
||||
const LEN_PREFIX: usize = 2;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Encoder
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct WzpFecEncoder {
|
||||
block_id: u8,
|
||||
frames_per_block: usize,
|
||||
symbol_size: usize,
|
||||
source_symbols: Vec<Vec<u8>>,
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WzpFecEncoder {
|
||||
/// Create a new FEC encoder.
|
||||
///
|
||||
/// * `block_size` — number of source symbols (audio frames) per FEC block.
|
||||
/// * `symbol_size` — padded byte size of each symbol (default 256).
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new(block_size: usize, symbol_size: usize) -> Self {
|
||||
Self {
|
||||
block_id: 0,
|
||||
frames_per_block: block_size,
|
||||
symbol_size,
|
||||
source_symbols: Vec::with_capacity(block_size),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a source symbol (audio frame).
|
||||
///
|
||||
/// Returns encoded packets (all source + repair) when the block is complete,
|
||||
/// or `undefined` if the block is still accumulating.
|
||||
///
|
||||
/// Each returned packet carries the 3-byte header:
|
||||
/// `[block_id][symbol_idx][is_repair]` followed by `symbol_size` bytes.
|
||||
pub fn add_symbol(&mut self, data: &[u8]) -> Option<Vec<u8>> {
|
||||
self.source_symbols.push(data.to_vec());
|
||||
|
||||
if self.source_symbols.len() >= self.frames_per_block {
|
||||
Some(self.encode_block())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Force-flush the current (possibly partial) block.
|
||||
///
|
||||
/// Returns all source + repair symbols with headers, or empty vec if no
|
||||
/// symbols have been accumulated.
|
||||
pub fn flush(&mut self) -> Vec<u8> {
|
||||
if self.source_symbols.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
self.encode_block()
|
||||
}
|
||||
|
||||
/// Internal: encode accumulated source symbols into a block, generate repair,
|
||||
/// and return the concatenated wire-format output.
|
||||
fn encode_block(&mut self) -> Vec<u8> {
|
||||
let ss = self.symbol_size;
|
||||
let num_source = self.source_symbols.len();
|
||||
let block_id = self.block_id;
|
||||
|
||||
// Build length-prefixed, padded block data (matches wzp-fec format).
|
||||
let block_data = self.build_block_data();
|
||||
|
||||
let config =
|
||||
ObjectTransmissionInformation::with_defaults(block_data.len() as u64, ss as u16);
|
||||
let encoder = SourceBlockEncoder::new(block_id, &config, &block_data);
|
||||
|
||||
// Generate source packets.
|
||||
let source_packets = encoder.source_packets();
|
||||
|
||||
// Generate repair packets — 50% overhead by default.
|
||||
let num_repair = ((num_source as f32) * 0.5).ceil() as u32;
|
||||
let repair_packets = encoder.repair_packets(0, num_repair);
|
||||
|
||||
// Allocate output buffer.
|
||||
let total_packets = source_packets.len() + repair_packets.len();
|
||||
let packet_wire_size = HEADER_SIZE + ss;
|
||||
let mut output = Vec::with_capacity(total_packets * packet_wire_size);
|
||||
|
||||
// Write source symbols.
|
||||
for (i, pkt) in source_packets.iter().enumerate() {
|
||||
output.push(block_id);
|
||||
output.push(i as u8);
|
||||
output.push(0); // is_repair = false
|
||||
let pkt_data = pkt.data();
|
||||
let copy_len = pkt_data.len().min(ss);
|
||||
output.extend_from_slice(&pkt_data[..copy_len]);
|
||||
// Pad if shorter.
|
||||
if copy_len < ss {
|
||||
output.resize(output.len() + (ss - copy_len), 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Write repair symbols.
|
||||
for (i, pkt) in repair_packets.iter().enumerate() {
|
||||
output.push(block_id);
|
||||
output.push((num_source + i) as u8);
|
||||
output.push(1); // is_repair = true
|
||||
let pkt_data = pkt.data();
|
||||
let copy_len = pkt_data.len().min(ss);
|
||||
output.extend_from_slice(&pkt_data[..copy_len]);
|
||||
if copy_len < ss {
|
||||
output.resize(output.len() + (ss - copy_len), 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Advance block.
|
||||
self.block_id = self.block_id.wrapping_add(1);
|
||||
self.source_symbols.clear();
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
/// Build the contiguous, length-prefixed block data buffer.
|
||||
fn build_block_data(&self) -> Vec<u8> {
|
||||
let ss = self.symbol_size;
|
||||
let mut data = vec![0u8; self.source_symbols.len() * ss];
|
||||
for (i, sym) in self.source_symbols.iter().enumerate() {
|
||||
let max_payload = ss - LEN_PREFIX;
|
||||
let payload_len = sym.len().min(max_payload);
|
||||
let offset = i * ss;
|
||||
data[offset..offset + LEN_PREFIX]
|
||||
.copy_from_slice(&(payload_len as u16).to_le_bytes());
|
||||
data[offset + LEN_PREFIX..offset + LEN_PREFIX + payload_len]
|
||||
.copy_from_slice(&sym[..payload_len]);
|
||||
}
|
||||
data
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Decoder
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Per-block decoder state.
|
||||
struct BlockState {
|
||||
packets: Vec<EncodingPacket>,
|
||||
decoded: bool,
|
||||
result: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct WzpFecDecoder {
|
||||
frames_per_block: usize,
|
||||
symbol_size: usize,
|
||||
blocks: Vec<(u8, BlockState)>, // poor man's map (no std HashMap in tiny WASM)
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WzpFecDecoder {
|
||||
/// Create a new FEC decoder.
|
||||
///
|
||||
/// * `block_size` — expected number of source symbols per block.
|
||||
/// * `symbol_size` — padded byte size of each symbol (must match encoder).
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new(block_size: usize, symbol_size: usize) -> Self {
|
||||
Self {
|
||||
frames_per_block: block_size,
|
||||
symbol_size,
|
||||
blocks: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Feed a received symbol.
|
||||
///
|
||||
/// Returns the decoded block (concatenated original frames, unpadded) if
|
||||
/// enough symbols have been received to recover the block, or `undefined`.
|
||||
pub fn add_symbol(
|
||||
&mut self,
|
||||
block_id: u8,
|
||||
symbol_idx: u8,
|
||||
_is_repair: bool,
|
||||
data: &[u8],
|
||||
) -> Option<Vec<u8>> {
|
||||
let ss = self.symbol_size;
|
||||
|
||||
// Pad incoming data to symbol_size.
|
||||
let mut padded = vec![0u8; ss];
|
||||
let len = data.len().min(ss);
|
||||
padded[..len].copy_from_slice(&data[..len]);
|
||||
|
||||
let esi = symbol_idx as u32;
|
||||
let packet = EncodingPacket::new(PayloadId::new(block_id, esi), padded);
|
||||
|
||||
// Find or create block state.
|
||||
let block = self.get_or_create_block(block_id);
|
||||
|
||||
if block.decoded {
|
||||
return block.result.clone();
|
||||
}
|
||||
|
||||
block.packets.push(packet);
|
||||
|
||||
// Attempt decode.
|
||||
self.try_decode(block_id)
|
||||
}
|
||||
|
||||
/// Try to decode a block; returns the original frames if successful.
|
||||
fn try_decode(&mut self, block_id: u8) -> Option<Vec<u8>> {
|
||||
let ss = self.symbol_size;
|
||||
let num_source = self.frames_per_block;
|
||||
let block_length = (num_source as u64) * (ss as u64);
|
||||
|
||||
let block = self.get_block_mut(block_id)?;
|
||||
if block.decoded {
|
||||
return block.result.clone();
|
||||
}
|
||||
|
||||
let config =
|
||||
ObjectTransmissionInformation::with_defaults(block_length, ss as u16);
|
||||
let mut decoder = SourceBlockDecoder::new(block_id, &config, block_length);
|
||||
|
||||
let decoded = decoder.decode(block.packets.clone());
|
||||
|
||||
match decoded {
|
||||
Some(data) => {
|
||||
// Extract original frames by stripping length prefixes.
|
||||
let mut output = Vec::new();
|
||||
for i in 0..num_source {
|
||||
let offset = i * ss;
|
||||
if offset + LEN_PREFIX > data.len() {
|
||||
break;
|
||||
}
|
||||
let payload_len = u16::from_le_bytes([
|
||||
data[offset],
|
||||
data[offset + 1],
|
||||
]) as usize;
|
||||
let payload_start = offset + LEN_PREFIX;
|
||||
let payload_end = (payload_start + payload_len).min(data.len());
|
||||
output.extend_from_slice(&data[payload_start..payload_end]);
|
||||
}
|
||||
|
||||
let block = self.get_block_mut(block_id).unwrap();
|
||||
block.decoded = true;
|
||||
block.result = Some(output.clone());
|
||||
Some(output)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_or_create_block(&mut self, block_id: u8) -> &mut BlockState {
|
||||
if let Some(pos) = self.blocks.iter().position(|(id, _)| *id == block_id) {
|
||||
return &mut self.blocks[pos].1;
|
||||
}
|
||||
self.blocks.push((
|
||||
block_id,
|
||||
BlockState {
|
||||
packets: Vec::new(),
|
||||
decoded: false,
|
||||
result: None,
|
||||
},
|
||||
));
|
||||
let last = self.blocks.len() - 1;
|
||||
&mut self.blocks[last].1
|
||||
}
|
||||
|
||||
fn get_block_mut(&mut self, block_id: u8) -> Option<&mut BlockState> {
|
||||
self.blocks
|
||||
.iter_mut()
|
||||
.find(|(id, _)| *id == block_id)
|
||||
.map(|(_, state)| state)
|
||||
}
|
||||
}
|
||||
|
||||
// =========================================================================
|
||||
// Crypto — X25519 key exchange
|
||||
// =========================================================================
|
||||
|
||||
/// X25519 key exchange: generate ephemeral keypair and derive shared secret.
|
||||
///
|
||||
/// Usage from JS:
|
||||
/// ```js
|
||||
/// const kx = new WzpKeyExchange();
|
||||
/// const ourPub = kx.public_key(); // Uint8Array(32)
|
||||
/// // ... send ourPub to peer, receive peerPub ...
|
||||
/// const secret = kx.derive_shared_secret(peerPub); // Uint8Array(32)
|
||||
/// const session = new WzpCryptoSession(secret);
|
||||
/// ```
|
||||
#[wasm_bindgen]
|
||||
pub struct WzpKeyExchange {
|
||||
secret: x25519_dalek::StaticSecret,
|
||||
public: x25519_dalek::PublicKey,
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WzpKeyExchange {
|
||||
/// Generate a new random X25519 keypair.
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new() -> Self {
|
||||
let secret = x25519_dalek::StaticSecret::random_from_rng(rand::rngs::OsRng);
|
||||
let public = x25519_dalek::PublicKey::from(&secret);
|
||||
Self { secret, public }
|
||||
}
|
||||
|
||||
/// Our public key (32 bytes).
|
||||
pub fn public_key(&self) -> Vec<u8> {
|
||||
self.public.as_bytes().to_vec()
|
||||
}
|
||||
|
||||
/// Derive a 32-byte session key from the peer's public key.
|
||||
///
|
||||
/// Raw DH output is expanded via HKDF-SHA256 with info="warzone-session-key",
|
||||
/// matching `wzp-crypto::handshake::WarzoneKeyExchange::derive_session`.
|
||||
pub fn derive_shared_secret(&self, peer_public: &[u8]) -> Result<Vec<u8>, JsValue> {
|
||||
if peer_public.len() != 32 {
|
||||
return Err(JsValue::from_str("peer public key must be 32 bytes"));
|
||||
}
|
||||
let mut peer_bytes = [0u8; 32];
|
||||
peer_bytes.copy_from_slice(peer_public);
|
||||
let peer_pk = x25519_dalek::PublicKey::from(peer_bytes);
|
||||
|
||||
// Rebuild secret from bytes (StaticSecret doesn't impl Clone).
|
||||
let secret_bytes = self.secret.to_bytes();
|
||||
let secret_clone = x25519_dalek::StaticSecret::from(secret_bytes);
|
||||
let shared = secret_clone.diffie_hellman(&peer_pk);
|
||||
|
||||
// HKDF expand — same derivation as wzp-crypto handshake.rs
|
||||
use hkdf::Hkdf;
|
||||
use sha2::Sha256;
|
||||
let hk = Hkdf::<Sha256>::new(None, shared.as_bytes());
|
||||
let mut session_key = [0u8; 32];
|
||||
hk.expand(b"warzone-session-key", &mut session_key)
|
||||
.expect("HKDF expand should not fail for 32-byte output");
|
||||
|
||||
Ok(session_key.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
// =========================================================================
|
||||
// Crypto — ChaCha20-Poly1305 AEAD session
|
||||
// =========================================================================
|
||||
|
||||
/// Build a 12-byte nonce (mirrors `wzp-crypto::nonce::build_nonce`).
|
||||
///
|
||||
/// Layout: `session_id[4] || seq(u32 BE) || direction(1) || pad(3 zero)`.
|
||||
fn build_nonce(session_id: &[u8; 4], seq: u32, direction: u8) -> [u8; 12] {
|
||||
let mut nonce = [0u8; 12];
|
||||
nonce[0..4].copy_from_slice(session_id);
|
||||
nonce[4..8].copy_from_slice(&seq.to_be_bytes());
|
||||
nonce[8] = direction;
|
||||
nonce
|
||||
}
|
||||
|
||||
/// Symmetric encryption session using ChaCha20-Poly1305.
|
||||
///
|
||||
/// Mirrors `wzp-crypto::session::ChaChaSession` for WASM. Nonce derivation
|
||||
/// and key setup are identical so WASM and native peers interoperate.
|
||||
#[wasm_bindgen]
|
||||
pub struct WzpCryptoSession {
|
||||
cipher: chacha20poly1305::ChaCha20Poly1305,
|
||||
session_id: [u8; 4],
|
||||
send_seq: u32,
|
||||
recv_seq: u32,
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WzpCryptoSession {
|
||||
/// Create from a 32-byte shared secret (output of `WzpKeyExchange.derive_shared_secret`).
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new(shared_secret: &[u8]) -> Result<WzpCryptoSession, JsValue> {
|
||||
if shared_secret.len() != 32 {
|
||||
return Err(JsValue::from_str("shared secret must be 32 bytes"));
|
||||
}
|
||||
|
||||
use chacha20poly1305::KeyInit;
|
||||
use sha2::Digest;
|
||||
|
||||
let session_id_hash = sha2::Sha256::digest(shared_secret);
|
||||
let mut session_id = [0u8; 4];
|
||||
session_id.copy_from_slice(&session_id_hash[..4]);
|
||||
|
||||
let cipher = chacha20poly1305::ChaCha20Poly1305::new_from_slice(shared_secret)
|
||||
.map_err(|e| JsValue::from_str(&format!("invalid key: {}", e)))?;
|
||||
|
||||
Ok(Self {
|
||||
cipher,
|
||||
session_id,
|
||||
send_seq: 0,
|
||||
recv_seq: 0,
|
||||
})
|
||||
}
|
||||
|
||||
/// Encrypt a media payload with AAD (typically the 12-byte MediaHeader).
|
||||
///
|
||||
/// Returns `ciphertext || poly1305_tag` (plaintext.len() + 16 bytes).
|
||||
pub fn encrypt(&mut self, header_aad: &[u8], plaintext: &[u8]) -> Result<Vec<u8>, JsValue> {
|
||||
use chacha20poly1305::aead::{Aead, Payload};
|
||||
use chacha20poly1305::Nonce;
|
||||
|
||||
let nonce_bytes = build_nonce(&self.session_id, self.send_seq, 0); // 0 = Send
|
||||
let nonce = Nonce::from_slice(&nonce_bytes);
|
||||
|
||||
let payload = Payload {
|
||||
msg: plaintext,
|
||||
aad: header_aad,
|
||||
};
|
||||
|
||||
let ciphertext = self
|
||||
.cipher
|
||||
.encrypt(nonce, payload)
|
||||
.map_err(|_| JsValue::from_str("encryption failed"))?;
|
||||
|
||||
self.send_seq = self.send_seq.wrapping_add(1);
|
||||
Ok(ciphertext)
|
||||
}
|
||||
|
||||
/// Decrypt a media payload with AAD.
|
||||
///
|
||||
/// Returns plaintext on success, or throws on auth failure.
|
||||
pub fn decrypt(&mut self, header_aad: &[u8], ciphertext: &[u8]) -> Result<Vec<u8>, JsValue> {
|
||||
use chacha20poly1305::aead::{Aead, Payload};
|
||||
use chacha20poly1305::Nonce;
|
||||
|
||||
// direction=0 (Send) matches the sender's nonce — same as native code.
|
||||
let nonce_bytes = build_nonce(&self.session_id, self.recv_seq, 0);
|
||||
let nonce = Nonce::from_slice(&nonce_bytes);
|
||||
|
||||
let payload = Payload {
|
||||
msg: ciphertext,
|
||||
aad: header_aad,
|
||||
};
|
||||
|
||||
let plaintext = self
|
||||
.cipher
|
||||
.decrypt(nonce, payload)
|
||||
.map_err(|_| JsValue::from_str("decryption failed — bad key or corrupted data"))?;
|
||||
|
||||
self.recv_seq = self.recv_seq.wrapping_add(1);
|
||||
Ok(plaintext)
|
||||
}
|
||||
|
||||
/// Current send sequence number (for diagnostics / UI stats).
|
||||
pub fn send_seq(&self) -> u32 {
|
||||
self.send_seq
|
||||
}
|
||||
|
||||
/// Current receive sequence number (for diagnostics / UI stats).
|
||||
pub fn recv_seq(&self) -> u32 {
|
||||
self.recv_seq
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests (native only — not compiled to WASM)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn encode_decode_roundtrip() {
|
||||
let block_size = 5;
|
||||
let symbol_size = 256;
|
||||
|
||||
let mut encoder = WzpFecEncoder::new(block_size, symbol_size);
|
||||
let mut decoder = WzpFecDecoder::new(block_size, symbol_size);
|
||||
|
||||
// Create test frames of varying sizes.
|
||||
let frames: Vec<Vec<u8>> = (0..block_size)
|
||||
.map(|i| vec![(i as u8).wrapping_mul(37).wrapping_add(7); 80 + i * 10])
|
||||
.collect();
|
||||
|
||||
// Feed frames to encoder; last one triggers block encoding.
|
||||
let mut wire_data = None;
|
||||
for frame in &frames {
|
||||
wire_data = encoder.add_symbol(frame);
|
||||
}
|
||||
let wire_data = wire_data.expect("block should be complete");
|
||||
|
||||
// Parse wire packets and feed to decoder.
|
||||
let packet_size = HEADER_SIZE + symbol_size;
|
||||
assert_eq!(wire_data.len() % packet_size, 0);
|
||||
|
||||
let mut result = None;
|
||||
for chunk in wire_data.chunks(packet_size) {
|
||||
let blk_id = chunk[0];
|
||||
let sym_idx = chunk[1];
|
||||
let is_repair = chunk[2] != 0;
|
||||
let sym_data = &chunk[HEADER_SIZE..];
|
||||
if let Some(decoded) = decoder.add_symbol(blk_id, sym_idx, is_repair, sym_data) {
|
||||
result = Some(decoded);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let decoded_data = result.expect("should decode with all symbols");
|
||||
|
||||
// Verify: decoded data should be all original frames concatenated.
|
||||
let mut expected = Vec::new();
|
||||
for frame in &frames {
|
||||
expected.extend_from_slice(frame);
|
||||
}
|
||||
assert_eq!(decoded_data, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn decode_with_packet_loss() {
|
||||
let block_size = 5;
|
||||
let symbol_size = 256;
|
||||
|
||||
let mut encoder = WzpFecEncoder::new(block_size, symbol_size);
|
||||
let mut decoder = WzpFecDecoder::new(block_size, symbol_size);
|
||||
|
||||
let frames: Vec<Vec<u8>> = (0..block_size)
|
||||
.map(|i| vec![(i as u8).wrapping_mul(37).wrapping_add(7); 100])
|
||||
.collect();
|
||||
|
||||
let mut wire_data = None;
|
||||
for frame in &frames {
|
||||
wire_data = encoder.add_symbol(frame);
|
||||
}
|
||||
let wire_data = wire_data.unwrap();
|
||||
|
||||
let packet_size = HEADER_SIZE + symbol_size;
|
||||
let packets: Vec<&[u8]> = wire_data.chunks(packet_size).collect();
|
||||
|
||||
// Drop 2 source packets (simulate 40% source loss).
|
||||
// We have 5 source + 3 repair = 8 packets. Drop packets at index 1 and 3.
|
||||
let mut result = None;
|
||||
for (i, chunk) in packets.iter().enumerate() {
|
||||
if i == 1 || i == 3 {
|
||||
continue; // simulate loss
|
||||
}
|
||||
let blk_id = chunk[0];
|
||||
let sym_idx = chunk[1];
|
||||
let is_repair = chunk[2] != 0;
|
||||
let sym_data = &chunk[HEADER_SIZE..];
|
||||
if let Some(decoded) = decoder.add_symbol(blk_id, sym_idx, is_repair, sym_data) {
|
||||
result = Some(decoded);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let decoded_data = result.expect("should recover with FEC despite 2 lost packets");
|
||||
|
||||
let mut expected = Vec::new();
|
||||
for frame in &frames {
|
||||
expected.extend_from_slice(frame);
|
||||
}
|
||||
assert_eq!(decoded_data, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flush_partial_block() {
|
||||
let mut encoder = WzpFecEncoder::new(5, 256);
|
||||
|
||||
// Add only 3 of 5 expected symbols, then flush.
|
||||
encoder.add_symbol(&[1; 50]);
|
||||
encoder.add_symbol(&[2; 60]);
|
||||
encoder.add_symbol(&[3; 70]);
|
||||
|
||||
let wire_data = encoder.flush();
|
||||
assert!(!wire_data.is_empty());
|
||||
|
||||
// Verify block_id advanced.
|
||||
assert_eq!(encoder.block_id, 1);
|
||||
}
|
||||
|
||||
// -- Crypto tests -------------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn crypto_encrypt_decrypt_roundtrip() {
|
||||
let key = [0x42u8; 32];
|
||||
let mut alice = WzpCryptoSession::new(&key).unwrap();
|
||||
let mut bob = WzpCryptoSession::new(&key).unwrap();
|
||||
|
||||
let header = b"test-header";
|
||||
let plaintext = b"hello warzone from wasm";
|
||||
|
||||
let ciphertext = alice.encrypt(header, plaintext).unwrap();
|
||||
let decrypted = bob.decrypt(header, &ciphertext).unwrap();
|
||||
|
||||
assert_eq!(&decrypted, plaintext);
|
||||
}
|
||||
|
||||
// NOTE: crypto_wrong_aad_fails and crypto_wrong_key_fails return
|
||||
// Err(JsValue) which aborts on non-wasm32 (JsValue::from_str uses an
|
||||
// extern "C" shim that panics with "cannot unwind"). These tests are
|
||||
// gated to wasm32-only; on native the encrypt/decrypt roundtrip and
|
||||
// nonce-layout tests provide sufficient coverage.
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[test]
|
||||
fn crypto_wrong_aad_fails() {
|
||||
let key = [0x42u8; 32];
|
||||
let mut alice = WzpCryptoSession::new(&key).unwrap();
|
||||
let mut bob = WzpCryptoSession::new(&key).unwrap();
|
||||
|
||||
let ciphertext = alice.encrypt(b"correct", b"secret").unwrap();
|
||||
let result = bob.decrypt(b"wrong", &ciphertext);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[test]
|
||||
fn crypto_wrong_key_fails() {
|
||||
let mut alice = WzpCryptoSession::new(&[0xAA; 32]).unwrap();
|
||||
let mut eve = WzpCryptoSession::new(&[0xBB; 32]).unwrap();
|
||||
|
||||
let ciphertext = alice.encrypt(b"hdr", b"secret").unwrap();
|
||||
let result = eve.decrypt(b"hdr", &ciphertext);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn crypto_multiple_packets() {
|
||||
let key = [0x42u8; 32];
|
||||
let mut alice = WzpCryptoSession::new(&key).unwrap();
|
||||
let mut bob = WzpCryptoSession::new(&key).unwrap();
|
||||
|
||||
for i in 0..100u32 {
|
||||
let msg = format!("message {}", i);
|
||||
let ct = alice.encrypt(b"hdr", msg.as_bytes()).unwrap();
|
||||
let pt = bob.decrypt(b"hdr", &ct).unwrap();
|
||||
assert_eq!(pt, msg.as_bytes());
|
||||
}
|
||||
assert_eq!(alice.send_seq(), 100);
|
||||
assert_eq!(bob.recv_seq(), 100);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn key_exchange_roundtrip() {
|
||||
let alice_kx = WzpKeyExchange::new();
|
||||
let bob_kx = WzpKeyExchange::new();
|
||||
|
||||
let alice_secret = alice_kx
|
||||
.derive_shared_secret(&bob_kx.public_key())
|
||||
.unwrap();
|
||||
let bob_secret = bob_kx
|
||||
.derive_shared_secret(&alice_kx.public_key())
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(alice_secret, bob_secret);
|
||||
assert_eq!(alice_secret.len(), 32);
|
||||
|
||||
// Verify the derived secret actually works for encrypt/decrypt.
|
||||
let mut alice_session = WzpCryptoSession::new(&alice_secret).unwrap();
|
||||
let mut bob_session = WzpCryptoSession::new(&bob_secret).unwrap();
|
||||
|
||||
let ct = alice_session.encrypt(b"hdr", b"hello").unwrap();
|
||||
let pt = bob_session.decrypt(b"hdr", &ct).unwrap();
|
||||
assert_eq!(&pt, b"hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonce_layout_matches_native() {
|
||||
// Verify our build_nonce matches wzp-crypto::nonce::build_nonce layout.
|
||||
let sid = [0xAA, 0xBB, 0xCC, 0xDD];
|
||||
let seq: u32 = 0x00000100;
|
||||
let nonce = build_nonce(&sid, seq, 1); // 1 = Recv direction
|
||||
assert_eq!(&nonce[0..4], &[0xAA, 0xBB, 0xCC, 0xDD]);
|
||||
assert_eq!(&nonce[4..8], &[0x00, 0x00, 0x01, 0x00]);
|
||||
assert_eq!(nonce[8], 1);
|
||||
assert_eq!(&nonce[9..12], &[0, 0, 0]);
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,10 @@
|
||||
.container { text-align: center; max-width: 420px; padding: 2rem; }
|
||||
h1 { font-size: 1.5rem; margin-bottom: 0.5rem; color: #00d4ff; }
|
||||
.subtitle { color: #888; font-size: 0.85rem; margin-bottom: 1.5rem; }
|
||||
.variant-badge { display: inline-block; background: #2a2a4a; border: 1px solid #444; color: #00d4ff; font-size: 0.65rem; padding: 0.15rem 0.5rem; border-radius: 4px; margin-left: 0.4rem; vertical-align: middle; font-family: monospace; letter-spacing: 0.05em; }
|
||||
.variant-selector { margin-bottom: 1.2rem; display: flex; gap: 0.8rem; justify-content: center; flex-wrap: wrap; }
|
||||
.variant-selector label { font-size: 0.75rem; color: #888; cursor: pointer; display: flex; align-items: center; gap: 0.25rem; }
|
||||
.variant-selector input[type="radio"] { accent-color: #00d4ff; }
|
||||
.room-input { margin-bottom: 1.5rem; }
|
||||
.room-input input { background: #2a2a4a; border: 1px solid #444; color: #e0e0e0; padding: 0.6rem 1rem; font-size: 1rem; border-radius: 8px; width: 200px; text-align: center; }
|
||||
.room-input input:focus { outline: none; border-color: #00d4ff; }
|
||||
@@ -31,15 +35,22 @@
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<h1>WarzonePhone</h1>
|
||||
<h1>WarzonePhone <span class="variant-badge" id="variantBadge">PURE</span></h1>
|
||||
<p class="subtitle">Lossy VoIP Protocol</p>
|
||||
|
||||
<div class="variant-selector">
|
||||
<label><input type="radio" name="variant" value="pure"> Pure JS</label>
|
||||
<label><input type="radio" name="variant" value="hybrid"> Hybrid</label>
|
||||
<label><input type="radio" name="variant" value="full"> Full WASM</label>
|
||||
</div>
|
||||
|
||||
<div class="room-input">
|
||||
<label for="room">Room</label>
|
||||
<input type="text" id="room" placeholder="enter room name" value="">
|
||||
</div>
|
||||
<button id="callBtn" onclick="toggleCall()">Connect</button>
|
||||
<button id="callBtn">Connect</button>
|
||||
<div class="controls" id="controls" style="display:none;">
|
||||
<label><input type="checkbox" id="pttMode" onchange="togglePTT()"> Radio mode (push-to-talk)</label>
|
||||
<label><input type="checkbox" id="pttMode"> Radio mode (push-to-talk)</label>
|
||||
</div>
|
||||
<button id="pttBtn">Hold to Talk</button>
|
||||
<div class="level"><div class="level-bar" id="levelBar"></div></div>
|
||||
@@ -47,302 +58,158 @@
|
||||
<div class="stats" id="stats"></div>
|
||||
</div>
|
||||
|
||||
<script src="js/wzp-core.js"></script>
|
||||
<script>
|
||||
const SAMPLE_RATE = 48000;
|
||||
const FRAME_SIZE = 960;
|
||||
|
||||
let ws = null;
|
||||
let audioCtx = null;
|
||||
let mediaStream = null;
|
||||
let captureNode = null;
|
||||
let playbackNode = null;
|
||||
let active = false;
|
||||
let transmitting = true; // in open-mic mode, always transmitting
|
||||
let pttMode = false;
|
||||
let framesSent = 0;
|
||||
let framesRecv = 0;
|
||||
let startTime = 0;
|
||||
let statsInterval = null;
|
||||
|
||||
// Use room from URL path or input field
|
||||
function getRoom() {
|
||||
const path = location.pathname.replace(/^\//, '').replace(/\/$/, '');
|
||||
if (path && path !== 'index.html') return path;
|
||||
const hash = location.hash.replace('#', '');
|
||||
if (hash) return hash;
|
||||
return document.getElementById('room').value.trim() || 'default';
|
||||
}
|
||||
|
||||
// Pre-fill room input from URL on page load
|
||||
// ---------------------------------------------------------------------------
|
||||
// Load the selected variant script dynamically
|
||||
// ---------------------------------------------------------------------------
|
||||
(function() {
|
||||
const path = location.pathname.replace(/^\//, '').replace(/\/$/, '');
|
||||
if (path && path !== 'index.html') {
|
||||
document.getElementById('room').value = path;
|
||||
}
|
||||
var variant = WZPCore.detectVariant();
|
||||
var scriptMap = {
|
||||
pure: 'js/wzp-pure.js',
|
||||
hybrid: 'js/wzp-hybrid.js',
|
||||
full: 'js/wzp-full.js',
|
||||
'ws': 'js/wzp-ws.js',
|
||||
'ws-fec': 'js/wzp-ws-fec.js',
|
||||
'ws-full': 'js/wzp-ws-full.js',
|
||||
};
|
||||
var src = scriptMap[variant] || scriptMap.pure;
|
||||
var s = document.createElement('script');
|
||||
s.src = src;
|
||||
s.onload = function() { wzpBoot(); };
|
||||
s.onerror = function() {
|
||||
WZPCore.updateStatus('Failed to load variant: ' + variant);
|
||||
};
|
||||
document.body.appendChild(s);
|
||||
})();
|
||||
|
||||
function setStatus(msg) { document.getElementById('status').textContent = msg; }
|
||||
function setStats(msg) { document.getElementById('stats').textContent = msg; }
|
||||
// ---------------------------------------------------------------------------
|
||||
// Boot: wire UI to the loaded client variant
|
||||
// ---------------------------------------------------------------------------
|
||||
function wzpBoot() {
|
||||
var client = null;
|
||||
var capture = null;
|
||||
var playback = null;
|
||||
var transmitting = true;
|
||||
|
||||
function toggleCall() {
|
||||
if (active) stopCall();
|
||||
else startCall();
|
||||
}
|
||||
var ui = WZPCore.initUI({
|
||||
onConnect: function(room) {
|
||||
doConnect(room);
|
||||
},
|
||||
onDisconnect: function() {
|
||||
doDisconnect();
|
||||
},
|
||||
onTransmit: function(tx) {
|
||||
transmitting = tx;
|
||||
},
|
||||
});
|
||||
|
||||
async function startCall() {
|
||||
const btn = document.getElementById('callBtn');
|
||||
const room = getRoom();
|
||||
if (!room) { setStatus('Enter a room name'); return; }
|
||||
async function doConnect(room) {
|
||||
WZPCore.updateStatus('Requesting microphone...');
|
||||
|
||||
btn.disabled = true;
|
||||
setStatus('Requesting microphone...');
|
||||
|
||||
try {
|
||||
mediaStream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: { sampleRate: SAMPLE_RATE, channelCount: 1, echoCancellation: true, noiseSuppression: true }
|
||||
});
|
||||
} catch(e) {
|
||||
setStatus('Mic access denied: ' + e.message);
|
||||
btn.disabled = false;
|
||||
return;
|
||||
}
|
||||
|
||||
audioCtx = new AudioContext({ sampleRate: SAMPLE_RATE });
|
||||
|
||||
// Connect WebSocket with room name
|
||||
const proto = location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsUrl = proto + '//' + location.host + '/ws/' + encodeURIComponent(room);
|
||||
setStatus('Connecting to room: ' + room + '...');
|
||||
|
||||
ws = new WebSocket(wsUrl);
|
||||
ws.binaryType = 'arraybuffer';
|
||||
|
||||
ws.onopen = async () => {
|
||||
setStatus('Connected to room: ' + room);
|
||||
btn.textContent = 'Disconnect';
|
||||
btn.classList.add('active');
|
||||
btn.disabled = false;
|
||||
active = true;
|
||||
framesSent = 0;
|
||||
framesRecv = 0;
|
||||
startTime = Date.now();
|
||||
showControls(true);
|
||||
await startAudioCapture();
|
||||
await startAudioPlayback();
|
||||
startStatsUpdate();
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
const pcmData = new Int16Array(event.data);
|
||||
framesRecv++;
|
||||
playAudio(pcmData);
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
if (active) {
|
||||
setStatus('Disconnected — reconnecting to ' + room + '...');
|
||||
setTimeout(() => { if (active) { cleanupAudio(); startCall(); } }, 1000);
|
||||
} else {
|
||||
setStatus('Disconnected');
|
||||
var audioCtx;
|
||||
try {
|
||||
audioCtx = await WZPCore.startAudioContext();
|
||||
} catch (e) {
|
||||
WZPCore.updateStatus('Audio init failed: ' + e.message);
|
||||
ui.setConnected(false);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = () => {
|
||||
if (active) {
|
||||
setStatus('Error — reconnecting...');
|
||||
setTimeout(() => { if (active) { cleanupAudio(); startCall(); } }, 1000);
|
||||
}
|
||||
};
|
||||
}
|
||||
// Build WebSocket URL
|
||||
var proto = location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
var wsUrl = proto + '//' + location.host + '/ws/' + encodeURIComponent(room);
|
||||
|
||||
function stopCall() {
|
||||
active = false;
|
||||
const btn = document.getElementById('callBtn');
|
||||
btn.textContent = 'Connect';
|
||||
btn.classList.remove('active');
|
||||
btn.disabled = false;
|
||||
showControls(false);
|
||||
cleanupAudio();
|
||||
if (ws) { ws.close(); ws = null; }
|
||||
if (statsInterval) { clearInterval(statsInterval); statsInterval = null; }
|
||||
setStatus('');
|
||||
setStats('');
|
||||
}
|
||||
// Create client based on detected variant
|
||||
var variant = WZPCore.detectVariant();
|
||||
var ClientClass = {
|
||||
pure: window.WZPPureClient,
|
||||
hybrid: window.WZPHybridClient,
|
||||
full: window.WZPFullClient,
|
||||
'ws': window.WZPWsClient,
|
||||
'ws-fec': window.WZPWsFecClient,
|
||||
'ws-full': window.WZPWsFullClient,
|
||||
}[variant] || window.WZPPureClient;
|
||||
|
||||
function cleanupAudio() {
|
||||
if (captureNode) { captureNode.disconnect(); captureNode = null; }
|
||||
if (playbackNode) { playbackNode.disconnect(); playbackNode = null; }
|
||||
if (audioCtx) { audioCtx.close(); audioCtx = null; workletLoaded = false; }
|
||||
if (mediaStream) { mediaStream.getTracks().forEach(t => t.stop()); mediaStream = null; }
|
||||
}
|
||||
|
||||
let workletLoaded = false;
|
||||
|
||||
async function loadWorkletModule() {
|
||||
if (workletLoaded) return true;
|
||||
if (typeof AudioWorkletNode === 'undefined' || !audioCtx.audioWorklet) {
|
||||
console.warn('AudioWorklet API not supported in this browser — using ScriptProcessorNode fallback');
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
await audioCtx.audioWorklet.addModule('audio-processor.js');
|
||||
workletLoaded = true;
|
||||
return true;
|
||||
} catch(e) {
|
||||
console.warn('AudioWorklet module failed to load — using ScriptProcessorNode fallback:', e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function startAudioCapture() {
|
||||
const source = audioCtx.createMediaStreamSource(mediaStream);
|
||||
const hasWorklet = await loadWorkletModule();
|
||||
|
||||
if (hasWorklet) {
|
||||
captureNode = new AudioWorkletNode(audioCtx, 'wzp-capture-processor');
|
||||
captureNode.port.onmessage = (e) => {
|
||||
if (!active || !ws || ws.readyState !== WebSocket.OPEN || !transmitting) return;
|
||||
ws.send(e.data);
|
||||
framesSent++;
|
||||
|
||||
// Level meter from the PCM data
|
||||
const pcm = new Int16Array(e.data);
|
||||
let max = 0;
|
||||
for (let i = 0; i < pcm.length; i += 16) max = Math.max(max, Math.abs(pcm[i]));
|
||||
document.getElementById('levelBar').style.width = (max / 32768 * 100) + '%';
|
||||
var clientOpts = {
|
||||
wsUrl: wsUrl,
|
||||
room: room,
|
||||
onAudio: function(pcm) {
|
||||
if (playback) playback.play(pcm);
|
||||
},
|
||||
onStatus: function(msg) {
|
||||
WZPCore.updateStatus(msg);
|
||||
},
|
||||
onStats: function(stats) {
|
||||
WZPCore.updateStats(stats);
|
||||
},
|
||||
};
|
||||
source.connect(captureNode);
|
||||
captureNode.connect(audioCtx.destination); // needed to keep worklet alive
|
||||
} else {
|
||||
// Fallback to ScriptProcessorNode (deprecated but widely supported)
|
||||
console.warn('Capture: using ScriptProcessorNode fallback');
|
||||
captureNode = audioCtx.createScriptProcessor(4096, 1, 1);
|
||||
let acc = new Float32Array(0);
|
||||
captureNode.onaudioprocess = (ev) => {
|
||||
if (!active || !ws || ws.readyState !== WebSocket.OPEN || !transmitting) return;
|
||||
const input = ev.inputBuffer.getChannelData(0);
|
||||
const n = new Float32Array(acc.length + input.length);
|
||||
n.set(acc); n.set(input, acc.length); acc = n;
|
||||
while (acc.length >= FRAME_SIZE) {
|
||||
const frame = acc.slice(0, FRAME_SIZE); acc = acc.slice(FRAME_SIZE);
|
||||
const pcm = new Int16Array(FRAME_SIZE);
|
||||
for (let i = 0; i < FRAME_SIZE; i++) pcm[i] = Math.max(-32768, Math.min(32767, Math.round(frame[i] * 32767)));
|
||||
let max = 0;
|
||||
for (let i = 0; i < pcm.length; i += 16) max = Math.max(max, Math.abs(pcm[i]));
|
||||
document.getElementById('levelBar').style.width = (max / 32768 * 100) + '%';
|
||||
ws.send(pcm.buffer);
|
||||
framesSent++;
|
||||
|
||||
// Full variant: add WebTransport URL for direct relay connection
|
||||
if (variant === 'full') {
|
||||
clientOpts.url = location.origin.replace('http', 'https');
|
||||
}
|
||||
|
||||
client = new ClientClass(clientOpts);
|
||||
|
||||
// Load WASM for variants that need it
|
||||
if (client.loadWasm) {
|
||||
try {
|
||||
WZPCore.updateStatus('Loading WASM module...');
|
||||
await client.loadWasm();
|
||||
} catch (e) {
|
||||
WZPCore.updateStatus('WASM load failed: ' + e.message);
|
||||
ui.setConnected(false);
|
||||
return;
|
||||
}
|
||||
};
|
||||
source.connect(captureNode);
|
||||
captureNode.connect(audioCtx.destination);
|
||||
}
|
||||
}
|
||||
|
||||
async function startAudioPlayback() {
|
||||
const hasWorklet = await loadWorkletModule();
|
||||
|
||||
if (hasWorklet) {
|
||||
playbackNode = new AudioWorkletNode(audioCtx, 'wzp-playback-processor');
|
||||
playbackNode.connect(audioCtx.destination);
|
||||
} else {
|
||||
console.warn('Playback: using scheduled BufferSource fallback');
|
||||
playbackNode = null; // will use createBufferSource fallback in playAudio()
|
||||
}
|
||||
}
|
||||
|
||||
let nextPlayTime = 0;
|
||||
|
||||
function playAudio(pcmInt16) {
|
||||
if (!audioCtx) return;
|
||||
|
||||
if (playbackNode && playbackNode.port) {
|
||||
// AudioWorklet path — send Int16 PCM directly to the worklet for conversion
|
||||
playbackNode.port.postMessage(pcmInt16.buffer, [pcmInt16.buffer]);
|
||||
} else {
|
||||
// Fallback: scheduled BufferSource (convert Int16 -> Float32 on main thread)
|
||||
const floatData = new Float32Array(pcmInt16.length);
|
||||
for (let i = 0; i < pcmInt16.length; i++) {
|
||||
floatData[i] = pcmInt16[i] / 32768.0;
|
||||
}
|
||||
const buffer = audioCtx.createBuffer(1, floatData.length, SAMPLE_RATE);
|
||||
buffer.getChannelData(0).set(floatData);
|
||||
const source = audioCtx.createBufferSource();
|
||||
source.buffer = buffer;
|
||||
source.connect(audioCtx.destination);
|
||||
const now = audioCtx.currentTime;
|
||||
if (nextPlayTime < now || nextPlayTime > now + 1.0) {
|
||||
nextPlayTime = now + 0.02;
|
||||
|
||||
try {
|
||||
await client.connect();
|
||||
} catch (e) {
|
||||
WZPCore.updateStatus('Connection failed: ' + e.message);
|
||||
ui.setConnected(false);
|
||||
return;
|
||||
}
|
||||
source.start(nextPlayTime);
|
||||
nextPlayTime += buffer.duration;
|
||||
|
||||
// Start audio capture and playback
|
||||
try {
|
||||
capture = await WZPCore.connectCapture(audioCtx, function(pcmBuffer) {
|
||||
if (!transmitting) return;
|
||||
var pcm = new Int16Array(pcmBuffer);
|
||||
WZPCore.updateLevel(pcm);
|
||||
if (client) client.sendAudio(pcmBuffer);
|
||||
});
|
||||
|
||||
playback = await WZPCore.connectPlayback(audioCtx);
|
||||
} catch (e) {
|
||||
WZPCore.updateStatus('Audio error: ' + e.message);
|
||||
if (client) client.disconnect();
|
||||
client = null;
|
||||
ui.setConnected(false);
|
||||
return;
|
||||
}
|
||||
|
||||
ui.setConnected(true);
|
||||
}
|
||||
|
||||
function doDisconnect() {
|
||||
if (capture) { capture.stop(); capture = null; }
|
||||
if (playback) { playback.stop(); playback = null; }
|
||||
if (client) { client.disconnect(); client = null; }
|
||||
|
||||
var audioCtx = WZPCore.getAudioContext();
|
||||
if (audioCtx && audioCtx.state !== 'closed') {
|
||||
audioCtx.close();
|
||||
}
|
||||
|
||||
WZPCore.updateStatus('');
|
||||
WZPCore.updateStats('');
|
||||
document.getElementById('levelBar').style.width = '0%';
|
||||
|
||||
ui.setConnected(false);
|
||||
}
|
||||
}
|
||||
|
||||
function startStatsUpdate() {
|
||||
statsInterval = setInterval(() => {
|
||||
if (!active) { clearInterval(statsInterval); return; }
|
||||
const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
setStats(elapsed + 's | sent: ' + framesSent + ' | recv: ' + framesRecv);
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// --- Push-to-talk ---
|
||||
|
||||
function togglePTT() {
|
||||
pttMode = document.getElementById('pttMode').checked;
|
||||
const btn = document.getElementById('pttBtn');
|
||||
if (pttMode) {
|
||||
transmitting = false;
|
||||
btn.style.display = 'block';
|
||||
} else {
|
||||
transmitting = true;
|
||||
btn.style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
// PTT button — hold to talk (mouse + touch)
|
||||
document.getElementById('pttBtn').addEventListener('mousedown', () => { startTransmit(); });
|
||||
document.getElementById('pttBtn').addEventListener('mouseup', () => { stopTransmit(); });
|
||||
document.getElementById('pttBtn').addEventListener('mouseleave', () => { stopTransmit(); });
|
||||
document.getElementById('pttBtn').addEventListener('touchstart', (e) => { e.preventDefault(); startTransmit(); });
|
||||
document.getElementById('pttBtn').addEventListener('touchend', (e) => { e.preventDefault(); stopTransmit(); });
|
||||
|
||||
// Spacebar PTT
|
||||
document.addEventListener('keydown', (e) => { if (pttMode && active && e.code === 'Space' && !e.repeat) { e.preventDefault(); startTransmit(); } });
|
||||
document.addEventListener('keyup', (e) => { if (pttMode && active && e.code === 'Space') { e.preventDefault(); stopTransmit(); } });
|
||||
|
||||
function startTransmit() {
|
||||
if (!pttMode || !active) return;
|
||||
transmitting = true;
|
||||
document.getElementById('pttBtn').classList.add('transmitting');
|
||||
document.getElementById('pttBtn').textContent = 'Transmitting...';
|
||||
}
|
||||
|
||||
function stopTransmit() {
|
||||
if (!pttMode) return;
|
||||
transmitting = false;
|
||||
document.getElementById('pttBtn').classList.remove('transmitting');
|
||||
document.getElementById('pttBtn').textContent = 'Hold to Talk';
|
||||
}
|
||||
|
||||
// Show controls when connected
|
||||
function showControls(show) {
|
||||
document.getElementById('controls').style.display = show ? 'flex' : 'none';
|
||||
if (!show) {
|
||||
document.getElementById('pttBtn').style.display = 'none';
|
||||
pttMode = false;
|
||||
transmitting = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Set room from URL on load
|
||||
window.addEventListener('load', () => {
|
||||
const room = getRoom();
|
||||
if (room && room !== 'default') {
|
||||
document.getElementById('room').value = room;
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
379
crates/wzp-web/static/js/wzp-core.js
Normal file
379
crates/wzp-web/static/js/wzp-core.js
Normal file
@@ -0,0 +1,379 @@
|
||||
// WarzonePhone — Shared UI logic for all client variants.
|
||||
// Provides: audio context management, mic capture, playback, UI wiring.
|
||||
|
||||
'use strict';
|
||||
|
||||
const WZP_SAMPLE_RATE = 48000;
|
||||
const WZP_FRAME_SIZE = 960; // 20ms @ 48kHz
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Variant detection
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function wzpDetectVariant() {
|
||||
const params = new URLSearchParams(location.search);
|
||||
const v = (params.get('variant') || 'pure').toLowerCase();
|
||||
const valid = ['pure', 'hybrid', 'full', 'ws', 'ws-fec', 'ws-full'];
|
||||
if (valid.includes(v)) return v;
|
||||
return 'pure';
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Room helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function wzpGetRoom() {
|
||||
const path = location.pathname.replace(/^\//, '').replace(/\/$/, '');
|
||||
if (path && path !== 'index.html') return path;
|
||||
const hash = location.hash.replace('#', '');
|
||||
if (hash) return hash;
|
||||
const el = document.getElementById('room');
|
||||
return (el && el.value.trim()) || 'default';
|
||||
}
|
||||
|
||||
function wzpPrefillRoom() {
|
||||
const path = location.pathname.replace(/^\//, '').replace(/\/$/, '');
|
||||
if (path && path !== 'index.html') {
|
||||
const el = document.getElementById('room');
|
||||
if (el) el.value = path;
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Status / stats helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function wzpUpdateStatus(msg) {
|
||||
const el = document.getElementById('status');
|
||||
if (el) el.textContent = msg;
|
||||
}
|
||||
|
||||
function wzpUpdateStats(stats) {
|
||||
const el = document.getElementById('stats');
|
||||
if (!el) return;
|
||||
if (typeof stats === 'string') {
|
||||
el.textContent = stats;
|
||||
} else {
|
||||
const parts = [];
|
||||
if (stats.elapsed != null) parts.push(stats.elapsed.toFixed(1) + 's');
|
||||
if (stats.sent != null) parts.push('sent: ' + stats.sent);
|
||||
if (stats.recv != null) parts.push('recv: ' + stats.recv);
|
||||
if (stats.loss != null) parts.push('loss: ' + (stats.loss * 100).toFixed(1) + '%');
|
||||
if (stats.fecRecovered != null && stats.fecRecovered > 0) parts.push('fec: ' + stats.fecRecovered);
|
||||
if (stats.fecReady != null) parts.push(stats.fecReady ? 'FEC:on' : 'FEC:off');
|
||||
el.textContent = parts.join(' | ');
|
||||
}
|
||||
}
|
||||
|
||||
function wzpUpdateLevel(pcmInt16) {
|
||||
const bar = document.getElementById('levelBar');
|
||||
if (!bar) return;
|
||||
let max = 0;
|
||||
for (let i = 0; i < pcmInt16.length; i += 16) {
|
||||
const v = Math.abs(pcmInt16[i]);
|
||||
if (v > max) max = v;
|
||||
}
|
||||
bar.style.width = (max / 32768 * 100) + '%';
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Audio context + worklet
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
let _wzpAudioCtx = null;
|
||||
let _wzpWorkletLoaded = false;
|
||||
|
||||
async function wzpStartAudioContext() {
|
||||
if (_wzpAudioCtx && _wzpAudioCtx.state !== 'closed') return _wzpAudioCtx;
|
||||
_wzpAudioCtx = new AudioContext({ sampleRate: WZP_SAMPLE_RATE });
|
||||
_wzpWorkletLoaded = false;
|
||||
return _wzpAudioCtx;
|
||||
}
|
||||
|
||||
function wzpGetAudioContext() {
|
||||
return _wzpAudioCtx;
|
||||
}
|
||||
|
||||
async function _wzpLoadWorklet(audioCtx) {
|
||||
if (_wzpWorkletLoaded) return true;
|
||||
if (typeof AudioWorkletNode === 'undefined' || !audioCtx.audioWorklet) {
|
||||
console.warn('[wzp-core] AudioWorklet not supported, will use fallback');
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
await audioCtx.audioWorklet.addModule('audio-processor.js');
|
||||
_wzpWorkletLoaded = true;
|
||||
return true;
|
||||
} catch (e) {
|
||||
console.warn('[wzp-core] AudioWorklet load failed:', e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Mic capture — returns { node, stop() }
|
||||
// onFrame(ArrayBuffer) called for each 960-sample Int16 PCM frame
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function wzpConnectCapture(audioCtx, onFrame) {
|
||||
let mediaStream;
|
||||
try {
|
||||
mediaStream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: {
|
||||
sampleRate: WZP_SAMPLE_RATE,
|
||||
channelCount: 1,
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
throw new Error('Mic access denied: ' + e.message);
|
||||
}
|
||||
|
||||
const source = audioCtx.createMediaStreamSource(mediaStream);
|
||||
const hasWorklet = await _wzpLoadWorklet(audioCtx);
|
||||
let captureNode;
|
||||
|
||||
if (hasWorklet) {
|
||||
captureNode = new AudioWorkletNode(audioCtx, 'wzp-capture-processor');
|
||||
captureNode.port.onmessage = (e) => {
|
||||
onFrame(e.data); // ArrayBuffer of Int16 PCM
|
||||
};
|
||||
source.connect(captureNode);
|
||||
captureNode.connect(audioCtx.destination); // keep worklet alive
|
||||
} else {
|
||||
// ScriptProcessorNode fallback
|
||||
captureNode = audioCtx.createScriptProcessor(4096, 1, 1);
|
||||
let acc = new Float32Array(0);
|
||||
captureNode.onaudioprocess = (ev) => {
|
||||
const input = ev.inputBuffer.getChannelData(0);
|
||||
const n = new Float32Array(acc.length + input.length);
|
||||
n.set(acc);
|
||||
n.set(input, acc.length);
|
||||
acc = n;
|
||||
while (acc.length >= WZP_FRAME_SIZE) {
|
||||
const frame = acc.slice(0, WZP_FRAME_SIZE);
|
||||
acc = acc.slice(WZP_FRAME_SIZE);
|
||||
const pcm = new Int16Array(WZP_FRAME_SIZE);
|
||||
for (let i = 0; i < WZP_FRAME_SIZE; i++) {
|
||||
pcm[i] = Math.max(-32768, Math.min(32767, Math.round(frame[i] * 32767)));
|
||||
}
|
||||
onFrame(pcm.buffer);
|
||||
}
|
||||
};
|
||||
source.connect(captureNode);
|
||||
captureNode.connect(audioCtx.destination);
|
||||
}
|
||||
|
||||
return {
|
||||
node: captureNode,
|
||||
stop() {
|
||||
captureNode.disconnect();
|
||||
mediaStream.getTracks().forEach((t) => t.stop());
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Playback — returns { node, play(Int16Array), stop() }
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function wzpConnectPlayback(audioCtx) {
|
||||
const hasWorklet = await _wzpLoadWorklet(audioCtx);
|
||||
let playbackNode;
|
||||
let nextPlayTime = 0;
|
||||
|
||||
if (hasWorklet) {
|
||||
playbackNode = new AudioWorkletNode(audioCtx, 'wzp-playback-processor');
|
||||
playbackNode.connect(audioCtx.destination);
|
||||
return {
|
||||
node: playbackNode,
|
||||
play(pcmInt16) {
|
||||
// Transfer Int16 buffer to worklet
|
||||
const buf = pcmInt16.buffer.slice(
|
||||
pcmInt16.byteOffset,
|
||||
pcmInt16.byteOffset + pcmInt16.byteLength
|
||||
);
|
||||
playbackNode.port.postMessage(buf, [buf]);
|
||||
},
|
||||
stop() {
|
||||
playbackNode.disconnect();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback: scheduled BufferSource
|
||||
return {
|
||||
node: null,
|
||||
play(pcmInt16) {
|
||||
if (!audioCtx || audioCtx.state === 'closed') return;
|
||||
const floatData = new Float32Array(pcmInt16.length);
|
||||
for (let i = 0; i < pcmInt16.length; i++) {
|
||||
floatData[i] = pcmInt16[i] / 32768.0;
|
||||
}
|
||||
const buffer = audioCtx.createBuffer(1, floatData.length, WZP_SAMPLE_RATE);
|
||||
buffer.getChannelData(0).set(floatData);
|
||||
const source = audioCtx.createBufferSource();
|
||||
source.buffer = buffer;
|
||||
source.connect(audioCtx.destination);
|
||||
const now = audioCtx.currentTime;
|
||||
if (nextPlayTime < now || nextPlayTime > now + 1.0) {
|
||||
nextPlayTime = now + 0.02;
|
||||
}
|
||||
source.start(nextPlayTime);
|
||||
nextPlayTime += buffer.duration;
|
||||
},
|
||||
stop() {
|
||||
// nothing to disconnect for fallback
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// UI wiring — call after DOM ready
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function wzpInitUI(callbacks) {
|
||||
// callbacks: { onConnect(room), onDisconnect() }
|
||||
const btn = document.getElementById('callBtn');
|
||||
const pttBtn = document.getElementById('pttBtn');
|
||||
const pttCheckbox = document.getElementById('pttMode');
|
||||
let connected = false;
|
||||
let pttMode = false;
|
||||
|
||||
wzpPrefillRoom();
|
||||
|
||||
// Variant badge
|
||||
const variant = wzpDetectVariant();
|
||||
const badge = document.getElementById('variantBadge');
|
||||
if (badge) badge.textContent = variant.toUpperCase();
|
||||
|
||||
// Variant selector radio buttons
|
||||
document.querySelectorAll('input[name="variant"]').forEach((radio) => {
|
||||
if (radio.value === variant) radio.checked = true;
|
||||
radio.addEventListener('change', () => {
|
||||
if (radio.checked) {
|
||||
const params = new URLSearchParams(location.search);
|
||||
params.set('variant', radio.value);
|
||||
location.search = params.toString();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
btn.onclick = () => {
|
||||
if (connected) {
|
||||
connected = false;
|
||||
btn.textContent = 'Connect';
|
||||
btn.classList.remove('active');
|
||||
_showControls(false);
|
||||
if (callbacks.onDisconnect) callbacks.onDisconnect();
|
||||
} else {
|
||||
const room = wzpGetRoom();
|
||||
if (!room) {
|
||||
wzpUpdateStatus('Enter a room name');
|
||||
return;
|
||||
}
|
||||
connected = true;
|
||||
btn.disabled = true;
|
||||
if (callbacks.onConnect) callbacks.onConnect(room);
|
||||
}
|
||||
};
|
||||
|
||||
// PTT toggle
|
||||
if (pttCheckbox) {
|
||||
pttCheckbox.onchange = () => {
|
||||
pttMode = pttCheckbox.checked;
|
||||
if (pttMode) {
|
||||
pttBtn.style.display = 'block';
|
||||
if (callbacks.onTransmit) callbacks.onTransmit(false);
|
||||
} else {
|
||||
pttBtn.style.display = 'none';
|
||||
if (callbacks.onTransmit) callbacks.onTransmit(true);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// PTT button events
|
||||
function startTx() {
|
||||
if (!pttMode || !connected) return;
|
||||
pttBtn.classList.add('transmitting');
|
||||
pttBtn.textContent = 'Transmitting...';
|
||||
if (callbacks.onTransmit) callbacks.onTransmit(true);
|
||||
}
|
||||
function stopTx() {
|
||||
if (!pttMode) return;
|
||||
pttBtn.classList.remove('transmitting');
|
||||
pttBtn.textContent = 'Hold to Talk';
|
||||
if (callbacks.onTransmit) callbacks.onTransmit(false);
|
||||
}
|
||||
|
||||
if (pttBtn) {
|
||||
pttBtn.addEventListener('mousedown', startTx);
|
||||
pttBtn.addEventListener('mouseup', stopTx);
|
||||
pttBtn.addEventListener('mouseleave', stopTx);
|
||||
pttBtn.addEventListener('touchstart', (e) => { e.preventDefault(); startTx(); });
|
||||
pttBtn.addEventListener('touchend', (e) => { e.preventDefault(); stopTx(); });
|
||||
}
|
||||
|
||||
// Spacebar PTT
|
||||
document.addEventListener('keydown', (e) => {
|
||||
if (pttMode && connected && e.code === 'Space' && !e.repeat) {
|
||||
e.preventDefault();
|
||||
startTx();
|
||||
}
|
||||
});
|
||||
document.addEventListener('keyup', (e) => {
|
||||
if (pttMode && connected && e.code === 'Space') {
|
||||
e.preventDefault();
|
||||
stopTx();
|
||||
}
|
||||
});
|
||||
|
||||
function _showControls(show) {
|
||||
const controls = document.getElementById('controls');
|
||||
if (controls) controls.style.display = show ? 'flex' : 'none';
|
||||
if (!show && pttBtn) {
|
||||
pttBtn.style.display = 'none';
|
||||
pttMode = false;
|
||||
if (pttCheckbox) pttCheckbox.checked = false;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
setConnected(isConnected) {
|
||||
connected = isConnected;
|
||||
btn.disabled = false;
|
||||
if (isConnected) {
|
||||
btn.textContent = 'Disconnect';
|
||||
btn.classList.add('active');
|
||||
_showControls(true);
|
||||
} else {
|
||||
btn.textContent = 'Connect';
|
||||
btn.classList.remove('active');
|
||||
_showControls(false);
|
||||
}
|
||||
},
|
||||
isPTT() {
|
||||
return pttMode;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Exports (global)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
window.WZPCore = {
|
||||
SAMPLE_RATE: WZP_SAMPLE_RATE,
|
||||
FRAME_SIZE: WZP_FRAME_SIZE,
|
||||
detectVariant: wzpDetectVariant,
|
||||
getRoom: wzpGetRoom,
|
||||
updateStatus: wzpUpdateStatus,
|
||||
updateStats: wzpUpdateStats,
|
||||
updateLevel: wzpUpdateLevel,
|
||||
startAudioContext: wzpStartAudioContext,
|
||||
getAudioContext: wzpGetAudioContext,
|
||||
connectCapture: wzpConnectCapture,
|
||||
connectPlayback: wzpConnectPlayback,
|
||||
initUI: wzpInitUI,
|
||||
};
|
||||
579
crates/wzp-web/static/js/wzp-full.js
Normal file
579
crates/wzp-web/static/js/wzp-full.js
Normal file
@@ -0,0 +1,579 @@
|
||||
// WarzonePhone — Full WASM + WebTransport client (Variant 3).
|
||||
//
|
||||
// Architecture:
|
||||
// - WebTransport for unreliable datagrams (UDP-like, no head-of-line blocking)
|
||||
// - ChaCha20-Poly1305 encryption via WASM (wzp-wasm WzpCryptoSession)
|
||||
// - RaptorQ FEC via WASM (wzp-wasm WzpFecEncoder/WzpFecDecoder)
|
||||
// - X25519 key exchange via WASM (wzp-wasm WzpKeyExchange)
|
||||
//
|
||||
// NOTE: WebTransport requires the relay to support HTTP/3 (h3-quinn).
|
||||
// The current wzp-relay uses raw QUIC. This variant demonstrates the full
|
||||
// architecture but will need relay-side HTTP/3 support to work end-to-end.
|
||||
// For development / testing, use the hybrid variant (WebSocket + WASM FEC).
|
||||
//
|
||||
// Relies on wzp-core.js for UI and audio helpers.
|
||||
|
||||
'use strict';
|
||||
|
||||
const WZP_WASM_PATH = (window.__WZP_BASE_URL || '') + '/wasm/wzp_wasm.js';
|
||||
|
||||
// 12-byte MediaHeader size (matches wzp-proto MediaHeader::WIRE_SIZE).
|
||||
const MEDIA_HEADER_SIZE = 12;
|
||||
|
||||
// FEC wire header: block_id(1) + symbol_idx(1) + is_repair(1) = 3 bytes.
|
||||
const FEC_HEADER_SIZE = 3;
|
||||
|
||||
class WZPFullClient {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {string} options.url WebTransport URL (https://host:port)
|
||||
* @param {string} options.room Room name
|
||||
* @param {Function} options.onAudio callback(Int16Array) for playback
|
||||
* @param {Function} options.onStatus callback(string) for UI status
|
||||
* @param {Function} options.onStats callback(Object) for UI stats
|
||||
*/
|
||||
constructor(options) {
|
||||
this.url = options.url;
|
||||
this.wsUrl = options.wsUrl; // WS fallback URL
|
||||
this.room = options.room;
|
||||
this.onAudio = options.onAudio || null;
|
||||
this.onStatus = options.onStatus || null;
|
||||
this.onStats = options.onStats || null;
|
||||
|
||||
this.wt = null; // WebTransport instance
|
||||
this.ws = null; // WebSocket fallback
|
||||
this.datagramWriter = null; // WritableStreamDefaultWriter
|
||||
this.datagramReader = null; // ReadableStreamDefaultReader
|
||||
this.cryptoSession = null; // WzpCryptoSession (WASM)
|
||||
this.fecEncoder = null; // WzpFecEncoder (WASM)
|
||||
this.fecDecoder = null; // WzpFecDecoder (WASM)
|
||||
this.sequence = 0;
|
||||
this._wasmModule = null;
|
||||
this._connected = false;
|
||||
this._useWebTransport = false; // true if WT connected, false = WS fallback
|
||||
this._startTime = 0;
|
||||
this._statsInterval = null;
|
||||
this._recvLoopRunning = false;
|
||||
this.stats = { sent: 0, recv: 0, fecRecovered: 0, encrypted: 0, decrypted: 0 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect: load WASM, open WebTransport, perform key exchange,
|
||||
* initialise FEC, and start the receive loop.
|
||||
*/
|
||||
async connect() {
|
||||
if (this._connected) return;
|
||||
|
||||
this._status('Loading WASM module...');
|
||||
|
||||
// 1. Load WASM (FEC + crypto)
|
||||
this._wasmModule = await import(WZP_WASM_PATH);
|
||||
await this._wasmModule.default();
|
||||
|
||||
// 2. Try WebTransport first, fall back to WebSocket
|
||||
let wtSuccess = false;
|
||||
if (typeof WebTransport !== 'undefined' && this.url) {
|
||||
try {
|
||||
this._status('Trying WebTransport...');
|
||||
const wtUrl = this.url + '/' + encodeURIComponent(this.room);
|
||||
this.wt = new WebTransport(wtUrl);
|
||||
await Promise.race([
|
||||
this.wt.ready,
|
||||
new Promise((_, reject) => setTimeout(() => reject(new Error('timeout')), 3000)),
|
||||
]);
|
||||
this.datagramWriter = this.wt.datagrams.writable.getWriter();
|
||||
this.datagramReader = this.wt.datagrams.readable.getReader();
|
||||
this._status('Performing key exchange...');
|
||||
await this._performKeyExchange();
|
||||
wtSuccess = true;
|
||||
this._useWebTransport = true;
|
||||
} catch (e) {
|
||||
console.warn('[wzp-full] WebTransport failed, falling back to WebSocket:', e.message);
|
||||
if (this.wt) { try { this.wt.close(); } catch (_) {} }
|
||||
this.wt = null;
|
||||
this.datagramWriter = null;
|
||||
this.datagramReader = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (!wtSuccess) {
|
||||
// WebSocket fallback (same as hybrid — WASM loaded but uses WS transport)
|
||||
this._useWebTransport = false;
|
||||
await this._connectWebSocket();
|
||||
}
|
||||
|
||||
// 3. Initialise FEC
|
||||
this.fecEncoder = new this._wasmModule.WzpFecEncoder(5, 256);
|
||||
this.fecDecoder = new this._wasmModule.WzpFecDecoder(5, 256);
|
||||
|
||||
this._connected = true;
|
||||
this.sequence = 0;
|
||||
this.stats = { sent: 0, recv: 0, fecRecovered: 0, encrypted: 0, decrypted: 0 };
|
||||
this._startTime = Date.now();
|
||||
this._startStatsTimer();
|
||||
|
||||
// 4. Start receive loop (WebTransport only — WS uses onmessage)
|
||||
if (this._useWebTransport) {
|
||||
this._recvLoop();
|
||||
this._status('Connected to room: ' + this.room + ' (WebTransport, encrypted, FEC active)');
|
||||
} else {
|
||||
this._status('Connected to room: ' + this.room + ' (WebSocket fallback, WASM FEC loaded)');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* WebSocket fallback connection (used when WebTransport unavailable).
|
||||
*/
|
||||
async _connectWebSocket() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._status('Connecting via WebSocket (fallback)...');
|
||||
this.ws = new WebSocket(this.wsUrl);
|
||||
this.ws.binaryType = 'arraybuffer';
|
||||
|
||||
this.ws.onopen = () => {
|
||||
this._status('WebSocket connected to room: ' + this.room);
|
||||
resolve();
|
||||
};
|
||||
|
||||
this.ws.onmessage = (event) => {
|
||||
if (!(event.data instanceof ArrayBuffer)) return;
|
||||
const pcm = new Int16Array(event.data);
|
||||
this.stats.recv++;
|
||||
if (this.onAudio) this.onAudio(pcm);
|
||||
};
|
||||
|
||||
this.ws.onclose = () => {
|
||||
if (this._connected) {
|
||||
this._cleanup();
|
||||
this._status('Disconnected');
|
||||
}
|
||||
};
|
||||
|
||||
this.ws.onerror = () => {
|
||||
if (!this._connected) {
|
||||
this._cleanup();
|
||||
reject(new Error('WebSocket connection failed'));
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect and clean up all resources.
|
||||
*/
|
||||
disconnect() {
|
||||
this._connected = false;
|
||||
if (this.wt) {
|
||||
try { this.wt.close(); } catch (_) { /* ignore */ }
|
||||
this.wt = null;
|
||||
}
|
||||
if (this.ws) {
|
||||
try { this.ws.close(); } catch (_) { /* ignore */ }
|
||||
this.ws = null;
|
||||
}
|
||||
this._cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a PCM audio frame.
|
||||
*
|
||||
* Pipeline: PCM -> FEC encode -> encrypt -> datagram send.
|
||||
*
|
||||
* @param {ArrayBuffer} pcmBuffer 960-sample Int16 PCM (1920 bytes)
|
||||
*/
|
||||
async sendAudio(pcmBuffer) {
|
||||
if (!this._connected) return;
|
||||
|
||||
// WebSocket fallback: send raw PCM like pure/hybrid
|
||||
if (!this._useWebTransport) {
|
||||
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
|
||||
this.ws.send(pcmBuffer);
|
||||
this.sequence++;
|
||||
this.stats.sent++;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.datagramWriter || !this.cryptoSession) return;
|
||||
|
||||
const pcmBytes = new Uint8Array(pcmBuffer);
|
||||
|
||||
// Build a minimal 12-byte MediaHeader for AAD.
|
||||
const header = this._buildMediaHeader(this.sequence);
|
||||
|
||||
// FEC encode: feed the frame; when a block completes we get wire packets.
|
||||
const fecOutput = this.fecEncoder.add_symbol(pcmBytes);
|
||||
|
||||
if (fecOutput) {
|
||||
// FEC block completed — send all packets (source + repair).
|
||||
const packetSize = FEC_HEADER_SIZE + 256; // header + symbol_size
|
||||
for (let offset = 0; offset + packetSize <= fecOutput.length; offset += packetSize) {
|
||||
const fecPacket = fecOutput.slice(offset, offset + packetSize);
|
||||
|
||||
// Encrypt: header bytes as AAD, FEC packet as plaintext.
|
||||
const ciphertext = this.cryptoSession.encrypt(header, fecPacket);
|
||||
this.stats.encrypted++;
|
||||
|
||||
// Build wire datagram: header (12) + ciphertext
|
||||
const datagram = new Uint8Array(MEDIA_HEADER_SIZE + ciphertext.length);
|
||||
datagram.set(header, 0);
|
||||
datagram.set(ciphertext, MEDIA_HEADER_SIZE);
|
||||
|
||||
try {
|
||||
await this.datagramWriter.write(datagram);
|
||||
} catch (e) {
|
||||
// Datagram send can fail if the transport is closing.
|
||||
if (this._connected) {
|
||||
console.warn('[wzp-full] datagram write failed:', e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
this.stats.sent++;
|
||||
}
|
||||
}
|
||||
// If FEC block not yet complete, accumulate (no packets sent yet).
|
||||
|
||||
this.sequence = (this.sequence + 1) & 0xFFFF;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test crypto + FEC roundtrip entirely in WASM (no network).
|
||||
* Useful for verifying the WASM module works correctly in the browser.
|
||||
*
|
||||
* @returns {Object} test results
|
||||
*/
|
||||
testCryptoFec() {
|
||||
if (!this._wasmModule) {
|
||||
return { success: false, error: 'WASM module not loaded' };
|
||||
}
|
||||
|
||||
const t0 = performance.now();
|
||||
const wasm = this._wasmModule;
|
||||
|
||||
// Key exchange
|
||||
const alice = new wasm.WzpKeyExchange();
|
||||
const bob = new wasm.WzpKeyExchange();
|
||||
const aliceSecret = alice.derive_shared_secret(bob.public_key());
|
||||
const bobSecret = bob.derive_shared_secret(alice.public_key());
|
||||
|
||||
// Verify secrets match
|
||||
let secretsMatch = aliceSecret.length === bobSecret.length;
|
||||
if (secretsMatch) {
|
||||
for (let i = 0; i < aliceSecret.length; i++) {
|
||||
if (aliceSecret[i] !== bobSecret[i]) { secretsMatch = false; break; }
|
||||
}
|
||||
}
|
||||
|
||||
// Encrypt/decrypt
|
||||
const aliceSession = new wasm.WzpCryptoSession(aliceSecret);
|
||||
const bobSession = new wasm.WzpCryptoSession(bobSecret);
|
||||
|
||||
const header = new Uint8Array([0xDE, 0xAD, 0xBE, 0xEF]);
|
||||
const plaintext = new TextEncoder().encode('hello warzone from full variant');
|
||||
|
||||
const ciphertext = aliceSession.encrypt(header, plaintext);
|
||||
const decrypted = bobSession.decrypt(header, ciphertext);
|
||||
|
||||
let cryptoOk = decrypted.length === plaintext.length;
|
||||
if (cryptoOk) {
|
||||
for (let i = 0; i < plaintext.length; i++) {
|
||||
if (decrypted[i] !== plaintext[i]) { cryptoOk = false; break; }
|
||||
}
|
||||
}
|
||||
|
||||
// FEC test (same as hybrid testFec)
|
||||
const encoder = new wasm.WzpFecEncoder(5, 256);
|
||||
const decoder = new wasm.WzpFecDecoder(5, 256);
|
||||
|
||||
const frames = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const frame = new Uint8Array(100);
|
||||
for (let j = 0; j < 100; j++) frame[j] = ((i * 37 + 7) + j) & 0xFF;
|
||||
frames.push(frame);
|
||||
}
|
||||
|
||||
let wireData = null;
|
||||
for (const frame of frames) {
|
||||
const result = encoder.add_symbol(frame);
|
||||
if (result) wireData = result;
|
||||
}
|
||||
|
||||
const PACKET_SIZE = FEC_HEADER_SIZE + 256;
|
||||
const packets = [];
|
||||
if (wireData) {
|
||||
for (let off = 0; off + PACKET_SIZE <= wireData.length; off += PACKET_SIZE) {
|
||||
packets.push({
|
||||
blockId: wireData[off],
|
||||
symbolIdx: wireData[off + 1],
|
||||
isRepair: wireData[off + 2] !== 0,
|
||||
data: wireData.slice(off + FEC_HEADER_SIZE, off + PACKET_SIZE),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Drop 2 packets, try to recover
|
||||
let fecDecoded = null;
|
||||
for (let i = 0; i < packets.length; i++) {
|
||||
if (i === 1 || i === 3) continue; // simulate loss
|
||||
const pkt = packets[i];
|
||||
const result = decoder.add_symbol(pkt.blockId, pkt.symbolIdx, pkt.isRepair, pkt.data);
|
||||
if (result) { fecDecoded = result; break; }
|
||||
}
|
||||
|
||||
let fecOk = false;
|
||||
if (fecDecoded) {
|
||||
const expected = new Uint8Array(5 * 100);
|
||||
let off = 0;
|
||||
for (const f of frames) { expected.set(f, off); off += f.length; }
|
||||
fecOk = fecDecoded.length === expected.length;
|
||||
if (fecOk) {
|
||||
for (let i = 0; i < expected.length; i++) {
|
||||
if (fecDecoded[i] !== expected[i]) { fecOk = false; break; }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup WASM objects
|
||||
alice.free();
|
||||
bob.free();
|
||||
aliceSession.free();
|
||||
bobSession.free();
|
||||
encoder.free();
|
||||
decoder.free();
|
||||
|
||||
const elapsed = performance.now() - t0;
|
||||
|
||||
return {
|
||||
success: secretsMatch && cryptoOk && fecOk,
|
||||
secretsMatch,
|
||||
cryptoOk,
|
||||
fecOk,
|
||||
fecPacketsTotal: packets.length,
|
||||
fecDropped: 2,
|
||||
elapsed: elapsed.toFixed(2) + 'ms',
|
||||
};
|
||||
}
|
||||
|
||||
// =========================================================================
|
||||
// Internal
|
||||
// =========================================================================
|
||||
|
||||
/**
|
||||
* Perform X25519 key exchange over a WebTransport bidirectional stream.
|
||||
*
|
||||
* Protocol (simplified DH, not the full SignalMessage handshake):
|
||||
* 1. Open a bidirectional stream.
|
||||
* 2. Send our 32-byte X25519 public key.
|
||||
* 3. Read the peer's 32-byte public key.
|
||||
* 4. Derive shared secret via HKDF.
|
||||
* 5. Create WzpCryptoSession from the shared secret.
|
||||
*
|
||||
* In production this would use the full SignalMessage protocol over the
|
||||
* bidirectional stream (offer/answer/encrypted-session). For now we do
|
||||
* a simple DH swap to prove the architecture.
|
||||
*/
|
||||
async _performKeyExchange() {
|
||||
const wasm = this._wasmModule;
|
||||
const kx = new wasm.WzpKeyExchange();
|
||||
const ourPub = kx.public_key(); // Uint8Array(32)
|
||||
|
||||
// Open a bidirectional stream for signaling.
|
||||
const stream = await this.wt.createBidirectionalStream();
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
|
||||
// Send our public key.
|
||||
await writer.write(new Uint8Array(ourPub));
|
||||
|
||||
// Read peer's public key (exactly 32 bytes).
|
||||
// WebTransport streams are byte-oriented; we may get it in chunks.
|
||||
let peerPub = new Uint8Array(0);
|
||||
while (peerPub.length < 32) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) {
|
||||
throw new Error('Key exchange stream closed before receiving peer public key');
|
||||
}
|
||||
const combined = new Uint8Array(peerPub.length + value.length);
|
||||
combined.set(peerPub, 0);
|
||||
combined.set(value, peerPub.length);
|
||||
peerPub = combined;
|
||||
}
|
||||
peerPub = peerPub.slice(0, 32);
|
||||
|
||||
// Derive shared secret and create crypto session.
|
||||
const secret = kx.derive_shared_secret(peerPub);
|
||||
this.cryptoSession = new wasm.WzpCryptoSession(secret);
|
||||
|
||||
// Close the signaling stream (key exchange complete).
|
||||
try {
|
||||
writer.releaseLock();
|
||||
reader.releaseLock();
|
||||
await stream.writable.close();
|
||||
} catch (_) {
|
||||
// Best-effort close.
|
||||
}
|
||||
|
||||
kx.free();
|
||||
}
|
||||
|
||||
/**
|
||||
* Receive loop: read datagrams, decrypt, FEC decode, play audio.
|
||||
*
|
||||
* Runs until the transport closes or disconnect() is called.
|
||||
*/
|
||||
async _recvLoop() {
|
||||
if (this._recvLoopRunning) return;
|
||||
this._recvLoopRunning = true;
|
||||
|
||||
try {
|
||||
while (this._connected && this.datagramReader) {
|
||||
const { value, done } = await this.datagramReader.read();
|
||||
if (done) break;
|
||||
|
||||
this.stats.recv++;
|
||||
|
||||
// value is a Uint8Array datagram: header(12) + ciphertext
|
||||
if (value.length <= MEDIA_HEADER_SIZE) continue; // too short
|
||||
|
||||
const headerAad = value.slice(0, MEDIA_HEADER_SIZE);
|
||||
const ciphertext = value.slice(MEDIA_HEADER_SIZE);
|
||||
|
||||
// Decrypt
|
||||
let fecPacket;
|
||||
try {
|
||||
fecPacket = this.cryptoSession.decrypt(headerAad, ciphertext);
|
||||
this.stats.decrypted++;
|
||||
} catch (e) {
|
||||
// Decryption failure — corrupted or out-of-order packet.
|
||||
// In a real implementation we'd handle sequence number gaps.
|
||||
console.warn('[wzp-full] decrypt failed:', e);
|
||||
continue;
|
||||
}
|
||||
|
||||
// FEC decode: parse the FEC wire header and feed to decoder.
|
||||
if (fecPacket.length < FEC_HEADER_SIZE) continue;
|
||||
const blockId = fecPacket[0];
|
||||
const symbolIdx = fecPacket[1];
|
||||
const isRepair = fecPacket[2] !== 0;
|
||||
const symbolData = fecPacket.slice(FEC_HEADER_SIZE);
|
||||
|
||||
const decoded = this.fecDecoder.add_symbol(blockId, symbolIdx, isRepair, symbolData);
|
||||
if (decoded) {
|
||||
this.stats.fecRecovered++;
|
||||
// decoded is concatenated original PCM frames.
|
||||
// Each frame is 1920 bytes (960 Int16 samples @ 48kHz mono).
|
||||
const FRAME_BYTES = 1920;
|
||||
for (let off = 0; off + FRAME_BYTES <= decoded.length; off += FRAME_BYTES) {
|
||||
const pcmSlice = decoded.slice(off, off + FRAME_BYTES);
|
||||
const pcm = new Int16Array(pcmSlice.buffer, pcmSlice.byteOffset, pcmSlice.byteLength / 2);
|
||||
if (this.onAudio) {
|
||||
this.onAudio(pcm);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
if (this._connected) {
|
||||
console.warn('[wzp-full] recv loop error:', e);
|
||||
}
|
||||
} finally {
|
||||
this._recvLoopRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a minimal 12-byte MediaHeader for use as AAD.
|
||||
*
|
||||
* Wire layout (from wzp-proto::packet::MediaHeader):
|
||||
* Byte 0: V(1)|T(1)|CodecID(4)|Q(1)|FecRatioHi(1)
|
||||
* Byte 1: FecRatioLo(6)|unused(2)
|
||||
* Bytes 2-3: Sequence number (BE u16)
|
||||
* Bytes 4-7: Timestamp ms (BE u32)
|
||||
* Byte 8: FEC block ID
|
||||
* Byte 9: FEC symbol index
|
||||
* Byte 10: Reserved
|
||||
* Byte 11: CSRC count
|
||||
*
|
||||
* @param {number} seq Sequence number (u16)
|
||||
* @returns {Uint8Array} 12-byte header
|
||||
*/
|
||||
_buildMediaHeader(seq) {
|
||||
const buf = new Uint8Array(MEDIA_HEADER_SIZE);
|
||||
// Byte 0: version=0, is_repair=0, codec=0 (Opus), quality_report=0, fec_ratio_hi=0
|
||||
buf[0] = 0x00;
|
||||
// Byte 1: fec_ratio_lo=0
|
||||
buf[1] = 0x00;
|
||||
// Bytes 2-3: sequence (BE u16)
|
||||
buf[2] = (seq >> 8) & 0xFF;
|
||||
buf[3] = seq & 0xFF;
|
||||
// Bytes 4-7: timestamp (BE u32) — ms since session start
|
||||
const ts = Date.now() - this._startTime;
|
||||
buf[4] = (ts >> 24) & 0xFF;
|
||||
buf[5] = (ts >> 16) & 0xFF;
|
||||
buf[6] = (ts >> 8) & 0xFF;
|
||||
buf[7] = ts & 0xFF;
|
||||
// Bytes 8-11: FEC block/symbol/reserved/csrc — filled by FEC layer in production
|
||||
return buf;
|
||||
}
|
||||
|
||||
_startStatsTimer() {
|
||||
this._stopStatsTimer();
|
||||
this._statsInterval = setInterval(() => {
|
||||
if (!this._connected) {
|
||||
this._stopStatsTimer();
|
||||
return;
|
||||
}
|
||||
const elapsed = (Date.now() - this._startTime) / 1000;
|
||||
const loss = this.stats.sent > 0
|
||||
? Math.max(0, 1 - this.stats.recv / this.stats.sent)
|
||||
: 0;
|
||||
if (this.onStats) {
|
||||
this.onStats({
|
||||
sent: this.stats.sent,
|
||||
recv: this.stats.recv,
|
||||
loss,
|
||||
elapsed,
|
||||
encrypted: this.stats.encrypted,
|
||||
decrypted: this.stats.decrypted,
|
||||
fecRecovered: this.stats.fecRecovered,
|
||||
});
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
_stopStatsTimer() {
|
||||
if (this._statsInterval) {
|
||||
clearInterval(this._statsInterval);
|
||||
this._statsInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
_status(msg) {
|
||||
if (this.onStatus) this.onStatus(msg);
|
||||
}
|
||||
|
||||
_cleanup() {
|
||||
this._connected = false;
|
||||
this._stopStatsTimer();
|
||||
this.datagramWriter = null;
|
||||
this.datagramReader = null;
|
||||
if (this.cryptoSession) {
|
||||
try { this.cryptoSession.free(); } catch (_) { /* ignore */ }
|
||||
this.cryptoSession = null;
|
||||
}
|
||||
if (this.fecEncoder) {
|
||||
try { this.fecEncoder.free(); } catch (_) { /* ignore */ }
|
||||
this.fecEncoder = null;
|
||||
}
|
||||
if (this.fecDecoder) {
|
||||
try { this.fecDecoder.free(); } catch (_) { /* ignore */ }
|
||||
this.fecDecoder = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Export
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
window.WZPFullClient = WZPFullClient;
|
||||
345
crates/wzp-web/static/js/wzp-hybrid.js
Normal file
345
crates/wzp-web/static/js/wzp-hybrid.js
Normal file
@@ -0,0 +1,345 @@
|
||||
// WarzonePhone — Hybrid JS + WASM client (Variant 2).
|
||||
// WebSocket transport, raw PCM, WASM FEC (RaptorQ) ready for WebTransport.
|
||||
// Relies on wzp-core.js for UI and audio helpers.
|
||||
//
|
||||
// The WASM FEC module is loaded and exposed but not used on the wire yet,
|
||||
// because WebSocket is TCP (no packet loss). FEC will activate when
|
||||
// WebTransport (UDP) is added. A testFec() method demonstrates FEC
|
||||
// encode -> simulate loss -> decode in the browser.
|
||||
|
||||
'use strict';
|
||||
|
||||
// WASM module path (served from /wasm/ by the wzp-web bridge).
|
||||
const WZP_WASM_PATH = (window.__WZP_BASE_URL || '') + '/wasm/wzp_wasm.js';
|
||||
|
||||
class WZPHybridClient {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {string} options.wsUrl WebSocket URL (ws://host/ws/room)
|
||||
* @param {string} options.room Room name
|
||||
* @param {Function} options.onAudio callback(Int16Array) for playback
|
||||
* @param {Function} options.onStatus callback(string) for UI status
|
||||
* @param {Function} options.onStats callback({sent, recv, loss, elapsed, fecRecovered}) for UI
|
||||
*/
|
||||
constructor(options) {
|
||||
this.wsUrl = options.wsUrl;
|
||||
this.room = options.room;
|
||||
this.onAudio = options.onAudio || null;
|
||||
this.onStatus = options.onStatus || null;
|
||||
this.onStats = options.onStats || null;
|
||||
|
||||
this.ws = null;
|
||||
this.sequence = 0;
|
||||
this.stats = { sent: 0, recv: 0, fecRecovered: 0 };
|
||||
this._startTime = 0;
|
||||
this._statsInterval = null;
|
||||
this._connected = false;
|
||||
|
||||
// WASM FEC instances (loaded in connect()).
|
||||
this._wasmModule = null;
|
||||
this.fecEncoder = null;
|
||||
this.fecDecoder = null;
|
||||
this._fecReady = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open WebSocket connection and load the WASM FEC module.
|
||||
* @returns {Promise<void>} resolves when connected
|
||||
*/
|
||||
async connect() {
|
||||
if (this._connected) return;
|
||||
|
||||
// Load WASM module in parallel with WebSocket connect.
|
||||
const wasmPromise = this._loadWasm();
|
||||
|
||||
const wsPromise = new Promise((resolve, reject) => {
|
||||
this._status('Connecting to room: ' + this.room + '...');
|
||||
|
||||
this.ws = new WebSocket(this.wsUrl);
|
||||
this.ws.binaryType = 'arraybuffer';
|
||||
|
||||
this.ws.onopen = () => {
|
||||
this._connected = true;
|
||||
this.sequence = 0;
|
||||
this.stats = { sent: 0, recv: 0, fecRecovered: 0 };
|
||||
this._startTime = Date.now();
|
||||
this._startStatsTimer();
|
||||
resolve();
|
||||
};
|
||||
|
||||
this.ws.onmessage = (event) => {
|
||||
this._handleMessage(event);
|
||||
};
|
||||
|
||||
this.ws.onclose = () => {
|
||||
const wasConnected = this._connected;
|
||||
this._cleanup();
|
||||
if (wasConnected) {
|
||||
this._status('Disconnected');
|
||||
}
|
||||
};
|
||||
|
||||
this.ws.onerror = () => {
|
||||
if (!this._connected) {
|
||||
this._cleanup();
|
||||
reject(new Error('WebSocket connection failed'));
|
||||
} else {
|
||||
this._status('Connection error');
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Wait for both WASM load and WS connect.
|
||||
await Promise.all([wasmPromise, wsPromise]);
|
||||
|
||||
const fecStatus = this._fecReady ? 'FEC ready' : 'FEC unavailable';
|
||||
this._status('Connected to room: ' + this.room + ' (' + fecStatus + ')');
|
||||
}
|
||||
|
||||
/**
|
||||
* Close WebSocket and clean up.
|
||||
*/
|
||||
disconnect() {
|
||||
this._connected = false;
|
||||
if (this.ws) {
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
}
|
||||
this._stopStatsTimer();
|
||||
// Keep WASM module loaded (reusable).
|
||||
this.fecEncoder = null;
|
||||
this.fecDecoder = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a PCM audio frame over the WebSocket.
|
||||
* Currently sends raw PCM (same as pure client) since WebSocket is TCP.
|
||||
* When WebTransport is added, this will FEC-encode before sending.
|
||||
* @param {ArrayBuffer} pcmBuffer 960-sample Int16 PCM (1920 bytes)
|
||||
*/
|
||||
async sendAudio(pcmBuffer) {
|
||||
if (!this._connected || !this.ws || this.ws.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Over WebSocket (TCP): send raw PCM, no FEC needed.
|
||||
// Over WebTransport (UDP, future): would call this.fecEncoder.add_symbol()
|
||||
// and send the resulting FEC-protected packets.
|
||||
this.ws.send(pcmBuffer);
|
||||
this.sequence++;
|
||||
this.stats.sent++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test FEC encode -> simulate loss -> decode in the browser.
|
||||
* Demonstrates that the WASM RaptorQ module works correctly.
|
||||
*
|
||||
* @param {Object} [opts]
|
||||
* @param {number} [opts.blockSize=5] Source symbols per block
|
||||
* @param {number} [opts.symbolSize=256] Padded symbol size
|
||||
* @param {number} [opts.frameSize=100] Bytes per test frame
|
||||
* @param {number} [opts.dropCount=2] Number of packets to drop
|
||||
* @returns {Object} { success, sourcePackets, repairPackets, dropped, recovered, elapsed }
|
||||
*/
|
||||
testFec(opts) {
|
||||
if (!this._fecReady) {
|
||||
return { success: false, error: 'WASM FEC module not loaded' };
|
||||
}
|
||||
|
||||
const blockSize = (opts && opts.blockSize) || 5;
|
||||
const symbolSize = (opts && opts.symbolSize) || 256;
|
||||
const frameSize = (opts && opts.frameSize) || 100;
|
||||
const dropCount = (opts && opts.dropCount) || 2;
|
||||
|
||||
const HEADER_SIZE = 3; // block_id + symbol_idx + is_repair
|
||||
const packetSize = HEADER_SIZE + symbolSize;
|
||||
|
||||
const t0 = performance.now();
|
||||
|
||||
// Create fresh encoder/decoder for the test.
|
||||
const encoder = new this._wasmModule.WzpFecEncoder(blockSize, symbolSize);
|
||||
const decoder = new this._wasmModule.WzpFecDecoder(blockSize, symbolSize);
|
||||
|
||||
// Generate test frames with known data.
|
||||
const frames = [];
|
||||
for (let i = 0; i < blockSize; i++) {
|
||||
const frame = new Uint8Array(frameSize);
|
||||
for (let j = 0; j < frameSize; j++) {
|
||||
frame[j] = ((i * 37 + 7) + j) & 0xFF;
|
||||
}
|
||||
frames.push(frame);
|
||||
}
|
||||
|
||||
// Encode: feed frames to encoder; last one triggers block output.
|
||||
let wireData = null;
|
||||
for (const frame of frames) {
|
||||
const result = encoder.add_symbol(frame);
|
||||
if (result) {
|
||||
wireData = result;
|
||||
}
|
||||
}
|
||||
|
||||
if (!wireData) {
|
||||
// Flush if block didn't complete (shouldn't happen with exact blockSize).
|
||||
wireData = encoder.flush();
|
||||
}
|
||||
|
||||
// Parse wire packets.
|
||||
const packets = [];
|
||||
for (let offset = 0; offset + packetSize <= wireData.length; offset += packetSize) {
|
||||
packets.push({
|
||||
blockId: wireData[offset],
|
||||
symbolIdx: wireData[offset + 1],
|
||||
isRepair: wireData[offset + 2] !== 0,
|
||||
data: wireData.slice(offset + HEADER_SIZE, offset + packetSize),
|
||||
});
|
||||
}
|
||||
|
||||
const sourcePackets = packets.filter(p => !p.isRepair).length;
|
||||
const repairPackets = packets.filter(p => p.isRepair).length;
|
||||
|
||||
// Simulate packet loss: drop `dropCount` packets from the front (source symbols).
|
||||
const dropped = [];
|
||||
const surviving = [];
|
||||
for (let i = 0; i < packets.length; i++) {
|
||||
if (i < dropCount) {
|
||||
dropped.push(i);
|
||||
} else {
|
||||
surviving.push(packets[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Decode from surviving packets.
|
||||
let decoded = null;
|
||||
for (const pkt of surviving) {
|
||||
const result = decoder.add_symbol(pkt.blockId, pkt.symbolIdx, pkt.isRepair, pkt.data);
|
||||
if (result) {
|
||||
decoded = result;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const elapsed = performance.now() - t0;
|
||||
|
||||
// Verify decoded data matches original frames.
|
||||
let success = false;
|
||||
if (decoded) {
|
||||
const expected = new Uint8Array(blockSize * frameSize);
|
||||
let off = 0;
|
||||
for (const frame of frames) {
|
||||
expected.set(frame, off);
|
||||
off += frame.length;
|
||||
}
|
||||
|
||||
success = decoded.length === expected.length;
|
||||
if (success) {
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
if (decoded[i] !== expected[i]) {
|
||||
success = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Free WASM objects.
|
||||
encoder.free();
|
||||
decoder.free();
|
||||
|
||||
return {
|
||||
success,
|
||||
sourcePackets,
|
||||
repairPackets,
|
||||
totalPackets: packets.length,
|
||||
dropped: dropCount,
|
||||
recovered: success,
|
||||
decodedBytes: decoded ? decoded.length : 0,
|
||||
expectedBytes: blockSize * frameSize,
|
||||
elapsed: elapsed.toFixed(2) + 'ms',
|
||||
};
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
async _loadWasm() {
|
||||
try {
|
||||
// Dynamic import of the wasm-pack generated JS glue.
|
||||
this._wasmModule = await import(WZP_WASM_PATH);
|
||||
// Initialize the WASM module (calls __wbg_init).
|
||||
await this._wasmModule.default();
|
||||
|
||||
// Create FEC encoder/decoder instances.
|
||||
// 5 symbols per block, 256-byte symbols — matches native wzp-fec defaults.
|
||||
this.fecEncoder = new this._wasmModule.WzpFecEncoder(5, 256);
|
||||
this.fecDecoder = new this._wasmModule.WzpFecDecoder(5, 256);
|
||||
this._fecReady = true;
|
||||
|
||||
console.log('[wzp-hybrid] WASM FEC module loaded successfully');
|
||||
} catch (e) {
|
||||
console.warn('[wzp-hybrid] WASM FEC module failed to load:', e);
|
||||
this._fecReady = false;
|
||||
// Non-fatal: client still works without FEC (like pure variant).
|
||||
}
|
||||
}
|
||||
|
||||
_handleMessage(event) {
|
||||
if (!(event.data instanceof ArrayBuffer)) return;
|
||||
const pcm = new Int16Array(event.data);
|
||||
this.stats.recv++;
|
||||
if (this.onAudio) {
|
||||
this.onAudio(pcm);
|
||||
}
|
||||
}
|
||||
|
||||
_startStatsTimer() {
|
||||
this._stopStatsTimer();
|
||||
this._statsInterval = setInterval(() => {
|
||||
if (!this._connected) {
|
||||
this._stopStatsTimer();
|
||||
return;
|
||||
}
|
||||
const elapsed = (Date.now() - this._startTime) / 1000;
|
||||
const loss = this.stats.sent > 0
|
||||
? Math.max(0, 1 - this.stats.recv / this.stats.sent)
|
||||
: 0;
|
||||
if (this.onStats) {
|
||||
this.onStats({
|
||||
sent: this.stats.sent,
|
||||
recv: this.stats.recv,
|
||||
loss: loss,
|
||||
elapsed: elapsed,
|
||||
fecRecovered: this.stats.fecRecovered,
|
||||
fecReady: this._fecReady,
|
||||
});
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
_stopStatsTimer() {
|
||||
if (this._statsInterval) {
|
||||
clearInterval(this._statsInterval);
|
||||
this._statsInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
_status(msg) {
|
||||
if (this.onStatus) this.onStatus(msg);
|
||||
}
|
||||
|
||||
_cleanup() {
|
||||
this._connected = false;
|
||||
this._stopStatsTimer();
|
||||
if (this.ws) {
|
||||
try { this.ws.close(); } catch (_) { /* ignore */ }
|
||||
this.ws = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Export
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
window.WZPHybridClient = WZPHybridClient;
|
||||
168
crates/wzp-web/static/js/wzp-pure.js
Normal file
168
crates/wzp-web/static/js/wzp-pure.js
Normal file
@@ -0,0 +1,168 @@
|
||||
// WarzonePhone — Pure JS client (Variant 1).
|
||||
// WebSocket transport, raw PCM, no WASM, no FEC.
|
||||
// Relies on wzp-core.js for UI and audio helpers.
|
||||
|
||||
'use strict';
|
||||
|
||||
class WZPPureClient {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {string} options.wsUrl WebSocket URL (ws://host/ws/room)
|
||||
* @param {string} options.room Room name
|
||||
* @param {Function} options.onAudio callback(Int16Array) for playback
|
||||
* @param {Function} options.onStatus callback(string) for UI status
|
||||
* @param {Function} options.onStats callback({sent, recv, loss, elapsed}) for UI
|
||||
*/
|
||||
constructor(options) {
|
||||
this.wsUrl = options.wsUrl;
|
||||
this.room = options.room;
|
||||
this.onAudio = options.onAudio || null;
|
||||
this.onStatus = options.onStatus || null;
|
||||
this.onStats = options.onStats || null;
|
||||
|
||||
this.ws = null;
|
||||
this.sequence = 0;
|
||||
this.stats = { sent: 0, recv: 0 };
|
||||
this._startTime = 0;
|
||||
this._statsInterval = null;
|
||||
this._connected = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open WebSocket connection to the wzp-web bridge.
|
||||
* @returns {Promise<void>} resolves when connected
|
||||
*/
|
||||
async connect() {
|
||||
if (this._connected) return;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._status('Connecting to room: ' + this.room + '...');
|
||||
|
||||
this.ws = new WebSocket(this.wsUrl);
|
||||
this.ws.binaryType = 'arraybuffer';
|
||||
|
||||
this.ws.onopen = () => {
|
||||
this._connected = true;
|
||||
this.sequence = 0;
|
||||
this.stats = { sent: 0, recv: 0 };
|
||||
this._startTime = Date.now();
|
||||
this._status('Connected to room: ' + this.room);
|
||||
this._startStatsTimer();
|
||||
resolve();
|
||||
};
|
||||
|
||||
this.ws.onmessage = (event) => {
|
||||
this._handleMessage(event);
|
||||
};
|
||||
|
||||
this.ws.onclose = () => {
|
||||
const wasConnected = this._connected;
|
||||
this._cleanup();
|
||||
if (wasConnected) {
|
||||
this._status('Disconnected');
|
||||
}
|
||||
};
|
||||
|
||||
this.ws.onerror = (err) => {
|
||||
if (!this._connected) {
|
||||
this._cleanup();
|
||||
reject(new Error('WebSocket connection failed'));
|
||||
} else {
|
||||
this._status('Connection error');
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Close WebSocket and clean up.
|
||||
*/
|
||||
disconnect() {
|
||||
this._connected = false;
|
||||
if (this.ws) {
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
}
|
||||
this._stopStatsTimer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a PCM audio frame over the WebSocket.
|
||||
* @param {ArrayBuffer} pcmBuffer 960-sample Int16 PCM (1920 bytes)
|
||||
*/
|
||||
async sendAudio(pcmBuffer) {
|
||||
if (!this._connected || !this.ws || this.ws.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Pure JS variant: send raw PCM directly (no encryption, no header).
|
||||
// The wzp-web bridge handles QUIC-side encryption.
|
||||
this.ws.send(pcmBuffer);
|
||||
this.sequence++;
|
||||
this.stats.sent++;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
_handleMessage(event) {
|
||||
if (!(event.data instanceof ArrayBuffer)) return;
|
||||
const pcm = new Int16Array(event.data);
|
||||
this.stats.recv++;
|
||||
if (this.onAudio) {
|
||||
this.onAudio(pcm);
|
||||
}
|
||||
}
|
||||
|
||||
_startStatsTimer() {
|
||||
this._stopStatsTimer();
|
||||
this._statsInterval = setInterval(() => {
|
||||
if (!this._connected) {
|
||||
this._stopStatsTimer();
|
||||
return;
|
||||
}
|
||||
const elapsed = (Date.now() - this._startTime) / 1000;
|
||||
// Simple loss estimate: if we sent frames, the other side should
|
||||
// receive roughly the same count. Since we only see our own recv,
|
||||
// we report raw counts and let the UI decide.
|
||||
const loss = this.stats.sent > 0
|
||||
? Math.max(0, 1 - this.stats.recv / this.stats.sent)
|
||||
: 0;
|
||||
if (this.onStats) {
|
||||
this.onStats({
|
||||
sent: this.stats.sent,
|
||||
recv: this.stats.recv,
|
||||
loss: loss,
|
||||
elapsed: elapsed,
|
||||
});
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
_stopStatsTimer() {
|
||||
if (this._statsInterval) {
|
||||
clearInterval(this._statsInterval);
|
||||
this._statsInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
_status(msg) {
|
||||
if (this.onStatus) this.onStatus(msg);
|
||||
}
|
||||
|
||||
_cleanup() {
|
||||
this._connected = false;
|
||||
this._stopStatsTimer();
|
||||
if (this.ws) {
|
||||
try { this.ws.close(); } catch (_) { /* ignore */ }
|
||||
this.ws = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Export
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
window.WZPPureClient = WZPPureClient;
|
||||
592
crates/wzp-web/static/js/wzp-ws-fec.js
Normal file
592
crates/wzp-web/static/js/wzp-ws-fec.js
Normal file
@@ -0,0 +1,592 @@
|
||||
// WarzonePhone — WZP-WS-FEC client (Variant 5).
|
||||
// WebSocket transport, WZP wire protocol, WASM RaptorQ FEC.
|
||||
// Application-layer redundancy even over TCP.
|
||||
// Sends MediaPacket-formatted frames with FEC encoding.
|
||||
// Ready for direct relay WS support (no bridge translation needed).
|
||||
|
||||
'use strict';
|
||||
|
||||
// WASM module path (served from /wasm/ by the wzp-web bridge).
|
||||
const WZP_WS_FEC_WASM_PATH = (window.__WZP_BASE_URL || '') + '/wasm/wzp_wasm.js';
|
||||
|
||||
// 12-byte MediaHeader size (matches wzp-proto MediaHeader::WIRE_SIZE).
|
||||
const WZP_WS_FEC_HEADER_SIZE = 12;
|
||||
|
||||
// FEC wire header: block_id(1) + symbol_idx(1) + is_repair(1) = 3 bytes.
|
||||
const WZP_WS_FEC_FEC_HEADER_SIZE = 3;
|
||||
|
||||
// FEC parameters.
|
||||
// A 960-sample Int16 PCM frame = 1920 bytes. We use symbol_size = 2048
|
||||
// (1920 payload + 2-byte length prefix + 126 bytes padding).
|
||||
const WZP_WS_FEC_BLOCK_SIZE = 5;
|
||||
const WZP_WS_FEC_SYMBOL_SIZE = 2048;
|
||||
|
||||
// Length prefix size within each FEC symbol.
|
||||
const WZP_WS_FEC_LENGTH_PREFIX = 2;
|
||||
|
||||
class WZPWsFecClient {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {string} options.wsUrl WebSocket URL (ws://host/ws/room)
|
||||
* @param {string} options.room Room name
|
||||
* @param {Function} options.onAudio callback(Int16Array) for playback
|
||||
* @param {Function} options.onStatus callback(string) for UI status
|
||||
* @param {Function} options.onStats callback(Object) for UI stats
|
||||
*/
|
||||
constructor(options) {
|
||||
this.wsUrl = options.wsUrl;
|
||||
this.room = options.room;
|
||||
this.authToken = options.authToken || null;
|
||||
this.onAudio = options.onAudio || null;
|
||||
this.onStatus = options.onStatus || null;
|
||||
this.onStats = options.onStats || null;
|
||||
|
||||
this.ws = null;
|
||||
this.seq = 0;
|
||||
this.startTimestamp = 0;
|
||||
this.stats = { sent: 0, recv: 0, fecRecovered: 0 };
|
||||
this._startTime = 0;
|
||||
this._statsInterval = null;
|
||||
this._connected = false;
|
||||
this._authenticated = false;
|
||||
|
||||
// WASM FEC instances (loaded in loadWasm() / connect()).
|
||||
this._wasmModule = null;
|
||||
this.fecEncoder = null;
|
||||
this.fecDecoder = null;
|
||||
this.wasmReady = false;
|
||||
|
||||
// Current FEC block counter for outgoing packets.
|
||||
this._fecBlockId = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the WASM FEC module.
|
||||
* Called automatically by connect(), or can be called early.
|
||||
*/
|
||||
async loadWasm() {
|
||||
if (this.wasmReady) return;
|
||||
try {
|
||||
this._wasmModule = await import(WZP_WS_FEC_WASM_PATH);
|
||||
await this._wasmModule.default();
|
||||
|
||||
this.fecEncoder = new this._wasmModule.WzpFecEncoder(
|
||||
WZP_WS_FEC_BLOCK_SIZE,
|
||||
WZP_WS_FEC_SYMBOL_SIZE
|
||||
);
|
||||
this.fecDecoder = new this._wasmModule.WzpFecDecoder(
|
||||
WZP_WS_FEC_BLOCK_SIZE,
|
||||
WZP_WS_FEC_SYMBOL_SIZE
|
||||
);
|
||||
this.wasmReady = true;
|
||||
console.log('[wzp-ws-fec] WASM FEC module loaded successfully');
|
||||
} catch (e) {
|
||||
console.error('[wzp-ws-fec] WASM FEC module failed to load:', e);
|
||||
this.wasmReady = false;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a 12-byte WZP MediaHeader.
|
||||
*
|
||||
* @param {number} seq Sequence number (u16)
|
||||
* @param {number} timestampMs Milliseconds since session start
|
||||
* @param {boolean} isRepair True if this is a FEC repair symbol
|
||||
* @param {number} codecId Codec ID (0=RawPcm16, 1=Opus16k, 2=Opus48k)
|
||||
* @param {number} fecBlock FEC block ID (u8)
|
||||
* @param {number} fecSymbol FEC symbol index (u8)
|
||||
* @param {number} fecRatio FEC ratio (0.0 to ~2.0)
|
||||
* @param {boolean} hasQuality Whether a quality report is attached
|
||||
* @returns {Uint8Array} 12-byte header
|
||||
*/
|
||||
_buildHeader(seq, timestampMs, isRepair = false, codecId = 0, fecBlock = 0, fecSymbol = 0, fecRatio = 0, hasQuality = false) {
|
||||
const buf = new ArrayBuffer(WZP_WS_FEC_HEADER_SIZE);
|
||||
const view = new DataView(buf);
|
||||
|
||||
const fecRatioEncoded = Math.min(127, Math.round(fecRatio * 63.5));
|
||||
const byte0 = ((0 & 0x01) << 7) // version=0
|
||||
| ((isRepair ? 1 : 0) << 6) // T bit
|
||||
| ((codecId & 0x0F) << 2) // CodecID
|
||||
| ((hasQuality ? 1 : 0) << 1) // Q bit
|
||||
| ((fecRatioEncoded >> 6) & 0x01); // FecRatioHi
|
||||
view.setUint8(0, byte0);
|
||||
|
||||
const byte1 = (fecRatioEncoded & 0x3F) << 2;
|
||||
view.setUint8(1, byte1);
|
||||
|
||||
view.setUint16(2, seq & 0xFFFF); // big-endian (default for DataView)
|
||||
view.setUint32(4, timestampMs & 0xFFFFFFFF); // big-endian
|
||||
view.setUint8(8, fecBlock & 0xFF);
|
||||
view.setUint8(9, fecSymbol & 0xFF);
|
||||
view.setUint8(10, 0); // reserved
|
||||
view.setUint8(11, 0); // csrc_count
|
||||
return new Uint8Array(buf);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a 12-byte MediaHeader from received binary data.
|
||||
*
|
||||
* @param {Uint8Array} data At least 12 bytes
|
||||
* @returns {Object|null} Parsed header fields, or null if too short
|
||||
*/
|
||||
_parseHeader(data) {
|
||||
if (data.byteLength < WZP_WS_FEC_HEADER_SIZE) return null;
|
||||
const view = new DataView(data.buffer || data, data.byteOffset || 0, 12);
|
||||
const byte0 = view.getUint8(0);
|
||||
const byte1 = view.getUint8(1);
|
||||
const fecRatioEncoded = ((byte0 & 0x01) << 6) | ((byte1 >> 2) & 0x3F);
|
||||
return {
|
||||
version: (byte0 >> 7) & 1,
|
||||
isRepair: !!((byte0 >> 6) & 1),
|
||||
codecId: (byte0 >> 2) & 0x0F,
|
||||
hasQuality: !!((byte0 >> 1) & 1),
|
||||
fecRatio: fecRatioEncoded / 63.5,
|
||||
seq: view.getUint16(2),
|
||||
timestamp: view.getUint32(4),
|
||||
fecBlock: view.getUint8(8),
|
||||
fecSymbol: view.getUint8(9),
|
||||
reserved: view.getUint8(10),
|
||||
csrcCount: view.getUint8(11),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Pad a PCM frame into a FEC symbol with a 2-byte length prefix.
|
||||
* Symbol layout: [len_hi, len_lo, ...pcm_bytes..., ...zero_padding...]
|
||||
*
|
||||
* @param {Uint8Array} pcmBytes Raw PCM bytes
|
||||
* @returns {Uint8Array} Padded symbol of WZP_WS_FEC_SYMBOL_SIZE bytes
|
||||
*/
|
||||
_padToSymbol(pcmBytes) {
|
||||
const symbol = new Uint8Array(WZP_WS_FEC_SYMBOL_SIZE);
|
||||
const len = pcmBytes.length;
|
||||
symbol[0] = (len >> 8) & 0xFF;
|
||||
symbol[1] = len & 0xFF;
|
||||
symbol.set(pcmBytes, WZP_WS_FEC_LENGTH_PREFIX);
|
||||
return symbol;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the original PCM payload from a FEC symbol (strip prefix + padding).
|
||||
*
|
||||
* @param {Uint8Array} symbol Symbol data (WZP_WS_FEC_SYMBOL_SIZE bytes)
|
||||
* @returns {Uint8Array} Original PCM bytes
|
||||
*/
|
||||
_unpadSymbol(symbol) {
|
||||
const len = (symbol[0] << 8) | symbol[1];
|
||||
if (len > WZP_WS_FEC_SYMBOL_SIZE - WZP_WS_FEC_LENGTH_PREFIX) {
|
||||
// Sanity check: if length is bogus, return empty.
|
||||
return new Uint8Array(0);
|
||||
}
|
||||
return symbol.slice(WZP_WS_FEC_LENGTH_PREFIX, WZP_WS_FEC_LENGTH_PREFIX + len);
|
||||
}
|
||||
|
||||
/**
|
||||
* Open WebSocket connection and load the WASM FEC module.
|
||||
* @returns {Promise<void>} resolves when connected
|
||||
*/
|
||||
async connect() {
|
||||
if (this._connected) return;
|
||||
|
||||
// Load WASM module in parallel with WebSocket connect.
|
||||
const wasmPromise = this.loadWasm();
|
||||
|
||||
const wsPromise = new Promise((resolve, reject) => {
|
||||
this._status('Connecting (WZP-WS-FEC) to room: ' + this.room + '...');
|
||||
|
||||
this.ws = new WebSocket(this.wsUrl);
|
||||
this.ws.binaryType = 'arraybuffer';
|
||||
|
||||
this.ws.onopen = () => {
|
||||
// Send auth if token provided.
|
||||
if (this.authToken) {
|
||||
this.ws.send(JSON.stringify({ type: 'auth', token: this.authToken }));
|
||||
}
|
||||
|
||||
this._connected = true;
|
||||
this._authenticated = !this.authToken;
|
||||
this.seq = 0;
|
||||
this.startTimestamp = Date.now();
|
||||
this.stats = { sent: 0, recv: 0, fecRecovered: 0 };
|
||||
this._startTime = Date.now();
|
||||
this._fecBlockId = 0;
|
||||
this._startStatsTimer();
|
||||
resolve();
|
||||
};
|
||||
|
||||
this.ws.onmessage = (event) => {
|
||||
// Handle text messages (auth responses).
|
||||
if (typeof event.data === 'string') {
|
||||
try {
|
||||
const msg = JSON.parse(event.data);
|
||||
if (msg.type === 'auth_ok') {
|
||||
this._authenticated = true;
|
||||
this._status('Authenticated (WZP-WS-FEC) to room: ' + this.room);
|
||||
}
|
||||
if (msg.type === 'auth_error') {
|
||||
this._status('Auth failed: ' + (msg.reason || 'unknown'));
|
||||
this.disconnect();
|
||||
}
|
||||
} catch(e) { /* ignore non-JSON text */ }
|
||||
return;
|
||||
}
|
||||
this._handleMessage(event);
|
||||
};
|
||||
|
||||
this.ws.onclose = () => {
|
||||
const was = this._connected;
|
||||
this._cleanup();
|
||||
if (was) this._status('Disconnected');
|
||||
};
|
||||
|
||||
this.ws.onerror = () => {
|
||||
if (!this._connected) {
|
||||
this._cleanup();
|
||||
reject(new Error('WebSocket connection failed'));
|
||||
} else {
|
||||
this._status('Connection error');
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
await Promise.all([wasmPromise, wsPromise]);
|
||||
|
||||
const fecStatus = this.wasmReady ? 'FEC ready' : 'FEC unavailable';
|
||||
this._status('Connected (WZP-WS-FEC) to room: ' + this.room + ' (' + fecStatus + ')');
|
||||
}
|
||||
|
||||
/**
|
||||
* Close WebSocket and clean up.
|
||||
*/
|
||||
disconnect() {
|
||||
this._connected = false;
|
||||
if (this.ws) {
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
}
|
||||
this._stopStatsTimer();
|
||||
// Keep WASM module loaded (reusable), but reset encoder/decoder.
|
||||
if (this.fecEncoder) {
|
||||
try { this.fecEncoder.free(); } catch (_) { /* ignore */ }
|
||||
this.fecEncoder = null;
|
||||
}
|
||||
if (this.fecDecoder) {
|
||||
try { this.fecDecoder.free(); } catch (_) { /* ignore */ }
|
||||
this.fecDecoder = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a PCM audio frame with FEC encoding over the WebSocket.
|
||||
*
|
||||
* Each PCM frame is padded to a FEC symbol (2048 bytes with length prefix)
|
||||
* and fed to the FEC encoder. When a block of 5 symbols completes, the
|
||||
* encoder outputs source + repair symbols. Each is sent as an individual
|
||||
* WZP MediaPacket with the appropriate fecBlock, fecSymbol, and isRepair
|
||||
* fields in the 12-byte header.
|
||||
*
|
||||
* @param {ArrayBuffer} pcmBuffer 960-sample Int16 PCM (1920 bytes)
|
||||
*/
|
||||
async sendAudio(pcmBuffer) {
|
||||
if (!this._connected || !this.ws || this.ws.readyState !== WebSocket.OPEN) return;
|
||||
if (!this.wasmReady || !this.fecEncoder) return;
|
||||
|
||||
const pcmBytes = new Uint8Array(pcmBuffer);
|
||||
|
||||
// Pad PCM frame to FEC symbol size with length prefix.
|
||||
const symbol = this._padToSymbol(pcmBytes);
|
||||
|
||||
// Feed to FEC encoder. Returns wire data when block completes.
|
||||
const fecOutput = this.fecEncoder.add_symbol(symbol);
|
||||
|
||||
if (fecOutput) {
|
||||
// Block completed — send all packets (source + repair).
|
||||
const packetSize = WZP_WS_FEC_FEC_HEADER_SIZE + WZP_WS_FEC_SYMBOL_SIZE;
|
||||
const timestampMs = Date.now() - this.startTimestamp;
|
||||
|
||||
for (let offset = 0; offset + packetSize <= fecOutput.length; offset += packetSize) {
|
||||
const blockId = fecOutput[offset];
|
||||
const symbolIdx = fecOutput[offset + 1];
|
||||
const isRepair = fecOutput[offset + 2] !== 0;
|
||||
const symbolData = fecOutput.slice(
|
||||
offset + WZP_WS_FEC_FEC_HEADER_SIZE,
|
||||
offset + packetSize
|
||||
);
|
||||
|
||||
// Build WZP MediaHeader for this FEC symbol.
|
||||
// fecRatio ~0.5 for 50% repair overhead: encoded = round(0.5 * 63.5) = 32
|
||||
const header = this._buildHeader(
|
||||
this.seq,
|
||||
timestampMs,
|
||||
isRepair,
|
||||
0, // codecId = RawPcm16
|
||||
blockId,
|
||||
symbolIdx,
|
||||
0.5, // fecRatio
|
||||
false // hasQuality
|
||||
);
|
||||
|
||||
// Wire frame: header(12) + symbol_data(2048)
|
||||
const packet = new Uint8Array(WZP_WS_FEC_HEADER_SIZE + symbolData.length);
|
||||
packet.set(header, 0);
|
||||
packet.set(symbolData, WZP_WS_FEC_HEADER_SIZE);
|
||||
|
||||
this.ws.send(packet.buffer);
|
||||
this.seq = (this.seq + 1) & 0xFFFF;
|
||||
this.stats.sent++;
|
||||
}
|
||||
|
||||
this._fecBlockId++;
|
||||
}
|
||||
// If block not yet complete, accumulate (no packets sent yet).
|
||||
}
|
||||
|
||||
/**
|
||||
* Test FEC encode -> simulate loss -> decode in the browser.
|
||||
* Demonstrates that the WASM RaptorQ module works correctly
|
||||
* with the WZP wire protocol symbol format.
|
||||
*
|
||||
* @param {Object} [opts]
|
||||
* @param {number} [opts.blockSize=5] Source symbols per block
|
||||
* @param {number} [opts.symbolSize=2048] Padded symbol size
|
||||
* @param {number} [opts.frameSize=1920] PCM frame size in bytes
|
||||
* @param {number} [opts.dropCount=2] Number of packets to drop (simulated 30%+ loss)
|
||||
* @returns {Object} Test results
|
||||
*/
|
||||
testFec(opts) {
|
||||
if (!this.wasmReady || !this._wasmModule) {
|
||||
return { success: false, error: 'WASM FEC module not loaded' };
|
||||
}
|
||||
|
||||
const blockSize = (opts && opts.blockSize) || 5;
|
||||
const symbolSize = (opts && opts.symbolSize) || WZP_WS_FEC_SYMBOL_SIZE;
|
||||
const frameSize = (opts && opts.frameSize) || 1920;
|
||||
const dropCount = (opts && opts.dropCount) || 2;
|
||||
|
||||
const FEC_HDR = 3; // block_id + symbol_idx + is_repair
|
||||
const packetSize = FEC_HDR + symbolSize;
|
||||
|
||||
const t0 = performance.now();
|
||||
|
||||
// Create fresh encoder/decoder for the test.
|
||||
const encoder = new this._wasmModule.WzpFecEncoder(blockSize, symbolSize);
|
||||
const decoder = new this._wasmModule.WzpFecDecoder(blockSize, symbolSize);
|
||||
|
||||
// Generate test frames with known data, padded to symbol size with length prefix.
|
||||
const originalFrames = [];
|
||||
const paddedSymbols = [];
|
||||
for (let i = 0; i < blockSize; i++) {
|
||||
const frame = new Uint8Array(frameSize);
|
||||
for (let j = 0; j < frameSize; j++) {
|
||||
frame[j] = ((i * 37 + 7) + j) & 0xFF;
|
||||
}
|
||||
originalFrames.push(frame);
|
||||
|
||||
// Pad with length prefix (same as _padToSymbol).
|
||||
const sym = new Uint8Array(symbolSize);
|
||||
sym[0] = (frameSize >> 8) & 0xFF;
|
||||
sym[1] = frameSize & 0xFF;
|
||||
sym.set(frame, 2);
|
||||
paddedSymbols.push(sym);
|
||||
}
|
||||
|
||||
// Encode: feed padded symbols to encoder.
|
||||
let wireData = null;
|
||||
for (const sym of paddedSymbols) {
|
||||
const result = encoder.add_symbol(sym);
|
||||
if (result) wireData = result;
|
||||
}
|
||||
|
||||
if (!wireData) {
|
||||
wireData = encoder.flush();
|
||||
}
|
||||
|
||||
// Parse wire packets.
|
||||
const packets = [];
|
||||
if (wireData) {
|
||||
for (let offset = 0; offset + packetSize <= wireData.length; offset += packetSize) {
|
||||
packets.push({
|
||||
blockId: wireData[offset],
|
||||
symbolIdx: wireData[offset + 1],
|
||||
isRepair: wireData[offset + 2] !== 0,
|
||||
data: wireData.slice(offset + FEC_HDR, offset + packetSize),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const sourcePackets = packets.filter(p => !p.isRepair).length;
|
||||
const repairPackets = packets.filter(p => p.isRepair).length;
|
||||
|
||||
// Simulate packet loss: drop `dropCount` source packets from the front.
|
||||
const dropped = [];
|
||||
const surviving = [];
|
||||
for (let i = 0; i < packets.length; i++) {
|
||||
if (i < dropCount) {
|
||||
dropped.push(i);
|
||||
} else {
|
||||
surviving.push(packets[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Decode from surviving packets.
|
||||
let decoded = null;
|
||||
for (const pkt of surviving) {
|
||||
const result = decoder.add_symbol(pkt.blockId, pkt.symbolIdx, pkt.isRepair, pkt.data);
|
||||
if (result) {
|
||||
decoded = result;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Verify decoded data: extract original frames from decoded symbols.
|
||||
let success = false;
|
||||
if (decoded) {
|
||||
// decoded is the concatenated padded symbols. Extract original frames.
|
||||
const recoveredFrames = [];
|
||||
for (let i = 0; i < blockSize; i++) {
|
||||
const symOffset = i * symbolSize;
|
||||
if (symOffset + symbolSize <= decoded.length) {
|
||||
const sym = decoded.slice(symOffset, symOffset + symbolSize);
|
||||
const len = (sym[0] << 8) | sym[1];
|
||||
recoveredFrames.push(sym.slice(2, 2 + len));
|
||||
}
|
||||
}
|
||||
|
||||
success = recoveredFrames.length === blockSize;
|
||||
if (success) {
|
||||
for (let i = 0; i < blockSize && success; i++) {
|
||||
if (recoveredFrames[i].length !== originalFrames[i].length) {
|
||||
success = false;
|
||||
break;
|
||||
}
|
||||
for (let j = 0; j < originalFrames[i].length; j++) {
|
||||
if (recoveredFrames[i][j] !== originalFrames[i][j]) {
|
||||
success = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Free WASM objects.
|
||||
encoder.free();
|
||||
decoder.free();
|
||||
|
||||
const elapsed = performance.now() - t0;
|
||||
|
||||
return {
|
||||
success,
|
||||
sourcePackets,
|
||||
repairPackets,
|
||||
totalPackets: packets.length,
|
||||
dropped: dropCount,
|
||||
recovered: !!decoded,
|
||||
symbolSize: symbolSize,
|
||||
frameSize: frameSize,
|
||||
elapsed: elapsed.toFixed(2) + 'ms',
|
||||
};
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
_handleMessage(event) {
|
||||
if (!(event.data instanceof ArrayBuffer)) return;
|
||||
const data = new Uint8Array(event.data);
|
||||
if (data.length < WZP_WS_FEC_HEADER_SIZE) return;
|
||||
|
||||
const header = this._parseHeader(data);
|
||||
if (!header) return;
|
||||
|
||||
this.stats.recv++;
|
||||
|
||||
if (!this.wasmReady || !this.fecDecoder) {
|
||||
// No FEC decoder — cannot process FEC-encoded data.
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract symbol data (everything after 12-byte MediaHeader).
|
||||
const symbolData = data.slice(WZP_WS_FEC_HEADER_SIZE);
|
||||
|
||||
// Feed symbol to FEC decoder using header fields.
|
||||
const decoded = this.fecDecoder.add_symbol(
|
||||
header.fecBlock,
|
||||
header.fecSymbol,
|
||||
header.isRepair,
|
||||
symbolData
|
||||
);
|
||||
|
||||
if (decoded) {
|
||||
this.stats.fecRecovered++;
|
||||
|
||||
// decoded is concatenated padded symbols.
|
||||
// Each symbol is WZP_WS_FEC_SYMBOL_SIZE bytes with a 2-byte length prefix.
|
||||
for (let off = 0; off + WZP_WS_FEC_SYMBOL_SIZE <= decoded.length; off += WZP_WS_FEC_SYMBOL_SIZE) {
|
||||
const symbol = decoded.slice(off, off + WZP_WS_FEC_SYMBOL_SIZE);
|
||||
const pcmBytes = this._unpadSymbol(symbol);
|
||||
|
||||
if (pcmBytes.length > 0 && pcmBytes.length % 2 === 0) {
|
||||
const pcm = new Int16Array(
|
||||
pcmBytes.buffer,
|
||||
pcmBytes.byteOffset,
|
||||
pcmBytes.byteLength / 2
|
||||
);
|
||||
if (this.onAudio) this.onAudio(pcm);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_startStatsTimer() {
|
||||
this._stopStatsTimer();
|
||||
this._statsInterval = setInterval(() => {
|
||||
if (!this._connected) {
|
||||
this._stopStatsTimer();
|
||||
return;
|
||||
}
|
||||
const elapsed = (Date.now() - this._startTime) / 1000;
|
||||
const loss = this.stats.sent > 0
|
||||
? Math.max(0, 1 - this.stats.recv / this.stats.sent)
|
||||
: 0;
|
||||
if (this.onStats) {
|
||||
this.onStats({
|
||||
sent: this.stats.sent,
|
||||
recv: this.stats.recv,
|
||||
loss: loss,
|
||||
elapsed: elapsed,
|
||||
fecRecovered: this.stats.fecRecovered,
|
||||
fecReady: this.wasmReady,
|
||||
});
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
_stopStatsTimer() {
|
||||
if (this._statsInterval) {
|
||||
clearInterval(this._statsInterval);
|
||||
this._statsInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
_status(msg) {
|
||||
if (this.onStatus) this.onStatus(msg);
|
||||
}
|
||||
|
||||
_cleanup() {
|
||||
this._connected = false;
|
||||
this._stopStatsTimer();
|
||||
if (this.ws) {
|
||||
try { this.ws.close(); } catch (_) { /* ignore */ }
|
||||
this.ws = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Export
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
window.WZPWsFecClient = WZPWsFecClient;
|
||||
749
crates/wzp-web/static/js/wzp-ws-full.js
Normal file
749
crates/wzp-web/static/js/wzp-ws-full.js
Normal file
@@ -0,0 +1,749 @@
|
||||
// WarzonePhone — WZP-WS-Full client (Variant 6).
|
||||
// WebSocket transport, WZP wire protocol, WASM FEC + ChaCha20-Poly1305 E2E.
|
||||
// Full encryption — relay sees only ciphertext.
|
||||
// Sends MediaPacket-formatted frames with FEC + encryption.
|
||||
// Ready for direct relay WS support (no bridge translation needed).
|
||||
|
||||
'use strict';
|
||||
|
||||
// WASM module path (served from /wasm/ by the wzp-web bridge).
|
||||
const WZP_WS_FULL_WASM_PATH = (window.__WZP_BASE_URL || '') + '/wasm/wzp_wasm.js';
|
||||
|
||||
// 12-byte MediaHeader size (matches wzp-proto MediaHeader::WIRE_SIZE).
|
||||
const WZP_WS_FULL_HEADER_SIZE = 12;
|
||||
|
||||
// FEC wire header: block_id(1) + symbol_idx(1) + is_repair(1) = 3 bytes.
|
||||
const WZP_WS_FULL_FEC_HEADER_SIZE = 3;
|
||||
|
||||
// FEC parameters.
|
||||
// A 960-sample Int16 PCM frame = 1920 bytes. Symbol size = 2048
|
||||
// (1920 payload + 2-byte length prefix + 126 bytes padding).
|
||||
const WZP_WS_FULL_BLOCK_SIZE = 5;
|
||||
const WZP_WS_FULL_SYMBOL_SIZE = 2048;
|
||||
|
||||
// Length prefix size within each FEC symbol.
|
||||
const WZP_WS_FULL_LENGTH_PREFIX = 2;
|
||||
|
||||
// ChaCha20-Poly1305 tag size (16 bytes).
|
||||
const WZP_WS_FULL_TAG_SIZE = 16;
|
||||
|
||||
// X25519 public key size (32 bytes).
|
||||
const WZP_WS_FULL_PUBKEY_SIZE = 32;
|
||||
|
||||
class WZPWsFullClient {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {string} options.wsUrl WebSocket URL (ws://host/ws/room)
|
||||
* @param {string} options.room Room name
|
||||
* @param {Function} options.onAudio callback(Int16Array) for playback
|
||||
* @param {Function} options.onStatus callback(string) for UI status
|
||||
* @param {Function} options.onStats callback(Object) for UI stats
|
||||
*/
|
||||
constructor(options) {
|
||||
this.wsUrl = options.wsUrl;
|
||||
this.room = options.room;
|
||||
this.authToken = options.authToken || null;
|
||||
this.onAudio = options.onAudio || null;
|
||||
this.onStatus = options.onStatus || null;
|
||||
this.onStats = options.onStats || null;
|
||||
|
||||
this.ws = null;
|
||||
this.seq = 0;
|
||||
this.startTimestamp = 0;
|
||||
this.stats = { sent: 0, recv: 0, fecRecovered: 0, encrypted: 0, decrypted: 0 };
|
||||
this._startTime = 0;
|
||||
this._statsInterval = null;
|
||||
this._connected = false;
|
||||
this._authenticated = false;
|
||||
|
||||
// WASM instances.
|
||||
this._wasmModule = null;
|
||||
this.fecEncoder = null;
|
||||
this.fecDecoder = null;
|
||||
this.cryptoSession = null;
|
||||
this._keyExchange = null;
|
||||
this.wasmReady = false;
|
||||
|
||||
// Key exchange state.
|
||||
this._keyExchangeComplete = false;
|
||||
this._keyExchangeResolve = null;
|
||||
this._keyExchangeReject = null;
|
||||
|
||||
// Current FEC block counter for outgoing packets.
|
||||
this._fecBlockId = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the WASM module (FEC + Crypto).
|
||||
* Called automatically by connect(), or can be called early.
|
||||
*/
|
||||
async loadWasm() {
|
||||
if (this.wasmReady) return;
|
||||
try {
|
||||
this._wasmModule = await import(WZP_WS_FULL_WASM_PATH);
|
||||
await this._wasmModule.default();
|
||||
this.wasmReady = true;
|
||||
console.log('[wzp-ws-full] WASM module loaded successfully');
|
||||
} catch (e) {
|
||||
console.error('[wzp-ws-full] WASM module failed to load:', e);
|
||||
this.wasmReady = false;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a 12-byte WZP MediaHeader.
|
||||
*
|
||||
* @param {number} seq Sequence number (u16)
|
||||
* @param {number} timestampMs Milliseconds since session start
|
||||
* @param {boolean} isRepair True if this is a FEC repair symbol
|
||||
* @param {number} codecId Codec ID (0=RawPcm16, 1=Opus16k, 2=Opus48k)
|
||||
* @param {number} fecBlock FEC block ID (u8)
|
||||
* @param {number} fecSymbol FEC symbol index (u8)
|
||||
* @param {number} fecRatio FEC ratio (0.0 to ~2.0)
|
||||
* @param {boolean} hasQuality Whether a quality report is attached
|
||||
* @returns {Uint8Array} 12-byte header
|
||||
*/
|
||||
_buildHeader(seq, timestampMs, isRepair = false, codecId = 0, fecBlock = 0, fecSymbol = 0, fecRatio = 0, hasQuality = false) {
|
||||
const buf = new ArrayBuffer(WZP_WS_FULL_HEADER_SIZE);
|
||||
const view = new DataView(buf);
|
||||
|
||||
const fecRatioEncoded = Math.min(127, Math.round(fecRatio * 63.5));
|
||||
const byte0 = ((0 & 0x01) << 7) // version=0
|
||||
| ((isRepair ? 1 : 0) << 6) // T bit
|
||||
| ((codecId & 0x0F) << 2) // CodecID
|
||||
| ((hasQuality ? 1 : 0) << 1) // Q bit
|
||||
| ((fecRatioEncoded >> 6) & 0x01); // FecRatioHi
|
||||
view.setUint8(0, byte0);
|
||||
|
||||
const byte1 = (fecRatioEncoded & 0x3F) << 2;
|
||||
view.setUint8(1, byte1);
|
||||
|
||||
view.setUint16(2, seq & 0xFFFF); // big-endian (default for DataView)
|
||||
view.setUint32(4, timestampMs & 0xFFFFFFFF); // big-endian
|
||||
view.setUint8(8, fecBlock & 0xFF);
|
||||
view.setUint8(9, fecSymbol & 0xFF);
|
||||
view.setUint8(10, 0); // reserved
|
||||
view.setUint8(11, 0); // csrc_count
|
||||
return new Uint8Array(buf);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a 12-byte MediaHeader from received binary data.
|
||||
*
|
||||
* @param {Uint8Array} data At least 12 bytes
|
||||
* @returns {Object|null} Parsed header fields, or null if too short
|
||||
*/
|
||||
_parseHeader(data) {
|
||||
if (data.byteLength < WZP_WS_FULL_HEADER_SIZE) return null;
|
||||
const view = new DataView(data.buffer || data, data.byteOffset || 0, 12);
|
||||
const byte0 = view.getUint8(0);
|
||||
const byte1 = view.getUint8(1);
|
||||
const fecRatioEncoded = ((byte0 & 0x01) << 6) | ((byte1 >> 2) & 0x3F);
|
||||
return {
|
||||
version: (byte0 >> 7) & 1,
|
||||
isRepair: !!((byte0 >> 6) & 1),
|
||||
codecId: (byte0 >> 2) & 0x0F,
|
||||
hasQuality: !!((byte0 >> 1) & 1),
|
||||
fecRatio: fecRatioEncoded / 63.5,
|
||||
seq: view.getUint16(2),
|
||||
timestamp: view.getUint32(4),
|
||||
fecBlock: view.getUint8(8),
|
||||
fecSymbol: view.getUint8(9),
|
||||
reserved: view.getUint8(10),
|
||||
csrcCount: view.getUint8(11),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Pad a PCM frame into a FEC symbol with a 2-byte length prefix.
|
||||
*
|
||||
* @param {Uint8Array} pcmBytes Raw PCM bytes
|
||||
* @returns {Uint8Array} Padded symbol of WZP_WS_FULL_SYMBOL_SIZE bytes
|
||||
*/
|
||||
_padToSymbol(pcmBytes) {
|
||||
const symbol = new Uint8Array(WZP_WS_FULL_SYMBOL_SIZE);
|
||||
const len = pcmBytes.length;
|
||||
symbol[0] = (len >> 8) & 0xFF;
|
||||
symbol[1] = len & 0xFF;
|
||||
symbol.set(pcmBytes, WZP_WS_FULL_LENGTH_PREFIX);
|
||||
return symbol;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the original PCM payload from a FEC symbol (strip prefix + padding).
|
||||
*
|
||||
* @param {Uint8Array} symbol Symbol data
|
||||
* @returns {Uint8Array} Original PCM bytes
|
||||
*/
|
||||
_unpadSymbol(symbol) {
|
||||
const len = (symbol[0] << 8) | symbol[1];
|
||||
if (len > WZP_WS_FULL_SYMBOL_SIZE - WZP_WS_FULL_LENGTH_PREFIX) {
|
||||
return new Uint8Array(0);
|
||||
}
|
||||
return symbol.slice(WZP_WS_FULL_LENGTH_PREFIX, WZP_WS_FULL_LENGTH_PREFIX + len);
|
||||
}
|
||||
|
||||
/**
|
||||
* Open WebSocket connection, load WASM, and perform key exchange.
|
||||
*
|
||||
* Key exchange protocol over WebSocket:
|
||||
* 1. After WS open, send our 32-byte X25519 public key as first binary message.
|
||||
* 2. First received binary message of exactly 32 bytes = peer's public key.
|
||||
* 3. Derive shared secret, create WzpCryptoSession.
|
||||
* 4. All subsequent binary messages are encrypted MediaPackets.
|
||||
*
|
||||
* @returns {Promise<void>} resolves when connected and key exchange completes
|
||||
*/
|
||||
async connect() {
|
||||
if (this._connected) return;
|
||||
|
||||
// Load WASM first (needed for key exchange).
|
||||
await this.loadWasm();
|
||||
|
||||
// Prepare key exchange.
|
||||
this._keyExchange = new this._wasmModule.WzpKeyExchange();
|
||||
this._keyExchangeComplete = false;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._status('Connecting (WZP-WS-Full) to room: ' + this.room + '...');
|
||||
|
||||
this.ws = new WebSocket(this.wsUrl);
|
||||
this.ws.binaryType = 'arraybuffer';
|
||||
|
||||
this.ws.onopen = () => {
|
||||
this.seq = 0;
|
||||
this.startTimestamp = Date.now();
|
||||
this.stats = { sent: 0, recv: 0, fecRecovered: 0, encrypted: 0, decrypted: 0 };
|
||||
this._startTime = Date.now();
|
||||
this._fecBlockId = 0;
|
||||
|
||||
// Send auth if token provided.
|
||||
if (this.authToken) {
|
||||
this.ws.send(JSON.stringify({ type: 'auth', token: this.authToken }));
|
||||
this._authenticated = false;
|
||||
} else {
|
||||
this._authenticated = true;
|
||||
// No auth needed — proceed directly to key exchange.
|
||||
this._status('Performing key exchange...');
|
||||
const ourPub = this._keyExchange.public_key();
|
||||
this.ws.send(new Uint8Array(ourPub).buffer);
|
||||
}
|
||||
|
||||
// Store resolve/reject for key exchange completion.
|
||||
this._keyExchangeResolve = resolve;
|
||||
this._keyExchangeReject = reject;
|
||||
};
|
||||
|
||||
this.ws.onmessage = (event) => {
|
||||
// Handle text messages (auth responses).
|
||||
if (typeof event.data === 'string') {
|
||||
try {
|
||||
const msg = JSON.parse(event.data);
|
||||
if (msg.type === 'auth_ok') {
|
||||
this._authenticated = true;
|
||||
this._status('Authenticated, performing key exchange...');
|
||||
// Auth succeeded — now send public key for key exchange.
|
||||
const ourPub = this._keyExchange.public_key();
|
||||
this.ws.send(new Uint8Array(ourPub).buffer);
|
||||
}
|
||||
if (msg.type === 'auth_error') {
|
||||
this._status('Auth failed: ' + (msg.reason || 'unknown'));
|
||||
if (this._keyExchangeReject) {
|
||||
this._keyExchangeReject(new Error('Auth failed: ' + (msg.reason || 'unknown')));
|
||||
this._keyExchangeResolve = null;
|
||||
this._keyExchangeReject = null;
|
||||
}
|
||||
this._cleanup();
|
||||
}
|
||||
} catch(e) { /* ignore non-JSON text */ }
|
||||
return;
|
||||
}
|
||||
if (!this._keyExchangeComplete) {
|
||||
this._handleKeyExchange(event);
|
||||
} else {
|
||||
this._handleMessage(event);
|
||||
}
|
||||
};
|
||||
|
||||
this.ws.onclose = () => {
|
||||
const was = this._connected;
|
||||
this._cleanup();
|
||||
if (was) {
|
||||
this._status('Disconnected');
|
||||
} else if (this._keyExchangeReject) {
|
||||
this._keyExchangeReject(new Error('Connection closed during key exchange'));
|
||||
this._keyExchangeResolve = null;
|
||||
this._keyExchangeReject = null;
|
||||
}
|
||||
};
|
||||
|
||||
this.ws.onerror = () => {
|
||||
if (!this._connected) {
|
||||
this._cleanup();
|
||||
if (this._keyExchangeReject) {
|
||||
this._keyExchangeReject(new Error('WebSocket connection failed'));
|
||||
this._keyExchangeResolve = null;
|
||||
this._keyExchangeReject = null;
|
||||
} else {
|
||||
reject(new Error('WebSocket connection failed'));
|
||||
}
|
||||
} else {
|
||||
this._status('Connection error');
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle the key exchange: first binary message of 32 bytes = peer's public key.
|
||||
*/
|
||||
_handleKeyExchange(event) {
|
||||
if (!(event.data instanceof ArrayBuffer)) return;
|
||||
const data = new Uint8Array(event.data);
|
||||
|
||||
if (data.length === WZP_WS_FULL_PUBKEY_SIZE) {
|
||||
// Received peer's public key — derive shared secret.
|
||||
try {
|
||||
const peerPub = data;
|
||||
const secret = this._keyExchange.derive_shared_secret(peerPub);
|
||||
this.cryptoSession = new this._wasmModule.WzpCryptoSession(secret);
|
||||
|
||||
// Free key exchange object (no longer needed).
|
||||
this._keyExchange.free();
|
||||
this._keyExchange = null;
|
||||
|
||||
// Initialize FEC encoder/decoder.
|
||||
this.fecEncoder = new this._wasmModule.WzpFecEncoder(
|
||||
WZP_WS_FULL_BLOCK_SIZE,
|
||||
WZP_WS_FULL_SYMBOL_SIZE
|
||||
);
|
||||
this.fecDecoder = new this._wasmModule.WzpFecDecoder(
|
||||
WZP_WS_FULL_BLOCK_SIZE,
|
||||
WZP_WS_FULL_SYMBOL_SIZE
|
||||
);
|
||||
|
||||
this._keyExchangeComplete = true;
|
||||
this._connected = true;
|
||||
this._startStatsTimer();
|
||||
this._status('Connected (WZP-WS-Full) to room: ' + this.room + ' (encrypted, FEC active)');
|
||||
|
||||
if (this._keyExchangeResolve) {
|
||||
this._keyExchangeResolve();
|
||||
this._keyExchangeResolve = null;
|
||||
this._keyExchangeReject = null;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('[wzp-ws-full] Key exchange failed:', e);
|
||||
if (this._keyExchangeReject) {
|
||||
this._keyExchangeReject(new Error('Key exchange failed: ' + e.message));
|
||||
this._keyExchangeResolve = null;
|
||||
this._keyExchangeReject = null;
|
||||
}
|
||||
this._cleanup();
|
||||
}
|
||||
}
|
||||
// Ignore non-32-byte messages during key exchange.
|
||||
}
|
||||
|
||||
/**
|
||||
* Close WebSocket and clean up all resources.
|
||||
*/
|
||||
disconnect() {
|
||||
this._connected = false;
|
||||
if (this.ws) {
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
}
|
||||
this._stopStatsTimer();
|
||||
if (this.cryptoSession) {
|
||||
try { this.cryptoSession.free(); } catch (_) { /* ignore */ }
|
||||
this.cryptoSession = null;
|
||||
}
|
||||
if (this.fecEncoder) {
|
||||
try { this.fecEncoder.free(); } catch (_) { /* ignore */ }
|
||||
this.fecEncoder = null;
|
||||
}
|
||||
if (this.fecDecoder) {
|
||||
try { this.fecDecoder.free(); } catch (_) { /* ignore */ }
|
||||
this.fecDecoder = null;
|
||||
}
|
||||
if (this._keyExchange) {
|
||||
try { this._keyExchange.free(); } catch (_) { /* ignore */ }
|
||||
this._keyExchange = null;
|
||||
}
|
||||
this._keyExchangeComplete = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a PCM audio frame with FEC encoding + encryption over the WebSocket.
|
||||
*
|
||||
* Pipeline: PCM -> pad to FEC symbol -> FEC encode -> encrypt -> WS send.
|
||||
*
|
||||
* Each FEC symbol is encrypted individually with ChaCha20-Poly1305. The
|
||||
* 12-byte MediaHeader is used as AAD (authenticated but not encrypted),
|
||||
* so the relay can inspect routing fields without decrypting the payload.
|
||||
*
|
||||
* Wire format per packet:
|
||||
* header(12) + ciphertext(symbol_size) + tag(16)
|
||||
*
|
||||
* @param {ArrayBuffer} pcmBuffer 960-sample Int16 PCM (1920 bytes)
|
||||
*/
|
||||
async sendAudio(pcmBuffer) {
|
||||
if (!this._connected || !this.ws || this.ws.readyState !== WebSocket.OPEN) return;
|
||||
if (!this.cryptoSession || !this.fecEncoder) return;
|
||||
|
||||
const pcmBytes = new Uint8Array(pcmBuffer);
|
||||
|
||||
// Pad PCM frame to FEC symbol size with length prefix.
|
||||
const symbol = this._padToSymbol(pcmBytes);
|
||||
|
||||
// Feed to FEC encoder. Returns wire data when block completes.
|
||||
const fecOutput = this.fecEncoder.add_symbol(symbol);
|
||||
|
||||
if (fecOutput) {
|
||||
// Block completed — encrypt and send all packets (source + repair).
|
||||
const fecPacketSize = WZP_WS_FULL_FEC_HEADER_SIZE + WZP_WS_FULL_SYMBOL_SIZE;
|
||||
const timestampMs = Date.now() - this.startTimestamp;
|
||||
|
||||
for (let offset = 0; offset + fecPacketSize <= fecOutput.length; offset += fecPacketSize) {
|
||||
const blockId = fecOutput[offset];
|
||||
const symbolIdx = fecOutput[offset + 1];
|
||||
const isRepair = fecOutput[offset + 2] !== 0;
|
||||
const symbolData = fecOutput.slice(
|
||||
offset + WZP_WS_FULL_FEC_HEADER_SIZE,
|
||||
offset + fecPacketSize
|
||||
);
|
||||
|
||||
// Build WZP MediaHeader (used as AAD for encryption).
|
||||
// fecRatio ~0.5 for 50% repair overhead.
|
||||
const header = this._buildHeader(
|
||||
this.seq,
|
||||
timestampMs,
|
||||
isRepair,
|
||||
0, // codecId = RawPcm16
|
||||
blockId,
|
||||
symbolIdx,
|
||||
0.5, // fecRatio
|
||||
false // hasQuality
|
||||
);
|
||||
|
||||
// Encrypt: header as AAD, FEC symbol data as plaintext.
|
||||
// Returns ciphertext + tag (symbol_size + 16 bytes).
|
||||
const ciphertext = this.cryptoSession.encrypt(header, symbolData);
|
||||
this.stats.encrypted++;
|
||||
|
||||
// Wire frame: header(12) + ciphertext_with_tag
|
||||
const packet = new Uint8Array(WZP_WS_FULL_HEADER_SIZE + ciphertext.length);
|
||||
packet.set(header, 0);
|
||||
packet.set(ciphertext, WZP_WS_FULL_HEADER_SIZE);
|
||||
|
||||
this.ws.send(packet.buffer);
|
||||
this.seq = (this.seq + 1) & 0xFFFF;
|
||||
this.stats.sent++;
|
||||
}
|
||||
|
||||
this._fecBlockId++;
|
||||
}
|
||||
// If block not yet complete, accumulate (no packets sent yet).
|
||||
}
|
||||
|
||||
/**
|
||||
* Test crypto + FEC roundtrip entirely in WASM (no network).
|
||||
* Simulates: key exchange -> encrypt -> FEC encode -> simulate loss ->
|
||||
* FEC decode -> decrypt -> verify.
|
||||
*
|
||||
* @returns {Object} Test results
|
||||
*/
|
||||
testCryptoFec() {
|
||||
if (!this.wasmReady || !this._wasmModule) {
|
||||
return { success: false, error: 'WASM module not loaded' };
|
||||
}
|
||||
|
||||
const t0 = performance.now();
|
||||
const wasm = this._wasmModule;
|
||||
|
||||
// --- Key exchange ---
|
||||
const alice = new wasm.WzpKeyExchange();
|
||||
const bob = new wasm.WzpKeyExchange();
|
||||
const aliceSecret = alice.derive_shared_secret(bob.public_key());
|
||||
const bobSecret = bob.derive_shared_secret(alice.public_key());
|
||||
|
||||
let secretsMatch = aliceSecret.length === bobSecret.length;
|
||||
if (secretsMatch) {
|
||||
for (let i = 0; i < aliceSecret.length; i++) {
|
||||
if (aliceSecret[i] !== bobSecret[i]) { secretsMatch = false; break; }
|
||||
}
|
||||
}
|
||||
|
||||
// --- Crypto sessions ---
|
||||
const aliceSession = new wasm.WzpCryptoSession(aliceSecret);
|
||||
const bobSession = new wasm.WzpCryptoSession(bobSecret);
|
||||
|
||||
// --- Encrypt + FEC encode ---
|
||||
const encoder = new wasm.WzpFecEncoder(WZP_WS_FULL_BLOCK_SIZE, WZP_WS_FULL_SYMBOL_SIZE);
|
||||
const decoder = new wasm.WzpFecDecoder(WZP_WS_FULL_BLOCK_SIZE, WZP_WS_FULL_SYMBOL_SIZE);
|
||||
|
||||
// Generate test PCM frames (known data).
|
||||
const originalFrames = [];
|
||||
for (let i = 0; i < WZP_WS_FULL_BLOCK_SIZE; i++) {
|
||||
const frame = new Uint8Array(1920);
|
||||
for (let j = 0; j < 1920; j++) {
|
||||
frame[j] = ((i * 37 + 7) + j) & 0xFF;
|
||||
}
|
||||
originalFrames.push(frame);
|
||||
}
|
||||
|
||||
// Pad and FEC-encode.
|
||||
const paddedSymbols = [];
|
||||
let wireData = null;
|
||||
for (const frame of originalFrames) {
|
||||
const sym = new Uint8Array(WZP_WS_FULL_SYMBOL_SIZE);
|
||||
sym[0] = (frame.length >> 8) & 0xFF;
|
||||
sym[1] = frame.length & 0xFF;
|
||||
sym.set(frame, 2);
|
||||
paddedSymbols.push(sym);
|
||||
|
||||
const result = encoder.add_symbol(sym);
|
||||
if (result) wireData = result;
|
||||
}
|
||||
|
||||
if (!wireData) wireData = encoder.flush();
|
||||
|
||||
// Parse FEC packets and encrypt each one.
|
||||
const FEC_HDR = WZP_WS_FULL_FEC_HEADER_SIZE;
|
||||
const fecPacketSize = FEC_HDR + WZP_WS_FULL_SYMBOL_SIZE;
|
||||
const encryptedPackets = [];
|
||||
|
||||
if (wireData) {
|
||||
for (let offset = 0; offset + fecPacketSize <= wireData.length; offset += fecPacketSize) {
|
||||
const blockId = wireData[offset];
|
||||
const symbolIdx = wireData[offset + 1];
|
||||
const isRepair = wireData[offset + 2] !== 0;
|
||||
const symbolData = wireData.slice(offset + FEC_HDR, offset + fecPacketSize);
|
||||
|
||||
// Build header for AAD (match wire protocol bit layout).
|
||||
const header = new Uint8Array(WZP_WS_FULL_HEADER_SIZE);
|
||||
const fecRatioEncoded = Math.min(127, Math.round(0.5 * 63.5)); // 50% FEC
|
||||
header[0] = ((isRepair ? 1 : 0) << 6)
|
||||
| ((0 & 0x0F) << 2) // codecId=0
|
||||
| ((fecRatioEncoded >> 6) & 0x01); // FecRatioHi
|
||||
header[1] = (fecRatioEncoded & 0x3F) << 2; // FecRatioLo
|
||||
header[8] = blockId;
|
||||
header[9] = symbolIdx;
|
||||
|
||||
// Encrypt with Alice's session.
|
||||
const ciphertext = aliceSession.encrypt(header, symbolData);
|
||||
|
||||
encryptedPackets.push({
|
||||
blockId, symbolIdx, isRepair, header, ciphertext,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const sourcePackets = encryptedPackets.filter(p => !p.isRepair).length;
|
||||
const repairPackets = encryptedPackets.filter(p => p.isRepair).length;
|
||||
|
||||
// --- Simulate 30% loss (drop 2 of ~7 packets) ---
|
||||
const dropIndices = new Set([1, 3]);
|
||||
const surviving = encryptedPackets.filter((_, i) => !dropIndices.has(i));
|
||||
|
||||
// --- Decrypt + FEC decode on Bob's side ---
|
||||
let fecDecoded = null;
|
||||
let decryptOk = true;
|
||||
|
||||
for (const pkt of surviving) {
|
||||
let symbolData;
|
||||
try {
|
||||
symbolData = bobSession.decrypt(pkt.header, pkt.ciphertext);
|
||||
} catch (e) {
|
||||
decryptOk = false;
|
||||
break;
|
||||
}
|
||||
|
||||
const result = decoder.add_symbol(pkt.blockId, pkt.symbolIdx, pkt.isRepair, symbolData);
|
||||
if (result) {
|
||||
fecDecoded = result;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Verify recovered frames ---
|
||||
let fecOk = false;
|
||||
if (fecDecoded) {
|
||||
fecOk = true;
|
||||
for (let i = 0; i < WZP_WS_FULL_BLOCK_SIZE && fecOk; i++) {
|
||||
const symOffset = i * WZP_WS_FULL_SYMBOL_SIZE;
|
||||
if (symOffset + WZP_WS_FULL_SYMBOL_SIZE > fecDecoded.length) {
|
||||
fecOk = false;
|
||||
break;
|
||||
}
|
||||
const sym = fecDecoded.slice(symOffset, symOffset + WZP_WS_FULL_SYMBOL_SIZE);
|
||||
const len = (sym[0] << 8) | sym[1];
|
||||
const recovered = sym.slice(2, 2 + len);
|
||||
|
||||
if (recovered.length !== originalFrames[i].length) {
|
||||
fecOk = false;
|
||||
break;
|
||||
}
|
||||
for (let j = 0; j < recovered.length; j++) {
|
||||
if (recovered[j] !== originalFrames[i][j]) {
|
||||
fecOk = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup WASM objects.
|
||||
alice.free();
|
||||
bob.free();
|
||||
aliceSession.free();
|
||||
bobSession.free();
|
||||
encoder.free();
|
||||
decoder.free();
|
||||
|
||||
const elapsed = performance.now() - t0;
|
||||
|
||||
return {
|
||||
success: secretsMatch && decryptOk && fecOk,
|
||||
secretsMatch,
|
||||
decryptOk,
|
||||
fecOk,
|
||||
sourcePackets,
|
||||
repairPackets,
|
||||
totalPackets: encryptedPackets.length,
|
||||
dropped: dropIndices.size,
|
||||
surviving: surviving.length,
|
||||
elapsed: elapsed.toFixed(2) + 'ms',
|
||||
};
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
_handleMessage(event) {
|
||||
if (!(event.data instanceof ArrayBuffer)) return;
|
||||
const data = new Uint8Array(event.data);
|
||||
if (data.length < WZP_WS_FULL_HEADER_SIZE) return;
|
||||
|
||||
const header = this._parseHeader(data);
|
||||
if (!header) return;
|
||||
|
||||
this.stats.recv++;
|
||||
|
||||
if (!this.cryptoSession || !this.fecDecoder) return;
|
||||
|
||||
// Extract header bytes (AAD) and ciphertext.
|
||||
const headerBytes = data.slice(0, WZP_WS_FULL_HEADER_SIZE);
|
||||
const ciphertext = data.slice(WZP_WS_FULL_HEADER_SIZE);
|
||||
|
||||
// Decrypt.
|
||||
let symbolData;
|
||||
try {
|
||||
symbolData = this.cryptoSession.decrypt(headerBytes, ciphertext);
|
||||
this.stats.decrypted++;
|
||||
} catch (e) {
|
||||
// Decryption failure — corrupted or replayed packet.
|
||||
console.warn('[wzp-ws-full] decrypt failed:', e);
|
||||
return;
|
||||
}
|
||||
|
||||
// Feed decrypted symbol to FEC decoder.
|
||||
const decoded = this.fecDecoder.add_symbol(
|
||||
header.fecBlock,
|
||||
header.fecSymbol,
|
||||
header.isRepair,
|
||||
symbolData
|
||||
);
|
||||
|
||||
if (decoded) {
|
||||
this.stats.fecRecovered++;
|
||||
|
||||
// decoded is concatenated padded symbols.
|
||||
// Each symbol is WZP_WS_FULL_SYMBOL_SIZE bytes with a 2-byte length prefix.
|
||||
for (let off = 0; off + WZP_WS_FULL_SYMBOL_SIZE <= decoded.length; off += WZP_WS_FULL_SYMBOL_SIZE) {
|
||||
const symbol = decoded.slice(off, off + WZP_WS_FULL_SYMBOL_SIZE);
|
||||
const pcmBytes = this._unpadSymbol(symbol);
|
||||
|
||||
if (pcmBytes.length > 0 && pcmBytes.length % 2 === 0) {
|
||||
const pcm = new Int16Array(
|
||||
pcmBytes.buffer,
|
||||
pcmBytes.byteOffset,
|
||||
pcmBytes.byteLength / 2
|
||||
);
|
||||
if (this.onAudio) this.onAudio(pcm);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_startStatsTimer() {
|
||||
this._stopStatsTimer();
|
||||
this._statsInterval = setInterval(() => {
|
||||
if (!this._connected) {
|
||||
this._stopStatsTimer();
|
||||
return;
|
||||
}
|
||||
const elapsed = (Date.now() - this._startTime) / 1000;
|
||||
const loss = this.stats.sent > 0
|
||||
? Math.max(0, 1 - this.stats.recv / this.stats.sent)
|
||||
: 0;
|
||||
if (this.onStats) {
|
||||
this.onStats({
|
||||
sent: this.stats.sent,
|
||||
recv: this.stats.recv,
|
||||
loss: loss,
|
||||
elapsed: elapsed,
|
||||
encrypted: this.stats.encrypted,
|
||||
decrypted: this.stats.decrypted,
|
||||
fecRecovered: this.stats.fecRecovered,
|
||||
});
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
_stopStatsTimer() {
|
||||
if (this._statsInterval) {
|
||||
clearInterval(this._statsInterval);
|
||||
this._statsInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
_status(msg) {
|
||||
if (this.onStatus) this.onStatus(msg);
|
||||
}
|
||||
|
||||
_cleanup() {
|
||||
this._connected = false;
|
||||
this._keyExchangeComplete = false;
|
||||
this._stopStatsTimer();
|
||||
if (this.ws) {
|
||||
try { this.ws.close(); } catch (_) { /* ignore */ }
|
||||
this.ws = null;
|
||||
}
|
||||
if (this.cryptoSession) {
|
||||
try { this.cryptoSession.free(); } catch (_) { /* ignore */ }
|
||||
this.cryptoSession = null;
|
||||
}
|
||||
if (this.fecEncoder) {
|
||||
try { this.fecEncoder.free(); } catch (_) { /* ignore */ }
|
||||
this.fecEncoder = null;
|
||||
}
|
||||
if (this.fecDecoder) {
|
||||
try { this.fecDecoder.free(); } catch (_) { /* ignore */ }
|
||||
this.fecDecoder = null;
|
||||
}
|
||||
if (this._keyExchange) {
|
||||
try { this._keyExchange.free(); } catch (_) { /* ignore */ }
|
||||
this._keyExchange = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Export
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
window.WZPWsFullClient = WZPWsFullClient;
|
||||
289
crates/wzp-web/static/js/wzp-ws.js
Normal file
289
crates/wzp-web/static/js/wzp-ws.js
Normal file
@@ -0,0 +1,289 @@
|
||||
// WarzonePhone — WZP-WS client (Variant 4).
|
||||
// WebSocket transport, WZP wire protocol, no WASM.
|
||||
// Sends MediaPacket-formatted frames instead of raw PCM.
|
||||
// Ready for direct relay WS support (no bridge translation needed).
|
||||
|
||||
'use strict';
|
||||
|
||||
// 12-byte MediaHeader size (matches wzp-proto MediaHeader::WIRE_SIZE).
|
||||
const WZP_WS_HEADER_SIZE = 12;
|
||||
|
||||
class WZPWsClient {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {string} options.wsUrl WebSocket URL (ws://host/ws/room)
|
||||
* @param {string} options.room Room name
|
||||
* @param {Function} options.onAudio callback(Int16Array) for playback
|
||||
* @param {Function} options.onStatus callback(string) for UI status
|
||||
* @param {Function} options.onStats callback({sent, recv, loss, elapsed}) for UI
|
||||
*/
|
||||
constructor(options) {
|
||||
this.wsUrl = options.wsUrl;
|
||||
this.room = options.room;
|
||||
this.authToken = options.authToken || null;
|
||||
this.onAudio = options.onAudio || null;
|
||||
this.onStatus = options.onStatus || null;
|
||||
this.onStats = options.onStats || null;
|
||||
|
||||
this.ws = null;
|
||||
this.seq = 0;
|
||||
this.startTimestamp = 0;
|
||||
this.stats = { sent: 0, recv: 0 };
|
||||
this._startTime = 0;
|
||||
this._statsInterval = null;
|
||||
this._connected = false;
|
||||
this._authenticated = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a 12-byte WZP MediaHeader.
|
||||
*
|
||||
* Wire layout (from wzp-proto::packet::MediaHeader):
|
||||
* Byte 0: V(1)|T(1)|CodecID(4)|Q(1)|FecRatioHi(1)
|
||||
* Byte 1: FecRatioLo(6)|Reserved(2)
|
||||
* Bytes 2-3: Sequence number (BE u16)
|
||||
* Bytes 4-7: Timestamp ms (BE u32)
|
||||
* Byte 8: FEC block ID
|
||||
* Byte 9: FEC symbol index
|
||||
* Byte 10: Reserved
|
||||
* Byte 11: CSRC count
|
||||
*
|
||||
* @param {number} seq Sequence number (u16)
|
||||
* @param {number} timestampMs Milliseconds since session start
|
||||
* @param {boolean} isRepair True if this is a FEC repair symbol
|
||||
* @param {number} codecId Codec ID (0=RawPcm16, 1=Opus16k, 2=Opus48k)
|
||||
* @param {number} fecBlock FEC block ID (u8)
|
||||
* @param {number} fecSymbol FEC symbol index (u8)
|
||||
* @param {number} fecRatio FEC ratio (0.0 to ~2.0)
|
||||
* @param {boolean} hasQuality Whether a quality report is attached
|
||||
* @returns {Uint8Array} 12-byte header
|
||||
*/
|
||||
_buildHeader(seq, timestampMs, isRepair = false, codecId = 0, fecBlock = 0, fecSymbol = 0, fecRatio = 0, hasQuality = false) {
|
||||
const buf = new ArrayBuffer(WZP_WS_HEADER_SIZE);
|
||||
const view = new DataView(buf);
|
||||
|
||||
const fecRatioEncoded = Math.min(127, Math.round(fecRatio * 63.5));
|
||||
const byte0 = ((0 & 0x01) << 7) // version=0
|
||||
| ((isRepair ? 1 : 0) << 6) // T bit
|
||||
| ((codecId & 0x0F) << 2) // CodecID
|
||||
| ((hasQuality ? 1 : 0) << 1) // Q bit
|
||||
| ((fecRatioEncoded >> 6) & 0x01); // FecRatioHi
|
||||
view.setUint8(0, byte0);
|
||||
|
||||
const byte1 = (fecRatioEncoded & 0x3F) << 2;
|
||||
view.setUint8(1, byte1);
|
||||
|
||||
view.setUint16(2, seq & 0xFFFF); // big-endian (default for DataView)
|
||||
view.setUint32(4, timestampMs & 0xFFFFFFFF); // big-endian
|
||||
view.setUint8(8, fecBlock & 0xFF);
|
||||
view.setUint8(9, fecSymbol & 0xFF);
|
||||
view.setUint8(10, 0); // reserved
|
||||
view.setUint8(11, 0); // csrc_count
|
||||
return new Uint8Array(buf);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a 12-byte MediaHeader from received binary data.
|
||||
*
|
||||
* @param {Uint8Array} data At least 12 bytes
|
||||
* @returns {Object|null} Parsed header fields, or null if too short
|
||||
*/
|
||||
_parseHeader(data) {
|
||||
if (data.byteLength < WZP_WS_HEADER_SIZE) return null;
|
||||
const view = new DataView(data.buffer || data, data.byteOffset || 0, 12);
|
||||
const byte0 = view.getUint8(0);
|
||||
const byte1 = view.getUint8(1);
|
||||
const fecRatioEncoded = ((byte0 & 0x01) << 6) | ((byte1 >> 2) & 0x3F);
|
||||
return {
|
||||
version: (byte0 >> 7) & 1,
|
||||
isRepair: !!((byte0 >> 6) & 1),
|
||||
codecId: (byte0 >> 2) & 0x0F,
|
||||
hasQuality: !!((byte0 >> 1) & 1),
|
||||
fecRatio: fecRatioEncoded / 63.5,
|
||||
seq: view.getUint16(2),
|
||||
timestamp: view.getUint32(4),
|
||||
fecBlock: view.getUint8(8),
|
||||
fecSymbol: view.getUint8(9),
|
||||
reserved: view.getUint8(10),
|
||||
csrcCount: view.getUint8(11),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Open WebSocket connection to the wzp-web bridge.
|
||||
* @returns {Promise<void>} resolves when connected
|
||||
*/
|
||||
async connect() {
|
||||
if (this._connected) return;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._status('Connecting (WZP-WS) to room: ' + this.room + '...');
|
||||
|
||||
this.ws = new WebSocket(this.wsUrl);
|
||||
this.ws.binaryType = 'arraybuffer';
|
||||
|
||||
this.ws.onopen = () => {
|
||||
// Send auth if token provided.
|
||||
if (this.authToken) {
|
||||
this.ws.send(JSON.stringify({ type: 'auth', token: this.authToken }));
|
||||
}
|
||||
|
||||
this._connected = true;
|
||||
this._authenticated = !this.authToken; // authenticated immediately if no token needed
|
||||
this.seq = 0;
|
||||
this.startTimestamp = Date.now();
|
||||
this.stats = { sent: 0, recv: 0 };
|
||||
this._startTime = Date.now();
|
||||
this._status('Connected (WZP-WS) to room: ' + this.room);
|
||||
this._startStatsTimer();
|
||||
resolve();
|
||||
};
|
||||
|
||||
this.ws.onmessage = (event) => {
|
||||
// Handle text messages (auth responses).
|
||||
if (typeof event.data === 'string') {
|
||||
try {
|
||||
const msg = JSON.parse(event.data);
|
||||
if (msg.type === 'auth_ok') {
|
||||
this._authenticated = true;
|
||||
this._status('Authenticated (WZP-WS) to room: ' + this.room);
|
||||
}
|
||||
if (msg.type === 'auth_error') {
|
||||
this._status('Auth failed: ' + (msg.reason || 'unknown'));
|
||||
this.disconnect();
|
||||
}
|
||||
} catch(e) { /* ignore non-JSON text */ }
|
||||
return;
|
||||
}
|
||||
this._handleMessage(event);
|
||||
};
|
||||
|
||||
this.ws.onclose = () => {
|
||||
const was = this._connected;
|
||||
this._cleanup();
|
||||
if (was) this._status('Disconnected');
|
||||
};
|
||||
|
||||
this.ws.onerror = () => {
|
||||
if (!this._connected) {
|
||||
this._cleanup();
|
||||
reject(new Error('WebSocket connection failed'));
|
||||
} else {
|
||||
this._status('Connection error');
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Close WebSocket and clean up.
|
||||
*/
|
||||
disconnect() {
|
||||
this._connected = false;
|
||||
if (this.ws) {
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
}
|
||||
this._stopStatsTimer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a PCM audio frame wrapped in a WZP MediaPacket over the WebSocket.
|
||||
*
|
||||
* Wire format: 12-byte MediaHeader + raw PCM payload.
|
||||
* The relay can parse this natively without bridge translation.
|
||||
*
|
||||
* @param {ArrayBuffer} pcmBuffer 960-sample Int16 PCM (1920 bytes)
|
||||
*/
|
||||
async sendAudio(pcmBuffer) {
|
||||
if (!this._connected || !this.ws || this.ws.readyState !== WebSocket.OPEN) return;
|
||||
|
||||
const header = this._buildHeader(
|
||||
this.seq,
|
||||
Date.now() - this.startTimestamp,
|
||||
false, 0, 0, 0, 0, false
|
||||
);
|
||||
|
||||
// Combine header + payload into single binary frame.
|
||||
const pcmBytes = new Uint8Array(pcmBuffer);
|
||||
const packet = new Uint8Array(WZP_WS_HEADER_SIZE + pcmBytes.length);
|
||||
packet.set(header, 0);
|
||||
packet.set(pcmBytes, WZP_WS_HEADER_SIZE);
|
||||
|
||||
this.ws.send(packet.buffer);
|
||||
this.seq = (this.seq + 1) & 0xFFFF;
|
||||
this.stats.sent++;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
_handleMessage(event) {
|
||||
if (!(event.data instanceof ArrayBuffer)) return;
|
||||
const data = new Uint8Array(event.data);
|
||||
if (data.length < WZP_WS_HEADER_SIZE) return; // too small for header
|
||||
|
||||
const header = this._parseHeader(data);
|
||||
if (!header) return;
|
||||
|
||||
// Extract payload (everything after 12-byte header).
|
||||
// Payload is raw PCM Int16 samples.
|
||||
const payloadBytes = data.slice(WZP_WS_HEADER_SIZE);
|
||||
const pcm = new Int16Array(
|
||||
payloadBytes.buffer,
|
||||
payloadBytes.byteOffset,
|
||||
payloadBytes.byteLength / 2
|
||||
);
|
||||
this.stats.recv++;
|
||||
if (this.onAudio) this.onAudio(pcm);
|
||||
}
|
||||
|
||||
_startStatsTimer() {
|
||||
this._stopStatsTimer();
|
||||
this._statsInterval = setInterval(() => {
|
||||
if (!this._connected) {
|
||||
this._stopStatsTimer();
|
||||
return;
|
||||
}
|
||||
const elapsed = (Date.now() - this._startTime) / 1000;
|
||||
const loss = this.stats.sent > 0
|
||||
? Math.max(0, 1 - this.stats.recv / this.stats.sent)
|
||||
: 0;
|
||||
if (this.onStats) {
|
||||
this.onStats({
|
||||
sent: this.stats.sent,
|
||||
recv: this.stats.recv,
|
||||
loss: loss,
|
||||
elapsed: elapsed,
|
||||
});
|
||||
}
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
_stopStatsTimer() {
|
||||
if (this._statsInterval) {
|
||||
clearInterval(this._statsInterval);
|
||||
this._statsInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
_status(msg) {
|
||||
if (this.onStatus) this.onStatus(msg);
|
||||
}
|
||||
|
||||
_cleanup() {
|
||||
this._connected = false;
|
||||
this._stopStatsTimer();
|
||||
if (this.ws) {
|
||||
try { this.ws.close(); } catch (_) { /* ignore */ }
|
||||
this.ws = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Export
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
window.WZPWsClient = WZPWsClient;
|
||||
2
crates/wzp-web/static/wasm/.gitignore
vendored
Normal file
2
crates/wzp-web/static/wasm/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
package.json
|
||||
*.d.ts
|
||||
556
crates/wzp-web/static/wasm/wzp_wasm.js
Normal file
556
crates/wzp-web/static/wasm/wzp_wasm.js
Normal file
@@ -0,0 +1,556 @@
|
||||
/* @ts-self-types="./wzp_wasm.d.ts" */
|
||||
|
||||
/**
|
||||
* Symmetric encryption session using ChaCha20-Poly1305.
|
||||
*
|
||||
* Mirrors `wzp-crypto::session::ChaChaSession` for WASM. Nonce derivation
|
||||
* and key setup are identical so WASM and native peers interoperate.
|
||||
*/
|
||||
export class WzpCryptoSession {
|
||||
__destroy_into_raw() {
|
||||
const ptr = this.__wbg_ptr;
|
||||
this.__wbg_ptr = 0;
|
||||
WzpCryptoSessionFinalization.unregister(this);
|
||||
return ptr;
|
||||
}
|
||||
free() {
|
||||
const ptr = this.__destroy_into_raw();
|
||||
wasm.__wbg_wzpcryptosession_free(ptr, 0);
|
||||
}
|
||||
/**
|
||||
* Decrypt a media payload with AAD.
|
||||
*
|
||||
* Returns plaintext on success, or throws on auth failure.
|
||||
* @param {Uint8Array} header_aad
|
||||
* @param {Uint8Array} ciphertext
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
decrypt(header_aad, ciphertext) {
|
||||
const ptr0 = passArray8ToWasm0(header_aad, wasm.__wbindgen_malloc);
|
||||
const len0 = WASM_VECTOR_LEN;
|
||||
const ptr1 = passArray8ToWasm0(ciphertext, wasm.__wbindgen_malloc);
|
||||
const len1 = WASM_VECTOR_LEN;
|
||||
const ret = wasm.wzpcryptosession_decrypt(this.__wbg_ptr, ptr0, len0, ptr1, len1);
|
||||
if (ret[3]) {
|
||||
throw takeFromExternrefTable0(ret[2]);
|
||||
}
|
||||
var v3 = getArrayU8FromWasm0(ret[0], ret[1]).slice();
|
||||
wasm.__wbindgen_free(ret[0], ret[1] * 1, 1);
|
||||
return v3;
|
||||
}
|
||||
/**
|
||||
* Encrypt a media payload with AAD (typically the 12-byte MediaHeader).
|
||||
*
|
||||
* Returns `ciphertext || poly1305_tag` (plaintext.len() + 16 bytes).
|
||||
* @param {Uint8Array} header_aad
|
||||
* @param {Uint8Array} plaintext
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
encrypt(header_aad, plaintext) {
|
||||
const ptr0 = passArray8ToWasm0(header_aad, wasm.__wbindgen_malloc);
|
||||
const len0 = WASM_VECTOR_LEN;
|
||||
const ptr1 = passArray8ToWasm0(plaintext, wasm.__wbindgen_malloc);
|
||||
const len1 = WASM_VECTOR_LEN;
|
||||
const ret = wasm.wzpcryptosession_encrypt(this.__wbg_ptr, ptr0, len0, ptr1, len1);
|
||||
if (ret[3]) {
|
||||
throw takeFromExternrefTable0(ret[2]);
|
||||
}
|
||||
var v3 = getArrayU8FromWasm0(ret[0], ret[1]).slice();
|
||||
wasm.__wbindgen_free(ret[0], ret[1] * 1, 1);
|
||||
return v3;
|
||||
}
|
||||
/**
|
||||
* Create from a 32-byte shared secret (output of `WzpKeyExchange.derive_shared_secret`).
|
||||
* @param {Uint8Array} shared_secret
|
||||
*/
|
||||
constructor(shared_secret) {
|
||||
const ptr0 = passArray8ToWasm0(shared_secret, wasm.__wbindgen_malloc);
|
||||
const len0 = WASM_VECTOR_LEN;
|
||||
const ret = wasm.wzpcryptosession_new(ptr0, len0);
|
||||
if (ret[2]) {
|
||||
throw takeFromExternrefTable0(ret[1]);
|
||||
}
|
||||
this.__wbg_ptr = ret[0] >>> 0;
|
||||
WzpCryptoSessionFinalization.register(this, this.__wbg_ptr, this);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Current receive sequence number (for diagnostics / UI stats).
|
||||
* @returns {number}
|
||||
*/
|
||||
recv_seq() {
|
||||
const ret = wasm.wzpcryptosession_recv_seq(this.__wbg_ptr);
|
||||
return ret >>> 0;
|
||||
}
|
||||
/**
|
||||
* Current send sequence number (for diagnostics / UI stats).
|
||||
* @returns {number}
|
||||
*/
|
||||
send_seq() {
|
||||
const ret = wasm.wzpcryptosession_send_seq(this.__wbg_ptr);
|
||||
return ret >>> 0;
|
||||
}
|
||||
}
|
||||
if (Symbol.dispose) WzpCryptoSession.prototype[Symbol.dispose] = WzpCryptoSession.prototype.free;
|
||||
|
||||
export class WzpFecDecoder {
|
||||
__destroy_into_raw() {
|
||||
const ptr = this.__wbg_ptr;
|
||||
this.__wbg_ptr = 0;
|
||||
WzpFecDecoderFinalization.unregister(this);
|
||||
return ptr;
|
||||
}
|
||||
free() {
|
||||
const ptr = this.__destroy_into_raw();
|
||||
wasm.__wbg_wzpfecdecoder_free(ptr, 0);
|
||||
}
|
||||
/**
|
||||
* Feed a received symbol.
|
||||
*
|
||||
* Returns the decoded block (concatenated original frames, unpadded) if
|
||||
* enough symbols have been received to recover the block, or `undefined`.
|
||||
* @param {number} block_id
|
||||
* @param {number} symbol_idx
|
||||
* @param {boolean} _is_repair
|
||||
* @param {Uint8Array} data
|
||||
* @returns {Uint8Array | undefined}
|
||||
*/
|
||||
add_symbol(block_id, symbol_idx, _is_repair, data) {
|
||||
const ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc);
|
||||
const len0 = WASM_VECTOR_LEN;
|
||||
const ret = wasm.wzpfecdecoder_add_symbol(this.__wbg_ptr, block_id, symbol_idx, _is_repair, ptr0, len0);
|
||||
let v2;
|
||||
if (ret[0] !== 0) {
|
||||
v2 = getArrayU8FromWasm0(ret[0], ret[1]).slice();
|
||||
wasm.__wbindgen_free(ret[0], ret[1] * 1, 1);
|
||||
}
|
||||
return v2;
|
||||
}
|
||||
/**
|
||||
* Create a new FEC decoder.
|
||||
*
|
||||
* * `block_size` — expected number of source symbols per block.
|
||||
* * `symbol_size` — padded byte size of each symbol (must match encoder).
|
||||
* @param {number} block_size
|
||||
* @param {number} symbol_size
|
||||
*/
|
||||
constructor(block_size, symbol_size) {
|
||||
const ret = wasm.wzpfecdecoder_new(block_size, symbol_size);
|
||||
this.__wbg_ptr = ret >>> 0;
|
||||
WzpFecDecoderFinalization.register(this, this.__wbg_ptr, this);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
if (Symbol.dispose) WzpFecDecoder.prototype[Symbol.dispose] = WzpFecDecoder.prototype.free;
|
||||
|
||||
export class WzpFecEncoder {
|
||||
__destroy_into_raw() {
|
||||
const ptr = this.__wbg_ptr;
|
||||
this.__wbg_ptr = 0;
|
||||
WzpFecEncoderFinalization.unregister(this);
|
||||
return ptr;
|
||||
}
|
||||
free() {
|
||||
const ptr = this.__destroy_into_raw();
|
||||
wasm.__wbg_wzpfecencoder_free(ptr, 0);
|
||||
}
|
||||
/**
|
||||
* Add a source symbol (audio frame).
|
||||
*
|
||||
* Returns encoded packets (all source + repair) when the block is complete,
|
||||
* or `undefined` if the block is still accumulating.
|
||||
*
|
||||
* Each returned packet carries the 3-byte header:
|
||||
* `[block_id][symbol_idx][is_repair]` followed by `symbol_size` bytes.
|
||||
* @param {Uint8Array} data
|
||||
* @returns {Uint8Array | undefined}
|
||||
*/
|
||||
add_symbol(data) {
|
||||
const ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc);
|
||||
const len0 = WASM_VECTOR_LEN;
|
||||
const ret = wasm.wzpfecencoder_add_symbol(this.__wbg_ptr, ptr0, len0);
|
||||
let v2;
|
||||
if (ret[0] !== 0) {
|
||||
v2 = getArrayU8FromWasm0(ret[0], ret[1]).slice();
|
||||
wasm.__wbindgen_free(ret[0], ret[1] * 1, 1);
|
||||
}
|
||||
return v2;
|
||||
}
|
||||
/**
|
||||
* Force-flush the current (possibly partial) block.
|
||||
*
|
||||
* Returns all source + repair symbols with headers, or empty vec if no
|
||||
* symbols have been accumulated.
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
flush() {
|
||||
const ret = wasm.wzpfecencoder_flush(this.__wbg_ptr);
|
||||
var v1 = getArrayU8FromWasm0(ret[0], ret[1]).slice();
|
||||
wasm.__wbindgen_free(ret[0], ret[1] * 1, 1);
|
||||
return v1;
|
||||
}
|
||||
/**
|
||||
* Create a new FEC encoder.
|
||||
*
|
||||
* * `block_size` — number of source symbols (audio frames) per FEC block.
|
||||
* * `symbol_size` — padded byte size of each symbol (default 256).
|
||||
* @param {number} block_size
|
||||
* @param {number} symbol_size
|
||||
*/
|
||||
constructor(block_size, symbol_size) {
|
||||
const ret = wasm.wzpfecencoder_new(block_size, symbol_size);
|
||||
this.__wbg_ptr = ret >>> 0;
|
||||
WzpFecEncoderFinalization.register(this, this.__wbg_ptr, this);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
if (Symbol.dispose) WzpFecEncoder.prototype[Symbol.dispose] = WzpFecEncoder.prototype.free;
|
||||
|
||||
/**
|
||||
* X25519 key exchange: generate ephemeral keypair and derive shared secret.
|
||||
*
|
||||
* Usage from JS:
|
||||
* ```js
|
||||
* const kx = new WzpKeyExchange();
|
||||
* const ourPub = kx.public_key(); // Uint8Array(32)
|
||||
* // ... send ourPub to peer, receive peerPub ...
|
||||
* const secret = kx.derive_shared_secret(peerPub); // Uint8Array(32)
|
||||
* const session = new WzpCryptoSession(secret);
|
||||
* ```
|
||||
*/
|
||||
export class WzpKeyExchange {
|
||||
__destroy_into_raw() {
|
||||
const ptr = this.__wbg_ptr;
|
||||
this.__wbg_ptr = 0;
|
||||
WzpKeyExchangeFinalization.unregister(this);
|
||||
return ptr;
|
||||
}
|
||||
free() {
|
||||
const ptr = this.__destroy_into_raw();
|
||||
wasm.__wbg_wzpkeyexchange_free(ptr, 0);
|
||||
}
|
||||
/**
|
||||
* Derive a 32-byte session key from the peer's public key.
|
||||
*
|
||||
* Raw DH output is expanded via HKDF-SHA256 with info="warzone-session-key",
|
||||
* matching `wzp-crypto::handshake::WarzoneKeyExchange::derive_session`.
|
||||
* @param {Uint8Array} peer_public
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
derive_shared_secret(peer_public) {
|
||||
const ptr0 = passArray8ToWasm0(peer_public, wasm.__wbindgen_malloc);
|
||||
const len0 = WASM_VECTOR_LEN;
|
||||
const ret = wasm.wzpkeyexchange_derive_shared_secret(this.__wbg_ptr, ptr0, len0);
|
||||
if (ret[3]) {
|
||||
throw takeFromExternrefTable0(ret[2]);
|
||||
}
|
||||
var v2 = getArrayU8FromWasm0(ret[0], ret[1]).slice();
|
||||
wasm.__wbindgen_free(ret[0], ret[1] * 1, 1);
|
||||
return v2;
|
||||
}
|
||||
/**
|
||||
* Generate a new random X25519 keypair.
|
||||
*/
|
||||
constructor() {
|
||||
const ret = wasm.wzpkeyexchange_new();
|
||||
this.__wbg_ptr = ret >>> 0;
|
||||
WzpKeyExchangeFinalization.register(this, this.__wbg_ptr, this);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Our public key (32 bytes).
|
||||
* @returns {Uint8Array}
|
||||
*/
|
||||
public_key() {
|
||||
const ret = wasm.wzpkeyexchange_public_key(this.__wbg_ptr);
|
||||
var v1 = getArrayU8FromWasm0(ret[0], ret[1]).slice();
|
||||
wasm.__wbindgen_free(ret[0], ret[1] * 1, 1);
|
||||
return v1;
|
||||
}
|
||||
}
|
||||
if (Symbol.dispose) WzpKeyExchange.prototype[Symbol.dispose] = WzpKeyExchange.prototype.free;
|
||||
|
||||
function __wbg_get_imports() {
|
||||
const import0 = {
|
||||
__proto__: null,
|
||||
__wbg___wbindgen_is_function_3c846841762788c1: function(arg0) {
|
||||
const ret = typeof(arg0) === 'function';
|
||||
return ret;
|
||||
},
|
||||
__wbg___wbindgen_is_object_781bc9f159099513: function(arg0) {
|
||||
const val = arg0;
|
||||
const ret = typeof(val) === 'object' && val !== null;
|
||||
return ret;
|
||||
},
|
||||
__wbg___wbindgen_is_string_7ef6b97b02428fae: function(arg0) {
|
||||
const ret = typeof(arg0) === 'string';
|
||||
return ret;
|
||||
},
|
||||
__wbg___wbindgen_is_undefined_52709e72fb9f179c: function(arg0) {
|
||||
const ret = arg0 === undefined;
|
||||
return ret;
|
||||
},
|
||||
__wbg___wbindgen_throw_6ddd609b62940d55: function(arg0, arg1) {
|
||||
throw new Error(getStringFromWasm0(arg0, arg1));
|
||||
},
|
||||
__wbg_call_2d781c1f4d5c0ef8: function() { return handleError(function (arg0, arg1, arg2) {
|
||||
const ret = arg0.call(arg1, arg2);
|
||||
return ret;
|
||||
}, arguments); },
|
||||
__wbg_crypto_38df2bab126b63dc: function(arg0) {
|
||||
const ret = arg0.crypto;
|
||||
return ret;
|
||||
},
|
||||
__wbg_getRandomValues_c44a50d8cfdaebeb: function() { return handleError(function (arg0, arg1) {
|
||||
arg0.getRandomValues(arg1);
|
||||
}, arguments); },
|
||||
__wbg_length_ea16607d7b61445b: function(arg0) {
|
||||
const ret = arg0.length;
|
||||
return ret;
|
||||
},
|
||||
__wbg_msCrypto_bd5a034af96bcba6: function(arg0) {
|
||||
const ret = arg0.msCrypto;
|
||||
return ret;
|
||||
},
|
||||
__wbg_new_with_length_825018a1616e9e55: function(arg0) {
|
||||
const ret = new Uint8Array(arg0 >>> 0);
|
||||
return ret;
|
||||
},
|
||||
__wbg_node_84ea875411254db1: function(arg0) {
|
||||
const ret = arg0.node;
|
||||
return ret;
|
||||
},
|
||||
__wbg_process_44c7a14e11e9f69e: function(arg0) {
|
||||
const ret = arg0.process;
|
||||
return ret;
|
||||
},
|
||||
__wbg_prototypesetcall_d62e5099504357e6: function(arg0, arg1, arg2) {
|
||||
Uint8Array.prototype.set.call(getArrayU8FromWasm0(arg0, arg1), arg2);
|
||||
},
|
||||
__wbg_randomFillSync_6c25eac9869eb53c: function() { return handleError(function (arg0, arg1) {
|
||||
arg0.randomFillSync(arg1);
|
||||
}, arguments); },
|
||||
__wbg_require_b4edbdcf3e2a1ef0: function() { return handleError(function () {
|
||||
const ret = module.require;
|
||||
return ret;
|
||||
}, arguments); },
|
||||
__wbg_static_accessor_GLOBAL_8adb955bd33fac2f: function() {
|
||||
const ret = typeof global === 'undefined' ? null : global;
|
||||
return isLikeNone(ret) ? 0 : addToExternrefTable0(ret);
|
||||
},
|
||||
__wbg_static_accessor_GLOBAL_THIS_ad356e0db91c7913: function() {
|
||||
const ret = typeof globalThis === 'undefined' ? null : globalThis;
|
||||
return isLikeNone(ret) ? 0 : addToExternrefTable0(ret);
|
||||
},
|
||||
__wbg_static_accessor_SELF_f207c857566db248: function() {
|
||||
const ret = typeof self === 'undefined' ? null : self;
|
||||
return isLikeNone(ret) ? 0 : addToExternrefTable0(ret);
|
||||
},
|
||||
__wbg_static_accessor_WINDOW_bb9f1ba69d61b386: function() {
|
||||
const ret = typeof window === 'undefined' ? null : window;
|
||||
return isLikeNone(ret) ? 0 : addToExternrefTable0(ret);
|
||||
},
|
||||
__wbg_subarray_a068d24e39478a8a: function(arg0, arg1, arg2) {
|
||||
const ret = arg0.subarray(arg1 >>> 0, arg2 >>> 0);
|
||||
return ret;
|
||||
},
|
||||
__wbg_versions_276b2795b1c6a219: function(arg0) {
|
||||
const ret = arg0.versions;
|
||||
return ret;
|
||||
},
|
||||
__wbindgen_cast_0000000000000001: function(arg0, arg1) {
|
||||
// Cast intrinsic for `Ref(Slice(U8)) -> NamedExternref("Uint8Array")`.
|
||||
const ret = getArrayU8FromWasm0(arg0, arg1);
|
||||
return ret;
|
||||
},
|
||||
__wbindgen_cast_0000000000000002: function(arg0, arg1) {
|
||||
// Cast intrinsic for `Ref(String) -> Externref`.
|
||||
const ret = getStringFromWasm0(arg0, arg1);
|
||||
return ret;
|
||||
},
|
||||
__wbindgen_init_externref_table: function() {
|
||||
const table = wasm.__wbindgen_externrefs;
|
||||
const offset = table.grow(4);
|
||||
table.set(0, undefined);
|
||||
table.set(offset + 0, undefined);
|
||||
table.set(offset + 1, null);
|
||||
table.set(offset + 2, true);
|
||||
table.set(offset + 3, false);
|
||||
},
|
||||
};
|
||||
return {
|
||||
__proto__: null,
|
||||
"./wzp_wasm_bg.js": import0,
|
||||
};
|
||||
}
|
||||
|
||||
const WzpCryptoSessionFinalization = (typeof FinalizationRegistry === 'undefined')
|
||||
? { register: () => {}, unregister: () => {} }
|
||||
: new FinalizationRegistry(ptr => wasm.__wbg_wzpcryptosession_free(ptr >>> 0, 1));
|
||||
const WzpFecDecoderFinalization = (typeof FinalizationRegistry === 'undefined')
|
||||
? { register: () => {}, unregister: () => {} }
|
||||
: new FinalizationRegistry(ptr => wasm.__wbg_wzpfecdecoder_free(ptr >>> 0, 1));
|
||||
const WzpFecEncoderFinalization = (typeof FinalizationRegistry === 'undefined')
|
||||
? { register: () => {}, unregister: () => {} }
|
||||
: new FinalizationRegistry(ptr => wasm.__wbg_wzpfecencoder_free(ptr >>> 0, 1));
|
||||
const WzpKeyExchangeFinalization = (typeof FinalizationRegistry === 'undefined')
|
||||
? { register: () => {}, unregister: () => {} }
|
||||
: new FinalizationRegistry(ptr => wasm.__wbg_wzpkeyexchange_free(ptr >>> 0, 1));
|
||||
|
||||
function addToExternrefTable0(obj) {
|
||||
const idx = wasm.__externref_table_alloc();
|
||||
wasm.__wbindgen_externrefs.set(idx, obj);
|
||||
return idx;
|
||||
}
|
||||
|
||||
function getArrayU8FromWasm0(ptr, len) {
|
||||
ptr = ptr >>> 0;
|
||||
return getUint8ArrayMemory0().subarray(ptr / 1, ptr / 1 + len);
|
||||
}
|
||||
|
||||
function getStringFromWasm0(ptr, len) {
|
||||
ptr = ptr >>> 0;
|
||||
return decodeText(ptr, len);
|
||||
}
|
||||
|
||||
let cachedUint8ArrayMemory0 = null;
|
||||
function getUint8ArrayMemory0() {
|
||||
if (cachedUint8ArrayMemory0 === null || cachedUint8ArrayMemory0.byteLength === 0) {
|
||||
cachedUint8ArrayMemory0 = new Uint8Array(wasm.memory.buffer);
|
||||
}
|
||||
return cachedUint8ArrayMemory0;
|
||||
}
|
||||
|
||||
function handleError(f, args) {
|
||||
try {
|
||||
return f.apply(this, args);
|
||||
} catch (e) {
|
||||
const idx = addToExternrefTable0(e);
|
||||
wasm.__wbindgen_exn_store(idx);
|
||||
}
|
||||
}
|
||||
|
||||
function isLikeNone(x) {
|
||||
return x === undefined || x === null;
|
||||
}
|
||||
|
||||
function passArray8ToWasm0(arg, malloc) {
|
||||
const ptr = malloc(arg.length * 1, 1) >>> 0;
|
||||
getUint8ArrayMemory0().set(arg, ptr / 1);
|
||||
WASM_VECTOR_LEN = arg.length;
|
||||
return ptr;
|
||||
}
|
||||
|
||||
function takeFromExternrefTable0(idx) {
|
||||
const value = wasm.__wbindgen_externrefs.get(idx);
|
||||
wasm.__externref_table_dealloc(idx);
|
||||
return value;
|
||||
}
|
||||
|
||||
let cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true });
|
||||
cachedTextDecoder.decode();
|
||||
const MAX_SAFARI_DECODE_BYTES = 2146435072;
|
||||
let numBytesDecoded = 0;
|
||||
function decodeText(ptr, len) {
|
||||
numBytesDecoded += len;
|
||||
if (numBytesDecoded >= MAX_SAFARI_DECODE_BYTES) {
|
||||
cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true });
|
||||
cachedTextDecoder.decode();
|
||||
numBytesDecoded = len;
|
||||
}
|
||||
return cachedTextDecoder.decode(getUint8ArrayMemory0().subarray(ptr, ptr + len));
|
||||
}
|
||||
|
||||
let WASM_VECTOR_LEN = 0;
|
||||
|
||||
let wasmModule, wasm;
|
||||
function __wbg_finalize_init(instance, module) {
|
||||
wasm = instance.exports;
|
||||
wasmModule = module;
|
||||
cachedUint8ArrayMemory0 = null;
|
||||
wasm.__wbindgen_start();
|
||||
return wasm;
|
||||
}
|
||||
|
||||
async function __wbg_load(module, imports) {
|
||||
if (typeof Response === 'function' && module instanceof Response) {
|
||||
if (typeof WebAssembly.instantiateStreaming === 'function') {
|
||||
try {
|
||||
return await WebAssembly.instantiateStreaming(module, imports);
|
||||
} catch (e) {
|
||||
const validResponse = module.ok && expectedResponseType(module.type);
|
||||
|
||||
if (validResponse && module.headers.get('Content-Type') !== 'application/wasm') {
|
||||
console.warn("`WebAssembly.instantiateStreaming` failed because your server does not serve Wasm with `application/wasm` MIME type. Falling back to `WebAssembly.instantiate` which is slower. Original error:\n", e);
|
||||
|
||||
} else { throw e; }
|
||||
}
|
||||
}
|
||||
|
||||
const bytes = await module.arrayBuffer();
|
||||
return await WebAssembly.instantiate(bytes, imports);
|
||||
} else {
|
||||
const instance = await WebAssembly.instantiate(module, imports);
|
||||
|
||||
if (instance instanceof WebAssembly.Instance) {
|
||||
return { instance, module };
|
||||
} else {
|
||||
return instance;
|
||||
}
|
||||
}
|
||||
|
||||
function expectedResponseType(type) {
|
||||
switch (type) {
|
||||
case 'basic': case 'cors': case 'default': return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function initSync(module) {
|
||||
if (wasm !== undefined) return wasm;
|
||||
|
||||
|
||||
if (module !== undefined) {
|
||||
if (Object.getPrototypeOf(module) === Object.prototype) {
|
||||
({module} = module)
|
||||
} else {
|
||||
console.warn('using deprecated parameters for `initSync()`; pass a single object instead')
|
||||
}
|
||||
}
|
||||
|
||||
const imports = __wbg_get_imports();
|
||||
if (!(module instanceof WebAssembly.Module)) {
|
||||
module = new WebAssembly.Module(module);
|
||||
}
|
||||
const instance = new WebAssembly.Instance(module, imports);
|
||||
return __wbg_finalize_init(instance, module);
|
||||
}
|
||||
|
||||
async function __wbg_init(module_or_path) {
|
||||
if (wasm !== undefined) return wasm;
|
||||
|
||||
|
||||
if (module_or_path !== undefined) {
|
||||
if (Object.getPrototypeOf(module_or_path) === Object.prototype) {
|
||||
({module_or_path} = module_or_path)
|
||||
} else {
|
||||
console.warn('using deprecated parameters for the initialization function; pass a single object instead')
|
||||
}
|
||||
}
|
||||
|
||||
if (module_or_path === undefined) {
|
||||
module_or_path = new URL('wzp_wasm_bg.wasm', import.meta.url);
|
||||
}
|
||||
const imports = __wbg_get_imports();
|
||||
|
||||
if (typeof module_or_path === 'string' || (typeof Request === 'function' && module_or_path instanceof Request) || (typeof URL === 'function' && module_or_path instanceof URL)) {
|
||||
module_or_path = fetch(module_or_path);
|
||||
}
|
||||
|
||||
const { instance, module } = await __wbg_load(await module_or_path, imports);
|
||||
|
||||
return __wbg_finalize_init(instance, module);
|
||||
}
|
||||
|
||||
export { initSync, __wbg_init as default };
|
||||
BIN
crates/wzp-web/static/wasm/wzp_wasm_bg.wasm
Normal file
BIN
crates/wzp-web/static/wasm/wzp_wasm_bg.wasm
Normal file
Binary file not shown.
2
desktop/.gitignore
vendored
2
desktop/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
node_modules/
|
||||
dist/
|
||||
@@ -1,143 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>WarzonePhone</title>
|
||||
<link rel="stylesheet" href="/src/style.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div id="app">
|
||||
<!-- Connect screen -->
|
||||
<div id="connect-screen">
|
||||
<h1>WarzonePhone</h1>
|
||||
<p class="subtitle">Encrypted Voice</p>
|
||||
<div class="form">
|
||||
<label>Relay
|
||||
<button id="relay-selected" class="relay-selected" type="button">
|
||||
<span id="relay-dot" class="dot"></span>
|
||||
<span id="relay-label">Select relay...</span>
|
||||
<span class="arrow">⚙</span>
|
||||
</button>
|
||||
</label>
|
||||
<label>Room
|
||||
<input id="room" type="text" value="android" />
|
||||
</label>
|
||||
<label>Alias
|
||||
<input id="alias" type="text" placeholder="your name" />
|
||||
</label>
|
||||
<div class="form-row">
|
||||
<label class="checkbox">
|
||||
<input id="os-aec" type="checkbox" checked />
|
||||
OS Echo Cancel
|
||||
</label>
|
||||
<button id="settings-btn-home" class="icon-btn" title="Settings (Cmd+,)">⚙</button>
|
||||
</div>
|
||||
<button id="connect-btn" class="primary">Connect</button>
|
||||
<p id="connect-error" class="error"></p>
|
||||
</div>
|
||||
<div class="identity-info">
|
||||
<span id="my-identicon"></span>
|
||||
<span id="my-fingerprint" class="fp-display"></span>
|
||||
</div>
|
||||
<div class="recent-rooms" id="recent-rooms"></div>
|
||||
</div>
|
||||
|
||||
<!-- In-call screen -->
|
||||
<div id="call-screen" class="hidden">
|
||||
<div class="call-header">
|
||||
<div class="call-header-row">
|
||||
<div id="room-name" class="room-name"></div>
|
||||
<button id="settings-btn-call" class="icon-btn small" title="Settings (Cmd+,)">⚙</button>
|
||||
</div>
|
||||
<div class="call-meta">
|
||||
<span id="call-status" class="status-dot"></span>
|
||||
<span id="call-timer" class="call-timer">0:00</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="level-meter">
|
||||
<div id="level-bar" class="level-bar-fill"></div>
|
||||
</div>
|
||||
<div id="participants" class="participants"></div>
|
||||
<div class="controls">
|
||||
<button id="mic-btn" class="control-btn" title="Toggle Mic (m)">
|
||||
<span class="icon" id="mic-icon">Mic</span>
|
||||
</button>
|
||||
<button id="hangup-btn" class="control-btn hangup" title="Hang Up (q)">
|
||||
<span class="icon">End</span>
|
||||
</button>
|
||||
<button id="spk-btn" class="control-btn" title="Toggle Speaker (s)">
|
||||
<span class="icon" id="spk-icon">Spk</span>
|
||||
</button>
|
||||
</div>
|
||||
<div id="stats" class="stats"></div>
|
||||
</div>
|
||||
|
||||
<!-- Settings panel -->
|
||||
<div id="settings-panel" class="hidden">
|
||||
<div class="settings-card">
|
||||
<div class="settings-header">
|
||||
<h2>Settings</h2>
|
||||
<button id="settings-close" class="icon-btn">×</button>
|
||||
</div>
|
||||
<div class="settings-section">
|
||||
<h3>Connection</h3>
|
||||
<label>Default Room
|
||||
<input id="s-room" type="text" />
|
||||
</label>
|
||||
<label>Alias
|
||||
<input id="s-alias" type="text" />
|
||||
</label>
|
||||
</div>
|
||||
<div class="settings-section">
|
||||
<h3>Audio</h3>
|
||||
<label class="checkbox">
|
||||
<input id="s-os-aec" type="checkbox" />
|
||||
OS Echo Cancellation (macOS VoiceProcessingIO)
|
||||
</label>
|
||||
<label class="checkbox">
|
||||
<input id="s-agc" type="checkbox" checked />
|
||||
Automatic Gain Control
|
||||
</label>
|
||||
</div>
|
||||
<div class="settings-section">
|
||||
<h3>Identity</h3>
|
||||
<div class="setting-row">
|
||||
<span class="setting-label">Fingerprint</span>
|
||||
<span id="s-fingerprint" class="fp-display-large"></span>
|
||||
</div>
|
||||
<div class="setting-row">
|
||||
<span class="setting-label">Identity file</span>
|
||||
<span class="fp-display">~/.wzp/identity</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="settings-section">
|
||||
<h3>Recent Rooms</h3>
|
||||
<div id="s-recent-rooms" class="recent-rooms-list"></div>
|
||||
<button id="s-clear-recent" class="secondary-btn">Clear History</button>
|
||||
</div>
|
||||
<button id="settings-save" class="primary">Save</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Manage Relays dialog -->
|
||||
<div id="relay-dialog" class="hidden">
|
||||
<div class="settings-card relay-dialog-card">
|
||||
<div class="settings-header">
|
||||
<h2>Manage Relays</h2>
|
||||
<button id="relay-dialog-close" class="icon-btn">×</button>
|
||||
</div>
|
||||
<div id="relay-dialog-list" class="relay-dialog-list"></div>
|
||||
<div class="relay-add-row">
|
||||
<div class="relay-add-inputs">
|
||||
<input id="relay-add-name" type="text" placeholder="Name" />
|
||||
<input id="relay-add-addr" type="text" placeholder="host:port" />
|
||||
</div>
|
||||
<button id="relay-add-btn" class="primary">Add Relay</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script type="module" src="/src/main.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
1350
desktop/package-lock.json
generated
1350
desktop/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"name": "wzp-desktop",
|
||||
"private": true,
|
||||
"version": "0.1.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"tauri": "tauri"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tauri-apps/api": "^2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5",
|
||||
"vite": "^6",
|
||||
"@tauri-apps/cli": "^2"
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
[package]
|
||||
name = "wzp-desktop"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
description = "WarzonePhone Desktop — encrypted VoIP client"
|
||||
default-run = "wzp-desktop"
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2", features = [] }
|
||||
|
||||
[dependencies]
|
||||
tauri = { version = "2", features = [] }
|
||||
tauri-plugin-shell = "2"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = "0.3"
|
||||
anyhow = "1"
|
||||
rustls = { version = "0.23", default-features = false, features = ["ring", "std"] }
|
||||
|
||||
# WarzonePhone crates
|
||||
wzp-proto = { path = "../../crates/wzp-proto" }
|
||||
wzp-codec = { path = "../../crates/wzp-codec" }
|
||||
wzp-fec = { path = "../../crates/wzp-fec" }
|
||||
wzp-crypto = { path = "../../crates/wzp-crypto" }
|
||||
wzp-transport = { path = "../../crates/wzp-transport" }
|
||||
wzp-client = { path = "../../crates/wzp-client", features = ["audio", "vpio"] }
|
||||
|
||||
# Platform-specific
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
coreaudio-rs = "0.11"
|
||||
|
||||
[features]
|
||||
default = ["custom-protocol"]
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
@@ -1,3 +0,0 @@
|
||||
fn main() {
|
||||
tauri_build::build()
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
{}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
|
Before Width: | Height: | Size: 104 B |
@@ -1,365 +0,0 @@
|
||||
//! Call engine for the desktop app — wraps wzp-client audio + transport
|
||||
//! into a clean async interface for Tauri commands.
|
||||
|
||||
use std::net::SocketAddr;
|
||||
use std::sync::atomic::{AtomicBool, AtomicU32, AtomicU64, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{error, info};
|
||||
|
||||
use wzp_client::audio_io::{AudioCapture, AudioPlayback};
|
||||
use wzp_client::call::{CallConfig, CallEncoder};
|
||||
use wzp_proto::MediaTransport;
|
||||
|
||||
const FRAME_SAMPLES: usize = 960;
|
||||
|
||||
/// Wrapper to make non-Sync audio handles safe to store in shared state.
|
||||
/// The audio handle is only accessed from the thread that created it (drop),
|
||||
/// never shared across threads — Sync is safe.
|
||||
#[allow(dead_code)]
|
||||
struct SyncWrapper(Box<dyn std::any::Any + Send>);
|
||||
unsafe impl Sync for SyncWrapper {}
|
||||
|
||||
pub struct ParticipantInfo {
|
||||
pub fingerprint: String,
|
||||
pub alias: Option<String>,
|
||||
}
|
||||
|
||||
pub struct EngineStatus {
|
||||
pub mic_muted: bool,
|
||||
pub spk_muted: bool,
|
||||
pub participants: Vec<ParticipantInfo>,
|
||||
pub frames_sent: u64,
|
||||
pub frames_received: u64,
|
||||
pub audio_level: u32,
|
||||
pub call_duration_secs: f64,
|
||||
pub fingerprint: String,
|
||||
}
|
||||
|
||||
pub struct CallEngine {
|
||||
running: Arc<AtomicBool>,
|
||||
mic_muted: Arc<AtomicBool>,
|
||||
spk_muted: Arc<AtomicBool>,
|
||||
participants: Arc<Mutex<Vec<ParticipantInfo>>>,
|
||||
frames_sent: Arc<AtomicU64>,
|
||||
frames_received: Arc<AtomicU64>,
|
||||
audio_level: Arc<AtomicU32>,
|
||||
transport: Arc<wzp_transport::QuinnTransport>,
|
||||
start_time: Instant,
|
||||
fingerprint: String,
|
||||
/// Keep audio handles alive for the duration of the call.
|
||||
/// Wrapped in SyncWrapper because AudioUnit isn't Sync.
|
||||
_audio_handle: SyncWrapper,
|
||||
}
|
||||
|
||||
impl CallEngine {
|
||||
pub async fn start<F>(
|
||||
relay: String,
|
||||
room: String,
|
||||
alias: String,
|
||||
_os_aec: bool,
|
||||
event_cb: F,
|
||||
) -> Result<Self, anyhow::Error>
|
||||
where
|
||||
F: Fn(&str, &str) + Send + Sync + 'static,
|
||||
{
|
||||
let _ = rustls::crypto::ring::default_provider().install_default();
|
||||
|
||||
let relay_addr: SocketAddr = relay.parse()?;
|
||||
|
||||
// Load or generate identity
|
||||
let seed = {
|
||||
let path = {
|
||||
let home = std::env::var("HOME").unwrap_or_else(|_| ".".into());
|
||||
std::path::PathBuf::from(home).join(".wzp").join("identity")
|
||||
};
|
||||
if path.exists() {
|
||||
if let Ok(hex) = std::fs::read_to_string(&path) {
|
||||
if let Ok(s) = wzp_crypto::Seed::from_hex(hex.trim()) {
|
||||
s
|
||||
} else {
|
||||
wzp_crypto::Seed::generate()
|
||||
}
|
||||
} else {
|
||||
wzp_crypto::Seed::generate()
|
||||
}
|
||||
} else {
|
||||
let s = wzp_crypto::Seed::generate();
|
||||
if let Some(p) = path.parent() {
|
||||
std::fs::create_dir_all(p).ok();
|
||||
}
|
||||
let hex: String = s.0.iter().map(|b| format!("{b:02x}")).collect();
|
||||
std::fs::write(&path, hex).ok();
|
||||
s
|
||||
}
|
||||
};
|
||||
|
||||
let fp = seed.derive_identity().public_identity().fingerprint;
|
||||
let fingerprint = fp.to_string();
|
||||
info!(%fp, "identity loaded");
|
||||
|
||||
// Connect
|
||||
let bind_addr: SocketAddr = "0.0.0.0:0".parse().unwrap();
|
||||
let endpoint = wzp_transport::create_endpoint(bind_addr, None)?;
|
||||
let client_config = wzp_transport::client_config();
|
||||
let conn = wzp_transport::connect(&endpoint, relay_addr, &room, client_config).await?;
|
||||
let transport = Arc::new(wzp_transport::QuinnTransport::new(conn));
|
||||
|
||||
// Handshake
|
||||
let _session = wzp_client::handshake::perform_handshake(
|
||||
&*transport,
|
||||
&seed.0,
|
||||
Some(&alias),
|
||||
)
|
||||
.await?;
|
||||
|
||||
info!("connected to relay, handshake complete");
|
||||
event_cb("connected", &format!("joined room {room}"));
|
||||
|
||||
// Audio I/O — VPIO (OS AEC) on macOS, plain CPAL otherwise.
|
||||
// The audio handle must be stored in CallEngine to keep streams alive.
|
||||
let (capture_ring, playout_ring, audio_handle): (_, _, Box<dyn std::any::Any + Send>) =
|
||||
if _os_aec {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
match wzp_client::audio_vpio::VpioAudio::start() {
|
||||
Ok(v) => {
|
||||
let cr = v.capture_ring().clone();
|
||||
let pr = v.playout_ring().clone();
|
||||
info!("using VoiceProcessingIO (OS AEC)");
|
||||
(cr, pr, Box::new(v))
|
||||
}
|
||||
Err(e) => {
|
||||
info!("VPIO failed ({e}), falling back to CPAL");
|
||||
let capture = AudioCapture::start()?;
|
||||
let playback = AudioPlayback::start()?;
|
||||
let cr = capture.ring().clone();
|
||||
let pr = playback.ring().clone();
|
||||
(cr, pr, Box::new((capture, playback)))
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
info!("OS AEC not available on this platform, using CPAL");
|
||||
let capture = AudioCapture::start()?;
|
||||
let playback = AudioPlayback::start()?;
|
||||
let cr = capture.ring().clone();
|
||||
let pr = playback.ring().clone();
|
||||
(cr, pr, Box::new((capture, playback)))
|
||||
}
|
||||
} else {
|
||||
let capture = AudioCapture::start()?;
|
||||
let playback = AudioPlayback::start()?;
|
||||
let cr = capture.ring().clone();
|
||||
let pr = playback.ring().clone();
|
||||
(cr, pr, Box::new((capture, playback)))
|
||||
};
|
||||
|
||||
let running = Arc::new(AtomicBool::new(true));
|
||||
let mic_muted = Arc::new(AtomicBool::new(false));
|
||||
let spk_muted = Arc::new(AtomicBool::new(false));
|
||||
let participants: Arc<Mutex<Vec<ParticipantInfo>>> = Arc::new(Mutex::new(vec![]));
|
||||
let frames_sent = Arc::new(AtomicU64::new(0));
|
||||
let frames_received = Arc::new(AtomicU64::new(0));
|
||||
let audio_level = Arc::new(AtomicU32::new(0));
|
||||
|
||||
// Send task
|
||||
let send_t = transport.clone();
|
||||
let send_r = running.clone();
|
||||
let send_mic = mic_muted.clone();
|
||||
let send_fs = frames_sent.clone();
|
||||
let send_level = audio_level.clone();
|
||||
let send_drops = Arc::new(AtomicU64::new(0));
|
||||
tokio::spawn(async move {
|
||||
let config = CallConfig {
|
||||
noise_suppression: false,
|
||||
suppression_enabled: false,
|
||||
..CallConfig::default()
|
||||
};
|
||||
let mut encoder = CallEncoder::new(&config);
|
||||
encoder.set_aec_enabled(false); // OS AEC or none
|
||||
let mut buf = vec![0i16; FRAME_SAMPLES];
|
||||
|
||||
loop {
|
||||
if !send_r.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
if capture_ring.available() < FRAME_SAMPLES {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(5)).await;
|
||||
continue;
|
||||
}
|
||||
capture_ring.read(&mut buf);
|
||||
|
||||
// Compute RMS audio level for UI meter
|
||||
if !buf.is_empty() {
|
||||
let sum_sq: f64 = buf.iter().map(|&s| (s as f64) * (s as f64)).sum();
|
||||
let rms = (sum_sq / buf.len() as f64).sqrt() as u32;
|
||||
send_level.store(rms, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
if send_mic.load(Ordering::Relaxed) {
|
||||
buf.fill(0);
|
||||
}
|
||||
match encoder.encode_frame(&buf) {
|
||||
Ok(pkts) => {
|
||||
for pkt in &pkts {
|
||||
if let Err(e) = send_t.send_media(pkt).await {
|
||||
// Transient congestion (Blocked) — drop packet, keep going
|
||||
send_drops.fetch_add(1, Ordering::Relaxed);
|
||||
if send_drops.load(Ordering::Relaxed) <= 3 {
|
||||
tracing::warn!("send_media error (dropping packet): {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
send_fs.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
Err(e) => error!("encode: {e}"),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Recv task (direct playout)
|
||||
let recv_t = transport.clone();
|
||||
let recv_r = running.clone();
|
||||
let recv_spk = spk_muted.clone();
|
||||
let recv_fr = frames_received.clone();
|
||||
tokio::spawn(async move {
|
||||
let mut opus_dec = wzp_codec::create_decoder(wzp_proto::QualityProfile::GOOD);
|
||||
let mut agc = wzp_codec::AutoGainControl::new();
|
||||
let mut pcm = vec![0i16; FRAME_SAMPLES];
|
||||
|
||||
loop {
|
||||
if !recv_r.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_millis(100),
|
||||
recv_t.recv_media(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Ok(Some(pkt))) => {
|
||||
if !pkt.header.is_repair {
|
||||
if let Ok(n) = opus_dec.decode(&pkt.payload, &mut pcm) {
|
||||
agc.process_frame(&mut pcm[..n]);
|
||||
if !recv_spk.load(Ordering::Relaxed) {
|
||||
playout_ring.write(&pcm[..n]);
|
||||
}
|
||||
}
|
||||
}
|
||||
recv_fr.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
Ok(Ok(None)) => break,
|
||||
Ok(Err(e)) => {
|
||||
let msg = e.to_string();
|
||||
if msg.contains("closed") || msg.contains("reset") {
|
||||
error!("recv fatal: {e}");
|
||||
break;
|
||||
}
|
||||
// Transient error — continue
|
||||
}
|
||||
Err(_) => {}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Signal task (presence)
|
||||
let sig_t = transport.clone();
|
||||
let sig_r = running.clone();
|
||||
let sig_p = participants.clone();
|
||||
let event_cb = Arc::new(event_cb);
|
||||
let sig_cb = event_cb.clone();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
if !sig_r.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_millis(200),
|
||||
sig_t.recv_signal(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Ok(Some(wzp_proto::SignalMessage::RoomUpdate {
|
||||
participants: parts,
|
||||
..
|
||||
}))) => {
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
let unique: Vec<ParticipantInfo> = parts
|
||||
.into_iter()
|
||||
.filter(|p| seen.insert((p.fingerprint.clone(), p.alias.clone())))
|
||||
.map(|p| ParticipantInfo {
|
||||
fingerprint: p.fingerprint,
|
||||
alias: p.alias,
|
||||
})
|
||||
.collect();
|
||||
let count = unique.len();
|
||||
*sig_p.lock().await = unique;
|
||||
sig_cb("room-update", &format!("{count} participants"));
|
||||
}
|
||||
Ok(Ok(Some(_))) => {}
|
||||
Ok(Ok(None)) => break,
|
||||
Ok(Err(_)) => break,
|
||||
Err(_) => {}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Self {
|
||||
running,
|
||||
mic_muted,
|
||||
spk_muted,
|
||||
participants,
|
||||
frames_sent,
|
||||
frames_received,
|
||||
audio_level,
|
||||
transport,
|
||||
start_time: Instant::now(),
|
||||
fingerprint,
|
||||
_audio_handle: SyncWrapper(audio_handle),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn toggle_mic(&self) -> bool {
|
||||
let was = self.mic_muted.load(Ordering::Relaxed);
|
||||
self.mic_muted.store(!was, Ordering::Relaxed);
|
||||
!was
|
||||
}
|
||||
|
||||
pub fn toggle_speaker(&self) -> bool {
|
||||
let was = self.spk_muted.load(Ordering::Relaxed);
|
||||
self.spk_muted.store(!was, Ordering::Relaxed);
|
||||
!was
|
||||
}
|
||||
|
||||
pub async fn status(&self) -> EngineStatus {
|
||||
let participants = {
|
||||
let parts = self.participants.lock().await;
|
||||
parts
|
||||
.iter()
|
||||
.map(|p| ParticipantInfo {
|
||||
fingerprint: p.fingerprint.clone(),
|
||||
alias: p.alias.clone(),
|
||||
})
|
||||
.collect()
|
||||
}; // lock dropped here
|
||||
EngineStatus {
|
||||
mic_muted: self.mic_muted.load(Ordering::Relaxed),
|
||||
spk_muted: self.spk_muted.load(Ordering::Relaxed),
|
||||
participants,
|
||||
frames_sent: self.frames_sent.load(Ordering::Relaxed),
|
||||
frames_received: self.frames_received.load(Ordering::Relaxed),
|
||||
audio_level: self.audio_level.load(Ordering::Relaxed),
|
||||
call_duration_secs: self.start_time.elapsed().as_secs_f64(),
|
||||
fingerprint: self.fingerprint.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn stop(self) {
|
||||
self.running.store(false, Ordering::SeqCst);
|
||||
self.transport.close().await.ok();
|
||||
}
|
||||
}
|
||||
@@ -1,239 +0,0 @@
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||
|
||||
mod engine;
|
||||
|
||||
use engine::CallEngine;
|
||||
use serde::Serialize;
|
||||
use std::sync::Arc;
|
||||
use tauri::Emitter;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
struct CallEvent {
|
||||
kind: String,
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
struct Participant {
|
||||
fingerprint: String,
|
||||
alias: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
struct CallStatus {
|
||||
active: bool,
|
||||
mic_muted: bool,
|
||||
spk_muted: bool,
|
||||
participants: Vec<Participant>,
|
||||
encode_fps: u64,
|
||||
recv_fps: u64,
|
||||
audio_level: u32,
|
||||
call_duration_secs: f64,
|
||||
fingerprint: String,
|
||||
}
|
||||
|
||||
struct AppState {
|
||||
engine: Mutex<Option<CallEngine>>,
|
||||
}
|
||||
|
||||
/// Ping result with RTT and server identity hash.
|
||||
#[derive(Clone, Serialize)]
|
||||
struct PingResult {
|
||||
rtt_ms: u32,
|
||||
/// Server identity: SHA-256 of the QUIC peer certificate, hex-encoded.
|
||||
server_fingerprint: String,
|
||||
}
|
||||
|
||||
/// Ping a relay to check if it's online, measure RTT, and get server identity.
|
||||
#[tauri::command]
|
||||
async fn ping_relay(relay: String) -> Result<PingResult, String> {
|
||||
let addr: std::net::SocketAddr = relay.parse().map_err(|e| format!("bad address: {e}"))?;
|
||||
let _ = rustls::crypto::ring::default_provider().install_default();
|
||||
let bind: std::net::SocketAddr = "0.0.0.0:0".parse().unwrap();
|
||||
let endpoint = wzp_transport::create_endpoint(bind, None).map_err(|e| format!("{e}"))?;
|
||||
let client_cfg = wzp_transport::client_config();
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
match tokio::time::timeout(
|
||||
std::time::Duration::from_secs(3),
|
||||
wzp_transport::connect(&endpoint, addr, "ping", client_cfg),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Ok(conn)) => {
|
||||
let rtt_ms = start.elapsed().as_millis() as u32;
|
||||
|
||||
// Extract server fingerprint from peer certificate
|
||||
let server_fingerprint = conn
|
||||
.peer_identity()
|
||||
.and_then(|id| id.downcast::<Vec<rustls::pki_types::CertificateDer>>().ok())
|
||||
.and_then(|certs| certs.first().map(|c| {
|
||||
use std::hash::{Hash, Hasher};
|
||||
let mut hasher = std::collections::hash_map::DefaultHasher::new();
|
||||
c.as_ref().hash(&mut hasher);
|
||||
let h = hasher.finish();
|
||||
format!("{h:016x}")
|
||||
}))
|
||||
.unwrap_or_else(|| {
|
||||
// Fallback: hash the remote address as identifier
|
||||
format!("{:x}", addr.ip().to_string().len() as u64 * 0x9e3779b97f4a7c15 + addr.port() as u64)
|
||||
});
|
||||
|
||||
conn.close(0u32.into(), b"ping");
|
||||
Ok(PingResult { rtt_ms, server_fingerprint })
|
||||
}
|
||||
Ok(Err(e)) => Err(format!("{e}")),
|
||||
Err(_) => Err("timeout (3s)".into()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Read fingerprint from ~/.wzp/identity without connecting.
|
||||
#[tauri::command]
|
||||
fn get_identity() -> Result<String, String> {
|
||||
let home = std::env::var("HOME").unwrap_or_else(|_| ".".into());
|
||||
let path = std::path::PathBuf::from(home).join(".wzp").join("identity");
|
||||
if path.exists() {
|
||||
if let Ok(hex) = std::fs::read_to_string(&path) {
|
||||
if let Ok(seed) = wzp_crypto::Seed::from_hex(hex.trim()) {
|
||||
let fp = seed.derive_identity().public_identity().fingerprint;
|
||||
return Ok(fp.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
// No identity yet — generate one so we can show the fingerprint
|
||||
let seed = wzp_crypto::Seed::generate();
|
||||
let fp = seed.derive_identity().public_identity().fingerprint;
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent).ok();
|
||||
}
|
||||
let hex: String = seed.0.iter().map(|b| format!("{b:02x}")).collect();
|
||||
std::fs::write(&path, hex).ok();
|
||||
Ok(fp.to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn connect(
|
||||
state: tauri::State<'_, Arc<AppState>>,
|
||||
app: tauri::AppHandle,
|
||||
relay: String,
|
||||
room: String,
|
||||
alias: String,
|
||||
os_aec: bool,
|
||||
) -> Result<String, String> {
|
||||
let mut engine_lock = state.engine.lock().await;
|
||||
if engine_lock.is_some() {
|
||||
return Err("already connected".into());
|
||||
}
|
||||
|
||||
let app_clone = app.clone();
|
||||
match CallEngine::start(relay, room, alias, os_aec, move |event_kind, message| {
|
||||
let _ = app_clone.emit(
|
||||
"call-event",
|
||||
CallEvent {
|
||||
kind: event_kind.to_string(),
|
||||
message: message.to_string(),
|
||||
},
|
||||
);
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(eng) => {
|
||||
*engine_lock = Some(eng);
|
||||
Ok("connected".into())
|
||||
}
|
||||
Err(e) => Err(format!("{e}")),
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn disconnect(state: tauri::State<'_, Arc<AppState>>) -> Result<String, String> {
|
||||
let mut engine_lock = state.engine.lock().await;
|
||||
if let Some(engine) = engine_lock.take() {
|
||||
engine.stop().await;
|
||||
Ok("disconnected".into())
|
||||
} else {
|
||||
Err("not connected".into())
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn toggle_mic(state: tauri::State<'_, Arc<AppState>>) -> Result<bool, String> {
|
||||
let engine_lock = state.engine.lock().await;
|
||||
if let Some(ref engine) = *engine_lock {
|
||||
Ok(engine.toggle_mic())
|
||||
} else {
|
||||
Err("not connected".into())
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn toggle_speaker(state: tauri::State<'_, Arc<AppState>>) -> Result<bool, String> {
|
||||
let engine_lock = state.engine.lock().await;
|
||||
if let Some(ref engine) = *engine_lock {
|
||||
Ok(engine.toggle_speaker())
|
||||
} else {
|
||||
Err("not connected".into())
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn get_status(state: tauri::State<'_, Arc<AppState>>) -> Result<CallStatus, String> {
|
||||
let engine_lock = state.engine.lock().await;
|
||||
if let Some(ref engine) = *engine_lock {
|
||||
let status = engine.status().await;
|
||||
Ok(CallStatus {
|
||||
active: true,
|
||||
mic_muted: status.mic_muted,
|
||||
spk_muted: status.spk_muted,
|
||||
participants: status
|
||||
.participants
|
||||
.into_iter()
|
||||
.map(|p| Participant {
|
||||
fingerprint: p.fingerprint,
|
||||
alias: p.alias,
|
||||
})
|
||||
.collect(),
|
||||
encode_fps: status.frames_sent,
|
||||
recv_fps: status.frames_received,
|
||||
audio_level: status.audio_level,
|
||||
call_duration_secs: status.call_duration_secs,
|
||||
fingerprint: status.fingerprint,
|
||||
})
|
||||
} else {
|
||||
Ok(CallStatus {
|
||||
active: false,
|
||||
mic_muted: false,
|
||||
spk_muted: false,
|
||||
participants: vec![],
|
||||
encode_fps: 0,
|
||||
recv_fps: 0,
|
||||
audio_level: 0,
|
||||
call_duration_secs: 0.0,
|
||||
fingerprint: String::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
tracing_subscriber::fmt().init();
|
||||
|
||||
let state = Arc::new(AppState {
|
||||
engine: Mutex::new(None),
|
||||
});
|
||||
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_shell::init())
|
||||
.manage(state)
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
ping_relay,
|
||||
get_identity,
|
||||
connect,
|
||||
disconnect,
|
||||
toggle_mic,
|
||||
toggle_speaker,
|
||||
get_status,
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running WarzonePhone Desktop");
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"productName": "WarzonePhone",
|
||||
"version": "0.1.0",
|
||||
"identifier": "com.wzp.desktop",
|
||||
"build": {
|
||||
"frontendDist": "../dist",
|
||||
"devUrl": "http://localhost:1420",
|
||||
"beforeDevCommand": "npm run dev",
|
||||
"beforeBuildCommand": "npm run build"
|
||||
},
|
||||
"app": {
|
||||
"windows": [
|
||||
{
|
||||
"title": "WarzonePhone",
|
||||
"width": 400,
|
||||
"height": 640,
|
||||
"resizable": true,
|
||||
"minWidth": 360,
|
||||
"minHeight": 500
|
||||
}
|
||||
],
|
||||
"security": {
|
||||
"csp": null
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"targets": "all",
|
||||
"icon": [
|
||||
"icons/icon.png"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,110 +0,0 @@
|
||||
/**
|
||||
* Deterministic identicon generator — creates a unique symmetric pattern
|
||||
* from a hex fingerprint string, similar to MetaMask's Jazzicon / Ethereum blockies.
|
||||
*
|
||||
* Returns an SVG data URL that can be used as an <img> src.
|
||||
*/
|
||||
|
||||
function hashBytes(hex: string): number[] {
|
||||
const clean = hex.replace(/[^0-9a-fA-F]/g, "");
|
||||
const bytes: number[] = [];
|
||||
for (let i = 0; i < clean.length; i += 2) {
|
||||
bytes.push(parseInt(clean.substring(i, i + 2), 16));
|
||||
}
|
||||
// Pad to at least 16 bytes
|
||||
while (bytes.length < 16) bytes.push(0);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
function hslToRgb(h: number, s: number, l: number): [number, number, number] {
|
||||
s /= 100;
|
||||
l /= 100;
|
||||
const k = (n: number) => (n + h / 30) % 12;
|
||||
const a = s * Math.min(l, 1 - l);
|
||||
const f = (n: number) =>
|
||||
l - a * Math.max(-1, Math.min(k(n) - 3, Math.min(9 - k(n), 1)));
|
||||
return [
|
||||
Math.round(f(0) * 255),
|
||||
Math.round(f(8) * 255),
|
||||
Math.round(f(4) * 255),
|
||||
];
|
||||
}
|
||||
|
||||
export function generateIdenticon(
|
||||
fingerprint: string,
|
||||
size: number = 36
|
||||
): string {
|
||||
const bytes = hashBytes(fingerprint);
|
||||
|
||||
// Derive colors from first bytes
|
||||
const hue1 = (bytes[0] * 360) / 256;
|
||||
const hue2 = ((bytes[1] * 360) / 256 + 120) % 360;
|
||||
const [r1, g1, b1] = hslToRgb(hue1, 65, 35); // dark bg
|
||||
const [r2, g2, b2] = hslToRgb(hue2, 70, 55); // bright fg
|
||||
|
||||
const bg = `rgb(${r1},${g1},${b1})`;
|
||||
const fg = `rgb(${r2},${g2},${b2})`;
|
||||
|
||||
// 5x5 grid, left-right symmetric (only need 3 columns)
|
||||
const grid: boolean[][] = [];
|
||||
for (let y = 0; y < 5; y++) {
|
||||
const row: boolean[] = [];
|
||||
for (let x = 0; x < 3; x++) {
|
||||
const byteIdx = 2 + y * 3 + x;
|
||||
row.push(bytes[byteIdx % bytes.length] > 128);
|
||||
}
|
||||
// Mirror: col 3 = col 1, col 4 = col 0
|
||||
grid.push([row[0], row[1], row[2], row[1], row[0]]);
|
||||
}
|
||||
|
||||
// Render SVG
|
||||
const cellSize = size / 5;
|
||||
const r = size * 0.12; // border radius
|
||||
let rects = "";
|
||||
for (let y = 0; y < 5; y++) {
|
||||
for (let x = 0; x < 5; x++) {
|
||||
if (grid[y][x]) {
|
||||
rects += `<rect x="${x * cellSize}" y="${y * cellSize}" width="${cellSize}" height="${cellSize}" fill="${fg}"/>`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const svg = `<svg xmlns="http://www.w3.org/2000/svg" width="${size}" height="${size}" viewBox="0 0 ${size} ${size}">
|
||||
<rect width="${size}" height="${size}" rx="${r}" fill="${bg}"/>
|
||||
${rects}
|
||||
</svg>`;
|
||||
|
||||
return `data:image/svg+xml,${encodeURIComponent(svg)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an <img> element with the identicon.
|
||||
* Click copies the fingerprint to clipboard.
|
||||
*/
|
||||
export function createIdenticonEl(
|
||||
fingerprint: string,
|
||||
size: number = 36,
|
||||
clickToCopy: boolean = true
|
||||
): HTMLImageElement {
|
||||
const img = document.createElement("img");
|
||||
img.src = generateIdenticon(fingerprint, size);
|
||||
img.width = size;
|
||||
img.height = size;
|
||||
img.style.borderRadius = `${size * 0.12}px`;
|
||||
img.style.cursor = clickToCopy ? "pointer" : "default";
|
||||
img.title = fingerprint;
|
||||
|
||||
if (clickToCopy && fingerprint) {
|
||||
img.addEventListener("click", (e) => {
|
||||
e.stopPropagation();
|
||||
navigator.clipboard.writeText(fingerprint).then(() => {
|
||||
img.style.outline = "2px solid #4ade80";
|
||||
setTimeout(() => {
|
||||
img.style.outline = "";
|
||||
}, 600);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return img;
|
||||
}
|
||||
@@ -1,591 +0,0 @@
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { listen } from "@tauri-apps/api/event";
|
||||
import { generateIdenticon, createIdenticonEl } from "./identicon";
|
||||
|
||||
// ── Elements ──
|
||||
const connectScreen = document.getElementById("connect-screen")!;
|
||||
const callScreen = document.getElementById("call-screen")!;
|
||||
const roomInput = document.getElementById("room") as HTMLInputElement;
|
||||
const aliasInput = document.getElementById("alias") as HTMLInputElement;
|
||||
const osAecCheckbox = document.getElementById("os-aec") as HTMLInputElement;
|
||||
const connectBtn = document.getElementById("connect-btn") as HTMLButtonElement;
|
||||
const connectError = document.getElementById("connect-error")!;
|
||||
const roomName = document.getElementById("room-name")!;
|
||||
const callTimer = document.getElementById("call-timer")!;
|
||||
const callStatus = document.getElementById("call-status")!;
|
||||
const levelBar = document.getElementById("level-bar")!;
|
||||
const participantsDiv = document.getElementById("participants")!;
|
||||
const micBtn = document.getElementById("mic-btn")!;
|
||||
const micIcon = document.getElementById("mic-icon")!;
|
||||
const spkBtn = document.getElementById("spk-btn")!;
|
||||
const spkIcon = document.getElementById("spk-icon")!;
|
||||
const hangupBtn = document.getElementById("hangup-btn")!;
|
||||
const statsDiv = document.getElementById("stats")!;
|
||||
const myFingerprintEl = document.getElementById("my-fingerprint")!;
|
||||
const myIdenticonEl = document.getElementById("my-identicon")!;
|
||||
const recentRoomsDiv = document.getElementById("recent-rooms")!;
|
||||
|
||||
// Relay button
|
||||
const relaySelected = document.getElementById("relay-selected")!;
|
||||
const relayDot = document.getElementById("relay-dot")!;
|
||||
const relayLabel = document.getElementById("relay-label")!;
|
||||
|
||||
// Relay dialog
|
||||
const relayDialog = document.getElementById("relay-dialog")!;
|
||||
const relayDialogClose = document.getElementById("relay-dialog-close")!;
|
||||
const relayDialogList = document.getElementById("relay-dialog-list")!;
|
||||
const relayAddName = document.getElementById("relay-add-name") as HTMLInputElement;
|
||||
const relayAddAddr = document.getElementById("relay-add-addr") as HTMLInputElement;
|
||||
const relayAddBtn = document.getElementById("relay-add-btn")!;
|
||||
|
||||
// Settings
|
||||
const settingsPanel = document.getElementById("settings-panel")!;
|
||||
const settingsClose = document.getElementById("settings-close")!;
|
||||
const settingsSave = document.getElementById("settings-save")!;
|
||||
const settingsBtnHome = document.getElementById("settings-btn-home")!;
|
||||
const settingsBtnCall = document.getElementById("settings-btn-call")!;
|
||||
const sRoom = document.getElementById("s-room") as HTMLInputElement;
|
||||
const sAlias = document.getElementById("s-alias") as HTMLInputElement;
|
||||
const sOsAec = document.getElementById("s-os-aec") as HTMLInputElement;
|
||||
const sAgc = document.getElementById("s-agc") as HTMLInputElement;
|
||||
const sFingerprint = document.getElementById("s-fingerprint")!;
|
||||
const sRecentRooms = document.getElementById("s-recent-rooms")!;
|
||||
const sClearRecent = document.getElementById("s-clear-recent")!;
|
||||
|
||||
let statusInterval: number | null = null;
|
||||
let myFingerprint = "";
|
||||
let userDisconnected = false;
|
||||
|
||||
// ── Data types ──
|
||||
interface RelayServer {
|
||||
name: string;
|
||||
address: string;
|
||||
rtt?: number | null;
|
||||
serverFingerprint?: string | null; // from ping
|
||||
knownFingerprint?: string | null; // saved TOFU fingerprint
|
||||
}
|
||||
|
||||
interface RecentRoom { relay: string; room: string; }
|
||||
|
||||
interface Settings {
|
||||
relays: RelayServer[];
|
||||
selectedRelay: number;
|
||||
room: string;
|
||||
alias: string;
|
||||
osAec: boolean;
|
||||
agc: boolean;
|
||||
recentRooms: RecentRoom[];
|
||||
}
|
||||
|
||||
function loadSettings(): Settings {
|
||||
const defaults: Settings = {
|
||||
relays: [{ name: "Default", address: "193.180.213.68:4433" }],
|
||||
selectedRelay: 0, room: "android", alias: "",
|
||||
osAec: true, agc: true, recentRooms: [],
|
||||
};
|
||||
try {
|
||||
const raw = localStorage.getItem("wzp-settings");
|
||||
if (raw) {
|
||||
const parsed = JSON.parse(raw);
|
||||
if (parsed.relay && !parsed.relays) {
|
||||
parsed.relays = [{ name: "Default", address: parsed.relay }];
|
||||
parsed.selectedRelay = 0;
|
||||
delete parsed.relay;
|
||||
}
|
||||
if (parsed.recentRooms?.length > 0 && typeof parsed.recentRooms[0] === "string") {
|
||||
const addr = parsed.relays?.[0]?.address || defaults.relays[0].address;
|
||||
parsed.recentRooms = parsed.recentRooms.map((r: string) => ({ relay: addr, room: r }));
|
||||
}
|
||||
return { ...defaults, ...parsed };
|
||||
}
|
||||
} catch {}
|
||||
return defaults;
|
||||
}
|
||||
|
||||
function saveSettingsObj(s: Settings) {
|
||||
localStorage.setItem("wzp-settings", JSON.stringify(s));
|
||||
}
|
||||
|
||||
function getSelectedRelay(): RelayServer | undefined {
|
||||
const s = loadSettings();
|
||||
return s.relays[s.selectedRelay];
|
||||
}
|
||||
|
||||
// ── Helpers ──
|
||||
function escapeHtml(s: string): string {
|
||||
const d = document.createElement("div");
|
||||
d.textContent = s;
|
||||
return d.innerHTML;
|
||||
}
|
||||
|
||||
// ── Lock status ──
|
||||
type LockStatus = "verified" | "new" | "changed" | "offline" | "unknown";
|
||||
|
||||
function lockStatus(relay: RelayServer): LockStatus {
|
||||
if (relay.rtt === undefined || relay.rtt === null) return "unknown";
|
||||
if (relay.rtt < 0) return "offline";
|
||||
if (!relay.serverFingerprint) return "new";
|
||||
if (!relay.knownFingerprint) return "new"; // first time
|
||||
if (relay.serverFingerprint === relay.knownFingerprint) return "verified";
|
||||
return "changed";
|
||||
}
|
||||
|
||||
function lockIcon(status: LockStatus): string {
|
||||
switch (status) {
|
||||
case "verified": return "🔒";
|
||||
case "new": return "🔓";
|
||||
case "changed": return "⚠️";
|
||||
case "offline": return "🔴";
|
||||
case "unknown": return "⚪";
|
||||
}
|
||||
}
|
||||
|
||||
function lockColor(status: LockStatus): string {
|
||||
switch (status) {
|
||||
case "verified": return "var(--green)";
|
||||
case "new": return "var(--yellow)";
|
||||
case "changed": return "var(--red)";
|
||||
case "offline": return "var(--red)";
|
||||
case "unknown": return "var(--text-dim)";
|
||||
}
|
||||
}
|
||||
|
||||
// ── Apply settings ──
|
||||
function applySettings() {
|
||||
const s = loadSettings();
|
||||
roomInput.value = s.room;
|
||||
aliasInput.value = s.alias;
|
||||
osAecCheckbox.checked = s.osAec;
|
||||
renderRecentRooms(s.recentRooms);
|
||||
renderRelayButton();
|
||||
}
|
||||
|
||||
// ── Relay button ──
|
||||
function renderRelayButton() {
|
||||
const s = loadSettings();
|
||||
const sel = s.relays[s.selectedRelay];
|
||||
if (sel) {
|
||||
const ls = lockStatus(sel);
|
||||
relayDot.textContent = lockIcon(ls);
|
||||
relayDot.className = "relay-lock";
|
||||
relayLabel.textContent = `${sel.name} (${sel.address})`;
|
||||
} else {
|
||||
relayDot.textContent = "⚪";
|
||||
relayDot.className = "relay-lock";
|
||||
relayLabel.textContent = "No relay configured";
|
||||
}
|
||||
}
|
||||
|
||||
relaySelected.addEventListener("click", () => openRelayDialog());
|
||||
|
||||
// ── Relay dialog ──
|
||||
function openRelayDialog() {
|
||||
renderRelayDialogList();
|
||||
relayAddName.value = "";
|
||||
relayAddAddr.value = "";
|
||||
relayDialog.classList.remove("hidden");
|
||||
}
|
||||
|
||||
function closeRelayDialog() {
|
||||
relayDialog.classList.add("hidden");
|
||||
renderRelayButton();
|
||||
}
|
||||
|
||||
function renderRelayDialogList() {
|
||||
const s = loadSettings();
|
||||
relayDialogList.innerHTML = "";
|
||||
s.relays.forEach((r, i) => {
|
||||
const item = document.createElement("div");
|
||||
item.className = `relay-dialog-item ${i === s.selectedRelay ? "selected" : ""}`;
|
||||
|
||||
const ls = lockStatus(r);
|
||||
const fp = r.serverFingerprint || r.address;
|
||||
|
||||
// Identicon
|
||||
const icon = createIdenticonEl(fp, 32, true);
|
||||
icon.title = r.serverFingerprint
|
||||
? `Server: ${r.serverFingerprint}\nClick to copy`
|
||||
: `No fingerprint yet`;
|
||||
item.appendChild(icon);
|
||||
|
||||
// Info
|
||||
const info = document.createElement("div");
|
||||
info.className = "relay-info";
|
||||
info.innerHTML = `
|
||||
<div class="relay-name">${escapeHtml(r.name)}</div>
|
||||
<div class="relay-addr">${escapeHtml(r.address)}</div>
|
||||
`;
|
||||
item.appendChild(info);
|
||||
|
||||
// Lock + RTT
|
||||
const meta = document.createElement("div");
|
||||
meta.className = "relay-meta";
|
||||
const rttStr = r.rtt !== undefined && r.rtt !== null
|
||||
? (r.rtt < 0 ? "offline" : `${r.rtt}ms`)
|
||||
: "";
|
||||
meta.innerHTML = `
|
||||
<span class="relay-lock-icon" style="color:${lockColor(ls)}">${lockIcon(ls)}</span>
|
||||
<span class="relay-rtt">${rttStr}</span>
|
||||
`;
|
||||
item.appendChild(meta);
|
||||
|
||||
// Delete button
|
||||
const del = document.createElement("button");
|
||||
del.className = "remove";
|
||||
del.textContent = "×";
|
||||
del.addEventListener("click", (e) => {
|
||||
e.stopPropagation();
|
||||
const s = loadSettings();
|
||||
s.relays.splice(i, 1);
|
||||
if (s.selectedRelay >= s.relays.length) s.selectedRelay = Math.max(0, s.relays.length - 1);
|
||||
saveSettingsObj(s);
|
||||
renderRelayDialogList();
|
||||
renderRelayButton();
|
||||
});
|
||||
item.appendChild(del);
|
||||
|
||||
// Click to select
|
||||
item.addEventListener("click", () => {
|
||||
const s = loadSettings();
|
||||
s.selectedRelay = i;
|
||||
|
||||
// TOFU: if first time seeing this server, trust its fingerprint
|
||||
if (r.serverFingerprint && !r.knownFingerprint) {
|
||||
s.relays[i].knownFingerprint = r.serverFingerprint;
|
||||
}
|
||||
|
||||
saveSettingsObj(s);
|
||||
renderRelayDialogList();
|
||||
renderRelayButton();
|
||||
});
|
||||
|
||||
relayDialogList.appendChild(item);
|
||||
});
|
||||
}
|
||||
|
||||
relayAddBtn.addEventListener("click", () => {
|
||||
const name = relayAddName.value.trim();
|
||||
const addr = relayAddAddr.value.trim();
|
||||
if (!addr) return;
|
||||
const s = loadSettings();
|
||||
s.relays.push({ name: name || addr, address: addr });
|
||||
saveSettingsObj(s);
|
||||
relayAddName.value = "";
|
||||
relayAddAddr.value = "";
|
||||
renderRelayDialogList();
|
||||
pingAllRelays();
|
||||
});
|
||||
|
||||
relayDialogClose.addEventListener("click", closeRelayDialog);
|
||||
relayDialog.addEventListener("click", (e) => { if (e.target === relayDialog) closeRelayDialog(); });
|
||||
|
||||
// ── Ping ──
|
||||
interface PingResult { rtt_ms: number; server_fingerprint: string; }
|
||||
|
||||
async function pingAllRelays() {
|
||||
const s = loadSettings();
|
||||
for (let i = 0; i < s.relays.length; i++) {
|
||||
const r = s.relays[i];
|
||||
try {
|
||||
const result: PingResult = await invoke("ping_relay", { relay: r.address });
|
||||
r.rtt = result.rtt_ms;
|
||||
r.serverFingerprint = result.server_fingerprint;
|
||||
|
||||
// TOFU: auto-save fingerprint on first contact
|
||||
if (!r.knownFingerprint) {
|
||||
r.knownFingerprint = result.server_fingerprint;
|
||||
}
|
||||
} catch {
|
||||
r.rtt = -1;
|
||||
}
|
||||
}
|
||||
saveSettingsObj(s);
|
||||
renderRelayButton();
|
||||
if (!relayDialog.classList.contains("hidden")) renderRelayDialogList();
|
||||
}
|
||||
|
||||
// ── Recent rooms ──
|
||||
function renderRecentRooms(rooms: RecentRoom[]) {
|
||||
recentRoomsDiv.innerHTML = rooms
|
||||
.map((r) => `<span class="recent-room" data-relay="${escapeHtml(r.relay)}" data-room="${escapeHtml(r.room)}">${escapeHtml(r.room)}</span>`)
|
||||
.join("");
|
||||
recentRoomsDiv.querySelectorAll(".recent-room").forEach((el) => {
|
||||
el.addEventListener("click", () => {
|
||||
const ds = (el as HTMLElement).dataset;
|
||||
roomInput.value = ds.room || "";
|
||||
const s = loadSettings();
|
||||
const idx = s.relays.findIndex((r) => r.address === ds.relay);
|
||||
if (idx >= 0) { s.selectedRelay = idx; saveSettingsObj(s); renderRelayButton(); }
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// ── Init ──
|
||||
applySettings();
|
||||
setTimeout(pingAllRelays, 300);
|
||||
|
||||
// Load fingerprint + render identicon
|
||||
(async () => {
|
||||
try {
|
||||
const fp: string = await invoke("get_identity");
|
||||
myFingerprint = fp;
|
||||
myFingerprintEl.textContent = fp;
|
||||
myFingerprintEl.style.cursor = "pointer";
|
||||
myFingerprintEl.addEventListener("click", () => {
|
||||
navigator.clipboard.writeText(fp).then(() => {
|
||||
const orig = myFingerprintEl.textContent;
|
||||
myFingerprintEl.textContent = "Copied!";
|
||||
setTimeout(() => { myFingerprintEl.textContent = orig; }, 1000);
|
||||
});
|
||||
});
|
||||
|
||||
// Identicon next to fingerprint
|
||||
const icon = createIdenticonEl(fp, 28, true);
|
||||
myIdenticonEl.innerHTML = "";
|
||||
myIdenticonEl.appendChild(icon);
|
||||
} catch {}
|
||||
})();
|
||||
|
||||
// ── Connect ──
|
||||
connectBtn.addEventListener("click", doConnect);
|
||||
[roomInput, aliasInput].forEach((el) =>
|
||||
el.addEventListener("keydown", (e) => { if (e.key === "Enter") doConnect(); })
|
||||
);
|
||||
|
||||
async function doConnect() {
|
||||
const relay = getSelectedRelay();
|
||||
if (!relay) { connectError.textContent = "No relay selected"; return; }
|
||||
|
||||
// Warn on fingerprint mismatch
|
||||
const ls = lockStatus(relay);
|
||||
if (ls === "changed") {
|
||||
if (!confirm(`Server fingerprint has changed!\n\nKnown: ${relay.knownFingerprint}\nNew: ${relay.serverFingerprint}\n\nThis could indicate a man-in-the-middle attack. Continue?`)) {
|
||||
return;
|
||||
}
|
||||
// User accepted — update known fingerprint
|
||||
const s = loadSettings();
|
||||
s.relays[s.selectedRelay].knownFingerprint = relay.serverFingerprint;
|
||||
saveSettingsObj(s);
|
||||
}
|
||||
|
||||
if (ls === "offline") { connectError.textContent = "Relay is offline"; return; }
|
||||
|
||||
connectError.textContent = "";
|
||||
connectBtn.disabled = true;
|
||||
connectBtn.textContent = "Connecting...";
|
||||
userDisconnected = false;
|
||||
|
||||
const s = loadSettings();
|
||||
s.room = roomInput.value; s.alias = aliasInput.value; s.osAec = osAecCheckbox.checked;
|
||||
const room = roomInput.value.trim();
|
||||
if (room) {
|
||||
const entry: RecentRoom = { relay: relay.address, room };
|
||||
s.recentRooms = [entry, ...s.recentRooms.filter((r) => !(r.relay === relay.address && r.room === room))].slice(0, 5);
|
||||
}
|
||||
saveSettingsObj(s);
|
||||
|
||||
try {
|
||||
await invoke("connect", {
|
||||
relay: relay.address, room: roomInput.value,
|
||||
alias: aliasInput.value, osAec: osAecCheckbox.checked,
|
||||
});
|
||||
showCallScreen();
|
||||
} catch (e: any) {
|
||||
connectError.textContent = String(e);
|
||||
connectBtn.disabled = false;
|
||||
connectBtn.textContent = "Connect";
|
||||
}
|
||||
}
|
||||
|
||||
function showCallScreen() {
|
||||
connectScreen.classList.add("hidden");
|
||||
callScreen.classList.remove("hidden");
|
||||
roomName.textContent = roomInput.value;
|
||||
callStatus.className = "status-dot";
|
||||
statusInterval = window.setInterval(pollStatus, 250);
|
||||
}
|
||||
|
||||
function showConnectScreen() {
|
||||
callScreen.classList.add("hidden");
|
||||
connectScreen.classList.remove("hidden");
|
||||
connectBtn.disabled = false;
|
||||
connectBtn.textContent = "Connect";
|
||||
levelBar.style.width = "0%";
|
||||
if (statusInterval) { clearInterval(statusInterval); statusInterval = null; }
|
||||
}
|
||||
|
||||
// ── Mute / hangup ──
|
||||
micBtn.addEventListener("click", async () => {
|
||||
try { const m: boolean = await invoke("toggle_mic"); micBtn.classList.toggle("muted", m); micIcon.textContent = m ? "Mic Off" : "Mic"; } catch {}
|
||||
});
|
||||
spkBtn.addEventListener("click", async () => {
|
||||
try { const m: boolean = await invoke("toggle_speaker"); spkBtn.classList.toggle("muted", m); spkIcon.textContent = m ? "Spk Off" : "Spk"; } catch {}
|
||||
});
|
||||
hangupBtn.addEventListener("click", async () => {
|
||||
userDisconnected = true;
|
||||
try { await invoke("disconnect"); } catch {}
|
||||
showConnectScreen();
|
||||
});
|
||||
|
||||
document.addEventListener("keydown", (e) => {
|
||||
if (callScreen.classList.contains("hidden")) return;
|
||||
if ((e.target as HTMLElement).tagName === "INPUT") return;
|
||||
if (e.key === "m") micBtn.click();
|
||||
if (e.key === "s") spkBtn.click();
|
||||
if (e.key === "q") hangupBtn.click();
|
||||
});
|
||||
|
||||
// ── Status polling ──
|
||||
interface CallStatusI {
|
||||
active: boolean; mic_muted: boolean; spk_muted: boolean;
|
||||
participants: { fingerprint: string; alias: string | null }[];
|
||||
encode_fps: number; recv_fps: number; audio_level: number;
|
||||
call_duration_secs: number; fingerprint: string;
|
||||
}
|
||||
|
||||
function formatDuration(secs: number): string {
|
||||
const m = Math.floor(secs / 60);
|
||||
const s = Math.floor(secs % 60);
|
||||
return `${m}:${s.toString().padStart(2, "0")}`;
|
||||
}
|
||||
|
||||
let reconnectAttempts = 0;
|
||||
|
||||
async function pollStatus() {
|
||||
try {
|
||||
const st: CallStatusI = await invoke("get_status");
|
||||
if (!st.active) {
|
||||
if (!userDisconnected && reconnectAttempts < 5) {
|
||||
reconnectAttempts++;
|
||||
callStatus.className = "status-dot reconnecting";
|
||||
statsDiv.textContent = `Reconnecting (${reconnectAttempts}/5)...`;
|
||||
const relay = getSelectedRelay();
|
||||
if (relay) {
|
||||
const delay = Math.min(1000 * Math.pow(2, reconnectAttempts - 1), 10000);
|
||||
setTimeout(async () => {
|
||||
try {
|
||||
await invoke("connect", { relay: relay.address, room: roomInput.value, alias: aliasInput.value, osAec: osAecCheckbox.checked });
|
||||
reconnectAttempts = 0; callStatus.className = "status-dot";
|
||||
} catch {}
|
||||
}, delay);
|
||||
}
|
||||
return;
|
||||
}
|
||||
reconnectAttempts = 0; showConnectScreen(); return;
|
||||
}
|
||||
|
||||
reconnectAttempts = 0;
|
||||
if (st.fingerprint) myFingerprint = st.fingerprint;
|
||||
|
||||
micBtn.classList.toggle("muted", st.mic_muted);
|
||||
micIcon.textContent = st.mic_muted ? "Mic Off" : "Mic";
|
||||
spkBtn.classList.toggle("muted", st.spk_muted);
|
||||
spkIcon.textContent = st.spk_muted ? "Spk Off" : "Spk";
|
||||
callTimer.textContent = formatDuration(st.call_duration_secs);
|
||||
|
||||
const rms = st.audio_level;
|
||||
const pct = rms > 0 ? Math.min(100, (Math.log(rms) / Math.log(32767)) * 100) : 0;
|
||||
levelBar.style.width = `${pct}%`;
|
||||
|
||||
// Participants with identicons
|
||||
if (st.participants.length === 0) {
|
||||
participantsDiv.innerHTML = '<div class="participants-empty">Waiting for participants...</div>';
|
||||
} else {
|
||||
participantsDiv.innerHTML = "";
|
||||
st.participants.forEach((p) => {
|
||||
const name = p.alias || "Anonymous";
|
||||
const fp = p.fingerprint || "";
|
||||
const isMe = fp && myFingerprint.includes(fp);
|
||||
|
||||
const row = document.createElement("div");
|
||||
row.className = "participant";
|
||||
|
||||
// Identicon avatar
|
||||
const icon = createIdenticonEl(fp || name, 36, true);
|
||||
if (isMe) icon.style.outline = "2px solid var(--accent)";
|
||||
row.appendChild(icon);
|
||||
|
||||
const info = document.createElement("div");
|
||||
info.className = "info";
|
||||
info.innerHTML = `
|
||||
<div class="name">${escapeHtml(name)} ${isMe ? '<span class="you-badge">you</span>' : ""}</div>
|
||||
<div class="fp">${escapeHtml(fp ? fp.substring(0, 16) : "")}</div>
|
||||
`;
|
||||
row.appendChild(info);
|
||||
participantsDiv.appendChild(row);
|
||||
});
|
||||
}
|
||||
|
||||
statsDiv.textContent = `TX: ${st.encode_fps} | RX: ${st.recv_fps}`;
|
||||
} catch {}
|
||||
}
|
||||
|
||||
listen("call-event", (event: any) => {
|
||||
const { kind } = event.payload;
|
||||
if (kind === "room-update") pollStatus();
|
||||
if (kind === "disconnected" && !userDisconnected) pollStatus();
|
||||
});
|
||||
|
||||
// ── Settings ──
|
||||
function openSettings() {
|
||||
const s = loadSettings();
|
||||
sRoom.value = s.room; sAlias.value = s.alias; sOsAec.checked = s.osAec;
|
||||
sFingerprint.textContent = myFingerprint || "(loading...)";
|
||||
renderSettingsRecentRooms(s.recentRooms);
|
||||
settingsPanel.classList.remove("hidden");
|
||||
}
|
||||
function closeSettings() { settingsPanel.classList.add("hidden"); }
|
||||
|
||||
function renderSettingsRecentRooms(rooms: RecentRoom[]) {
|
||||
if (rooms.length === 0) {
|
||||
sRecentRooms.innerHTML = '<span style="color:var(--text-dim);font-size:12px">No recent rooms</span>';
|
||||
return;
|
||||
}
|
||||
sRecentRooms.innerHTML = rooms.map((r, i) => `
|
||||
<div class="recent-room-item">
|
||||
<span>${escapeHtml(r.room)} <small style="color:var(--text-dim)">${escapeHtml(r.relay)}</small></span>
|
||||
<button class="remove" data-idx="${i}">×</button>
|
||||
</div>`).join("");
|
||||
sRecentRooms.querySelectorAll(".remove").forEach((btn) => {
|
||||
btn.addEventListener("click", () => {
|
||||
const idx = parseInt((btn as HTMLElement).dataset.idx || "0");
|
||||
const s = loadSettings();
|
||||
s.recentRooms.splice(idx, 1);
|
||||
saveSettingsObj(s);
|
||||
renderSettingsRecentRooms(s.recentRooms);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
settingsBtnHome.addEventListener("click", openSettings);
|
||||
settingsBtnCall.addEventListener("click", openSettings);
|
||||
settingsClose.addEventListener("click", closeSettings);
|
||||
settingsPanel.addEventListener("click", (e) => { if (e.target === settingsPanel) closeSettings(); });
|
||||
|
||||
settingsSave.addEventListener("click", () => {
|
||||
const s = loadSettings();
|
||||
s.room = sRoom.value; s.alias = sAlias.value; s.osAec = sOsAec.checked;
|
||||
saveSettingsObj(s);
|
||||
roomInput.value = s.room; aliasInput.value = s.alias; osAecCheckbox.checked = s.osAec;
|
||||
renderRecentRooms(s.recentRooms);
|
||||
closeSettings();
|
||||
});
|
||||
|
||||
sClearRecent.addEventListener("click", () => {
|
||||
const s = loadSettings();
|
||||
s.recentRooms = [];
|
||||
saveSettingsObj(s);
|
||||
renderSettingsRecentRooms([]);
|
||||
renderRecentRooms([]);
|
||||
});
|
||||
|
||||
document.addEventListener("keydown", (e) => {
|
||||
if ((e.metaKey || e.ctrlKey) && e.key === ",") {
|
||||
e.preventDefault();
|
||||
settingsPanel.classList.contains("hidden") ? openSettings() : closeSettings();
|
||||
}
|
||||
if (e.key === "Escape") {
|
||||
if (!relayDialog.classList.contains("hidden")) closeRelayDialog();
|
||||
else if (!settingsPanel.classList.contains("hidden")) closeSettings();
|
||||
}
|
||||
});
|
||||
@@ -1,653 +0,0 @@
|
||||
:root {
|
||||
--bg: #0f0f1a;
|
||||
--surface: #1a1a2e;
|
||||
--surface2: #222244;
|
||||
--primary: #0f3460;
|
||||
--accent: #e94560;
|
||||
--text: #eee;
|
||||
--text-dim: #777;
|
||||
--green: #4ade80;
|
||||
--red: #ef4444;
|
||||
--yellow: #facc15;
|
||||
--radius: 12px;
|
||||
}
|
||||
|
||||
* { margin: 0; padding: 0; box-sizing: border-box; }
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
background: var(--bg);
|
||||
color: var(--text);
|
||||
min-height: 100vh;
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
}
|
||||
|
||||
#app {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-height: 100vh;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.hidden { display: none !important; }
|
||||
|
||||
/* ── Connect screen ── */
|
||||
#connect-screen {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex: 1;
|
||||
gap: 20px;
|
||||
}
|
||||
|
||||
#connect-screen h1 {
|
||||
font-size: 26px;
|
||||
font-weight: 700;
|
||||
letter-spacing: 1px;
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
font-size: 13px;
|
||||
color: var(--text-dim);
|
||||
margin-top: -12px;
|
||||
letter-spacing: 2px;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
width: 100%;
|
||||
max-width: 320px;
|
||||
}
|
||||
|
||||
.form label {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
font-size: 11px;
|
||||
color: var(--text-dim);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
}
|
||||
|
||||
.form input[type="text"] {
|
||||
background: var(--surface);
|
||||
border: 1px solid #333;
|
||||
border-radius: 8px;
|
||||
padding: 10px 12px;
|
||||
color: var(--text);
|
||||
font-size: 15px;
|
||||
outline: none;
|
||||
transition: border-color 0.2s;
|
||||
}
|
||||
|
||||
.form input[type="text"]:focus {
|
||||
border-color: var(--accent);
|
||||
}
|
||||
|
||||
/* ── Relay button ── */
|
||||
.relay-selected {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
background: var(--surface);
|
||||
border: 1px solid #333;
|
||||
border-radius: 8px;
|
||||
padding: 10px 12px;
|
||||
color: var(--text);
|
||||
font-size: 14px;
|
||||
cursor: pointer;
|
||||
text-align: left;
|
||||
transition: border-color 0.2s;
|
||||
}
|
||||
|
||||
.relay-selected:hover { border-color: var(--accent); }
|
||||
|
||||
.relay-lock {
|
||||
font-size: 14px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.relay-selected .arrow {
|
||||
margin-left: auto;
|
||||
font-size: 10px;
|
||||
color: var(--text-dim);
|
||||
}
|
||||
|
||||
.dot.green { background: var(--green); }
|
||||
.dot.yellow { background: var(--yellow); }
|
||||
.dot.red { background: var(--red); }
|
||||
.dot.gray { background: #555; }
|
||||
|
||||
/* ── Relay dialog ── */
|
||||
#relay-dialog {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0,0,0,0.6);
|
||||
backdrop-filter: blur(4px);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 200;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.relay-dialog-card {
|
||||
max-width: 360px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.relay-dialog-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 6px;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.relay-dialog-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
background: var(--surface);
|
||||
border-radius: 8px;
|
||||
padding: 8px 12px;
|
||||
}
|
||||
|
||||
.relay-dialog-item .dot { width: 8px; height: 8px; border-radius: 50%; flex-shrink: 0; }
|
||||
.relay-dialog-item { cursor: pointer; transition: background 0.1s; }
|
||||
.relay-dialog-item:hover { background: var(--surface2); }
|
||||
.relay-dialog-item.selected { background: var(--primary); border: 1px solid var(--accent); }
|
||||
|
||||
.relay-dialog-item .relay-info { flex: 1; min-width: 0; overflow: hidden; }
|
||||
.relay-dialog-item .relay-name { font-size: 13px; font-weight: 500; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.relay-dialog-item .relay-addr { font-size: 11px; color: var(--text-dim); font-family: monospace; overflow: hidden; text-overflow: ellipsis; }
|
||||
.relay-dialog-item .relay-rtt { font-size: 11px; color: var(--text-dim); margin-right: 4px; }
|
||||
|
||||
.relay-meta {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 2px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.relay-lock-icon { font-size: 16px; }
|
||||
.relay-meta .relay-rtt { font-size: 10px; color: var(--text-dim); }
|
||||
|
||||
.relay-dialog-item .remove {
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--text-dim);
|
||||
cursor: pointer;
|
||||
font-size: 16px;
|
||||
padding: 0 4px;
|
||||
}
|
||||
|
||||
.relay-dialog-item .remove:hover { color: var(--red); }
|
||||
|
||||
.relay-add-row {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
margin-top: 12px;
|
||||
border-top: 1px solid #333;
|
||||
padding-top: 12px;
|
||||
}
|
||||
|
||||
.relay-add-inputs {
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
.relay-add-row input {
|
||||
background: var(--surface);
|
||||
border: 1px solid #333;
|
||||
border-radius: 8px;
|
||||
padding: 8px 10px;
|
||||
color: var(--text);
|
||||
font-size: 13px;
|
||||
outline: none;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.relay-add-row input:focus { border-color: var(--accent); }
|
||||
|
||||
.relay-add-row .primary {
|
||||
padding: 10px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.form-row {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.checkbox {
|
||||
flex-direction: row !important;
|
||||
align-items: center;
|
||||
gap: 8px !important;
|
||||
cursor: pointer;
|
||||
font-size: 13px !important;
|
||||
}
|
||||
|
||||
.checkbox input { width: 16px; height: 16px; }
|
||||
|
||||
button.primary {
|
||||
background: var(--accent);
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 8px;
|
||||
padding: 12px;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
transition: opacity 0.2s;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
button.primary:hover { opacity: 0.9; }
|
||||
button.primary:disabled { opacity: 0.5; cursor: not-allowed; }
|
||||
|
||||
.error {
|
||||
color: var(--red);
|
||||
font-size: 13px;
|
||||
min-height: 18px;
|
||||
}
|
||||
|
||||
.identity-info {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.fp-display {
|
||||
font-family: monospace;
|
||||
font-size: 11px;
|
||||
color: var(--text-dim);
|
||||
}
|
||||
|
||||
.recent-rooms {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
justify-content: center;
|
||||
max-width: 320px;
|
||||
}
|
||||
|
||||
.recent-room {
|
||||
background: var(--surface);
|
||||
border: 1px solid #333;
|
||||
border-radius: 16px;
|
||||
padding: 4px 12px;
|
||||
font-size: 12px;
|
||||
color: var(--text-dim);
|
||||
cursor: pointer;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
|
||||
.recent-room:hover {
|
||||
border-color: var(--accent);
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
/* ── Call screen ── */
|
||||
#call-screen {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex: 1;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.call-header {
|
||||
text-align: center;
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
.room-name {
|
||||
font-size: 20px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.call-meta {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.status-dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: var(--green);
|
||||
display: inline-block;
|
||||
animation: pulse 2s infinite;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; }
|
||||
50% { opacity: 0.4; }
|
||||
}
|
||||
|
||||
.status-dot.reconnecting {
|
||||
background: var(--yellow);
|
||||
animation: blink 0.5s infinite;
|
||||
}
|
||||
|
||||
@keyframes blink {
|
||||
0%, 100% { opacity: 1; }
|
||||
50% { opacity: 0.1; }
|
||||
}
|
||||
|
||||
.call-timer {
|
||||
font-size: 14px;
|
||||
color: var(--text-dim);
|
||||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
||||
/* ── Audio level meter ── */
|
||||
.level-meter {
|
||||
height: 4px;
|
||||
background: var(--surface);
|
||||
border-radius: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.level-bar-fill {
|
||||
height: 100%;
|
||||
width: 0%;
|
||||
background: linear-gradient(90deg, var(--green) 0%, var(--yellow) 60%, var(--red) 100%);
|
||||
border-radius: 2px;
|
||||
transition: width 0.1s ease-out;
|
||||
}
|
||||
|
||||
/* ── Participants ── */
|
||||
.participants {
|
||||
background: var(--surface);
|
||||
border-radius: var(--radius);
|
||||
padding: 12px 16px;
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
min-height: 80px;
|
||||
}
|
||||
|
||||
.participants-empty {
|
||||
color: var(--text-dim);
|
||||
font-size: 13px;
|
||||
text-align: center;
|
||||
padding: 20px 0;
|
||||
}
|
||||
|
||||
.participant {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
padding: 8px 0;
|
||||
border-bottom: 1px solid #ffffff08;
|
||||
}
|
||||
|
||||
.participant:last-child { border-bottom: none; }
|
||||
|
||||
.participant .avatar {
|
||||
width: 36px;
|
||||
height: 36px;
|
||||
border-radius: 50%;
|
||||
background: var(--primary);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.participant .avatar.me {
|
||||
background: var(--accent);
|
||||
}
|
||||
|
||||
.participant .info { flex: 1; min-width: 0; }
|
||||
|
||||
.participant .name {
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.participant .fp {
|
||||
font-size: 10px;
|
||||
color: var(--text-dim);
|
||||
font-family: monospace;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.participant .you-badge {
|
||||
font-size: 10px;
|
||||
color: var(--accent);
|
||||
background: #e9456020;
|
||||
padding: 1px 6px;
|
||||
border-radius: 8px;
|
||||
}
|
||||
|
||||
/* ── Controls ── */
|
||||
.controls {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
gap: 24px;
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.control-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
background: var(--surface2);
|
||||
color: var(--text);
|
||||
border: none;
|
||||
border-radius: 50%;
|
||||
width: 56px;
|
||||
height: 56px;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s;
|
||||
font-size: 13px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.control-btn:hover { background: var(--primary); }
|
||||
|
||||
.control-btn.muted {
|
||||
background: var(--red);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.control-btn.hangup {
|
||||
background: var(--red);
|
||||
color: white;
|
||||
width: 64px;
|
||||
height: 64px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.control-btn.hangup:hover { opacity: 0.85; }
|
||||
|
||||
/* ── Stats ── */
|
||||
.stats {
|
||||
text-align: center;
|
||||
font-size: 10px;
|
||||
color: var(--text-dim);
|
||||
font-family: monospace;
|
||||
padding: 4px;
|
||||
}
|
||||
|
||||
/* ── Icon button ── */
|
||||
.icon-btn {
|
||||
background: none;
|
||||
border: 1px solid #444;
|
||||
border-radius: 8px;
|
||||
color: var(--text-dim);
|
||||
font-size: 18px;
|
||||
width: 36px;
|
||||
height: 36px;
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
transition: all 0.15s;
|
||||
}
|
||||
|
||||
.icon-btn:hover { border-color: var(--accent); color: var(--text); }
|
||||
.icon-btn.small { width: 28px; height: 28px; font-size: 14px; }
|
||||
|
||||
.call-header-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
/* ── Settings panel ── */
|
||||
#settings-panel {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0, 0, 0, 0.6);
|
||||
backdrop-filter: blur(4px);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
z-index: 100;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.settings-card {
|
||||
background: var(--bg);
|
||||
border: 1px solid #333;
|
||||
border-radius: 16px;
|
||||
padding: 24px;
|
||||
width: 100%;
|
||||
max-width: 380px;
|
||||
max-height: 90vh;
|
||||
overflow-y: auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 20px;
|
||||
}
|
||||
|
||||
.settings-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.settings-header h2 {
|
||||
font-size: 18px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.settings-section {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.settings-section h3 {
|
||||
font-size: 12px;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 1px;
|
||||
color: var(--text-dim);
|
||||
border-bottom: 1px solid #333;
|
||||
padding-bottom: 4px;
|
||||
}
|
||||
|
||||
.settings-section label {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
font-size: 11px;
|
||||
color: var(--text-dim);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.5px;
|
||||
}
|
||||
|
||||
.settings-section input[type="text"] {
|
||||
background: var(--surface);
|
||||
border: 1px solid #333;
|
||||
border-radius: 8px;
|
||||
padding: 8px 10px;
|
||||
color: var(--text);
|
||||
font-size: 14px;
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.settings-section input[type="text"]:focus {
|
||||
border-color: var(--accent);
|
||||
}
|
||||
|
||||
.setting-row {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 4px 0;
|
||||
}
|
||||
|
||||
.setting-label {
|
||||
font-size: 12px;
|
||||
color: var(--text-dim);
|
||||
}
|
||||
|
||||
.fp-display-large {
|
||||
font-family: monospace;
|
||||
font-size: 12px;
|
||||
color: var(--text);
|
||||
word-break: break-all;
|
||||
}
|
||||
|
||||
.recent-rooms-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.recent-room-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
background: var(--surface);
|
||||
border-radius: 8px;
|
||||
padding: 6px 10px;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.recent-room-item .remove {
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--text-dim);
|
||||
cursor: pointer;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.recent-room-item .remove:hover { color: var(--red); }
|
||||
|
||||
.secondary-btn {
|
||||
background: var(--surface);
|
||||
border: 1px solid #444;
|
||||
border-radius: 8px;
|
||||
padding: 8px;
|
||||
color: var(--text-dim);
|
||||
font-size: 13px;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s;
|
||||
}
|
||||
|
||||
.secondary-btn:hover { border-color: var(--accent); color: var(--text); }
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"allowImportingTsExtensions": true,
|
||||
"noEmit": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import { defineConfig } from "vite";
|
||||
|
||||
export default defineConfig({
|
||||
clearScreen: false,
|
||||
server: {
|
||||
port: 1420,
|
||||
strictPort: true,
|
||||
},
|
||||
envPrefix: ["VITE_", "TAURI_"],
|
||||
build: {
|
||||
target: "esnext",
|
||||
minify: !process.env.TAURI_DEBUG ? "esbuild" : false,
|
||||
sourcemap: !!process.env.TAURI_DEBUG,
|
||||
},
|
||||
});
|
||||
473
docs/WEB_VARIANTS.md
Normal file
473
docs/WEB_VARIANTS.md
Normal file
@@ -0,0 +1,473 @@
|
||||
# WZP Web Client Variants
|
||||
|
||||
Three browser-based client implementations with different trade-offs between simplicity, features, and performance.
|
||||
|
||||
## Variant Comparison
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
subgraph "Variant 1: Pure JS"
|
||||
P_MIC[Mic] --> P_WRK[AudioWorklet<br/>48kHz PCM]
|
||||
P_WRK --> P_WS[WebSocket<br/>TCP]
|
||||
P_WS --> P_BRIDGE[wzp-web Bridge<br/>Opus + FEC + Crypto]
|
||||
P_BRIDGE --> P_QUIC[QUIC Datagram<br/>to Relay]
|
||||
end
|
||||
|
||||
style P_BRIDGE fill:#ff9f43
|
||||
style P_WS fill:#74b9ff
|
||||
```
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
subgraph "Variant 2: Hybrid"
|
||||
H_MIC[Mic] --> H_WRK[AudioWorklet<br/>48kHz PCM]
|
||||
H_WRK --> H_FEC[WASM RaptorQ<br/>FEC Encode]
|
||||
H_FEC --> H_WS[WebSocket<br/>TCP]
|
||||
H_WS --> H_BRIDGE[wzp-web Bridge<br/>Opus + Crypto]
|
||||
H_BRIDGE --> H_QUIC[QUIC Datagram<br/>to Relay]
|
||||
end
|
||||
|
||||
style H_FEC fill:#a29bfe
|
||||
style H_BRIDGE fill:#ff9f43
|
||||
style H_WS fill:#74b9ff
|
||||
```
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
subgraph "Variant 3: Full WASM"
|
||||
F_MIC[Mic] --> F_WRK[AudioWorklet<br/>48kHz PCM]
|
||||
F_WRK --> F_FEC[WASM RaptorQ<br/>FEC Encode]
|
||||
F_FEC --> F_ENC[WASM ChaCha20<br/>Encrypt]
|
||||
F_ENC --> F_WT[WebTransport<br/>UDP Datagrams]
|
||||
F_WT --> F_RELAY[Direct to Relay<br/>No Bridge]
|
||||
end
|
||||
|
||||
style F_FEC fill:#a29bfe
|
||||
style F_ENC fill:#ee5a24
|
||||
style F_WT fill:#00b894
|
||||
```
|
||||
|
||||
## Summary Table
|
||||
|
||||
| | Pure JS | Hybrid | Full WASM |
|
||||
|--|---------|--------|-----------|
|
||||
| **Bundle** | ~20KB JS | ~120KB (JS + 337KB WASM) | ~20KB JS + 337KB WASM |
|
||||
| **Transport** | WebSocket (TCP) | WebSocket (TCP) | WebTransport (UDP) |
|
||||
| **Encryption** | Bridge-side (ChaCha20 on QUIC) | Bridge-side | Browser-side ChaCha20-Poly1305 WASM |
|
||||
| **FEC** | None | RaptorQ WASM (ready, not active over TCP) | RaptorQ WASM (active over UDP) |
|
||||
| **Codec** | Bridge Opus (server-side) | Bridge Opus | Browser Opus (future) / Bridge Opus |
|
||||
| **E2E Encrypted** | No (bridge sees plaintext PCM) | No (bridge sees plaintext PCM) | Yes (bridge eliminated) |
|
||||
| **Latency** | ~50-80ms (TCP overhead) | ~50-80ms (TCP) | ~20-40ms (UDP datagrams) |
|
||||
| **Loss Recovery** | TCP retransmit (adds latency) | TCP retransmit | RaptorQ FEC (no retransmit) |
|
||||
| **Browser Support** | All browsers | All browsers | Chrome 97+, Edge 97+, Firefox 114+, Safari 17.4+ |
|
||||
| **Relay Changes** | None | None | Needs HTTP/3 (h3-quinn) |
|
||||
| **Status** | Ready | Ready (FEC testable in console) | Architecture complete, needs relay HTTP/3 |
|
||||
|
||||
## Variant 1: Pure JS
|
||||
|
||||
The lightest implementation. No WASM, no FEC, no browser-side encryption. The `wzp-web` Rust bridge handles everything on the server side.
|
||||
|
||||
### Architecture
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant B as Browser
|
||||
participant W as wzp-web Bridge
|
||||
participant R as wzp-relay
|
||||
|
||||
B->>B: getUserMedia() mic access
|
||||
B->>B: AudioWorklet captures 960 samples / 20ms
|
||||
|
||||
B->>W: WebSocket connect /ws/room-name
|
||||
W->>R: QUIC connect (SNI = hashed room)
|
||||
W->>R: Crypto handshake (X25519 + ChaCha20)
|
||||
|
||||
loop Every 20ms
|
||||
B->>W: WS Binary: Int16[960] raw PCM
|
||||
W->>W: Opus encode + FEC + Encrypt
|
||||
W->>R: QUIC Datagram
|
||||
end
|
||||
|
||||
loop Incoming
|
||||
R->>W: QUIC Datagram
|
||||
W->>W: Decrypt + FEC decode + Opus decode
|
||||
W->>B: WS Binary: Int16[960] raw PCM
|
||||
end
|
||||
|
||||
B->>B: AudioWorklet plays received PCM
|
||||
```
|
||||
|
||||
### Data Flow
|
||||
|
||||
```
|
||||
Browser (Pure JS)
|
||||
├── Capture: getUserMedia → AudioWorklet (WZPCaptureProcessor)
|
||||
│ └── 128-sample blocks accumulated → 960-sample frame
|
||||
│ └── Float32 → Int16 conversion
|
||||
│ └── postMessage(ArrayBuffer) to main thread
|
||||
├── Send: onmessage → ws.send(pcmBuffer)
|
||||
│ └── Binary WebSocket frame (1920 bytes = 960 × 2)
|
||||
├── Receive: ws.onmessage → ArrayBuffer
|
||||
│ └── Int16Array(960) → playback port
|
||||
└── Playback: AudioWorklet (WZPPlaybackProcessor)
|
||||
└── Ring buffer (max 120ms)
|
||||
└── Int16 → Float32 → output blocks
|
||||
```
|
||||
|
||||
### Files
|
||||
- `js/wzp-pure.js` — `WZPPureClient` class (~100 lines)
|
||||
- `js/wzp-core.js` — Shared UI + audio (used by all variants)
|
||||
- `audio-processor.js` — AudioWorklet (unchanged)
|
||||
|
||||
### Limitations
|
||||
- No packet loss recovery (TCP retransmit adds latency spikes)
|
||||
- Bridge sees plaintext audio (not E2E encrypted)
|
||||
- Full audio processing pipeline runs on server (Opus, FEC, crypto)
|
||||
- Each browser connection = one QUIC session on the bridge
|
||||
|
||||
---
|
||||
|
||||
## Variant 2: Hybrid (JS + WASM FEC)
|
||||
|
||||
Adds RaptorQ forward error correction via a small WASM module. Same WebSocket transport as Pure — the FEC module is loaded and functional but doesn't add value over TCP (no packet loss). It's ready to activate when WebTransport replaces WebSocket.
|
||||
|
||||
### Architecture
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant B as Browser
|
||||
participant WASM as WASM Module
|
||||
participant W as wzp-web Bridge
|
||||
participant R as wzp-relay
|
||||
|
||||
B->>WASM: Load wzp_wasm.js (337KB)
|
||||
WASM-->>B: WzpFecEncoder + WzpFecDecoder ready
|
||||
|
||||
B->>W: WebSocket connect /ws/room-name
|
||||
W->>R: QUIC connect + handshake
|
||||
|
||||
loop Every 20ms
|
||||
B->>B: AudioWorklet captures PCM
|
||||
B->>WASM: fecEncoder.add_symbol(pcm_bytes)
|
||||
WASM-->>B: FEC packets (source + repair) when block complete
|
||||
B->>W: WS Binary: raw PCM (FEC not on wire over TCP)
|
||||
end
|
||||
|
||||
Note over B,WASM: FEC encode/decode proven via testFec()
|
||||
```
|
||||
|
||||
### WASM Module (wzp-wasm)
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph "wzp-wasm (337KB)"
|
||||
FE[WzpFecEncoder<br/>RaptorQ source block accumulator]
|
||||
FD[WzpFecDecoder<br/>RaptorQ reconstruction]
|
||||
KX[WzpKeyExchange<br/>X25519 ephemeral DH]
|
||||
CS[WzpCryptoSession<br/>ChaCha20-Poly1305]
|
||||
end
|
||||
|
||||
subgraph "Hybrid uses"
|
||||
FE
|
||||
FD
|
||||
end
|
||||
|
||||
subgraph "Full uses"
|
||||
FE
|
||||
FD
|
||||
KX
|
||||
CS
|
||||
end
|
||||
|
||||
style FE fill:#a29bfe
|
||||
style FD fill:#a29bfe
|
||||
style KX fill:#ee5a24
|
||||
style CS fill:#ee5a24
|
||||
```
|
||||
|
||||
### FEC Wire Format
|
||||
|
||||
```
|
||||
Per symbol (encoded by WASM, 259 bytes):
|
||||
┌──────────┬───────────┬──────────┬──────────────────┐
|
||||
│ block_id │ symbol_idx│ is_repair│ symbol_data │
|
||||
│ (1 byte) │ (1 byte) │ (1 byte) │ (256 bytes) │
|
||||
└──────────┴───────────┴──────────┴──────────────────┘
|
||||
|
||||
Symbol data internals (256 bytes):
|
||||
┌────────────┬──────────────────┬─────────┐
|
||||
│ length │ audio frame data │ padding │
|
||||
│ (2B LE) │ (variable) │ (zeros) │
|
||||
└────────────┴──────────────────┴─────────┘
|
||||
|
||||
Block = 5 source symbols + ceil(5 × 0.5) = 3 repair symbols = 8 total
|
||||
Any 5 of 8 received → full block recoverable (RaptorQ fountain code)
|
||||
```
|
||||
|
||||
### Testing FEC in Browser Console
|
||||
|
||||
```javascript
|
||||
// On any hybrid variant page, open console:
|
||||
client.testFec({ lossRate: 0.3, blockSize: 5, symbolSize: 256 })
|
||||
// Output: "FEC test passed — recovered from 30% loss"
|
||||
|
||||
client.testFec({ lossRate: 0.5 })
|
||||
// Output: "FEC test passed — recovered from 50% loss"
|
||||
```
|
||||
|
||||
### Files
|
||||
- `js/wzp-hybrid.js` — `WZPHybridClient` class (~150 lines)
|
||||
- `js/wzp-core.js` — Shared UI + audio
|
||||
- `wasm/wzp_wasm.js` + `wasm/wzp_wasm_bg.wasm` — WASM module (337KB)
|
||||
|
||||
### Limitations
|
||||
- FEC doesn't help over TCP WebSocket (no packet loss to recover from)
|
||||
- Bridge still sees plaintext audio
|
||||
- WebTransport activation is the unlock for FEC value
|
||||
|
||||
---
|
||||
|
||||
## Variant 3: Full WASM + WebTransport
|
||||
|
||||
The complete WZP client in the browser. No bridge server needed — the browser connects directly to the relay via WebTransport unreliable datagrams. All encryption and FEC happens in WASM.
|
||||
|
||||
### Architecture
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant B as Browser
|
||||
participant WASM as WASM Module
|
||||
participant R as wzp-relay
|
||||
|
||||
B->>WASM: Load wzp_wasm.js
|
||||
WASM-->>B: FEC + Crypto + KeyExchange ready
|
||||
|
||||
B->>R: WebTransport connect (HTTPS/HTTP3)
|
||||
B->>R: Bidirectional stream open
|
||||
|
||||
Note over B,R: Key Exchange
|
||||
B->>WASM: kx = new WzpKeyExchange()
|
||||
B->>R: Stream: our X25519 public key (32 bytes)
|
||||
R->>B: Stream: relay X25519 public key (32 bytes)
|
||||
B->>WASM: secret = kx.derive_shared_secret(peer_pub)
|
||||
B->>WASM: session = new WzpCryptoSession(secret)
|
||||
|
||||
Note over B,R: Media Flow (Unreliable Datagrams)
|
||||
loop Every 20ms
|
||||
B->>B: AudioWorklet captures PCM
|
||||
B->>WASM: fecEncoder.add_symbol(pcm_bytes)
|
||||
WASM-->>B: FEC symbols when block complete
|
||||
B->>WASM: encrypted = session.encrypt(header, symbol)
|
||||
B->>R: WebTransport datagram (encrypted)
|
||||
end
|
||||
|
||||
loop Incoming
|
||||
R->>B: WebTransport datagram (encrypted)
|
||||
B->>WASM: plaintext = session.decrypt(header, ciphertext)
|
||||
B->>WASM: frames = fecDecoder.add_symbol(...)
|
||||
WASM-->>B: Decoded audio frames
|
||||
B->>B: AudioWorklet plays PCM
|
||||
end
|
||||
```
|
||||
|
||||
### Encryption Flow
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph "Key Exchange (once per session)"
|
||||
KX_A[Browser: WzpKeyExchange.new<br/>Generate X25519 keypair] --> PUB_A[Send public key<br/>32 bytes over stream]
|
||||
PUB_B[Receive relay public key<br/>32 bytes] --> DH[derive_shared_secret<br/>X25519 DH + HKDF-SHA256]
|
||||
DH --> SESSION[WzpCryptoSession<br/>ChaCha20-Poly1305 256-bit key]
|
||||
end
|
||||
|
||||
subgraph "Per-Packet Encryption"
|
||||
HDR[Build MediaHeader<br/>12 bytes AAD] --> ENC[session.encrypt<br/>header=AAD plaintext=audio]
|
||||
ENC --> NONCE[Nonce 12 bytes<br/>session_id 4 + seq 4 + dir 1 + pad 3]
|
||||
ENC --> CT[Ciphertext + 16-byte Poly1305 tag]
|
||||
CT --> DG[WebTransport datagram send]
|
||||
end
|
||||
|
||||
style SESSION fill:#ee5a24
|
||||
style NONCE fill:#fdcb6e
|
||||
```
|
||||
|
||||
### Nonce Construction (matches native wzp-crypto)
|
||||
|
||||
```
|
||||
Bytes 0-3: session_id (SHA-256(session_key)[:4])
|
||||
Bytes 4-7: sequence_number (u32 BE, incrementing)
|
||||
Byte 8: direction (0x00 = send, 0x01 = recv)
|
||||
Bytes 9-11: 0x000000 (padding)
|
||||
|
||||
Total: 12 bytes — deterministic, never reused (seq increments)
|
||||
```
|
||||
|
||||
### Send Pipeline Detail
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
MIC[Mic PCM Int16 x 960] --> PAD[Pad to 256 bytes<br/>2-byte LE length + data + zeros]
|
||||
PAD --> FEC[WzpFecEncoder.add_symbol<br/>Accumulate 5 frames per block]
|
||||
FEC -->|Block complete| SYMBOLS[5 source + 3 repair symbols]
|
||||
SYMBOLS --> HDR[Build 12-byte MediaHeader<br/>seq, timestamp, codec, fec_block, symbol_idx]
|
||||
HDR --> ENCRYPT[WzpCryptoSession.encrypt<br/>AAD=header, payload=symbol]
|
||||
ENCRYPT --> DG[WebTransport datagram<br/>header 12B + ciphertext + tag 16B]
|
||||
|
||||
style FEC fill:#a29bfe
|
||||
style ENCRYPT fill:#ee5a24
|
||||
style DG fill:#00b894
|
||||
```
|
||||
|
||||
### Receive Pipeline Detail
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
DG[WebTransport datagram] --> PARSE[Parse 12-byte MediaHeader]
|
||||
PARSE --> DECRYPT[WzpCryptoSession.decrypt<br/>AAD=header, ciphertext=rest]
|
||||
DECRYPT --> FEC_HDR[Parse 3-byte FEC header<br/>block_id + symbol_idx + is_repair]
|
||||
FEC_HDR --> FEC_D[WzpFecDecoder.add_symbol]
|
||||
FEC_D -->|Block decoded| FRAMES[Original audio frames]
|
||||
FRAMES --> UNPAD[Strip 2-byte length prefix + padding]
|
||||
UNPAD --> PLAY[AudioWorklet playback<br/>Int16 PCM x 960]
|
||||
|
||||
style DECRYPT fill:#ee5a24
|
||||
style FEC_D fill:#a29bfe
|
||||
style PLAY fill:#4a9eff
|
||||
```
|
||||
|
||||
### Testing Crypto + FEC in Browser Console
|
||||
|
||||
```javascript
|
||||
// On any full variant page, open console:
|
||||
client.testCryptoFec()
|
||||
// Tests: key exchange → encrypt → FEC encode → simulate 30% loss → FEC decode → decrypt
|
||||
// Output: "Crypto+FEC test passed — key exchange, encrypt, FEC(30% loss), decrypt all OK"
|
||||
```
|
||||
|
||||
### Files
|
||||
- `js/wzp-full.js` — `WZPFullClient` class (~250 lines)
|
||||
- `js/wzp-core.js` — Shared UI + audio
|
||||
- `wasm/wzp_wasm.js` + `wasm/wzp_wasm_bg.wasm` — WASM module (337KB, shared with hybrid)
|
||||
|
||||
### Requirements (not yet met)
|
||||
- Relay must support HTTP/3 WebTransport (h3-quinn integration)
|
||||
- Real TLS certificate (WebTransport requires valid HTTPS)
|
||||
- Browser with WebTransport support (Chrome 97+, Edge 97+, Firefox 114+, Safari 17.4+)
|
||||
|
||||
### Limitations
|
||||
- No Opus encoding in browser yet (sends raw PCM, relay/peer decodes)
|
||||
- Key exchange is simplified (no Ed25519 signature verification in WASM yet)
|
||||
- No adaptive quality switching in browser (server-side only)
|
||||
|
||||
---
|
||||
|
||||
## Shared Infrastructure
|
||||
|
||||
### wzp-core.js
|
||||
|
||||
Common code used by all three variants:
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
CORE[wzp-core.js] --> DETECT[detectVariant<br/>URL ?variant= param]
|
||||
CORE --> ROOM[getRoom<br/>URL path / input field]
|
||||
CORE --> AUDIO[startAudioContext<br/>48kHz AudioContext]
|
||||
CORE --> CAP[connectCapture<br/>Mic to AudioWorklet]
|
||||
CORE --> PLAY[connectPlayback<br/>AudioWorklet to speaker]
|
||||
CORE --> UI[initUI<br/>Buttons, PTT, level meter]
|
||||
CORE --> STATUS[updateStatus / updateStats<br/>DOM updates]
|
||||
|
||||
CAP --> WORKLET[AudioWorklet<br/>or ScriptProcessor fallback]
|
||||
PLAY --> WORKLET
|
||||
|
||||
style CORE fill:#6c5ce7
|
||||
style WORKLET fill:#00b894
|
||||
```
|
||||
|
||||
### AudioWorklet Processors (audio-processor.js)
|
||||
|
||||
```
|
||||
WZPCaptureProcessor:
|
||||
AudioWorklet process() → 128 samples per call
|
||||
Buffer internally until 960 samples (20ms frame)
|
||||
Convert Float32 → Int16
|
||||
postMessage(ArrayBuffer) to main thread
|
||||
|
||||
WZPPlaybackProcessor:
|
||||
Receive Int16 PCM via port.onmessage
|
||||
Convert Int16 → Float32
|
||||
Write to ring buffer (max ~120ms / 6 frames)
|
||||
process() reads from ring buffer → output
|
||||
```
|
||||
|
||||
### index.html Boot Sequence
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant PAGE as index.html
|
||||
participant CORE as wzp-core.js
|
||||
participant VAR as Variant JS
|
||||
|
||||
PAGE->>CORE: Load (static script tag)
|
||||
CORE->>CORE: detectVariant() from URL
|
||||
PAGE->>VAR: Dynamic script load (wzp-pure/hybrid/full.js)
|
||||
VAR-->>PAGE: wzpBoot() called on load
|
||||
|
||||
PAGE->>CORE: initUI(callbacks)
|
||||
Note over PAGE: User clicks Connect
|
||||
|
||||
PAGE->>CORE: startAudioContext()
|
||||
PAGE->>VAR: new WZP*Client(options)
|
||||
PAGE->>VAR: client.connect()
|
||||
PAGE->>CORE: connectCapture(audioCtx, onFrame)
|
||||
PAGE->>CORE: connectPlayback(audioCtx)
|
||||
|
||||
loop Audio flowing
|
||||
CORE->>VAR: client.sendAudio(pcmBuffer)
|
||||
VAR->>CORE: onAudio(Int16Array) callback
|
||||
end
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
### Behind Caddy (recommended)
|
||||
|
||||
```
|
||||
# Caddyfile
|
||||
wzp.example.com {
|
||||
reverse_proxy 127.0.0.1:8080
|
||||
}
|
||||
```
|
||||
|
||||
```bash
|
||||
# Relay
|
||||
./wzp-relay --listen 0.0.0.0:4433
|
||||
|
||||
# Web bridge (no --tls, Caddy handles SSL)
|
||||
./wzp-web --port 8080 --relay 127.0.0.1:4433
|
||||
```
|
||||
|
||||
### Direct TLS
|
||||
|
||||
```bash
|
||||
./wzp-web --port 443 --relay 127.0.0.1:4433 --tls \
|
||||
--cert /etc/letsencrypt/live/domain/fullchain.pem \
|
||||
--key /etc/letsencrypt/live/domain/privkey.pem
|
||||
```
|
||||
|
||||
### URL Patterns
|
||||
|
||||
```
|
||||
https://domain/room-name → Pure (default)
|
||||
https://domain/room-name?variant=pure → Pure JS
|
||||
https://domain/room-name?variant=hybrid → Hybrid (JS + WASM FEC)
|
||||
https://domain/room-name?variant=full → Full WASM (needs HTTP/3 relay)
|
||||
```
|
||||
|
||||
## Future Work
|
||||
|
||||
1. **Relay HTTP/3 support** (h3-quinn) — unlocks Full variant for production
|
||||
2. **Browser Opus encoding** — AudioEncoder API or Opus WASM, removes bridge dependency for Hybrid
|
||||
3. **Ed25519 signatures in WASM** — full identity verification in Full variant
|
||||
4. **Adaptive quality in browser** — monitor RTT/loss, switch profiles
|
||||
5. **WebTransport fallback to WebSocket** — Full variant auto-degrades if WebTransport unavailable
|
||||
@@ -1,41 +0,0 @@
|
||||
# WarzonePhone Android Client
|
||||
|
||||
The WZP Android client is a native VoIP application built with Kotlin/Jetpack Compose on top of a Rust audio engine. It connects to WZP relay servers over QUIC, providing encrypted voice calls with adaptive quality, forward error correction, and acoustic echo cancellation.
|
||||
|
||||
## Quick Start
|
||||
|
||||
1. **Build**: `cd android && ./gradlew assembleRelease` (requires NDK 26.1, cargo-ndk)
|
||||
2. **Install**: `adb install app/build/outputs/apk/release/app-release.apk`
|
||||
3. **Run**: Open "WZ Phone", tap **CALL** to connect to the hardcoded relay
|
||||
4. **Relay**: Must be running at the configured address (default `172.16.81.125:4433`)
|
||||
|
||||
## Current State (April 2025)
|
||||
|
||||
| Feature | Status |
|
||||
|---------|--------|
|
||||
| QUIC transport to relay | Working |
|
||||
| Crypto handshake (X25519 + Ed25519) | Working |
|
||||
| Opus 24k encoding/decoding | Working |
|
||||
| Oboe audio I/O (48kHz mono) | Working |
|
||||
| AEC / AGC signal processing | Working |
|
||||
| RaptorQ FEC | Wired (repair symbols not sent yet) |
|
||||
| Jitter buffer | Working |
|
||||
| Adaptive quality switching | Codec-ready, not network-driven yet |
|
||||
| Authentication (featherChat) | Skipped (relay has no --auth-url) |
|
||||
| Media encryption (ChaCha20-Poly1305) | Session derived but not applied to packets |
|
||||
| Foreground service / wake locks | Implemented, not started from UI |
|
||||
|
||||
## Documentation Index
|
||||
|
||||
- [Architecture](architecture.md) - System design, data flow diagrams, thread model
|
||||
- [Build Guide](build-guide.md) - Build environment setup, dependencies, signing
|
||||
- [Debugging](debugging.md) - Crash diagnosis, logcat filters, common issues
|
||||
- [Maintenance](maintenance.md) - Code map, dependency management, upgrade paths
|
||||
- [Roadmap](roadmap.md) - Planned work and known gaps
|
||||
|
||||
## Key Design Decisions
|
||||
|
||||
- **Rust native engine**: All audio processing, codecs, FEC, crypto, and networking run in Rust. Kotlin is UI-only.
|
||||
- **Lock-free audio**: SPSC ring buffers with atomic ordering between Oboe C++ callbacks and the Rust codec thread. No mutexes in the audio path.
|
||||
- **cargo-ndk**: The native library (`libwzp_android.so`) is cross-compiled for `arm64-v8a` using cargo-ndk, invoked automatically by Gradle's `cargoNdkBuild` task.
|
||||
- **Single-activity Compose**: One `CallActivity` hosts all UI via Jetpack Compose with `CallViewModel` as the state holder.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user