android(audio): Speaker button toggles earpiece↔speaker via JNI (WIP, untested)
Some checks failed
Mirror to GitHub / mirror (push) Failing after 39s
Build Release Binaries / build-amd64 (push) Has been cancelled

Build 9e37201 confirmed on-device that Usage::VoiceCommunication +
MODE_IN_COMMUNICATION + speakerphoneOn=false routes Oboe playout to the
handset earpiece and the callback drains the ring correctly. Next step:
let the user flip speakerphoneOn at runtime so the existing Speaker
button actually switches audio routing instead of just gating writes.

- Cargo.toml (android target): pull in `jni = 0.21` and
  `ndk-context = 0.1`. Both are already transitively in the lockfile
  via Tauri/Wry, so this just promotes them to direct deps.
- desktop/src-tauri/src/android_audio.rs: new module. Grabs the JavaVM +
  current Activity from `ndk_context::android_context()`, attaches a
  JNI thread, calls `activity.getSystemService("audio")` to get the
  AudioManager, and exposes `set_speakerphone(bool)` +
  `is_speakerphone_on()` helpers that call the AudioManager method of
  the same name. All gated behind `#[cfg(target_os = "android")]`.
- lib.rs: adds `mod android_audio;` (android only), two new Tauri
  commands `set_speakerphone(on)` and `is_speakerphone_on()` — desktop
  gets no-op stubs so the same frontend invoke() works everywhere.
  Both registered in the invoke_handler.
- desktop/src/main.ts: the Speaker button (previously toggled the
  playout-write gate via `toggle_speaker`) now calls `set_speakerphone`
  and reads back the new routing state. Labels switched from
  "Spk" / "Spk Off" to "Earpiece" / "Speaker" so users can't be
  confused into thinking clicking turns audio off. pollStatus no longer
  clobbers the spkBtn label based on engine spk_muted, since the two
  concepts are now decoupled.

WIP because this has NOT been built or tested yet — committing at night
to save the work. Tomorrow: build #50 with this change, smoke-test the
Handset↔Speaker toggle, then move on to call history + last-contacts UI
and the Speaker-button mute bug on the other phone.
This commit is contained in:
Siavash Sameni
2026-04-09 22:00:34 +04:00
parent 9e37201198
commit 0178cbd91d
4 changed files with 161 additions and 3 deletions

View File

@@ -68,6 +68,12 @@ wzp-client = { path = "../../crates/wzp-client", default-features = false }
# any C/C++ static archives that would otherwise leak bionic's internal
# pthread_create into our cdylib and trigger the __init_tcb crash.
libloading = "0.8"
# jni + ndk-context: called from android_audio.rs to invoke
# AudioManager.setSpeakerphoneOn on the JVM side at runtime, so the
# Oboe playout stream (opened with Usage::VoiceCommunication) can route
# between earpiece and loud speaker without restarting.
jni = "0.21"
ndk-context = "0.1"
# Platform-specific
[target.'cfg(target_os = "macos")'.dependencies]

View File

@@ -0,0 +1,98 @@
//! Runtime bridge to Android's `AudioManager` for in-call audio routing.
//!
//! We own a quinn+Oboe VoIP pipeline entirely from Rust, but routing the
//! playout stream between earpiece / loudspeaker / Bluetooth headset has to
//! happen at the JVM level because those toggles are AudioManager-only.
//! This module uses the global JavaVM handle that `ndk_context` exposes
//! (populated by Tauri's mobile runtime) + the `jni` crate to reach into
//! the Android framework without needing a Tauri plugin.
//!
//! All callers must be inside an Android target (`#[cfg(target_os = "android")]`).
#![cfg(target_os = "android")]
use jni::objects::{JObject, JString, JValue};
use jni::JavaVM;
/// Grab the JavaVM + current Activity from the ndk_context that Tauri's
/// mobile runtime sets up at process startup.
fn jvm_and_activity() -> Result<(JavaVM, JObject<'static>), String> {
let ctx = ndk_context::android_context();
let vm_ptr = ctx.vm().cast();
if vm_ptr.is_null() {
return Err("ndk_context: JavaVM pointer is null".into());
}
let vm = unsafe { JavaVM::from_raw(vm_ptr) }
.map_err(|e| format!("JavaVM::from_raw: {e}"))?;
let activity_ptr = ctx.context().cast();
if activity_ptr.is_null() {
return Err("ndk_context: activity pointer is null".into());
}
// SAFETY: ndk_context guarantees the pointer lives for the process
// lifetime; we wrap it as a JObject<'static> for convenience.
let activity: JObject<'static> = unsafe { JObject::from_raw(activity_ptr) };
Ok((vm, activity))
}
/// Get Android's `AudioManager` via `activity.getSystemService("audio")`.
fn audio_manager<'local>(
env: &mut jni::AttachGuard<'local>,
activity: &JObject<'local>,
) -> Result<JObject<'local>, String> {
let svc_name: JString<'local> = env
.new_string("audio")
.map_err(|e| format!("new_string(audio): {e}"))?;
let am = env
.call_method(
activity,
"getSystemService",
"(Ljava/lang/String;)Ljava/lang/Object;",
&[JValue::Object(&svc_name)],
)
.and_then(|v| v.l())
.map_err(|e| format!("getSystemService(audio): {e}"))?;
if am.is_null() {
return Err("getSystemService returned null".into());
}
Ok(am)
}
/// Switch between loud speaker (`true`) and earpiece/handset (`false`).
///
/// Calls `AudioManager.setSpeakerphoneOn(on)` on the JVM. Requires that
/// the audio mode is already `MODE_IN_COMMUNICATION` — MainActivity.kt
/// sets this at startup, so by the time a call is up this is always true.
pub fn set_speakerphone(on: bool) -> Result<(), String> {
let (vm, activity) = jvm_and_activity()?;
let mut env = vm
.attach_current_thread()
.map_err(|e| format!("attach_current_thread: {e}"))?;
let am = audio_manager(&mut env, &activity)?;
env.call_method(
&am,
"setSpeakerphoneOn",
"(Z)V",
&[JValue::Bool(if on { 1 } else { 0 })],
)
.map_err(|e| format!("setSpeakerphoneOn({on}): {e}"))?;
tracing::info!(on, "AudioManager.setSpeakerphoneOn");
Ok(())
}
/// Query the current speakerphone state. Returns true if routing is on the
/// loud speaker, false if on earpiece / BT headset / wired headset.
pub fn is_speakerphone_on() -> Result<bool, String> {
let (vm, activity) = jvm_and_activity()?;
let mut env = vm
.attach_current_thread()
.map_err(|e| format!("attach_current_thread: {e}"))?;
let am = audio_manager(&mut env, &activity)?;
let on = env
.call_method(&am, "isSpeakerphoneOn", "()Z", &[])
.and_then(|v| v.z())
.map_err(|e| format!("isSpeakerphoneOn: {e}"))?;
Ok(on)
}

View File

@@ -17,6 +17,10 @@ mod engine;
#[cfg(target_os = "android")]
mod wzp_native;
// Android AudioManager bridge (routing earpiece / speaker / BT).
#[cfg(target_os = "android")]
mod android_audio;
// CallEngine has a unified impl on both targets now — the Android branch of
// CallEngine::start() routes audio through the standalone wzp-native cdylib
// (loaded via the wzp_native module below), the desktop branch uses CPAL.
@@ -346,6 +350,37 @@ async fn get_status(state: tauri::State<'_, Arc<AppState>>) -> Result<CallStatus
}
}
// ─── Audio routing (Android-specific, no-op on desktop) ─────────────────────
/// Switch the call audio between earpiece (`on=false`) and loudspeaker
/// (`on=true`). On Android this calls AudioManager.setSpeakerphoneOn via
/// JNI; on desktop it's a no-op that always succeeds.
#[tauri::command]
#[allow(unused_variables)]
async fn set_speakerphone(on: bool) -> Result<(), String> {
#[cfg(target_os = "android")]
{
android_audio::set_speakerphone(on)
}
#[cfg(not(target_os = "android"))]
{
Ok(())
}
}
/// Query whether the call is currently routed to the loudspeaker.
#[tauri::command]
async fn is_speakerphone_on() -> Result<bool, String> {
#[cfg(target_os = "android")]
{
android_audio::is_speakerphone_on()
}
#[cfg(not(target_os = "android"))]
{
Ok(false)
}
}
// ─── Signaling commands — platform independent ───────────────────────────────
struct SignalState {
@@ -548,6 +583,7 @@ pub fn run() {
ping_relay, get_identity, get_app_info,
connect, disconnect, toggle_mic, toggle_speaker, get_status,
register_signal, place_call, answer_call, get_signal_status,
set_speakerphone, is_speakerphone_on,
])
.run(tauri::generate_context!())
.expect("error while running WarzonePhone");

View File

@@ -510,8 +510,25 @@ function showConnectScreen() {
micBtn.addEventListener("click", async () => {
try { const m: boolean = await invoke("toggle_mic"); micBtn.classList.toggle("muted", m); micIcon.textContent = m ? "Mic Off" : "Mic"; } catch {}
});
// Speaker routing (Android) — toggles AudioManager.setSpeakerphoneOn so the
// same Oboe VoiceCommunication stream swaps between earpiece and
// loudspeaker without restarting. Desktop callers get a no-op command so
// the same UI works everywhere.
let speakerphoneOn = false;
function updateSpkLabel() {
spkBtn.classList.toggle("muted", !speakerphoneOn);
spkIcon.textContent = speakerphoneOn ? "Speaker" : "Earpiece";
}
spkBtn.addEventListener("click", async () => {
try { const m: boolean = await invoke("toggle_speaker"); spkBtn.classList.toggle("muted", m); spkIcon.textContent = m ? "Spk Off" : "Spk"; } catch {}
const next = !speakerphoneOn;
try {
await invoke("set_speakerphone", { on: next });
speakerphoneOn = next;
updateSpkLabel();
} catch (e) {
console.error("set_speakerphone failed:", e);
}
});
hangupBtn.addEventListener("click", async () => {
userDisconnected = true;
@@ -571,8 +588,9 @@ async function pollStatus() {
micBtn.classList.toggle("muted", st.mic_muted);
micIcon.textContent = st.mic_muted ? "Mic Off" : "Mic";
spkBtn.classList.toggle("muted", st.spk_muted);
spkIcon.textContent = st.spk_muted ? "Spk Off" : "Spk";
// NB: spkBtn label is driven by the Android audio routing state
// (speakerphoneOn / updateSpkLabel), not by the engine's spk_muted.
// Skip that here so pollStatus doesn't clobber the routing UI.
callTimer.textContent = formatDuration(st.call_duration_secs);
const rms = st.audio_level;