;
rotationQuarterTurns: number;
screenAspect: string;
+ screenClassName?: string;
selectedSimulator: SimulatorMetadata | null;
shellStyle: CSSProperties | null;
streamBackend: string;
@@ -112,6 +113,7 @@ export function SimulatorViewport({
outerCanvasRef,
rotationQuarterTurns,
screenAspect,
+ screenClassName,
selectedSimulator,
shellStyle,
streamBackend,
@@ -197,6 +199,7 @@ export function SimulatorViewport({
onStartPanning={onStartPanning}
rotationQuarterTurns={rotationQuarterTurns}
screenAspect={screenAspect}
+ screenClassName={screenClassName}
shellStyle={shellStyle}
simulatorName={selectedSimulator.name}
streamBackend={streamBackend}
diff --git a/client/src/styles/components.css b/client/src/styles/components.css
index 9e69f76b..2bfc5b15 100644
--- a/client/src/styles/components.css
+++ b/client/src/styles/components.css
@@ -636,6 +636,12 @@
color: color-mix(in srgb, #d7ba7d 82%, var(--text));
}
+.hierarchy-source-pill.source-android-uiautomator {
+ border-color: color-mix(in srgb, #7fd97f 55%, var(--border));
+ background: color-mix(in srgb, #7fd97f 13%, transparent);
+ color: color-mix(in srgb, #7fd97f 82%, var(--text));
+}
+
.hierarchy-source-pill.active {
gap: 5px;
padding-inline: 7px 8px;
@@ -1491,6 +1497,11 @@
border-radius: 0;
}
+.device-screen.android-screen {
+ background: transparent;
+ border-radius: 10px;
+}
+
.stream-canvas {
position: absolute;
inset: 0;
diff --git a/server/src/android.rs b/server/src/android.rs
index 3e6d66e5..c4687476 100644
--- a/server/src/android.rs
+++ b/server/src/android.rs
@@ -1086,6 +1086,8 @@ fn android_node_value(node: roxmltree::Node<'_, '_>, depth: usize, max_depth: us
let text = node.attribute("text").unwrap_or("");
let content_desc = node.attribute("content-desc").unwrap_or("");
let label = if !text.is_empty() { text } else { content_desc };
+ let resource_id = node.attribute("resource-id").unwrap_or("");
+ let role = android_role(node, short_class);
let mut children = Vec::new();
if depth < max_depth {
for child in node.children().filter(|child| child.has_tag_name("node")) {
@@ -1094,15 +1096,27 @@ fn android_node_value(node: roxmltree::Node<'_, '_>, depth: usize, max_depth: us
}
json!({
"source": "android-uiautomator",
- "type": map_android_class(short_class),
- "role": map_android_class(short_class),
+ "type": android_type(short_class, class_name),
+ "role": role,
"className": class_name,
- "AXIdentifier": node.attribute("resource-id").unwrap_or(""),
+ "AXIdentifier": resource_id,
"AXLabel": label,
"AXValue": text,
+ "androidClass": class_name,
+ "androidPackage": node.attribute("package").unwrap_or(""),
+ "androidResourceId": resource_id,
+ "checkable": bool_attr(node, "checkable"),
+ "checked": bool_attr(node, "checked"),
+ "clickable": bool_attr(node, "clickable"),
+ "focusable": bool_attr(node, "focusable"),
+ "focused": bool_attr(node, "focused"),
+ "longClickable": bool_attr(node, "long-clickable"),
+ "password": bool_attr(node, "password"),
+ "scrollable": bool_attr(node, "scrollable"),
+ "selected": bool_attr(node, "selected"),
"text": text,
"title": label,
- "enabled": node.attribute("enabled") == Some("true"),
+ "enabled": bool_attr(node, "enabled"),
"isHidden": node.attribute("visible-to-user") == Some("false"),
"frame": frame_value(bounds.0, bounds.1, bounds.2, bounds.3),
"frameInScreen": frame_value(bounds.0, bounds.1, bounds.2, bounds.3),
@@ -1132,7 +1146,26 @@ fn frame_value(x: f64, y: f64, width: f64, height: f64) -> Value {
json!({ "x": x, "y": y, "width": width, "height": height })
}
-fn map_android_class(class_name: &str) -> &'static str {
+fn bool_attr(node: roxmltree::Node<'_, '_>, name: &str) -> bool {
+ node.attribute(name) == Some("true")
+}
+
+fn android_type(short_class: &str, class_name: &str) -> String {
+ let fallback = if short_class.is_empty() {
+ class_name
+ } else {
+ short_class
+ };
+ if fallback.is_empty() {
+ "View".to_owned()
+ } else {
+ fallback.to_owned()
+ }
+}
+
+fn android_role(node: roxmltree::Node<'_, '_>, class_name: &str) -> &'static str {
+ let clickable = bool_attr(node, "clickable");
+ let scrollable = bool_attr(node, "scrollable");
match class_name {
"Button" | "ImageButton" | "FloatingActionButton" => "button",
"EditText" => "textField",
@@ -1142,10 +1175,18 @@ fn map_android_class(class_name: &str) -> &'static str {
"RadioButton" => "radioButton",
"Switch" | "ToggleButton" => "switch",
"SeekBar" => "slider",
- "RecyclerView" | "ListView" => "table",
- "ScrollView" | "HorizontalScrollView" | "NestedScrollView" => "scrollView",
+ "RecyclerView" | "ListView" | "GridView" => "collection",
+ "ScrollView" | "HorizontalScrollView" | "NestedScrollView" | "ViewPager" => "scrollView",
"WebView" => "webView",
- _ => "other",
+ "ProgressBar" => "progressIndicator",
+ "Spinner" => "popUpButton",
+ "TabWidget" | "TabLayout" => "tabGroup",
+ "Toolbar" | "ActionBar" => "toolbar",
+ "ViewGroup" | "FrameLayout" | "LinearLayout" | "RelativeLayout" | "ConstraintLayout"
+ | "CoordinatorLayout" | "DrawerLayout" => "container",
+ _ if scrollable => "scrollView",
+ _ if clickable => "button",
+ _ => "view",
}
}
@@ -1180,6 +1221,41 @@ fn shell_quote(value: &str) -> String {
format!("'{}'", value.replace('\'', "'\\''"))
}
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn android_nodes_keep_class_type_and_semantic_role() {
+ let document = roxmltree::Document::parse(
+ r#"
"#,
+ )
+ .unwrap();
+
+ let value = android_node_value(document.root_element(), 0, 10);
+
+ assert_eq!(value["type"], "ViewGroup");
+ assert_eq!(value["role"], "container");
+ assert_eq!(value["AXIdentifier"], "com.example:id/hotseat");
+ assert_eq!(value["androidClass"], "android.view.ViewGroup");
+ assert_eq!(value["androidResourceId"], "com.example:id/hotseat");
+ assert_eq!(value["enabled"], true);
+ }
+
+ #[test]
+ fn clickable_unknown_android_nodes_are_buttons() {
+ let document = roxmltree::Document::parse(
+ r#"
"#,
+ )
+ .unwrap();
+
+ let value = android_node_value(document.root_element(), 0, 10);
+
+ assert_eq!(value["type"], "CustomTile");
+ assert_eq!(value["role"], "button");
+ }
+}
+
#[allow(dead_code)]
fn _dedupe(values: impl IntoIterator
- ) -> Vec {
let mut seen = HashSet::new();
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index 7a6ce51a..f1394fc9 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -58,7 +58,7 @@ const WEBRTC_FULL_ICE_GATHER_TIMEOUT: Duration = Duration::from_secs(3);
const WEBRTC_RTP_OUTBOUND_MTU: usize = 1200;
const WEBRTC_PEER_DISCONNECTED_TIMEOUT: Duration = Duration::from_secs(12);
const ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY: usize = 128;
-const DEFAULT_ANDROID_WEBRTC_MAX_EDGE: u32 = 1280;
+const DEFAULT_ANDROID_WEBRTC_MAX_EDGE: u32 = 960;
const DEFAULT_ANDROID_WEBRTC_FPS: u64 = 60;
const MAX_ANDROID_WEBRTC_FPS: u64 = 120;
static WEBRTC_MEDIA_STREAMS: OnceLock>>> =
@@ -1101,7 +1101,7 @@ impl AndroidWebRtcSource {
) -> Result {
let mut frame_stream = bridge.grpc_frame_stream(&udid, Some(max_edge)).await?;
let (sender, _) = broadcast::channel(ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY);
- let (shutdown_tx, mut shutdown_rx) = broadcast::channel(1);
+ let (shutdown_tx, _) = broadcast::channel(1);
let inner = Arc::new(AndroidWebRtcSourceInner {
udid: udid.clone(),
encoder_handle: AtomicUsize::new(0),
@@ -1135,34 +1135,52 @@ impl AndroidWebRtcSource {
.store(user_data as usize, Ordering::Release);
let source = Self { inner };
- let task_inner = Arc::downgrade(&source.inner);
+ let latest_frame = Arc::new(Mutex::new(None::));
+ let reader_inner = Arc::downgrade(&source.inner);
+ let reader_latest_frame = latest_frame.clone();
+ let mut reader_shutdown_rx = source.inner.shutdown_tx.subscribe();
tokio::spawn(async move {
- let min_frame_gap = android_webrtc_frame_interval();
- let mut last_encoded_at = Instant::now() - min_frame_gap;
loop {
tokio::select! {
- _ = shutdown_rx.recv() => break,
+ _ = reader_shutdown_rx.recv() => break,
frame = frame_stream.next_frame() => {
- let frame = match frame {
- Ok(Some(frame)) => frame,
+ match frame {
+ Ok(Some(frame)) => {
+ *reader_latest_frame.lock().unwrap() = Some(frame);
+ }
Ok(None) => break,
Err(error) => {
- let udid = task_inner
+ let udid = reader_inner
.upgrade()
.map(|inner| inner.udid.clone())
.unwrap_or_else(|| "android".to_owned());
warn!("Android WebRTC raw frame stream failed for {udid}: {error}");
break;
}
- };
- let Some(inner) = task_inner.upgrade() else {
+ }
+ }
+ }
+ }
+ });
+
+ let encoder_inner = Arc::downgrade(&source.inner);
+ let encoder_latest_frame = latest_frame;
+ let mut encoder_shutdown_rx = source.inner.shutdown_tx.subscribe();
+ tokio::spawn(async move {
+ let min_frame_gap = android_webrtc_frame_interval();
+ let mut ticker = time::interval(min_frame_gap);
+ ticker.set_missed_tick_behavior(time::MissedTickBehavior::Skip);
+ loop {
+ tokio::select! {
+ _ = encoder_shutdown_rx.recv() => break,
+ _ = ticker.tick() => {
+ let Some(inner) = encoder_inner.upgrade() else {
break;
};
- let now = Instant::now();
- if now.duration_since(last_encoded_at) < min_frame_gap {
+ let frame = encoder_latest_frame.lock().unwrap().take();
+ let Some(frame) = frame else {
continue;
- }
- last_encoded_at = now;
+ };
let handle = inner.encoder_handle.load(Ordering::Acquire);
let udid = inner.udid.clone();
let encode_result = task::spawn_blocking(move || {
@@ -1363,11 +1381,16 @@ unsafe fn take_native_error(raw: *mut i8) -> Option {
}
fn android_webrtc_max_edge() -> u32 {
- std::env::var("SIMDECK_ANDROID_WEBRTC_MAX_EDGE")
+ let android_cap = std::env::var("SIMDECK_ANDROID_WEBRTC_MAX_EDGE")
.ok()
.and_then(|value| value.parse::().ok())
.unwrap_or(DEFAULT_ANDROID_WEBRTC_MAX_EDGE)
- .clamp(360, 2400)
+ .clamp(360, 2400);
+ std::env::var("SIMDECK_REALTIME_MAX_EDGE")
+ .ok()
+ .and_then(|value| value.parse::().ok())
+ .map(|value| value.clamp(360, 2400).min(android_cap))
+ .unwrap_or(android_cap)
}
fn android_webrtc_frame_interval() -> Duration {
From 6055435478415e998f71815c0fa6f296225eb4c0 Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 15:36:08 -0400
Subject: [PATCH 6/8] Fix Android WebRTC stream startup
---
client/src/features/stream/streamTypes.ts | 1 +
.../stream/streamWorkerClient.test.ts | 28 +++++++++++++++++++
.../src/features/stream/streamWorkerClient.ts | 21 ++++++++++++--
client/src/features/stream/useLiveStream.ts | 3 ++
server/src/transport/webrtc.rs | 26 +++++++++++------
5 files changed, 68 insertions(+), 11 deletions(-)
create mode 100644 client/src/features/stream/streamWorkerClient.test.ts
diff --git a/client/src/features/stream/streamTypes.ts b/client/src/features/stream/streamTypes.ts
index f55eb58c..4e9b6099 100644
--- a/client/src/features/stream/streamTypes.ts
+++ b/client/src/features/stream/streamTypes.ts
@@ -2,6 +2,7 @@ import type { Size } from "../viewport/types";
export interface StreamConnectTarget {
clientId?: string;
+ platform?: string;
remote?: boolean;
streamConfig?: StreamConfig;
transport?: StreamTransport;
diff --git a/client/src/features/stream/streamWorkerClient.test.ts b/client/src/features/stream/streamWorkerClient.test.ts
new file mode 100644
index 00000000..dc079f4b
--- /dev/null
+++ b/client/src/features/stream/streamWorkerClient.test.ts
@@ -0,0 +1,28 @@
+import { describe, expect, it } from "vitest";
+
+import {
+ buildStreamTarget,
+ initialStreamBackend,
+ preferredStreamBackend,
+} from "./streamWorkerClient";
+
+describe("streamWorkerClient", () => {
+ it("forces Android emulator streams onto WebRTC even when H264 is requested", () => {
+ const target = buildStreamTarget("android:emulator-5554", {
+ platform: "android-emulator",
+ transport: "h264",
+ });
+
+ expect(preferredStreamBackend(target)).toBe("webrtc");
+ expect(initialStreamBackend(target)).toBe("webrtc");
+ });
+
+ it("treats Android UDID prefixes as WebRTC-only stream targets", () => {
+ const target = buildStreamTarget("android:Pixel_8", {
+ transport: "h264",
+ });
+
+ expect(preferredStreamBackend(target)).toBe("webrtc");
+ expect(initialStreamBackend(target)).toBe("webrtc");
+ });
+});
diff --git a/client/src/features/stream/streamWorkerClient.ts b/client/src/features/stream/streamWorkerClient.ts
index e5397753..2212fdf2 100644
--- a/client/src/features/stream/streamWorkerClient.ts
+++ b/client/src/features/stream/streamWorkerClient.ts
@@ -224,6 +224,7 @@ export function buildStreamTarget(
udid: string,
options: {
clientId?: string;
+ platform?: string;
remote?: boolean;
streamConfig?: StreamConfig;
transport?: StreamTransport;
@@ -231,6 +232,7 @@ export function buildStreamTarget(
): StreamConnectTarget {
return {
clientId: options.clientId,
+ platform: options.platform,
remote: options.remote,
streamConfig: options.streamConfig,
transport: options.transport,
@@ -2725,9 +2727,12 @@ export class StreamWorkerClient {
};
}
-function preferredStreamBackend(
+export function preferredStreamBackend(
target?: StreamConnectTarget | null,
): "auto" | StreamBackend {
+ if (isAndroidStreamTarget(target)) {
+ return "webrtc";
+ }
const value =
target?.transport ??
new URLSearchParams(window.location.search).get("stream");
@@ -2737,7 +2742,12 @@ function preferredStreamBackend(
return value === "webrtc" ? "webrtc" : "auto";
}
-function initialStreamBackend(target: StreamConnectTarget): StreamBackend {
+export function initialStreamBackend(
+ target: StreamConnectTarget,
+): StreamBackend {
+ if (isAndroidStreamTarget(target)) {
+ return "webrtc";
+ }
const preferredBackend = preferredStreamBackend(target);
if (preferredBackend === "h264-ws") {
return canUseH264WebSocket() ? "h264-ws" : "webrtc";
@@ -2756,3 +2766,10 @@ function nextAutoFallbackBackend(
}
return null;
}
+
+function isAndroidStreamTarget(target?: StreamConnectTarget | null): boolean {
+ return (
+ target?.platform === "android-emulator" ||
+ Boolean(target?.udid.startsWith("android:"))
+ );
+}
diff --git a/client/src/features/stream/useLiveStream.ts b/client/src/features/stream/useLiveStream.ts
index 55f40118..60b980a2 100644
--- a/client/src/features/stream/useLiveStream.ts
+++ b/client/src/features/stream/useLiveStream.ts
@@ -301,6 +301,7 @@ export function useLiveStream({
const targetKey = [
simulator.udid,
+ simulator.platform ?? "",
remote ? "remote" : "local",
streamTransport,
].join("|");
@@ -316,6 +317,7 @@ export function useLiveStream({
workerClient.connect(
buildStreamTarget(simulator.udid, {
clientId: clientTelemetryIdRef.current,
+ platform: simulator.platform,
remote,
streamConfig,
transport: streamTransport,
@@ -324,6 +326,7 @@ export function useLiveStream({
}, [
canvasElement,
simulator?.isBooted,
+ simulator?.platform,
simulator?.udid,
paused,
remote,
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index f1394fc9..1ec7e608 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -1181,6 +1181,9 @@ impl AndroidWebRtcSource {
let Some(frame) = frame else {
continue;
};
+ if inner.latest_keyframe.read().unwrap().is_none() {
+ inner.request_keyframe();
+ }
let handle = inner.encoder_handle.load(Ordering::Acquire);
let udid = inner.udid.clone();
let encode_result = task::spawn_blocking(move || {
@@ -1238,15 +1241,7 @@ impl AndroidWebRtcSource {
fn request_refresh(&self) {}
fn request_keyframe(&self) {
- self.inner
- .metrics
- .keyframe_requests
- .fetch_add(1, Ordering::Relaxed);
- unsafe {
- ffi::xcw_native_h264_encoder_request_keyframe(
- self.inner.encoder_handle.load(Ordering::Acquire) as *mut c_void,
- );
- }
+ self.inner.request_keyframe();
}
}
@@ -1284,6 +1279,19 @@ unsafe extern "C" fn android_h264_encoder_frame_callback(
}
impl AndroidWebRtcSourceInner {
+ fn request_keyframe(&self) {
+ self.metrics
+ .keyframe_requests
+ .fetch_add(1, Ordering::Relaxed);
+ let encoder_handle = self.encoder_handle.load(Ordering::Acquire);
+ if encoder_handle == 0 {
+ return;
+ }
+ unsafe {
+ ffi::xcw_native_h264_encoder_request_keyframe(encoder_handle as *mut c_void);
+ }
+ }
+
fn handle_encoded_frame(&self, frame: &ffi::xcw_native_frame) {
let description = unsafe { copy_native_shared_bytes(frame.description) };
let Some(data) = (unsafe { copy_native_shared_bytes(frame.data) }) else {
From 475d423ff383b611406f65c3d6fe2550c985de11 Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 15:52:19 -0400
Subject: [PATCH 7/8] Stabilize Android WebRTC readiness
---
.../src/features/stream/streamWorkerClient.ts | 30 +++++++++++++++++--
server/src/transport/webrtc.rs | 14 +++++++++
2 files changed, 41 insertions(+), 3 deletions(-)
diff --git a/client/src/features/stream/streamWorkerClient.ts b/client/src/features/stream/streamWorkerClient.ts
index 2212fdf2..4af8c856 100644
--- a/client/src/features/stream/streamWorkerClient.ts
+++ b/client/src/features/stream/streamWorkerClient.ts
@@ -353,6 +353,13 @@ interface WebCodecsVideoDecoderConstructor {
}>;
}
+interface WebRtcAnswerPayload extends RTCSessionDescriptionInit {
+ video?: {
+ height?: number;
+ width?: number;
+ };
+}
+
interface PendingVideoFrame {
frame: WebCodecsVideoFrame;
sequence: number | null;
@@ -1532,7 +1539,7 @@ class WebRtcStreamClient implements StreamClientBackend {
target,
localDescription,
);
- const answer = (await response.json()) as RTCSessionDescriptionInit;
+ const answer = (await response.json()) as WebRtcAnswerPayload;
if (generation !== this.connectGeneration) {
return;
}
@@ -1541,6 +1548,15 @@ class WebRtcStreamClient implements StreamClientBackend {
);
this.postDiagnostics(target, `${options.detailPrefix}-answer`);
await peerConnection.setRemoteDescription(answer);
+ if (
+ typeof answer.video?.width === "number" &&
+ typeof answer.video?.height === "number" &&
+ answer.video.width > 0 &&
+ answer.video.height > 0
+ ) {
+ this.syncCanvasSize(answer.video.width, answer.video.height);
+ this.reportVideoConfig(answer.video.width, answer.video.height);
+ }
}
destroy() {
@@ -1687,10 +1703,14 @@ class WebRtcStreamClient implements StreamClientBackend {
return;
}
const now = performance.now();
- const hasRenderedFrame = this.stats.renderedFrames > 0;
+ const hasMediaProgress =
+ this.hasRenderedFrame ||
+ this.stats.renderedFrames > 0 ||
+ this.stats.decodedFrames > 0 ||
+ this.stats.receivedPackets > 0;
const frameAgeMs =
this.lastVideoFrameAt > 0 ? now - this.lastVideoFrameAt : Infinity;
- if (!hasRenderedFrame) {
+ if (!hasMediaProgress) {
this.handleConnectionError(
target,
generation,
@@ -1699,6 +1719,10 @@ class WebRtcStreamClient implements StreamClientBackend {
);
return;
}
+ if (!this.hasRenderedFrame) {
+ this.scheduleFrameWatchdog(target, generation);
+ return;
+ }
if (frameAgeMs > WEBRTC_STALLED_FRAME_TIMEOUT_MS) {
this.sendControl({ snapshot: true, type: "streamControl" });
this.scheduleFrameWatchdog(target, generation);
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index 1ec7e608..f384557f 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -89,6 +89,14 @@ pub struct WebRtcAnswerPayload {
pub sdp: String,
#[serde(rename = "type")]
pub kind: String,
+ pub video: WebRtcVideoMetadata,
+}
+
+#[derive(Debug, Serialize)]
+#[serde(rename_all = "camelCase")]
+pub struct WebRtcVideoMetadata {
+ pub width: u32,
+ pub height: u32,
}
#[derive(Debug, Clone, Serialize)]
@@ -289,6 +297,8 @@ pub async fn create_answer(
summarize_sdp_candidate_types(&local_description.sdp)
);
+ let first_frame_width = first_frame.width;
+ let first_frame_height = first_frame.height;
let (cancellation_token, cancellation) =
register_webrtc_media_stream(&udid, payload.client_id.as_deref(), true);
tokio::spawn(
@@ -309,6 +319,10 @@ pub async fn create_answer(
Ok(WebRtcAnswerPayload {
sdp: local_description.sdp,
kind: "answer".to_owned(),
+ video: WebRtcVideoMetadata {
+ width: first_frame_width,
+ height: first_frame_height,
+ },
})
}
From e67d1ff8b49646e66f4594aeaed1a86d401aa13a Mon Sep 17 00:00:00 2001
From: DjDeveloperr
Date: Sat, 9 May 2026 16:06:58 -0400
Subject: [PATCH 8/8] Use raw Android frame streaming
---
.../stream/streamWorkerClient.test.ts | 12 +-
.../src/features/stream/streamWorkerClient.ts | 408 +++++++++++++++++-
client/src/features/stream/useLiveStream.ts | 7 +-
server/src/android.rs | 11 +-
server/src/api/routes.rs | 6 +-
server/src/transport/webrtc.rs | 48 +--
6 files changed, 435 insertions(+), 57 deletions(-)
diff --git a/client/src/features/stream/streamWorkerClient.test.ts b/client/src/features/stream/streamWorkerClient.test.ts
index dc079f4b..9aac980a 100644
--- a/client/src/features/stream/streamWorkerClient.test.ts
+++ b/client/src/features/stream/streamWorkerClient.test.ts
@@ -7,22 +7,22 @@ import {
} from "./streamWorkerClient";
describe("streamWorkerClient", () => {
- it("forces Android emulator streams onto WebRTC even when H264 is requested", () => {
+ it("forces Android emulator streams onto the raw frame socket even when H264 is requested", () => {
const target = buildStreamTarget("android:emulator-5554", {
platform: "android-emulator",
transport: "h264",
});
- expect(preferredStreamBackend(target)).toBe("webrtc");
- expect(initialStreamBackend(target)).toBe("webrtc");
+ expect(preferredStreamBackend(target)).toBe("android-raw");
+ expect(initialStreamBackend(target)).toBe("android-raw");
});
- it("treats Android UDID prefixes as WebRTC-only stream targets", () => {
+ it("treats Android UDID prefixes as raw frame stream targets", () => {
const target = buildStreamTarget("android:Pixel_8", {
transport: "h264",
});
- expect(preferredStreamBackend(target)).toBe("webrtc");
- expect(initialStreamBackend(target)).toBe("webrtc");
+ expect(preferredStreamBackend(target)).toBe("android-raw");
+ expect(initialStreamBackend(target)).toBe("android-raw");
});
});
diff --git a/client/src/features/stream/streamWorkerClient.ts b/client/src/features/stream/streamWorkerClient.ts
index 4af8c856..05a6d3e0 100644
--- a/client/src/features/stream/streamWorkerClient.ts
+++ b/client/src/features/stream/streamWorkerClient.ts
@@ -33,6 +33,9 @@ const H264_WS_HEADER_BYTES = 40;
const H264_WS_MAGIC = 0x53444831;
const H264_WS_FLAG_KEYFRAME = 1 << 0;
const H264_WS_FLAG_CONFIG = 1 << 1;
+const ANDROID_RAW_HEADER_BYTES = 32;
+const ANDROID_RAW_MAGIC = 0x53444146;
+const ANDROID_RAW_FPS = 30;
const H264_WS_LOCAL_AUTO_PROFILES: StreamQualityPreset[] = [
"low",
"economy",
@@ -58,9 +61,10 @@ let activeWebRtcControlChannel: RTCDataChannel | null = null;
let activeWebRtcTelemetryChannel: RTCDataChannel | null = null;
let activeInputSocket: WebSocket | null = null;
let activeH264StreamSocket: WebSocket | null = null;
+let activeAndroidFrameSocket: WebSocket | null = null;
let activeStreamClient: StreamWorkerClient | null = null;
-export type StreamBackend = "h264-ws" | "webrtc";
+export type StreamBackend = "android-raw" | "h264-ws" | "webrtc";
export function sendWebRtcControlMessage(
encoded: string,
@@ -76,7 +80,8 @@ export function sendStreamClientStats(stats: unknown): boolean {
const encoded = JSON.stringify({ stats, type: "clientStats" });
return (
sendDataChannelMessage(activeWebRtcTelemetryChannel, encoded) ||
- sendWebSocketMessage(activeH264StreamSocket, encoded)
+ sendWebSocketMessage(activeH264StreamSocket, encoded) ||
+ sendWebSocketMessage(activeAndroidFrameSocket, encoded)
);
}
@@ -365,6 +370,14 @@ interface PendingVideoFrame {
sequence: number | null;
}
+interface AndroidRawFrame {
+ height: number;
+ pixels: Uint8ClampedArray;
+ sequence: number;
+ timestampUs: number;
+ width: number;
+}
+
function webCodecsConstructors(): {
EncodedVideoChunk?: WebCodecsEncodedVideoChunkConstructor;
VideoDecoder?: WebCodecsVideoDecoderConstructor;
@@ -1114,6 +1127,382 @@ class H264WebSocketStreamClient implements StreamClientBackend {
}
}
+class AndroidRawFrameStreamClient implements StreamClientBackend {
+ private canvas: HTMLCanvasElement | null = null;
+ private canvasContext: CanvasRenderingContext2D | null = null;
+ private connectGeneration = 0;
+ private frameWatchdogTimeout = 0;
+ private inputSocket: WebSocket | null = null;
+ private lastFrameAt = 0;
+ private reportedVideoHeight = 0;
+ private reportedVideoWidth = 0;
+ private shouldReconnect = false;
+ private stats: StreamStats = createEmptyStreamStats();
+ private streamSocket: WebSocket | null = null;
+ private streamTarget: StreamConnectTarget | null = null;
+ private stalledFrameWatchdogCount = 0;
+ private streamingReported = false;
+
+ constructor(
+ private readonly onMessage: (message: WorkerToMainMessage) => void,
+ ) {}
+
+ attachCanvas(canvasElement: HTMLCanvasElement) {
+ this.canvas = canvasElement;
+ this.canvasContext = canvasElement.getContext("2d", {
+ alpha: false,
+ desynchronized: true,
+ });
+ }
+
+ connect(target: StreamConnectTarget) {
+ this.disconnect();
+ if (!this.canvas) {
+ return;
+ }
+ const generation = ++this.connectGeneration;
+ this.shouldReconnect = true;
+ this.streamTarget = target;
+ this.streamingReported = false;
+ this.lastFrameAt = 0;
+ this.reportedVideoHeight = 0;
+ this.reportedVideoWidth = 0;
+ this.stalledFrameWatchdogCount = 0;
+ this.stats = createEmptyStreamStats();
+ this.stats.codec = "android-raw";
+ this.onMessage({ type: "stats", stats: { ...this.stats } });
+ this.onMessage({
+ type: "status",
+ status: {
+ detail: "Opening Android raw frame stream",
+ state: "connecting",
+ },
+ });
+
+ const socket = new WebSocket(
+ webSocketApiUrl(
+ `/api/simulators/${encodeURIComponent(target.udid)}/android/frames?max_fps=${ANDROID_RAW_FPS}`,
+ ),
+ );
+ socket.binaryType = "arraybuffer";
+ this.streamSocket = socket;
+ socket.addEventListener("open", () => {
+ if (socket === this.streamSocket) {
+ socket.binaryType = "arraybuffer";
+ activeAndroidFrameSocket = socket;
+ }
+ });
+ socket.addEventListener("message", (event) => {
+ if (socket !== this.streamSocket) {
+ return;
+ }
+ if (typeof event.data === "string") {
+ this.handleTextMessage(event.data);
+ return;
+ }
+ if (hasArrayBufferMethod(event.data)) {
+ void event.data.arrayBuffer().then((buffer) => {
+ if (socket === this.streamSocket) {
+ this.handleFrameMessage(buffer);
+ }
+ });
+ return;
+ }
+ this.handleFrameMessage(event.data);
+ });
+ socket.addEventListener("close", () => {
+ if (activeAndroidFrameSocket === socket) {
+ activeAndroidFrameSocket = null;
+ }
+ if (socket === this.streamSocket && this.shouldReconnect) {
+ this.handleError("Android raw frame stream closed.");
+ }
+ });
+ socket.addEventListener("error", () => {
+ if (socket === this.streamSocket) {
+ this.handleError("Android raw frame stream failed.");
+ }
+ });
+
+ this.connectInputSocket(target, generation);
+ this.scheduleFrameWatchdog(generation);
+ }
+
+ disconnect() {
+ this.shouldReconnect = false;
+ this.connectGeneration += 1;
+ this.clearFrameWatchdog();
+ this.streamSocket?.close();
+ if (activeAndroidFrameSocket === this.streamSocket) {
+ activeAndroidFrameSocket = null;
+ }
+ this.streamSocket = null;
+ this.inputSocket?.close();
+ if (activeInputSocket === this.inputSocket) {
+ activeInputSocket = null;
+ }
+ this.inputSocket = null;
+ this.streamTarget = null;
+ this.streamingReported = false;
+ this.lastFrameAt = 0;
+ this.reportedVideoHeight = 0;
+ this.reportedVideoWidth = 0;
+ this.stalledFrameWatchdogCount = 0;
+ }
+
+ destroy() {
+ this.disconnect();
+ }
+
+ clear() {
+ if (!this.canvas) {
+ return;
+ }
+ this.ensureCanvasContext()?.clearRect(
+ 0,
+ 0,
+ this.canvas.width,
+ this.canvas.height,
+ );
+ }
+
+ sendControl(payload: unknown): boolean {
+ if (
+ payload &&
+ typeof payload === "object" &&
+ "type" in payload &&
+ payload.type === "streamControl"
+ ) {
+ return true;
+ }
+ return sendWebSocketMessage(this.inputSocket, JSON.stringify(payload));
+ }
+
+ private connectInputSocket(target: StreamConnectTarget, generation: number) {
+ const socket = new WebSocket(
+ webSocketApiUrl(
+ `/api/simulators/${encodeURIComponent(target.udid)}/input`,
+ ),
+ );
+ this.inputSocket = socket;
+ activeInputSocket = socket;
+ socket.addEventListener("open", () => {
+ if (generation === this.connectGeneration) {
+ activeInputSocket = socket;
+ }
+ });
+ socket.addEventListener("close", () => {
+ if (activeInputSocket === socket) {
+ activeInputSocket = null;
+ }
+ });
+ socket.addEventListener("error", () => {
+ if (generation === this.connectGeneration) {
+ console.warn("Android input WebSocket failed.");
+ }
+ });
+ }
+
+ private handleTextMessage(text: string) {
+ try {
+ const message = JSON.parse(text) as { error?: string; type?: string };
+ if (message.error) {
+ this.handleError(message.error);
+ }
+ } catch {
+ // Text frames are diagnostics; binary frames carry pixels.
+ }
+ }
+
+ private handleFrameMessage(data: unknown) {
+ const frame = parseAndroidRawFrame(data);
+ if (!frame) {
+ this.stats.h264ParseFailures += 1;
+ this.onMessage({ type: "stats", stats: { ...this.stats } });
+ return;
+ }
+ this.paintFrame(frame);
+ }
+
+ private paintFrame(frame: AndroidRawFrame) {
+ const canvas = this.canvas;
+ if (!canvas) {
+ return;
+ }
+ this.syncCanvasSize(frame.width, frame.height);
+ const startedAt = performance.now();
+ const image = new ImageData(frame.pixels, frame.width, frame.height);
+ this.ensureCanvasContext()?.putImageData(image, 0, 0);
+ const finishedAt = performance.now();
+ const previousFrameAt = this.lastFrameAt;
+ this.lastFrameAt = finishedAt;
+ this.stalledFrameWatchdogCount = 0;
+ this.reportVideoConfig(frame.width, frame.height);
+ this.stats.codec = "android-raw";
+ this.stats.decodedFrames += 1;
+ this.stats.renderedFrames += 1;
+ this.stats.receivedPackets += 1;
+ this.stats.frameSequence = frame.sequence;
+ this.stats.width = frame.width;
+ this.stats.height = frame.height;
+ this.stats.latestRenderMs = finishedAt - startedAt;
+ this.stats.maxRenderMs = Math.max(
+ this.stats.maxRenderMs,
+ this.stats.latestRenderMs,
+ );
+ this.stats.averageRenderMs =
+ this.stats.averageRenderMs <= 0
+ ? this.stats.latestRenderMs
+ : this.stats.averageRenderMs * 0.9 + this.stats.latestRenderMs * 0.1;
+ this.stats.latestFrameGapMs =
+ previousFrameAt > 0 ? finishedAt - previousFrameAt : 0;
+ this.onMessage({ type: "stats", stats: { ...this.stats } });
+ if (!this.streamingReported) {
+ this.streamingReported = true;
+ this.onMessage({
+ type: "status",
+ status: {
+ detail: "Android raw frame stream connected",
+ state: "streaming",
+ },
+ });
+ }
+ }
+
+ private ensureCanvasContext(): CanvasRenderingContext2D | null {
+ const canvas = this.canvas;
+ if (!canvas) {
+ this.canvasContext = null;
+ return null;
+ }
+ if (this.canvasContext?.canvas === canvas) {
+ return this.canvasContext;
+ }
+ this.canvasContext = canvas.getContext("2d", {
+ alpha: false,
+ desynchronized: true,
+ });
+ return this.canvasContext;
+ }
+
+ private syncCanvasSize(width: number, height: number) {
+ if (!this.canvas) {
+ return;
+ }
+ const nextWidth = Math.max(1, Math.round(width));
+ const nextHeight = Math.max(1, Math.round(height));
+ if (this.canvas.width !== nextWidth) {
+ this.canvas.width = nextWidth;
+ }
+ if (this.canvas.height !== nextHeight) {
+ this.canvas.height = nextHeight;
+ }
+ }
+
+ private reportVideoConfig(width: number, height: number) {
+ if (
+ this.reportedVideoWidth === width &&
+ this.reportedVideoHeight === height
+ ) {
+ return;
+ }
+ this.reportedVideoWidth = width;
+ this.reportedVideoHeight = height;
+ this.onMessage({ type: "video-config", size: { height, width } });
+ }
+
+ private scheduleFrameWatchdog(generation: number) {
+ this.clearFrameWatchdog();
+ this.frameWatchdogTimeout = window.setTimeout(
+ () => {
+ this.frameWatchdogTimeout = 0;
+ if (generation !== this.connectGeneration || !this.shouldReconnect) {
+ return;
+ }
+ if (this.lastFrameAt <= 0) {
+ this.handleError("Android raw frame stream did not render a frame.");
+ return;
+ }
+ const now = performance.now();
+ if (now - this.lastFrameAt > H264_WS_STALLED_FRAME_TIMEOUT_MS) {
+ this.stalledFrameWatchdogCount += 1;
+ if (this.stalledFrameWatchdogCount >= 2 && this.streamTarget) {
+ const target = this.streamTarget;
+ this.onMessage({
+ type: "status",
+ status: {
+ detail: "Reconnecting stalled Android raw frame stream",
+ state: "connecting",
+ },
+ });
+ this.connect(target);
+ return;
+ }
+ } else {
+ this.stalledFrameWatchdogCount = 0;
+ }
+ this.scheduleFrameWatchdog(generation);
+ },
+ this.lastFrameAt > 0
+ ? H264_WS_STALLED_FRAME_TIMEOUT_MS
+ : H264_WS_FIRST_FRAME_TIMEOUT_MS,
+ );
+ }
+
+ private clearFrameWatchdog() {
+ if (!this.frameWatchdogTimeout) {
+ return;
+ }
+ window.clearTimeout(this.frameWatchdogTimeout);
+ this.frameWatchdogTimeout = 0;
+ }
+
+ private handleError(message: string) {
+ this.onMessage({
+ type: "status",
+ status: { error: message.replace(/\.$/, ""), state: "error" },
+ });
+ }
+}
+
+function parseAndroidRawFrame(data: unknown): AndroidRawFrame | null {
+ const bytes = bytesFromBinaryMessage(data);
+ if (!bytes || bytes.byteLength < ANDROID_RAW_HEADER_BYTES) {
+ return null;
+ }
+ const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
+ if (
+ view.getUint32(0, false) !== ANDROID_RAW_MAGIC ||
+ view.getUint8(4) !== 1
+ ) {
+ return null;
+ }
+ const width = view.getUint32(8, true);
+ const height = view.getUint32(12, true);
+ const sequence = view.getUint32(16, true);
+ const timestampUs =
+ view.getUint32(24, true) + view.getUint32(28, true) * 4294967296;
+ const pixelBytes = width * height * 4;
+ if (
+ width <= 0 ||
+ height <= 0 ||
+ bytes.byteLength < ANDROID_RAW_HEADER_BYTES + pixelBytes
+ ) {
+ return null;
+ }
+ return {
+ height,
+ pixels: new Uint8ClampedArray(
+ bytes.buffer as ArrayBuffer,
+ bytes.byteOffset + ANDROID_RAW_HEADER_BYTES,
+ pixelBytes,
+ ),
+ sequence,
+ timestampUs,
+ width,
+ };
+}
+
function parseH264WebSocketFrame(data: unknown): H264WebSocketFrame | null {
const bytes = bytesFromBinaryMessage(data);
if (!bytes || bytes.byteLength < H264_WS_HEADER_BYTES) {
@@ -2713,10 +3102,13 @@ export class StreamWorkerClient {
return;
}
this.backend?.destroy();
- this.backend =
- kind === "h264-ws"
- ? new H264WebSocketStreamClient(this.handleBackendMessage)
- : new WebRtcStreamClient(this.handleBackendMessage);
+ if (kind === "android-raw") {
+ this.backend = new AndroidRawFrameStreamClient(this.handleBackendMessage);
+ } else if (kind === "h264-ws") {
+ this.backend = new H264WebSocketStreamClient(this.handleBackendMessage);
+ } else {
+ this.backend = new WebRtcStreamClient(this.handleBackendMessage);
+ }
this.backendKind = kind;
if (this.canvasElement) {
this.backend.attachCanvas(this.canvasElement);
@@ -2755,7 +3147,7 @@ export function preferredStreamBackend(
target?: StreamConnectTarget | null,
): "auto" | StreamBackend {
if (isAndroidStreamTarget(target)) {
- return "webrtc";
+ return "android-raw";
}
const value =
target?.transport ??
@@ -2770,7 +3162,7 @@ export function initialStreamBackend(
target: StreamConnectTarget,
): StreamBackend {
if (isAndroidStreamTarget(target)) {
- return "webrtc";
+ return "android-raw";
}
const preferredBackend = preferredStreamBackend(target);
if (preferredBackend === "h264-ws") {
diff --git a/client/src/features/stream/useLiveStream.ts b/client/src/features/stream/useLiveStream.ts
index 60b980a2..26d1e9a1 100644
--- a/client/src/features/stream/useLiveStream.ts
+++ b/client/src/features/stream/useLiveStream.ts
@@ -437,7 +437,12 @@ export function useLiveStream({
runtimeInfo,
stats,
status,
- streamBackend: stats.codec === "h264-ws" ? "h264-ws" : "webrtc",
+ streamBackend:
+ stats.codec === "android-raw"
+ ? "android-raw"
+ : stats.codec === "h264-ws"
+ ? "h264-ws"
+ : "webrtc",
streamCanvasKey: `stream-${streamCanvasRevision}`,
};
}
diff --git a/server/src/android.rs b/server/src/android.rs
index c4687476..e83289a9 100644
--- a/server/src/android.rs
+++ b/server/src/android.rs
@@ -16,7 +16,7 @@ use tonic::transport::{Channel, Endpoint};
const ANDROID_ID_PREFIX: &str = "android:";
const DEFAULT_GRPC_PORT_BASE: u16 = 8554;
-const DEFAULT_ANDROID_STREAM_MAX_EDGE: u32 = 960;
+const ANDROID_GRPC_FRAME_MESSAGE_LIMIT: usize = 64 * 1024 * 1024;
const ANDROID_TOUCH_IDENTIFIER: i32 = 1;
const RUNNING_EMULATOR_CACHE_TTL: Duration = Duration::from_secs(2);
const AVD_GRPC_PORT_CACHE_TTL: Duration = Duration::from_secs(60);
@@ -460,11 +460,9 @@ impl AndroidBridge {
display: 0,
transport: None,
};
- if let Ok(serial) = self.resolve_serial(&avd_name) {
+ if let (Some(max_edge), Ok(serial)) = (max_edge, self.resolve_serial(&avd_name)) {
if let Ok((width, height)) = self.screen_size_for_serial(&serial) {
- let max_edge = max_edge
- .unwrap_or(DEFAULT_ANDROID_STREAM_MAX_EDGE)
- .clamp(240, 2400) as f64;
+ let max_edge = max_edge.clamp(240, 2400) as f64;
let largest = width.max(height);
if largest > max_edge {
let scale = max_edge / largest;
@@ -483,7 +481,8 @@ impl AndroidBridge {
"Unable to connect to Android emulator gRPC: {error}"
))
})?;
- let mut grpc = tonic::client::Grpc::new(endpoint);
+ let mut grpc = tonic::client::Grpc::new(endpoint)
+ .max_decoding_message_size(ANDROID_GRPC_FRAME_MESSAGE_LIMIT);
grpc.ready().await.map_err(|error| {
AppError::native(format!("Android emulator gRPC is not ready: {error}"))
})?;
diff --git a/server/src/api/routes.rs b/server/src/api/routes.rs
index d12d6561..caa8196d 100644
--- a/server/src/api/routes.rs
+++ b/server/src/api/routes.rs
@@ -1846,10 +1846,8 @@ async fn handle_android_frame_socket(
))
.await;
- let min_frame_gap = max_fps
- .filter(|fps| *fps > 0)
- .map(|fps| Duration::from_millis(1000 / u64::from(fps.min(60))))
- .unwrap_or_else(|| Duration::from_millis(83));
+ let fps = max_fps.unwrap_or(30).clamp(1, 30);
+ let min_frame_gap = Duration::from_micros(1_000_000 / u64::from(fps));
let mut last_sent_at = Instant::now() - min_frame_gap;
loop {
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index f384557f..f4c52a00 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -58,9 +58,7 @@ const WEBRTC_FULL_ICE_GATHER_TIMEOUT: Duration = Duration::from_secs(3);
const WEBRTC_RTP_OUTBOUND_MTU: usize = 1200;
const WEBRTC_PEER_DISCONNECTED_TIMEOUT: Duration = Duration::from_secs(12);
const ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY: usize = 128;
-const DEFAULT_ANDROID_WEBRTC_MAX_EDGE: u32 = 960;
-const DEFAULT_ANDROID_WEBRTC_FPS: u64 = 60;
-const MAX_ANDROID_WEBRTC_FPS: u64 = 120;
+const ANDROID_WEBRTC_FPS: u64 = 30;
static WEBRTC_MEDIA_STREAMS: OnceLock>>> =
OnceLock::new();
const MAX_WEBRTC_MEDIA_STREAMS_PER_UDID: usize = 16;
@@ -124,11 +122,14 @@ pub async fn create_answer(
"WebRTC preview supports media tracks only.",
));
}
- if let Some(stream_config) = payload.stream_config.as_ref() {
- apply_stream_quality_payload(&state, stream_config)?;
+ let is_android = android::is_android_id(&udid);
+ if !is_android {
+ if let Some(stream_config) = payload.stream_config.as_ref() {
+ apply_stream_quality_payload(&state, stream_config)?;
+ }
}
- let source = if android::is_android_id(&udid) {
+ let source = if is_android {
WebRtcVideoSource::Android(
AndroidWebRtcSource::start(
state.android.clone(),
@@ -593,13 +594,8 @@ fn attach_android_data_channel(
let _ = stream_control_tx.send(command);
}
WebRtcDataChannelMessage::StreamQuality { config } => {
- if let Err(error) = apply_stream_quality_payload(&state, &config) {
- warn!(
- "Android WebRTC stream quality update failed for {udid}: {error}"
- );
- } else {
- source.request_keyframe();
- }
+ let _ = config;
+ source.request_keyframe();
}
}
return;
@@ -1111,9 +1107,9 @@ impl AndroidWebRtcSource {
bridge: android::AndroidBridge,
metrics: Arc,
udid: String,
- max_edge: u32,
+ max_edge: Option,
) -> Result {
- let mut frame_stream = bridge.grpc_frame_stream(&udid, Some(max_edge)).await?;
+ let mut frame_stream = bridge.grpc_frame_stream(&udid, max_edge).await?;
let (sender, _) = broadcast::channel(ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY);
let (shutdown_tx, _) = broadcast::channel(1);
let inner = Arc::new(AndroidWebRtcSourceInner {
@@ -1402,28 +1398,16 @@ unsafe fn take_native_error(raw: *mut i8) -> Option {
Some(AppError::native(message))
}
-fn android_webrtc_max_edge() -> u32 {
- let android_cap = std::env::var("SIMDECK_ANDROID_WEBRTC_MAX_EDGE")
+fn android_webrtc_max_edge() -> Option {
+ std::env::var("SIMDECK_ANDROID_WEBRTC_MAX_EDGE")
.ok()
.and_then(|value| value.parse::().ok())
- .unwrap_or(DEFAULT_ANDROID_WEBRTC_MAX_EDGE)
- .clamp(360, 2400);
- std::env::var("SIMDECK_REALTIME_MAX_EDGE")
- .ok()
- .and_then(|value| value.parse::().ok())
- .map(|value| value.clamp(360, 2400).min(android_cap))
- .unwrap_or(android_cap)
+ .filter(|value| *value > 0)
+ .map(|value| value.clamp(360, 4096))
}
fn android_webrtc_frame_interval() -> Duration {
- let fps = std::env::var("SIMDECK_REALTIME_FPS")
- .or_else(|_| std::env::var("SIMDECK_LOCAL_STREAM_FPS"))
- .or_else(|_| std::env::var("SIMDECK_ANDROID_WEBRTC_FPS"))
- .ok()
- .and_then(|value| value.parse::