From 9c59c8ffe9a1d96743a3cb269333ce7602fae84c Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Tue, 16 Sep 2025 23:39:29 +0200 Subject: [PATCH 01/12] Revert "ensure stable frame release under low-latency mode" This reverts commit d4ef0870e214be9a5ccb9e60be7f0a13ca7ea63f. --- app/src/main/java/com/limelight/Game.java | 65 ++-- .../video/MediaCodecDecoderRenderer.java | 312 ++++++++---------- 2 files changed, 183 insertions(+), 194 deletions(-) diff --git a/app/src/main/java/com/limelight/Game.java b/app/src/main/java/com/limelight/Game.java index 992fc00a71..cc92067c15 100755 --- a/app/src/main/java/com/limelight/Game.java +++ b/app/src/main/java/com/limelight/Game.java @@ -660,7 +660,7 @@ public void notifyCrash(Exception e) { // We must use commit because the app will crash when we return from this function tombstonePrefs.edit().putInt("CrashCount", tombstonePrefs.getInt("CrashCount", 0) + 1).commit(); - reportedCrash = true; + reportedCrash = true; } }, tombstonePrefs.getInt("CrashCount", 0), @@ -670,7 +670,7 @@ public void notifyCrash(Exception e) { glPrefs.glRenderer, this); -// --- Force tight thresholds (prefConfig.forceTightThresholds) --- +// --- Force tight thresholds (opzionale, via prefConfig.forceTightThresholds) --- try { boolean forceTight = false; if (prefConfig != null) { @@ -681,30 +681,32 @@ public void notifyCrash(Exception e) { if (v instanceof Boolean) forceTight = (Boolean) v; } catch (Throwable ignored) {} } - try { decoderRenderer.setForceTightThresholds(forceTight); } catch (Throwable ignored) {} + try { decoderRenderer.setForceTightThresholds(forceTight); + applyLatencyPolicy(decoderRenderer, prefConfig);} catch (Throwable ignored) {} if (forceTight) { LimeLog.info("ForceTightThresholds enabled: using vsync-based thresholds on all devices"); } } catch (Throwable ignored) {} -// --- latency profile selection --- +// --- Selezione profilo latenza --- +// Semantica: TRUE = gestito (usa timeout); FALSE = 0µs latest-only try { - if (prefConfig != null && prefConfig.preferLowerDelays) { - // Intermediate: more responsive than Balanced but not 0 µs - decoderRenderer.setPreferLowerDelays(true); + if (prefConfig != null && prefConfig.preferLowerDelays) { + // Intermedio: più reattivo di Balanced ma non 0 µs + decoderRenderer.setPreferLowerDelays(true); // GESTITO decoderRenderer.setPreferLowerDelaysTimeoutUs(500); // 0.5 ms prefConfig.framePacing = PreferenceConfiguration.FRAME_PACING_BALANCED; LimeLog.info("PreferLowerDelays: preferLowerDelays=true, timeout=500us, pacing=BALANCED"); } else { // Balanced default - decoderRenderer.setPreferLowerDelays(false); + decoderRenderer.setPreferLowerDelays(true); // GESTITO decoderRenderer.setPreferLowerDelaysTimeoutUs(2000); // 2 ms prefConfig.framePacing = PreferenceConfiguration.FRAME_PACING_BALANCED; - LimeLog.info("Balanced: preferLowerDelays=false, timeout=2000us, pacing=BALANCED"); + LimeLog.info("Balanced: preferLowerDelays=true, timeout=2000us, pacing=BALANCED"); } } catch (Throwable ignored) {} -// Don't stream HDR if the decoder can't support it + // Don't stream HDR if the decoder can't support it if (willStreamHdr && !decoderRenderer.isHevcMain10Hdr10Supported() && !decoderRenderer.isAv1Main10Supported()) { willStreamHdr = false; Toast.makeText(this, "Decoder does not support HDR10 profile", Toast.LENGTH_LONG).show(); @@ -886,7 +888,7 @@ public void notifyCrash(Exception e) { overlayToggleButton = findViewById(R.id.overlayToggleZoomButton); setupOverlayToggleButton(); - + //fixed size + pacing without back-pressure on MTK try { View root = findViewById(android.R.id.content); @@ -894,7 +896,7 @@ public void notifyCrash(Exception e) { SurfaceView streamSurfaceView = findFirstSurfaceViewFrom(root); if (streamSurfaceView != null) { - // Avoid resizes/glitches that break the compositor + // 1) Evita resize/glitch che mandano in crisi il compositor int vw = (prefConfig != null && prefConfig.width > 0) ? prefConfig.width : displayWidth; int vh = (prefConfig != null && prefConfig.height > 0) ? prefConfig.height : displayHeight; try { streamSurfaceView.getHolder().setFixedSize(vw, vh); } catch (Throwable ignored) {} @@ -932,7 +934,7 @@ public void notifyCrash(Exception e) { } } } catch (Throwable ignored) {} - } +} @SuppressLint("ClickableViewAccessibility") private void setupOverlayToggleButton() { @@ -1002,7 +1004,7 @@ private void updateZoomButtonAppearance() { if (overlayToggleButton != null) { // Change background based on pan/zoom mode state overlayToggleButton.setBackgroundResource(isPanZoomMode ? - R.drawable.floating_menu_button_active : R.drawable.floating_menu_button); + R.drawable.floating_menu_button_active : R.drawable.floating_menu_button); // No need for alpha changes since the color indicates the state overlayToggleButton.setAlpha(1.0f); } @@ -1629,7 +1631,7 @@ else if (!isRefreshRateGoodMatch(candidate.getRefreshRate())) { if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEVISION) || getPackageManager().hasSystemFeature(PackageManager.FEATURE_LEANBACK) - || isOnExternalDisplay()) {// TVs may take a few moments to switch refresh rates, and we can probably assume + || isOnExternalDisplay()) {// TVs may take a few moments to switch refresh rates, and we can probably assume // it will be eventually activated. // external displays cant be compared with displaymanager currents display refreshrate // TODO: Improve this @@ -2888,11 +2890,11 @@ else if (view != null) { // Press & Hold / Double-Tap & Hold for Selection or Drag & Drop double positionDelta = Math.sqrt( Math.pow(event.getX() - lastTouchDownX, 2) + - Math.pow(event.getY() - lastTouchDownY, 2) + Math.pow(event.getY() - lastTouchDownY, 2) ); if (synthClickPending && - event.getEventTime() - synthTouchDownTime >= prefConfig.trackpadDragDropThreshold) { + event.getEventTime() - synthTouchDownTime >= prefConfig.trackpadDragDropThreshold) { if (positionDelta > 50) { pendingDrag = false; } else if (pendingDrag) { @@ -4153,9 +4155,9 @@ private void toggleMouseLocalCursor(){ private void applyMouseMode(int mode) { switch (mode) { case 0: // Multi-touch - prefConfig.enableMultiTouchScreen = true; - prefConfig.touchscreenTrackpad = false; - break; + prefConfig.enableMultiTouchScreen = true; + prefConfig.touchscreenTrackpad = false; + break; case 1: // Normal mouse case 5: // Normal mouse with swapped buttons prefConfig.enableMultiTouchScreen = false; @@ -4346,4 +4348,27 @@ private SurfaceView findFirstSurfaceViewFrom(View v) { return null; } + + // Apply low-latency vs smooth policy to the decoder renderer + // Notes (EN): + // - In low-latency modes we enforce non-blocking dequeue (0 µs) and tight VSYNC pacing. + // - In smooth/balanced modes we allow a small timeout to stabilize pacing. + private void applyLatencyPolicy(com.limelight.binding.video.MediaCodecDecoderRenderer decoderRenderer, + com.limelight.preferences.PreferenceConfiguration prefConfig) { + try { + boolean isLowLatency = true; + if (prefConfig != null) { + // Consider Ultra/Reactive/ULL as low-latency, Balanced/Smooth as non-low-latency + int pacing = prefConfig.framePacing; + // Heuristic: if user selected Balanced/Smooth keep some timeout + isLowLatency = (pacing != com.limelight.preferences.PreferenceConfiguration.FRAME_PACING_BALANCED); + } + decoderRenderer.setPreferLowerDelays(isLowLatency); + decoderRenderer.setPreferLowerDelaysTimeoutUs(2000); + // Tighten thresholds to VSYNC when low-latency is requested + decoderRenderer.setForceTightThresholds(isLowLatency); + } catch (Throwable ignored) { + } + } + } \ No newline at end of file diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index b131684e2b..1690e2c29b 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -48,12 +48,12 @@ public class MediaCodecDecoderRenderer extends VideoDecoderRenderer implements C // Set true to enable a 'latest-only' fast path in the render loop. private boolean preferLowerDelays = false; - - // Force tight thresholds regardless of device refresh (use vsyncPeriodNs always) - private volatile boolean forceTightThresholds = false; - /** Toggle tight frame pacing thresholds globally. */ - public void setForceTightThresholds(boolean v) { this.forceTightThresholds = v; } - // Toggle at runtime if needed + +// Force tight thresholds regardless of device refresh (use vsyncPeriodNs always) +private volatile boolean forceTightThresholds = false; +/** Toggle tight frame pacing thresholds globally. */ +public void setForceTightThresholds(boolean v) { this.forceTightThresholds = v; } +// Toggle at runtime if needed // Decode latency tracking: map PTS(us) -> enqueue time (ns) private final LongSparseArray enqueueNsByPtsUs = new LongSparseArray<>(); @@ -62,25 +62,7 @@ public class MediaCodecDecoderRenderer extends VideoDecoderRenderer implements C private volatile int preferLowerDelaysTimeoutUs = 2000; public void setPreferLowerDelaysTimeoutUs(int us) { this.preferLowerDelaysTimeoutUs = Math.max(0, us); } - - // Helper: release with low-latency policy (immediate only when very near to now) - private void releaseWithPolicy(int bufferIndex, long frameTimeNanos) { - try { - long now = System.nanoTime(); - boolean immediate = preferLowerDelays && (frameTimeNanos <= now + 300_000L); - if (immediate) { - videoDecoder.releaseOutputBuffer(bufferIndex, true); - } else { - videoDecoder.releaseOutputBuffer(bufferIndex, frameTimeNanos); - } - } catch (Throwable t) { - try { - // Fallback to immediate if timestamped release fails for any reason - videoDecoder.releaseOutputBuffer(bufferIndex, true); - } catch (Throwable ignored) {} - } - } - private int getOutputDequeueTimeoutUs(){ return preferLowerDelays ? Math.max(250, preferLowerDelaysTimeoutUs) : preferLowerDelaysTimeoutUs; } + private int getOutputDequeueTimeoutUs(){ return preferLowerDelays ? preferLowerDelaysTimeoutUs : 0; } // Update stats using real decode time: enqueue->dequeue, instead of uptime - PTS private void updateDecodeLatencyStats(long presentationTimeUs) { @@ -571,7 +553,7 @@ private MediaFormat createBaseMediaFormat(String mimeType) { } } - return videoFormat; +return videoFormat; } private void configureAndStartDecoder(MediaFormat format) { @@ -610,13 +592,13 @@ else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { try { applySurfaceFrameRate(renderTarget, targetFps); } catch (Throwable ignored) {} - try { - MediaCodecInfo __info = (android.os.Build.VERSION.SDK_INT >= 21) ? videoDecoder.getCodecInfo() : null; - String __name = (__info != null) ? __info.getName() : ""; - LimeLog.info("Decoder name: " + __name); - } catch (Throwable t) { - LimeLog.info("Decoder name: "); - } +try { + MediaCodecInfo __info = (android.os.Build.VERSION.SDK_INT >= 21) ? videoDecoder.getCodecInfo() : null; + String __name = (__info != null) ? __info.getName() : ""; + LimeLog.info("Decoder name: " + __name); +} catch (Throwable t) { + LimeLog.info("Decoder name: "); +} configuredFormat = format; @@ -639,14 +621,14 @@ else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { videoDecoder.start(); // Diagnostics: dump negotiated input/output formats and check vendor keys acceptance - try { - MediaFormat __inF = videoDecoder.getInputFormat(); - MediaFormat __outF = videoDecoder.getOutputFormat(); - LimeLog.info("Decoder input format: " + (__inF != null ? __inF.toString() : "")); - LimeLog.info("Decoder output format: " + (__outF != null ? __outF.toString() : "")); - } catch (Throwable t) { - LimeLog.info("Decoder formats unavailable after start"); - } +try { + MediaFormat __inF = videoDecoder.getInputFormat(); + MediaFormat __outF = videoDecoder.getOutputFormat(); + LimeLog.info("Decoder input format: " + (__inF != null ? __inF.toString() : "")); + LimeLog.info("Decoder output format: " + (__outF != null ? __outF.toString() : "")); +} catch (Throwable t) { + LimeLog.info("Decoder formats unavailable after start"); +} if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { @@ -1090,23 +1072,20 @@ public void doFrame(long frameTimeNanos) { if (nextOutputBuffer != null) { try { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - if (preferLowerDelays) { - // ULL: present at next VSYNC (no scheduling) - releaseWithPolicy(nextOutputBuffer, System.nanoTime());} else { - // Smooth/Balanced: keep timestamp scheduling - videoDecoder.releaseOutputBuffer(nextOutputBuffer, frameTimeNanos); - } - + videoDecoder.releaseOutputBuffer(nextOutputBuffer, frameTimeNanos); } else { if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - releaseWithPolicy(nextOutputBuffer, System.nanoTime());} else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - releaseWithPolicy(nextOutputBuffer, frameTimeNanos);} else { - releaseWithPolicy(nextOutputBuffer, frameTimeNanos);} - } + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(nextOutputBuffer, __ts); + } else { + if (android.os.Build.VERSION.SDK_INT >= 21) { + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(nextOutputBuffer, __ts); +} else { + videoDecoder.releaseOutputBuffer(nextOutputBuffer, true); +} + } } lastRenderedFrameTimeNanos = frameTimeNanos; @@ -1176,13 +1155,15 @@ public void run() { final int tfps = (targetFps > 0 ? targetFps : 60); final long streamPeriodNs = (long) (1_000_000_000L / Math.max(1, tfps)); - + // Adaptive period selection to avoid added latency on high-refresh devices final boolean highRefresh = displayHz >= 90f; final boolean managedMode = (prefs != null && prefs.framePacing == PreferenceConfiguration.FRAME_PACING_BALANCED); // Use stream-aligned thresholds only on lower-refresh screens while in Balanced. - final long periodNs = (preferLowerDelays ? vsyncPeriodNs : Math.max(vsyncPeriodNs, streamPeriodNs)); - boolean isC2Decoder = false; + final long periodNs = forceTightThresholds + ? vsyncPeriodNs + : ((managedMode && !highRefresh) ? Math.max(vsyncPeriodNs, streamPeriodNs) : vsyncPeriodNs); +boolean isC2Decoder = false; try { String decName = videoDecoder.getName(); if (decName != null) { @@ -1209,42 +1190,44 @@ public void run() { BufferInfo info = new BufferInfo(); long lastOutputNs = System.nanoTime(); while (!stopping) { - /* LATEST_ONLY_LOW_LATENCY */ - if (!preferLowerDelays) { - try { - android.media.MediaCodec.BufferInfo __tmpInfo = new android.media.MediaCodec.BufferInfo(); - int __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); - int __last = -1; - long __lastPtsUs = -1L; - - // Drain non-blocking; keep only the newest buffer - while (__idx >= 0) { - if (__last >= 0) { - try { videoDecoder.releaseOutputBuffer(__last, false); } catch (Throwable ignored) {} - } - __last = __idx; - __lastPtsUs = __tmpInfo.presentationTimeUs; - __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); - } + /* LATEST_ONLY_LOW_LATENCY */ + if (!preferLowerDelays) { + try { + android.media.MediaCodec.BufferInfo __tmpInfo = new android.media.MediaCodec.BufferInfo(); + int __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); + int __last = -1; + long __lastPtsUs = -1L; + + // Drain non-blocking; keep only the newest buffer + while (__idx >= 0) { + if (__last >= 0) { + try { videoDecoder.releaseOutputBuffer(__last, false); } catch (Throwable ignored) {} + } + __last = __idx; + __lastPtsUs = __tmpInfo.presentationTimeUs; + __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); + } + + if (__last >= 0) { + long __nowNs = System.nanoTime(); + if (android.os.Build.VERSION.SDK_INT >= 21) { + videoDecoder.releaseOutputBuffer(__last, __nowNs); + } else { + videoDecoder.releaseOutputBuffer(__last, true); + } - if (__last >= 0) { - long __nowNs = System.nanoTime(); - if (android.os.Build.VERSION.SDK_INT >= 21) { - releaseWithPolicy(__last, System.nanoTime());} else { - releaseWithPolicy(__last, System.nanoTime());} - - // Update decode->present EWMA and decode stats if we have a valid PTS - if (__lastPtsUs >= 0) { - long __d2pNs = __nowNs - (__lastPtsUs * 1000L); - ewmaDecodeToPresentNs += EWMA_ALPHA * (__d2pNs - ewmaDecodeToPresentNs); - try { updateDecodeLatencyStats(__lastPtsUs); } catch (Throwable ignored) {} - } + // Update decode->present EWMA and decode stats if we have a valid PTS + if (__lastPtsUs >= 0) { + long __d2pNs = __nowNs - (__lastPtsUs * 1000L); + ewmaDecodeToPresentNs += EWMA_ALPHA * (__d2pNs - ewmaDecodeToPresentNs); + try { updateDecodeLatencyStats(__lastPtsUs); } catch (Throwable ignored) {} + } - continue; // handled this iteration - } - } catch (Throwable ignored) {} - } - /* /LATEST_ONLY_LOW_LATENCY */ + continue; // handled this iteration + } + } catch (Throwable ignored) {} +} +/* /LATEST_ONLY_LOW_LATENCY */ try { @@ -1252,9 +1235,9 @@ public void run() { int outIndex = videoDecoder.dequeueOutputBuffer(info, getOutputDequeueTimeoutUs()); if (outIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { - // reduced backoff 0–500 µs + // backoff ridotto 0–500 µs tryAgainStreak++; - int backoffUs = (tryAgainStreak <= 2) ? 250 : 500; + int backoffUs = Math.min(getOutputDequeueTimeoutUs(), (tryAgainStreak <= 2) ? 250 : 500); outIndex = videoDecoder.dequeueOutputBuffer(info, backoffUs); } else { tryAgainStreak = 0; @@ -1285,7 +1268,7 @@ public void run() { // Get the last output buffer in the queue while ((outIndex = videoDecoder.dequeueOutputBuffer(info, getOutputDequeueTimeoutUs())) >= 0) { videoDecoder.releaseOutputBuffer(lastIndex, false); - frameDropped = true; // we're discarding the oldest one + frameDropped = true; // stiamo scartando il più vecchio numFramesOut++; lastIndex = outIndex; @@ -1299,7 +1282,7 @@ public void run() { final long nowNs = System.nanoTime(); final long frameAgeNs = nowNs - (presentationTimeUs * 1000L); - // Smoothness: tighter threshold 1.05..1.2× + // Smoothness: soglia più stretta 1.05..1.2× double pressure = Math.min(1.0, (ewmaJitterNs / vsyncPeriodNs) + (recentDrops * 0.1)); double factorSmooth = 1.2 - 0.15 * (1.0 - pressure); factorSmooth = Math.max(1.05, Math.min(1.2, factorSmooth)); @@ -1307,26 +1290,14 @@ public void run() { long dropThresholdSmoothNs = (long)(periodNs * factorSmooth); if (frameAgeNs >= dropThresholdSmoothNs) { - if (preferLowerDelays) { - // ULL: present at next VSYNC (no scheduling) - releaseWithPolicy(lastIndex, System.nanoTime());} else { - // Smooth/Balanced: keep timestamp scheduling - videoDecoder.releaseOutputBuffer(lastIndex, /* render */ false); - } - + videoDecoder.releaseOutputBuffer(lastIndex, /* render */ false); frameDropped = true; lastDropNs = nowNs; recentDrops = Math.min(10, recentDrops + 1); continue; } - if (preferLowerDelays) { - // ULL: present at next VSYNC (no scheduling) - releaseWithPolicy(lastIndex, System.nanoTime());} else { - // Smooth/Balanced: keep timestamp scheduling - videoDecoder.releaseOutputBuffer(lastIndex, nowNs); - } - + videoDecoder.releaseOutputBuffer(lastIndex, nowNs); lastPresentNs = nowNs; recentDrops = Math.max(0, recentDrops - 1); @@ -1336,14 +1307,16 @@ public void run() { } else { if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - releaseWithPolicy(lastIndex, System.nanoTime());} else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - releaseWithPolicy(lastIndex, System.nanoTime());} else { - videoDecoder.releaseOutputBuffer(lastIndex, false); - } - } + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(lastIndex, __ts); + } else { + if (android.os.Build.VERSION.SDK_INT >= 21) { + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(lastIndex, __ts); +} else { + videoDecoder.releaseOutputBuffer(lastIndex, false); +} + } // [STATS] anche su pre-Lollipop, dopo presentazione updateDecodeLatencyStats(presentationTimeUs); @@ -1380,26 +1353,14 @@ public void run() { dropCooldownOk; if (shouldDrop) { - if (preferLowerDelays) { - // ULL: present at next VSYNC (no scheduling) - releaseWithPolicy(lastIndex, System.nanoTime());} else { - // Smooth/Balanced: keep timestamp scheduling - videoDecoder.releaseOutputBuffer(lastIndex, /* render */ false); - } - + videoDecoder.releaseOutputBuffer(lastIndex, /* render */ false); frameDropped = true; lastDropNs = nowNs; recentDrops = Math.min(10, recentDrops + 1); continue; // niente stats sui frame droppati } - if (preferLowerDelays) { - // ULL: present at next VSYNC (no scheduling) - releaseWithPolicy(lastIndex, System.nanoTime());} else { - // Smooth/Balanced: keep timestamp scheduling - videoDecoder.releaseOutputBuffer(lastIndex, nowNs); - } - + videoDecoder.releaseOutputBuffer(lastIndex, nowNs); lastPresentNs = nowNs; if (!isLate) lateStreak = 0; recentDrops = Math.max(0, recentDrops - 1); @@ -1410,14 +1371,16 @@ public void run() { } else { if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - releaseWithPolicy(lastIndex, System.nanoTime());} else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - releaseWithPolicy(lastIndex, System.nanoTime());} else { - videoDecoder.releaseOutputBuffer(lastIndex, false); - } - } + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(lastIndex, __ts); + } else { + if (android.os.Build.VERSION.SDK_INT >= 21) { + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(lastIndex, __ts); +} else { + videoDecoder.releaseOutputBuffer(lastIndex, false); +} + } // [STATS] anche su pre-Lollipop, dopo presentazione updateDecodeLatencyStats(presentationTimeUs); @@ -1451,7 +1414,8 @@ public void run() { } // --- Fallback stats update --- - // If we didn't update the stats in-branch and the frame wasn't dropped, + // Se non abbiamo aggiornato le stats in-branch e il frame non è stato droppato, + // aggiorniamo ora (ripristina il comportamento classico, utile per BALANCED). if (!statsUpdated && !frameDropped) { updateDecodeLatencyStats(presentationTimeUs); } @@ -1476,22 +1440,22 @@ public void run() { } } - /* WATCHDOG_C2_SLEEP */ - try { - final long __nowNs = System.nanoTime(); - if (__nowNs - lastOutputNs > 1_200_000_000L) { // ~1.2s without output → likely C2 sleep - LimeLog.warning("Decoder watchdog: no output >1.2s, flushing codec to recover..."); - try { - videoDecoder.flush(); - } catch (Throwable ignored) {} - try { - android.os.Bundle __poke = new android.os.Bundle(); - __poke.putInt("priority", 0); - videoDecoder.setParameters(__poke); - } catch (Throwable ignored) {} - lastOutputNs = __nowNs; - } - } catch (Throwable ignored) {} + /* WATCHDOG_C2_SLEEP */ + try { + final long __nowNs = System.nanoTime(); + if (__nowNs - lastOutputNs > 1_200_000_000L) { // ~1.2s senza output → probabile C2 sleep + LimeLog.warning("Decoder watchdog: no output >1.2s, flushing codec to recover..."); + try { + videoDecoder.flush(); + } catch (Throwable ignored) {} + try { + android.os.Bundle __poke = new android.os.Bundle(); + __poke.putInt("priority", 0); + videoDecoder.setParameters(__poke); + } catch (Throwable ignored) {} + lastOutputNs = __nowNs; + } + } catch (Throwable ignored) {} } }; rendererThread.setName("Video - Renderer (MediaCodec)"); @@ -2409,26 +2373,26 @@ else if (renderer.numFramesOut <= renderer.refreshRate * 30) { } - private void applySurfaceFrameRate(android.view.Surface surface, int targetFps) { - if (surface == null) return; - try { - // API 30+ supports Surface.setFrameRate; for older, attempt View-based call elsewhere. - if (android.os.Build.VERSION.SDK_INT >= 30) { - surface.setFrameRate((float) targetFps, - android.view.Surface.FRAME_RATE_COMPATIBILITY_DEFAULT); - LimeLog.info("Applied Surface frame rate: " + targetFps + " Hz"); - } - } catch (Throwable t) { - // best-effort +private void applySurfaceFrameRate(android.view.Surface surface, int targetFps) { + if (surface == null) return; + try { + // API 30+ supports Surface.setFrameRate; for older, attempt View-based call elsewhere. + if (android.os.Build.VERSION.SDK_INT >= 30) { + surface.setFrameRate((float) targetFps, + android.view.Surface.FRAME_RATE_COMPATIBILITY_DEFAULT); + LimeLog.info("Applied Surface frame rate: " + targetFps + " Hz"); } + } catch (Throwable t) { + // best-effort } +} - private boolean isMTKDecoderName(String name) { - if (name == null) return false; - String n = name.toLowerCase(); - return n.startsWith("c2.mtk") || n.startsWith("omx.mtk"); - } +private boolean isMTKDecoderName(String name) { + if (name == null) return false; + String n = name.toLowerCase(); + return n.startsWith("c2.mtk") || n.startsWith("omx.mtk"); +} -} \ No newline at end of file +} From a5ec41886608d8f37eeb90a8dece4e55ecd29be8 Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Mon, 15 Sep 2025 11:38:19 +0200 Subject: [PATCH 02/12] revet the decoder watchdog - seems to be too aggressive in logs --- .../video/MediaCodecDecoderRenderer.java | 23 +++---------------- 1 file changed, 3 insertions(+), 20 deletions(-) diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index 1690e2c29b..196ff665e2 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -48,7 +48,7 @@ public class MediaCodecDecoderRenderer extends VideoDecoderRenderer implements C // Set true to enable a 'latest-only' fast path in the render loop. private boolean preferLowerDelays = false; - + // Force tight thresholds regardless of device refresh (use vsyncPeriodNs always) private volatile boolean forceTightThresholds = false; /** Toggle tight frame pacing thresholds globally. */ @@ -1155,7 +1155,7 @@ public void run() { final int tfps = (targetFps > 0 ? targetFps : 60); final long streamPeriodNs = (long) (1_000_000_000L / Math.max(1, tfps)); - + // Adaptive period selection to avoid added latency on high-refresh devices final boolean highRefresh = displayHz >= 90f; final boolean managedMode = (prefs != null && prefs.framePacing == PreferenceConfiguration.FRAME_PACING_BALANCED); @@ -1439,24 +1439,7 @@ public void run() { doCodecRecoveryIfRequired(CR_FLAG_RENDER_THREAD); } } - - /* WATCHDOG_C2_SLEEP */ - try { - final long __nowNs = System.nanoTime(); - if (__nowNs - lastOutputNs > 1_200_000_000L) { // ~1.2s senza output → probabile C2 sleep - LimeLog.warning("Decoder watchdog: no output >1.2s, flushing codec to recover..."); - try { - videoDecoder.flush(); - } catch (Throwable ignored) {} - try { - android.os.Bundle __poke = new android.os.Bundle(); - __poke.putInt("priority", 0); - videoDecoder.setParameters(__poke); - } catch (Throwable ignored) {} - lastOutputNs = __nowNs; - } - } catch (Throwable ignored) {} - } + } }; rendererThread.setName("Video - Renderer (MediaCodec)"); rendererThread.setPriority(Thread.NORM_PRIORITY + 2); From 5c0d3981aee27d29b7a8334aa4727a50ded432a8 Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Mon, 15 Sep 2025 17:31:11 +0200 Subject: [PATCH 03/12] revert applySurfaceFrameRate --- .../video/MediaCodecDecoderRenderer.java | 27 +++---------------- 1 file changed, 4 insertions(+), 23 deletions(-) diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index 196ff665e2..a16a8dbd53 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -590,8 +590,6 @@ else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { videoDecoder.configure(format, renderTarget, null, 0); - try { applySurfaceFrameRate(renderTarget, targetFps); } catch (Throwable ignored) {} - try { MediaCodecInfo __info = (android.os.Build.VERSION.SDK_INT >= 21) ? videoDecoder.getCodecInfo() : null; String __name = (__info != null) ? __info.getName() : ""; @@ -1235,7 +1233,7 @@ public void run() { int outIndex = videoDecoder.dequeueOutputBuffer(info, getOutputDequeueTimeoutUs()); if (outIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { - // backoff ridotto 0–500 µs + // reduced backoff 0–500 µs tryAgainStreak++; int backoffUs = Math.min(getOutputDequeueTimeoutUs(), (tryAgainStreak <= 2) ? 250 : 500); outIndex = videoDecoder.dequeueOutputBuffer(info, backoffUs); @@ -1268,7 +1266,7 @@ public void run() { // Get the last output buffer in the queue while ((outIndex = videoDecoder.dequeueOutputBuffer(info, getOutputDequeueTimeoutUs())) >= 0) { videoDecoder.releaseOutputBuffer(lastIndex, false); - frameDropped = true; // stiamo scartando il più vecchio + frameDropped = true; // we're discarding the oldest one numFramesOut++; lastIndex = outIndex; @@ -1282,7 +1280,7 @@ public void run() { final long nowNs = System.nanoTime(); final long frameAgeNs = nowNs - (presentationTimeUs * 1000L); - // Smoothness: soglia più stretta 1.05..1.2× + // Smoothness: tighter threshold 1.05..1.2× double pressure = Math.min(1.0, (ewmaJitterNs / vsyncPeriodNs) + (recentDrops * 0.1)); double factorSmooth = 1.2 - 0.15 * (1.0 - pressure); factorSmooth = Math.max(1.05, Math.min(1.2, factorSmooth)); @@ -1414,8 +1412,7 @@ public void run() { } // --- Fallback stats update --- - // Se non abbiamo aggiornato le stats in-branch e il frame non è stato droppato, - // aggiorniamo ora (ripristina il comportamento classico, utile per BALANCED). + // If we didn't update the stats in-branch and the frame wasn't dropped, if (!statsUpdated && !frameDropped) { updateDecodeLatencyStats(presentationTimeUs); } @@ -2356,22 +2353,6 @@ else if (renderer.numFramesOut <= renderer.refreshRate * 30) { } -private void applySurfaceFrameRate(android.view.Surface surface, int targetFps) { - if (surface == null) return; - try { - // API 30+ supports Surface.setFrameRate; for older, attempt View-based call elsewhere. - if (android.os.Build.VERSION.SDK_INT >= 30) { - surface.setFrameRate((float) targetFps, - android.view.Surface.FRAME_RATE_COMPATIBILITY_DEFAULT); - LimeLog.info("Applied Surface frame rate: " + targetFps + " Hz"); - } - } catch (Throwable t) { - // best-effort - } -} - - - private boolean isMTKDecoderName(String name) { if (name == null) return false; String n = name.toLowerCase(); From b8aa452283af4a90d367957bb0af71774422e446 Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Tue, 14 Oct 2025 23:26:37 +0200 Subject: [PATCH 04/12] Fix latency policy \ Prefer Lower Delays (LFR) - Early in my experiments, I had accidentally forced balanced pacing and inverted LFR logic . I've since fixed these issues in my experimental branch, and it's now time to port those fixes here. --- app/src/main/java/com/limelight/Game.java | 60 +++++++++---------- .../video/MediaCodecDecoderRenderer.java | 8 +-- 2 files changed, 31 insertions(+), 37 deletions(-) diff --git a/app/src/main/java/com/limelight/Game.java b/app/src/main/java/com/limelight/Game.java index cc92067c15..981b083030 100755 --- a/app/src/main/java/com/limelight/Game.java +++ b/app/src/main/java/com/limelight/Game.java @@ -688,23 +688,6 @@ public void notifyCrash(Exception e) { } } catch (Throwable ignored) {} -// --- Selezione profilo latenza --- -// Semantica: TRUE = gestito (usa timeout); FALSE = 0µs latest-only - try { - if (prefConfig != null && prefConfig.preferLowerDelays) { - // Intermedio: più reattivo di Balanced ma non 0 µs - decoderRenderer.setPreferLowerDelays(true); // GESTITO - decoderRenderer.setPreferLowerDelaysTimeoutUs(500); // 0.5 ms - prefConfig.framePacing = PreferenceConfiguration.FRAME_PACING_BALANCED; - LimeLog.info("PreferLowerDelays: preferLowerDelays=true, timeout=500us, pacing=BALANCED"); - } else { - // Balanced default - decoderRenderer.setPreferLowerDelays(true); // GESTITO - decoderRenderer.setPreferLowerDelaysTimeoutUs(2000); // 2 ms - prefConfig.framePacing = PreferenceConfiguration.FRAME_PACING_BALANCED; - LimeLog.info("Balanced: preferLowerDelays=true, timeout=2000us, pacing=BALANCED"); - } - } catch (Throwable ignored) {} // Don't stream HDR if the decoder can't support it if (willStreamHdr && !decoderRenderer.isHevcMain10Hdr10Supported() && !decoderRenderer.isAv1Main10Supported()) { @@ -931,6 +914,9 @@ public void notifyCrash(Exception e) { java.lang.reflect.Method m = SurfaceView.class.getMethod("setFrameRate", float.class, int.class); m.invoke(streamSurfaceView, Math.min(targetFps, displayHz), compat); } catch (Throwable ignored) {} + // Apply latency policy (LFR/ULL vs managed) + try { applyLatencyPolicy(decoderRenderer, prefConfig); } catch (Throwable ignored) {} + } } } catch (Throwable ignored) {} @@ -4353,22 +4339,30 @@ private SurfaceView findFirstSurfaceViewFrom(View v) { // Notes (EN): // - In low-latency modes we enforce non-blocking dequeue (0 µs) and tight VSYNC pacing. // - In smooth/balanced modes we allow a small timeout to stabilize pacing. - private void applyLatencyPolicy(com.limelight.binding.video.MediaCodecDecoderRenderer decoderRenderer, - com.limelight.preferences.PreferenceConfiguration prefConfig) { + private void applyLatencyPolicy( + com.limelight.binding.video.MediaCodecDecoderRenderer decoderRenderer, + com.limelight.preferences.PreferenceConfiguration prefConfig) { + if (decoderRenderer == null) return; try { - boolean isLowLatency = true; - if (prefConfig != null) { - // Consider Ultra/Reactive/ULL as low-latency, Balanced/Smooth as non-low-latency - int pacing = prefConfig.framePacing; - // Heuristic: if user selected Balanced/Smooth keep some timeout - isLowLatency = (pacing != com.limelight.preferences.PreferenceConfiguration.FRAME_PACING_BALANCED); - } - decoderRenderer.setPreferLowerDelays(isLowLatency); - decoderRenderer.setPreferLowerDelaysTimeoutUs(2000); - // Tighten thresholds to VSYNC when low-latency is requested - decoderRenderer.setForceTightThresholds(isLowLatency); - } catch (Throwable ignored) { - } + // UI semantics: + // preferLowerDelays = TRUE → latest-only (0 µs dequeue) [ULL] + // preferLowerDelays = FALSE → managed (+ small timeout) [Balanced] + final boolean latestOnly = (prefConfig != null) && prefConfig.preferLowerDelays; + + // Balanced: 500 µs, ULL: 0 µs + final int timeoutUs = latestOnly ? 0 : 500; + + // Renderer API: TRUE=latest-only (LFR), FALSE=managed (Balanced) + decoderRenderer.setPreferLowerDelays(latestOnly); + decoderRenderer.setPreferLowerDelaysTimeoutUs(timeoutUs); + + // Tight thresholds ON se: ULL oppure toggle "Tight VSync" attivo in UI + final boolean tightFromUi = (prefConfig != null) && prefConfig.forceTightThresholds; + decoderRenderer.setForceTightThresholds(tightFromUi); + + LimeLog.info("Latency policy → " + + (latestOnly ? "latest-only, timeout=0us" : ("managed, timeout=" + timeoutUs + "us")) + + " | forceTight=" + (tightFromUi)); + } catch (Throwable ignored) { } } - } \ No newline at end of file diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index a16a8dbd53..25f5879a82 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -57,9 +57,9 @@ public class MediaCodecDecoderRenderer extends VideoDecoderRenderer implements C // Decode latency tracking: map PTS(us) -> enqueue time (ns) private final LongSparseArray enqueueNsByPtsUs = new LongSparseArray<>(); - // When preferLowerDelays=true we use this configurable timeout (µs) for output dequeue. -// When preferLowerDelays=false we force 0µs (non-blocking, latest-frame rendering). - private volatile int preferLowerDelaysTimeoutUs = 2000; + // When preferLowerDelays = true (LFR/ULL): force non-blocking by default. + // When preferLowerDelays = false (Balanced/managed): use a small timeout for smoothing. + private volatile int preferLowerDelaysTimeoutUs = 0; // default 0 for ULL; policy may override if needed public void setPreferLowerDelaysTimeoutUs(int us) { this.preferLowerDelaysTimeoutUs = Math.max(0, us); } private int getOutputDequeueTimeoutUs(){ return preferLowerDelays ? preferLowerDelaysTimeoutUs : 0; } @@ -1189,7 +1189,7 @@ public void run() { long lastOutputNs = System.nanoTime(); while (!stopping) { /* LATEST_ONLY_LOW_LATENCY */ - if (!preferLowerDelays) { + if (preferLowerDelays) { try { android.media.MediaCodec.BufferInfo __tmpInfo = new android.media.MediaCodec.BufferInfo(); int __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); From 9306b396b5adcf6a810dc0dfa6706ad4d0993361 Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Tue, 21 Oct 2025 21:40:51 +0200 Subject: [PATCH 05/12] Refractor latency policy - Universal LFR and renderer-default timeout when off. Minimal logging aligned to behavior. --- app/src/main/java/com/limelight/Game.java | 41 ++++++++++++----------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/app/src/main/java/com/limelight/Game.java b/app/src/main/java/com/limelight/Game.java index 981b083030..990fa4c609 100755 --- a/app/src/main/java/com/limelight/Game.java +++ b/app/src/main/java/com/limelight/Game.java @@ -4336,33 +4336,34 @@ private SurfaceView findFirstSurfaceViewFrom(View v) { // Apply low-latency vs smooth policy to the decoder renderer - // Notes (EN): - // - In low-latency modes we enforce non-blocking dequeue (0 µs) and tight VSYNC pacing. - // - In smooth/balanced modes we allow a small timeout to stabilize pacing. +// - When LFR (preferLowerDelays) is enabled, use non-blocking dequeue (0 us) for all pacing profiles. +// - When LFR is disabled, the renderer decides timeouts internally (we do not set a timeout here). private void applyLatencyPolicy( com.limelight.binding.video.MediaCodecDecoderRenderer decoderRenderer, com.limelight.preferences.PreferenceConfiguration prefConfig) { - if (decoderRenderer == null) return; + if (decoderRenderer == null || prefConfig == null) return; try { - // UI semantics: - // preferLowerDelays = TRUE → latest-only (0 µs dequeue) [ULL] - // preferLowerDelays = FALSE → managed (+ small timeout) [Balanced] - final boolean latestOnly = (prefConfig != null) && prefConfig.preferLowerDelays; + final boolean lfrActive = prefConfig.preferLowerDelays; - // Balanced: 500 µs, ULL: 0 µs - final int timeoutUs = latestOnly ? 0 : 500; + // Apply LFR for all pacing profiles + decoderRenderer.setPreferLowerDelays(lfrActive); - // Renderer API: TRUE=latest-only (LFR), FALSE=managed (Balanced) - decoderRenderer.setPreferLowerDelays(latestOnly); - decoderRenderer.setPreferLowerDelaysTimeoutUs(timeoutUs); - - // Tight thresholds ON se: ULL oppure toggle "Tight VSync" attivo in UI - final boolean tightFromUi = (prefConfig != null) && prefConfig.forceTightThresholds; - decoderRenderer.setForceTightThresholds(tightFromUi); + // LFR uses 0 us dequeue timeout; otherwise leave renderer defaults untouched + if (lfrActive) { + decoderRenderer.setPreferLowerDelaysTimeoutUs(0); + } - LimeLog.info("Latency policy → " + - (latestOnly ? "latest-only, timeout=0us" : ("managed, timeout=" + timeoutUs + "us")) + - " | forceTight=" + (tightFromUi)); + // Tight thresholds follow the UI toggle + decoderRenderer.setForceTightThresholds(prefConfig.forceTightThresholds); + + // Minimal logging + try { + final String mode = lfrActive + ? "LFR on, timeout=0us" + : "managed, timeout=renderer-default"; + com.limelight.LimeLog.info("Latency policy -> " + mode + + " | forceTight=" + prefConfig.forceTightThresholds); + } catch (Throwable ignored) { } } catch (Throwable ignored) { } } } \ No newline at end of file From 8bb00b20ff02a06ca53224d33deae359c5c5f711 Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Tue, 7 Oct 2025 19:28:29 +0200 Subject: [PATCH 06/12] Fix:gate SemWindowManager behind Samsung check - fixes: java.lang.ClassNotFoundException: com.samsung.android.view.SemWindowManager on non samsung devices --- app/src/main/java/com/limelight/Game.java | 48 ++++++++++++++--------- 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/app/src/main/java/com/limelight/Game.java b/app/src/main/java/com/limelight/Game.java index 990fa4c609..b05b1350fb 100755 --- a/app/src/main/java/com/limelight/Game.java +++ b/app/src/main/java/com/limelight/Game.java @@ -292,6 +292,13 @@ public interface GameMenuCallbacks { boolean isMenuOpen(); } + //Samsung SemWindowManager + private static boolean isSamsungDevice() { + try { return "samsung".equalsIgnoreCase(android.os.Build.MANUFACTURER); } + catch (Throwable ignored) { return false; } + } + + public GameMenuCallbacks gameMenuCallbacks; public boolean isInputOnly = true; @@ -1334,26 +1341,27 @@ public void updatePipAutoEnter() { } public void setMetaKeyCaptureState(boolean enabled) { - // This uses custom APIs present on some Samsung devices to allow capture of - // meta key events while streaming. + if (!isSamsungDevice()) return; // niente Samsung → niente reflection + try { - Class semWindowManager = Class.forName("com.samsung.android.view.SemWindowManager"); - Method getInstanceMethod = semWindowManager.getMethod("getInstance"); - Object manager = getInstanceMethod.invoke(null); - - if (manager != null) { - Class[] parameterTypes = new Class[2]; - parameterTypes[0] = ComponentName.class; - parameterTypes[1] = boolean.class; - Method requestMetaKeyEventMethod = semWindowManager.getDeclaredMethod("requestMetaKeyEvent", parameterTypes); - requestMetaKeyEventMethod.invoke(manager, this.getComponentName(), enabled); - } - else { - LimeLog.warning("SemWindowManager.getInstance() returned null"); + Class semWm = Class.forName("com.samsung.android.view.SemWindowManager"); + Method getInstance = semWm.getMethod("getInstance"); + Object mgr = getInstance.invoke(null); + if (mgr == null) return; + + Method requestMeta; + try { + + requestMeta = semWm.getMethod("requestMetaKeyEvent", ComponentName.class, boolean.class); + } catch (NoSuchMethodException e) { + + requestMeta = semWm.getDeclaredMethod("requestMetaKeyEvent", ComponentName.class, boolean.class); + requestMeta.setAccessible(true); } - } catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | - IllegalAccessException e) { - e.printStackTrace(); + + requestMeta.invoke(mgr, getComponentName(), enabled); + } catch (Throwable ignored) { + } } @@ -1865,7 +1873,9 @@ private void setInputGrabState(boolean grab) { } // Grab/ungrab system keyboard shortcuts - setMetaKeyCaptureState(grab); + if (isSamsungDevice()) { + try { setMetaKeyCaptureState(grab); } catch (Throwable ignored) {} + } grabbedInput = grab; } From e3eaf49395f3ab7db63d2a1ebd2272e6d9c0c9e2 Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Sat, 4 Oct 2025 22:34:41 +0200 Subject: [PATCH 07/12] remove SurfaceView setFrameRate/compat & inline policy - Drop reflective setFrameRate + FIXED_SOURCE (causes stutter) --- app/src/main/java/com/limelight/Game.java | 51 +---------------------- 1 file changed, 1 insertion(+), 50 deletions(-) diff --git a/app/src/main/java/com/limelight/Game.java b/app/src/main/java/com/limelight/Game.java index b05b1350fb..9f26060dd9 100755 --- a/app/src/main/java/com/limelight/Game.java +++ b/app/src/main/java/com/limelight/Game.java @@ -878,56 +878,7 @@ public void notifyCrash(Exception e) { overlayToggleButton = findViewById(R.id.overlayToggleZoomButton); setupOverlayToggleButton(); - - //fixed size + pacing without back-pressure on MTK - try { - View root = findViewById(android.R.id.content); - // Niente getIdentifier: troviamo la prima SurfaceView nel layout - SurfaceView streamSurfaceView = findFirstSurfaceViewFrom(root); - - if (streamSurfaceView != null) { - // 1) Evita resize/glitch che mandano in crisi il compositor - int vw = (prefConfig != null && prefConfig.width > 0) ? prefConfig.width : displayWidth; - int vh = (prefConfig != null && prefConfig.height > 0) ? prefConfig.height : displayHeight; - try { streamSurfaceView.getHolder().setFixedSize(vw, vh); } catch (Throwable ignored) {} - try { streamSurfaceView.setZOrderOnTop(false); } catch (Throwable ignored) {} - try { streamSurfaceView.setZOrderMediaOverlay(false); } catch (Throwable ignored) {} - - // 2) setFrameRate via reflection (compat < 30) - if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) { - float displayHz = 60f; - try { - if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) { - displayHz = currentDisplay.getMode().getRefreshRate(); - } else { - displayHz = currentDisplay.getRefreshRate(); - } - } catch (Throwable ignored) {} - - float targetFps = (prefConfig != null && prefConfig.fps > 0) ? prefConfig.fps : displayHz; - - boolean isMTKDevice; - try { - String sum = (android.os.Build.MANUFACTURER + " " + android.os.Build.HARDWARE + " " + android.os.Build.BOARD) - .toLowerCase(java.util.Locale.US); - isMTKDevice = sum.contains("mtk") || sum.contains("mediatek"); - } catch (Throwable t) { isMTKDevice = false; } - - int compat = isMTKDevice - ? Surface.FRAME_RATE_COMPATIBILITY_DEFAULT - : Surface.FRAME_RATE_COMPATIBILITY_FIXED_SOURCE; - - try { - java.lang.reflect.Method m = SurfaceView.class.getMethod("setFrameRate", float.class, int.class); - m.invoke(streamSurfaceView, Math.min(targetFps, displayHz), compat); - } catch (Throwable ignored) {} - // Apply latency policy (LFR/ULL vs managed) - try { applyLatencyPolicy(decoderRenderer, prefConfig); } catch (Throwable ignored) {} - - } - } - } catch (Throwable ignored) {} -} + } @SuppressLint("ClickableViewAccessibility") private void setupOverlayToggleButton() { From bb2d2278c6819b9fc2477b0a674d77986b86b44e Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Tue, 21 Oct 2025 23:56:34 +0200 Subject: [PATCH 08/12] Render: fix the decoding latency mismatch bug between eg: 60hz and 120 fps stream \ Lfr fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - LFR: drain to newest and update per dequeued frame - Decode latency is now measured on all decoded frames, not only on displayed ones. - Numbers with hz \ stream fps mismatch may look higher than before, but they’re finally accurate. - Update stats before any drop/pacing logic. - Old metric sampled mostly frames that survived to render - New metric includes every decoded frame, so it reflects the real decoder cost. I noticed the bug with my tab. If the G99 needs ~6–7 ms to decode at 60 FPS, it won’t hit 3.5 ms at 120 FPS for 1440×900/25 Mbps. Higher FPS lowers input lag, but it won’t cut decode time especially on weaker SoCs. --- .../video/MediaCodecDecoderRenderer.java | 189 +++++++++--------- 1 file changed, 99 insertions(+), 90 deletions(-) diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index 25f5879a82..b8c236597e 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -1153,7 +1153,6 @@ public void run() { final int tfps = (targetFps > 0 ? targetFps : 60); final long streamPeriodNs = (long) (1_000_000_000L / Math.max(1, tfps)); - // Adaptive period selection to avoid added latency on high-refresh devices final boolean highRefresh = displayHz >= 90f; final boolean managedMode = (prefs != null && prefs.framePacing == PreferenceConfiguration.FRAME_PACING_BALANCED); @@ -1185,73 +1184,103 @@ public void run() { double ewmaDecodeToPresentNs = periodNs * 0.7; double ewmaJitterNs = periodNs * 0.1; - BufferInfo info = new BufferInfo(); - long lastOutputNs = System.nanoTime(); + final android.media.MediaCodec.BufferInfo info = new android.media.MediaCodec.BufferInfo(); while (!stopping) { - /* LATEST_ONLY_LOW_LATENCY */ - if (preferLowerDelays) { - try { - android.media.MediaCodec.BufferInfo __tmpInfo = new android.media.MediaCodec.BufferInfo(); - int __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); - int __last = -1; - long __lastPtsUs = -1L; - - // Drain non-blocking; keep only the newest buffer - while (__idx >= 0) { - if (__last >= 0) { - try { videoDecoder.releaseOutputBuffer(__last, false); } catch (Throwable ignored) {} - } - __last = __idx; - __lastPtsUs = __tmpInfo.presentationTimeUs; - __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); - } - - if (__last >= 0) { - long __nowNs = System.nanoTime(); - if (android.os.Build.VERSION.SDK_INT >= 21) { - videoDecoder.releaseOutputBuffer(__last, __nowNs); - } else { - videoDecoder.releaseOutputBuffer(__last, true); - } - // Update decode->present EWMA and decode stats if we have a valid PTS - if (__lastPtsUs >= 0) { - long __d2pNs = __nowNs - (__lastPtsUs * 1000L); - ewmaDecodeToPresentNs += EWMA_ALPHA * (__d2pNs - ewmaDecodeToPresentNs); - try { updateDecodeLatencyStats(__lastPtsUs); } catch (Throwable ignored) {} - } + /* LATEST_ONLY_LOW_LATENCY */ + if (preferLowerDelays) { + try { + final android.media.MediaCodec.BufferInfo __tmpInfo = new android.media.MediaCodec.BufferInfo(); + int __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); + int __last = -1; + long __lastPtsUs = -1L; - continue; // handled this iteration - } - } catch (Throwable ignored) {} -} -/* /LATEST_ONLY_LOW_LATENCY */ + // Drain non-blocking; keep only the newest buffer + while (__idx >= 0) { + final long ptsUs = __tmpInfo.presentationTimeUs; + + // Measure pure decode time at dequeue (for ALL frames, shown or discarded) + try { updateDecodeLatencyStats(ptsUs); } catch (Throwable ignored) {} + + if (__last >= 0) { + // Drop older buffer without rendering + try { videoDecoder.releaseOutputBuffer(__last, false); } catch (Throwable ignored) {} + } + + __last = __idx; + __lastPtsUs = ptsUs; + __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); + } + + if (__last >= 0) { + final long __nowNs = System.nanoTime(); + // Present the newest buffer ASAP (timestamped) + if (android.os.Build.VERSION.SDK_INT >= 21) { + videoDecoder.releaseOutputBuffer(__last, __nowNs); + } else { + videoDecoder.releaseOutputBuffer(__last, true); + } + + try { + activeWindowVideoStats.totalFramesRendered++; + numFramesOut++; + lastDecoderPtsUs = __lastPtsUs; + } catch (Throwable ignored) {} + + // EWMA decode->present: + if (__lastPtsUs >= 0) { + final long __d2pNs = __nowNs - (__lastPtsUs * 1000L); + ewmaDecodeToPresentNs += EWMA_ALPHA * (__d2pNs - ewmaDecodeToPresentNs); + } + + continue; + } + } catch (Throwable ignored) {} + } + /* /LATEST_ONLY_LOW_LATENCY */ try { - // Try to output a frame - int outIndex = videoDecoder.dequeueOutputBuffer(info, getOutputDequeueTimeoutUs()); + // Try to output a frame (respect policy and do quick retry within budget) + final int policyUs = getOutputDequeueTimeoutUs(); + + final long t0 = System.nanoTime(); + int outIndex = videoDecoder.dequeueOutputBuffer(info, policyUs); + final long elapsedUs = (System.nanoTime() - t0) / 1000L; if (outIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { - // reduced backoff 0–500 µs tryAgainStreak++; - int backoffUs = Math.min(getOutputDequeueTimeoutUs(), (tryAgainStreak <= 2) ? 250 : 500); - outIndex = videoDecoder.dequeueOutputBuffer(info, backoffUs); + final int quickBackoffUs = (tryAgainStreak <= 2) ? 250 : 500; + + final int remainingUs = (policyUs > 0) ? Math.max(0, policyUs - (int) elapsedUs) : 0; + final int backoffUs = Math.min(remainingUs, quickBackoffUs); + + if (backoffUs > 0) { + outIndex = videoDecoder.dequeueOutputBuffer(info, backoffUs); + } + if (outIndex >= 0) { + tryAgainStreak = 0; + } } else { tryAgainStreak = 0; } if (outIndex >= 0) { - // --- flags per gestire le statistiche in modo robusto --- + // --- flags to manage statistics in a robust way --- boolean statsUpdated = false; boolean frameDropped = false; long presentationTimeUs = info.presentationTimeUs; int lastIndex = outIndex; + long lastPtsUs = presentationTimeUs; numFramesOut++; - // aggiorna inter-arrival + // Measure decode latency AT DEQUEUE + try { updateDecodeLatencyStats(presentationTimeUs); } catch (Throwable ignored) {} + statsUpdated = true; + + // update inter-arrival if (lastDecoderPtsUs != 0L) { long interUs = presentationTimeUs - lastDecoderPtsUs; if (interUs > 0) { @@ -1261,21 +1290,26 @@ public void run() { } lastDecoderPtsUs = presentationTimeUs; + final PreferenceConfiguration p = prefs; // snapshot for null safety + // Render the latest frame now if frame pacing isn't in balanced mode - if (prefs.framePacing != PreferenceConfiguration.FRAME_PACING_BALANCED) { - // Get the last output buffer in the queue + if (p == null || p.framePacing != PreferenceConfiguration.FRAME_PACING_BALANCED) { + // Keep only the newest: measure decode for each new frame at DEQUEUE while ((outIndex = videoDecoder.dequeueOutputBuffer(info, getOutputDequeueTimeoutUs())) >= 0) { + final long newPtsUs = info.presentationTimeUs; + try { updateDecodeLatencyStats(newPtsUs); } catch (Throwable ignored) {} videoDecoder.releaseOutputBuffer(lastIndex, false); frameDropped = true; // we're discarding the oldest one numFramesOut++; lastIndex = outIndex; - presentationTimeUs = info.presentationTimeUs; + presentationTimeUs = newPtsUs; + lastPtsUs = newPtsUs; } - if (prefs.framePacing == PreferenceConfiguration.FRAME_PACING_MAX_SMOOTHNESS || - prefs.framePacing == PreferenceConfiguration.FRAME_PACING_CAP_FPS) { - // In max smoothness or cap FPS mode, we want to never drop frames + if (p != null && (p.framePacing == PreferenceConfiguration.FRAME_PACING_MAX_SMOOTHNESS || + p.framePacing == PreferenceConfiguration.FRAME_PACING_CAP_FPS)) { + // Smoothness/Cap: avoid drop, present ASAP if not beyond threshold if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { final long nowNs = System.nanoTime(); final long frameAgeNs = nowNs - (presentationTimeUs * 1000L); @@ -1299,29 +1333,17 @@ public void run() { lastPresentNs = nowNs; recentDrops = Math.max(0, recentDrops - 1); - // [STATS] update subito dopo il present - updateDecodeLatencyStats(presentationTimeUs); - statsUpdated = true; - } else { if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - videoDecoder.releaseOutputBuffer(lastIndex, __ts); - } else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - videoDecoder.releaseOutputBuffer(lastIndex, __ts); -} else { - videoDecoder.releaseOutputBuffer(lastIndex, false); -} - } - - // [STATS] anche su pre-Lollipop, dopo presentazione - updateDecodeLatencyStats(presentationTimeUs); - statsUpdated = true; + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(lastIndex, __ts); + } else { + videoDecoder.releaseOutputBuffer(lastIndex, true); + } } } else { + // Latency mode if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { final long nowNs = System.nanoTime(); final long frameAgeNs = nowNs - (presentationTimeUs * 1000L); @@ -1355,7 +1377,7 @@ public void run() { frameDropped = true; lastDropNs = nowNs; recentDrops = Math.min(10, recentDrops + 1); - continue; // niente stats sui frame droppati + continue; // stats already recorded at dequeue for this PTS } videoDecoder.releaseOutputBuffer(lastIndex, nowNs); @@ -1363,26 +1385,13 @@ public void run() { if (!isLate) lateStreak = 0; recentDrops = Math.max(0, recentDrops - 1); - // [STATS] update subito dopo il present - updateDecodeLatencyStats(presentationTimeUs); - statsUpdated = true; - } else { if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - videoDecoder.releaseOutputBuffer(lastIndex, __ts); - } else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - videoDecoder.releaseOutputBuffer(lastIndex, __ts); -} else { - videoDecoder.releaseOutputBuffer(lastIndex, false); -} - } - - // [STATS] anche su pre-Lollipop, dopo presentazione - updateDecodeLatencyStats(presentationTimeUs); - statsUpdated = true; + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(lastIndex, __ts); + } else { + videoDecoder.releaseOutputBuffer(lastIndex, true); + } } } @@ -1408,7 +1417,7 @@ public void run() { // Add this buffer outputBufferQueue.add(lastIndex); - // NB: in BALANCED non presentiamo qui; lasciamo il fallback stats sotto + // NB: in BALANCED we don't present here; stats already updated at dequeue } // --- Fallback stats update --- From aea3d679b01ee6d262de72eee1e45b037328504e Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Tue, 21 Oct 2025 23:29:42 +0200 Subject: [PATCH 09/12] LFR: play nice with pacings MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - LFR bypassed on Balanced/CapFPS to avoid heavy drops \ stuttering on weaker devices (eg.mtk g99) - Per-profile dequeue timeouts: Balanced 1000 µs, CapFPS 1500 µs, Max Smoothness 2000 µs, others 500 µs. - 0 µs dequeue only in LFR My S20 FE can run Balanced mode with LFR enabled without any issues, but my Tab with the G99 processor struggles significantly, experiencing heavy frame drops and severe stuttering. Instead of leaving this choice to users, we should force LFR off with both Cap FPS and Balanced modes. Max Smoothness \ LFR appears to work well together with both devices, so I'll leave that untouched for now, but i'll monitor this in the future. --- app/src/main/java/com/limelight/Game.java | 45 +- .../video/MediaCodecDecoderRenderer.java | 4762 +++++++++-------- 2 files changed, 2417 insertions(+), 2390 deletions(-) diff --git a/app/src/main/java/com/limelight/Game.java b/app/src/main/java/com/limelight/Game.java index 9f26060dd9..51a48a92f2 100755 --- a/app/src/main/java/com/limelight/Game.java +++ b/app/src/main/java/com/limelight/Game.java @@ -4296,35 +4296,42 @@ private SurfaceView findFirstSurfaceViewFrom(View v) { } - // Apply low-latency vs smooth policy to the decoder renderer -// - When LFR (preferLowerDelays) is enabled, use non-blocking dequeue (0 us) for all pacing profiles. -// - When LFR is disabled, the renderer decides timeouts internally (we do not set a timeout here). +// Apply low-latency vs smooth policy to the decoder renderer +// - LFR ON only for Max Smoothness (tollerante). Bypass on Balanced/CapFPS. +// - Dequeue timeout: renderer decides per-profile (non-zero) when LFR is OFF. private void applyLatencyPolicy( com.limelight.binding.video.MediaCodecDecoderRenderer decoderRenderer, com.limelight.preferences.PreferenceConfiguration prefConfig) { if (decoderRenderer == null || prefConfig == null) return; + try { - final boolean lfrActive = prefConfig.preferLowerDelays; + final boolean userLfr = prefConfig.preferLowerDelays; + final int fp = prefConfig.framePacing; - // Apply LFR for all pacing profiles - decoderRenderer.setPreferLowerDelays(lfrActive); + final boolean lfrEffective = isLfrEffective(fp, userLfr); - // LFR uses 0 us dequeue timeout; otherwise leave renderer defaults untouched - if (lfrActive) { - decoderRenderer.setPreferLowerDelaysTimeoutUs(0); - } + decoderRenderer.setPreferLowerDelays(lfrEffective); + // LFR path uses 0 µs by default; managed path uses per-profile timeouts inside the renderer + decoderRenderer.setPreferLowerDelaysTimeoutUs(0); - // Tight thresholds follow the UI toggle decoderRenderer.setForceTightThresholds(prefConfig.forceTightThresholds); - // Minimal logging try { - final String mode = lfrActive - ? "LFR on, timeout=0us" - : "managed, timeout=renderer-default"; - com.limelight.LimeLog.info("Latency policy -> " + mode + - " | forceTight=" + prefConfig.forceTightThresholds); - } catch (Throwable ignored) { } - } catch (Throwable ignored) { } + com.limelight.LimeLog.info("Latency policy -> LFR(eff)=" + lfrEffective + + " (user=" + userLfr + ", fp=" + fp + "), tight=" + prefConfig.forceTightThresholds); + } catch (Throwable ignored) {} + } catch (Throwable ignored) {} + } + + private static boolean isLfrEffective(int fp, boolean userLfr) { + final boolean isBalanced = + (fp == PreferenceConfiguration.FRAME_PACING_BALANCED); + final boolean isCapFps = + (fp == PreferenceConfiguration.FRAME_PACING_CAP_FPS); + final boolean isMaxSmooth = + (fp == PreferenceConfiguration.FRAME_PACING_MAX_SMOOTHNESS); + + // Bypass LFR on Balanced/CapFPS, allow it on Max Smoothness + return userLfr && !isBalanced && !isCapFps; } } \ No newline at end of file diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index b8c236597e..451b876e95 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -1,2371 +1,2391 @@ -package com.limelight.binding.video; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.atomic.AtomicInteger; - -import org.jcodec.codecs.h264.H264Utils; -import org.jcodec.codecs.h264.io.model.SeqParameterSet; -import org.jcodec.codecs.h264.io.model.VUIParameters; - -import com.limelight.BuildConfig; -import com.limelight.LimeLog; -import com.limelight.R; -import com.limelight.nvstream.av.video.VideoDecoderRenderer; -import com.limelight.nvstream.jni.MoonBridge; -import com.limelight.preferences.PreferenceConfiguration; -import com.limelight.utils.Stereo3DRenderer; -import com.limelight.utils.TrafficStatsHelper; - -import android.annotation.SuppressLint; -import android.util.LongSparseArray; -import android.annotation.TargetApi; -import android.app.Activity; -import android.content.Context; -import android.media.MediaCodec; -import android.os.Bundle; -import android.media.MediaCodecInfo; -import android.media.MediaFormat; -import android.media.MediaCodec.BufferInfo; -import android.media.MediaCodec.CodecException; -import android.net.TrafficStats; -import android.os.Build; -import android.os.Handler; -import android.os.HandlerThread; -import android.os.Process; -import android.os.SystemClock; -import android.util.Range; -import android.view.Choreographer; -import android.view.Surface; - -public class MediaCodecDecoderRenderer extends VideoDecoderRenderer implements Choreographer.FrameCallback { - // Latency profile: favor minimal end-to-end delay over absolute smoothness. - // Set true to enable a 'latest-only' fast path in the render loop. - private boolean preferLowerDelays = false; - - -// Force tight thresholds regardless of device refresh (use vsyncPeriodNs always) -private volatile boolean forceTightThresholds = false; -/** Toggle tight frame pacing thresholds globally. */ -public void setForceTightThresholds(boolean v) { this.forceTightThresholds = v; } -// Toggle at runtime if needed - // Decode latency tracking: map PTS(us) -> enqueue time (ns) - private final LongSparseArray enqueueNsByPtsUs = new LongSparseArray<>(); - - // When preferLowerDelays = true (LFR/ULL): force non-blocking by default. - // When preferLowerDelays = false (Balanced/managed): use a small timeout for smoothing. - private volatile int preferLowerDelaysTimeoutUs = 0; // default 0 for ULL; policy may override if needed - public void setPreferLowerDelaysTimeoutUs(int us) { this.preferLowerDelaysTimeoutUs = Math.max(0, us); } - - private int getOutputDequeueTimeoutUs(){ return preferLowerDelays ? preferLowerDelaysTimeoutUs : 0; } - - // Update stats using real decode time: enqueue->dequeue, instead of uptime - PTS - private void updateDecodeLatencyStats(long presentationTimeUs) { - Long enqNs = enqueueNsByPtsUs.get(presentationTimeUs); - if (enqNs != null) { - enqueueNsByPtsUs.delete(presentationTimeUs); - long decMs = (System.nanoTime() - enqNs) / 1_000_000L; - if (decMs >= 0 && decMs < 1000) { - activeWindowVideoStats.decoderTimeMs += decMs; - if (!USE_FRAME_RENDER_TIME) { - activeWindowVideoStats.totalTimeMs += decMs; - } - } - } - } - - public void setPreferLowerDelays(boolean v) { this.preferLowerDelays = v; } - - - private static final boolean USE_FRAME_RENDER_TIME = false; - private static final boolean FRAME_RENDER_TIME_ONLY = USE_FRAME_RENDER_TIME && false; - - // Used on versions < 5.0 - private ByteBuffer[] legacyInputBuffers; - - private MediaCodecInfo avcDecoder; - private MediaCodecInfo hevcDecoder; - private MediaCodecInfo av1Decoder; - - private final ArrayList vpsBuffers = new ArrayList<>(); - private final ArrayList spsBuffers = new ArrayList<>(); - private final ArrayList ppsBuffers = new ArrayList<>(); - private boolean submittedCsd; - private byte[] currentHdrMetadata; - - private int nextInputBufferIndex = -1; - private ByteBuffer nextInputBuffer; - - private Context context; - private Activity activity; - private MediaCodec videoDecoder; - private Thread rendererThread; - private boolean needsSpsBitstreamFixup, isExynos4; - private boolean adaptivePlayback, directSubmit, fusedIdrFrame; - private boolean constrainedHighProfile; - private boolean refFrameInvalidationAvc, refFrameInvalidationHevc, refFrameInvalidationAv1; - private byte optimalSlicesPerFrame; - private boolean refFrameInvalidationActive; - private int initialWidth, initialHeight; - private boolean invertResolution; - private int videoFormat; - private Surface renderTarget; - private volatile boolean stopping; - private CrashListener crashListener; - private boolean reportedCrash; - private int consecutiveCrashCount; - private String glRenderer; - private boolean foreground = true; - private PerfOverlayListener perfListener; - - private static final int CR_MAX_TRIES = 10; - private static final int CR_RECOVERY_TYPE_NONE = 0; - private static final int CR_RECOVERY_TYPE_FLUSH = 1; - private static final int CR_RECOVERY_TYPE_RESTART = 2; - private static final int CR_RECOVERY_TYPE_RESET = 3; - private AtomicInteger codecRecoveryType = new AtomicInteger(CR_RECOVERY_TYPE_NONE); - private final Object codecRecoveryMonitor = new Object(); - - // Each thread that touches the MediaCodec object or any associated buffers must have a flag - // here and must call doCodecRecoveryIfRequired() on a regular basis. - private static final int CR_FLAG_INPUT_THREAD = 0x1; - private static final int CR_FLAG_RENDER_THREAD = 0x2; - private static final int CR_FLAG_CHOREOGRAPHER = 0x4; - private static final int CR_FLAG_ALL = CR_FLAG_INPUT_THREAD | CR_FLAG_RENDER_THREAD | CR_FLAG_CHOREOGRAPHER; - private int codecRecoveryThreadQuiescedFlags = 0; - private int codecRecoveryAttempts = 0; - - private MediaFormat inputFormat; - private MediaFormat outputFormat; - private MediaFormat configuredFormat; - - private boolean needsBaselineSpsHack; - private SeqParameterSet savedSps; - - private RendererException initialException; - private long initialExceptionTimestamp; - private static final int EXCEPTION_REPORT_DELAY_MS = 3000; - - private VideoStats activeWindowVideoStats; - private VideoStats lastWindowVideoStats; - private VideoStats globalVideoStats; - - private long lastTimestampUs; - private int lastFrameNumber; - private int refreshRate; - private PreferenceConfiguration prefs; - - private float minDecodeTime = Float.MAX_VALUE; - private String minDecodeTimeFullLog = ""; - - private long lastNetDataNum; - private LinkedBlockingQueue outputBufferQueue = new LinkedBlockingQueue<>(); - private static final int OUTPUT_BUFFER_QUEUE_LIMIT = 2; - private long lastRenderedFrameTimeNanos; - private HandlerThread choreographerHandlerThread; - private Handler choreographerHandler; - - private int numSpsIn; - private int numPpsIn; - private int numVpsIn; - private int numFramesIn; - private int numFramesOut; - - private int targetFps = 0; - - private MediaCodecInfo findAvcDecoder() { - MediaCodecInfo decoder = MediaCodecHelper.findProbableSafeDecoder("video/avc", MediaCodecInfo.CodecProfileLevel.AVCProfileHigh); - if (decoder == null) { - decoder = MediaCodecHelper.findFirstDecoder("video/avc"); - } - return decoder; - } - - @TargetApi(Build.VERSION_CODES.LOLLIPOP) - private boolean decoderCanMeetPerformancePoint(MediaCodecInfo.VideoCapabilities caps, PreferenceConfiguration prefs) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { - MediaCodecInfo.VideoCapabilities.PerformancePoint targetPerfPoint = new MediaCodecInfo.VideoCapabilities.PerformancePoint(initialWidth, initialHeight, Math.round(prefs.fps)); - List perfPoints = caps.getSupportedPerformancePoints(); - if (perfPoints != null) { - for (MediaCodecInfo.VideoCapabilities.PerformancePoint perfPoint : perfPoints) { - // If we find a performance point that covers our target, we're good to go - if (perfPoint.covers(targetPerfPoint)) { - return true; - } - } - - // We had performance point data but none met the specified streaming settings - return false; - } - - // Fall-through to try the Android M API if there's no performance point data - } - - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - try { - // We'll ask the decoder what it can do for us at this resolution and see if our - // requested frame rate falls below or inside the range of achievable frame rates. - Range fpsRange = caps.getAchievableFrameRatesFor(initialWidth, initialHeight); - if (fpsRange != null) { - return prefs.fps <= fpsRange.getUpper(); - } - - // Fall-through to try the Android L API if there's no performance point data - } catch (IllegalArgumentException e) { - // Video size not supported at any frame rate - return false; - } - } - - // As a last resort, we will use areSizeAndRateSupported() which is explicitly NOT a - // performance metric, but it can work at least for the purpose of determining if - // the codec is going to die when given a stream with the specified settings. - return caps.areSizeAndRateSupported(initialWidth, initialHeight, prefs.fps); - } - - private boolean decoderCanMeetPerformancePointWithHevcAndNotAvc(MediaCodecInfo hevcDecoderInfo, MediaCodecInfo avcDecoderInfo, PreferenceConfiguration prefs) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - MediaCodecInfo.VideoCapabilities avcCaps = avcDecoderInfo.getCapabilitiesForType("video/avc").getVideoCapabilities(); - MediaCodecInfo.VideoCapabilities hevcCaps = hevcDecoderInfo.getCapabilitiesForType("video/hevc").getVideoCapabilities(); - - return !decoderCanMeetPerformancePoint(avcCaps, prefs) && decoderCanMeetPerformancePoint(hevcCaps, prefs); - } - else { - // No performance data - return false; - } - } - - private boolean decoderCanMeetPerformancePointWithAv1AndNotHevc(MediaCodecInfo av1DecoderInfo, MediaCodecInfo hevcDecoderInfo, PreferenceConfiguration prefs) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - MediaCodecInfo.VideoCapabilities av1Caps = av1DecoderInfo.getCapabilitiesForType("video/av01").getVideoCapabilities(); - MediaCodecInfo.VideoCapabilities hevcCaps = hevcDecoderInfo.getCapabilitiesForType("video/hevc").getVideoCapabilities(); - - return !decoderCanMeetPerformancePoint(hevcCaps, prefs) && decoderCanMeetPerformancePoint(av1Caps, prefs); - } - else { - // No performance data - return false; - } - } - - private boolean decoderCanMeetPerformancePointWithAv1AndNotAvc(MediaCodecInfo av1DecoderInfo, MediaCodecInfo avcDecoderInfo, PreferenceConfiguration prefs) { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - MediaCodecInfo.VideoCapabilities avcCaps = avcDecoderInfo.getCapabilitiesForType("video/avc").getVideoCapabilities(); - MediaCodecInfo.VideoCapabilities av1Caps = av1DecoderInfo.getCapabilitiesForType("video/av01").getVideoCapabilities(); - - return !decoderCanMeetPerformancePoint(avcCaps, prefs) && decoderCanMeetPerformancePoint(av1Caps, prefs); - } - else { - // No performance data - return false; - } - } - - private MediaCodecInfo findHevcDecoder(PreferenceConfiguration prefs, boolean meteredNetwork, boolean requestedHdr) { - // Don't return anything if H.264 is forced - if (prefs.videoFormat == PreferenceConfiguration.FormatOption.FORCE_H264) { - return null; - } - - // We don't try the first HEVC decoder. We'd rather fall back to hardware accelerated AVC instead - // - // We need HEVC Main profile, so we could pass that constant to findProbableSafeDecoder, however - // some decoders (at least Qualcomm's Snapdragon 805) don't properly report support - // for even required levels of HEVC. - MediaCodecInfo hevcDecoderInfo = MediaCodecHelper.findProbableSafeDecoder("video/hevc", -1); - if (hevcDecoderInfo != null) { - if (!MediaCodecHelper.decoderIsWhitelistedForHevc(hevcDecoderInfo)) { - LimeLog.info("Found HEVC decoder, but it's not whitelisted - "+hevcDecoderInfo.getName()); - - // Force HEVC enabled if the user asked for it - if (prefs.videoFormat == PreferenceConfiguration.FormatOption.FORCE_HEVC) { - LimeLog.info("Forcing HEVC enabled despite non-whitelisted decoder"); - } - // HDR implies HEVC forced on, since HEVCMain10HDR10 is required for HDR. - else if (requestedHdr) { - LimeLog.info("Forcing HEVC enabled for HDR streaming"); - } - // > 4K streaming also requires HEVC, so force it on there too. - else if (initialWidth > 4096 || initialHeight > 4096) { - LimeLog.info("Forcing HEVC enabled for over 4K streaming"); - } - // Use HEVC if the H.264 decoder is unable to meet the performance point - else if (avcDecoder != null && decoderCanMeetPerformancePointWithHevcAndNotAvc(hevcDecoderInfo, avcDecoder, prefs)) { - LimeLog.info("Using non-whitelisted HEVC decoder to meet performance point"); - } - else { - return null; - } - } - } - - return hevcDecoderInfo; - } - - private MediaCodecInfo findAv1Decoder(PreferenceConfiguration prefs) { - // For now, don't use AV1 unless explicitly requested - if (prefs.videoFormat != PreferenceConfiguration.FormatOption.FORCE_AV1) { - return null; - } - - MediaCodecInfo decoderInfo = MediaCodecHelper.findProbableSafeDecoder("video/av01", -1); - if (decoderInfo != null) { - if (!MediaCodecHelper.isDecoderWhitelistedForAv1(decoderInfo)) { - LimeLog.info("Found AV1 decoder, but it's not whitelisted - "+decoderInfo.getName()); - - // Force HEVC enabled if the user asked for it - if (prefs.videoFormat == PreferenceConfiguration.FormatOption.FORCE_AV1) { - LimeLog.info("Forcing AV1 enabled despite non-whitelisted decoder"); - } - // Use AV1 if the HEVC decoder is unable to meet the performance point - else if (hevcDecoder != null && decoderCanMeetPerformancePointWithAv1AndNotHevc(decoderInfo, hevcDecoder, prefs)) { - LimeLog.info("Using non-whitelisted AV1 decoder to meet performance point"); - } - // Use AV1 if the H.264 decoder is unable to meet the performance point and we have no HEVC decoder - else if (hevcDecoder == null && decoderCanMeetPerformancePointWithAv1AndNotAvc(decoderInfo, avcDecoder, prefs)) { - LimeLog.info("Using non-whitelisted AV1 decoder to meet performance point"); - } - else { - return null; - } - } - } - - return decoderInfo; - } - - public void setRenderTarget(Surface renderTarget) { - this.renderTarget = renderTarget; - } - - public MediaCodecDecoderRenderer(Activity activity, PreferenceConfiguration prefs, - CrashListener crashListener, int consecutiveCrashCount, - boolean meteredData, boolean requestedHdr, boolean invertResolution, - String glRenderer, PerfOverlayListener perfListener) { - //dumpDecoders(); - - this.context = activity; - this.activity = activity; - this.prefs = prefs; - this.crashListener = crashListener; - this.consecutiveCrashCount = consecutiveCrashCount; - this.glRenderer = glRenderer; - this.perfListener = perfListener; - this.invertResolution = invertResolution; - - this.activeWindowVideoStats = new VideoStats(); - this.lastWindowVideoStats = new VideoStats(); - this.globalVideoStats = new VideoStats(); - - avcDecoder = findAvcDecoder(); - if (avcDecoder != null) { - LimeLog.info("Selected AVC decoder: "+avcDecoder.getName()); - } - else { - LimeLog.warning("No AVC decoder found"); - } - - hevcDecoder = findHevcDecoder(prefs, meteredData, requestedHdr); - if (hevcDecoder != null) { - LimeLog.info("Selected HEVC decoder: "+hevcDecoder.getName()); - } - else { - LimeLog.info("No HEVC decoder found"); - } - - av1Decoder = findAv1Decoder(prefs); - if (av1Decoder != null) { - LimeLog.info("Selected AV1 decoder: "+av1Decoder.getName()); - } - else { - LimeLog.info("No AV1 decoder found"); - } - - // Set attributes that are queried in getCapabilities(). This must be done here - // because getCapabilities() may be called before setup() in current versions of the common - // library. The limitation of this is that we don't know whether we're using HEVC or AVC. - int avcOptimalSlicesPerFrame = 0; - int hevcOptimalSlicesPerFrame = 0; - if (avcDecoder != null) { - directSubmit = MediaCodecHelper.decoderCanDirectSubmit(avcDecoder.getName()); - refFrameInvalidationAvc = MediaCodecHelper.decoderSupportsRefFrameInvalidationAvc(avcDecoder.getName(), initialHeight); - avcOptimalSlicesPerFrame = MediaCodecHelper.getDecoderOptimalSlicesPerFrame(avcDecoder.getName()); - - if (directSubmit) { - LimeLog.info("Decoder "+avcDecoder.getName()+" will use direct submit"); - } - if (refFrameInvalidationAvc) { - LimeLog.info("Decoder "+avcDecoder.getName()+" will use reference frame invalidation for AVC"); - } - LimeLog.info("Decoder "+avcDecoder.getName()+" wants "+avcOptimalSlicesPerFrame+" slices per frame"); - } - - if (hevcDecoder != null) { - refFrameInvalidationHevc = MediaCodecHelper.decoderSupportsRefFrameInvalidationHevc(hevcDecoder); - hevcOptimalSlicesPerFrame = MediaCodecHelper.getDecoderOptimalSlicesPerFrame(hevcDecoder.getName()); - - if (refFrameInvalidationHevc) { - LimeLog.info("Decoder "+hevcDecoder.getName()+" will use reference frame invalidation for HEVC"); - } - - LimeLog.info("Decoder "+hevcDecoder.getName()+" wants "+hevcOptimalSlicesPerFrame+" slices per frame"); - } - - if (av1Decoder != null) { - refFrameInvalidationAv1 = MediaCodecHelper.decoderSupportsRefFrameInvalidationAv1(av1Decoder); - - if (refFrameInvalidationAv1) { - LimeLog.info("Decoder "+av1Decoder.getName()+" will use reference frame invalidation for AV1"); - } - } - - // Use the larger of the two slices per frame preferences - optimalSlicesPerFrame = (byte)Math.max(avcOptimalSlicesPerFrame, hevcOptimalSlicesPerFrame); - LimeLog.info("Requesting "+optimalSlicesPerFrame+" slices per frame"); - - if (consecutiveCrashCount % 2 == 1) { - refFrameInvalidationAvc = refFrameInvalidationHevc = false; - LimeLog.warning("Disabling RFI due to previous crash"); - } - } - - public boolean isHevcSupported() { - return hevcDecoder != null; - } - - public boolean isAvcSupported() { - return avcDecoder != null; - } - - public boolean isHevcMain10Hdr10Supported() { - if (hevcDecoder == null) { - return false; - } - - for (MediaCodecInfo.CodecProfileLevel profileLevel : hevcDecoder.getCapabilitiesForType("video/hevc").profileLevels) { - if (profileLevel.profile == MediaCodecInfo.CodecProfileLevel.HEVCProfileMain10HDR10) { - LimeLog.info("HEVC decoder "+hevcDecoder.getName()+" supports HEVC Main10 HDR10"); - return true; - } - } - - return false; - } - - public boolean isAv1Supported() { - return av1Decoder != null; - } - - public boolean isAv1Main10Supported() { - if (av1Decoder == null) { - return false; - } - - for (MediaCodecInfo.CodecProfileLevel profileLevel : av1Decoder.getCapabilitiesForType("video/av01").profileLevels) { - if (profileLevel.profile == MediaCodecInfo.CodecProfileLevel.AV1ProfileMain10HDR10) { - LimeLog.info("AV1 decoder "+av1Decoder.getName()+" supports AV1 Main 10 HDR10"); - return true; - } - } - - return false; - } - - public int getPreferredColorSpace() { - // Default to Rec 709 which is probably better supported on modern devices. - // - // We are sticking to Rec 601 on older devices unless the device has an HEVC decoder - // to avoid possible regressions (and they are < 5% of installed devices). If we have - // an HEVC decoder, we will use Rec 709 (even for H.264) since we can't choose a - // colorspace by codec (and it's probably safe to say a SoC with HEVC decoding is - // plenty modern enough to handle H.264 VUI colorspace info). - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O || hevcDecoder != null || av1Decoder != null) { - return MoonBridge.COLORSPACE_REC_709; - } - else { - return MoonBridge.COLORSPACE_REC_601; - } - } - - public int getPreferredColorRange() { - if (prefs.fullRange) { - return MoonBridge.COLOR_RANGE_FULL; - } - else { - return MoonBridge.COLOR_RANGE_LIMITED; - } - } - - public void notifyVideoForeground() { - foreground = true; - } - - public void notifyVideoBackground() { - foreground = false; - } - - public int getActiveVideoFormat() { - return this.videoFormat; - } - - private MediaFormat createBaseMediaFormat(String mimeType) { - MediaFormat videoFormat = MediaFormat.createVideoFormat(mimeType, initialWidth, initialHeight); - - // Avoid setting KEY_FRAME_RATE on Lollipop and earlier to reduce compatibility risk - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, refreshRate); - } - - // Populate keys for adaptive playback - if (adaptivePlayback) { - videoFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, initialWidth); - videoFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, initialHeight); - } - - // Android 7.0 adds color options to the MediaFormat - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { - videoFormat.setInteger(MediaFormat.KEY_COLOR_RANGE, - getPreferredColorRange() == MoonBridge.COLOR_RANGE_FULL ? - MediaFormat.COLOR_RANGE_FULL : MediaFormat.COLOR_RANGE_LIMITED); - - // If the stream is HDR-capable, the decoder will detect transitions in color standards - // rather than us hardcoding them into the MediaFormat. - if ((getActiveVideoFormat() & MoonBridge.VIDEO_FORMAT_MASK_10BIT) == 0) { - // Set color format keys when not in HDR mode, since we know they won't change - videoFormat.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); - switch (getPreferredColorSpace()) { - case MoonBridge.COLORSPACE_REC_601: - videoFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_NTSC); - break; - case MoonBridge.COLORSPACE_REC_709: - videoFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT709); - break; - case MoonBridge.COLORSPACE_REC_2020: - videoFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT2020); - break; - } - } - } - -return videoFormat; - } - - private void configureAndStartDecoder(MediaFormat format) { - // Set HDR metadata if present - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { - if (currentHdrMetadata != null) { - ByteBuffer hdrStaticInfo = ByteBuffer.allocate(25).order(ByteOrder.LITTLE_ENDIAN); - ByteBuffer hdrMetadata = ByteBuffer.wrap(currentHdrMetadata).order(ByteOrder.LITTLE_ENDIAN); - - // Create a HDMI Dynamic Range and Mastering InfoFrame as defined by CTA-861.3 - hdrStaticInfo.put((byte) 0); // Metadata type - hdrStaticInfo.putShort(hdrMetadata.getShort()); // RX - hdrStaticInfo.putShort(hdrMetadata.getShort()); // RY - hdrStaticInfo.putShort(hdrMetadata.getShort()); // GX - hdrStaticInfo.putShort(hdrMetadata.getShort()); // GY - hdrStaticInfo.putShort(hdrMetadata.getShort()); // BX - hdrStaticInfo.putShort(hdrMetadata.getShort()); // BY - hdrStaticInfo.putShort(hdrMetadata.getShort()); // White X - hdrStaticInfo.putShort(hdrMetadata.getShort()); // White Y - hdrStaticInfo.putShort(hdrMetadata.getShort()); // Max mastering luminance - hdrStaticInfo.putShort(hdrMetadata.getShort()); // Min mastering luminance - hdrStaticInfo.putShort(hdrMetadata.getShort()); // Max content luminance - hdrStaticInfo.putShort(hdrMetadata.getShort()); // Max frame average luminance - - hdrStaticInfo.rewind(); - format.setByteBuffer(MediaFormat.KEY_HDR_STATIC_INFO, hdrStaticInfo); - } - else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { - format.removeKey(MediaFormat.KEY_HDR_STATIC_INFO); - } - } - - LimeLog.info("Configuring with format: "+format); - - videoDecoder.configure(format, renderTarget, null, 0); - -try { - MediaCodecInfo __info = (android.os.Build.VERSION.SDK_INT >= 21) ? videoDecoder.getCodecInfo() : null; - String __name = (__info != null) ? __info.getName() : ""; - LimeLog.info("Decoder name: " + __name); -} catch (Throwable t) { - LimeLog.info("Decoder name: "); -} - - - configuredFormat = format; - - // After reconfiguration, we must resubmit CSD buffers - submittedCsd = false; - vpsBuffers.clear(); - spsBuffers.clear(); - ppsBuffers.clear(); - - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - // This will contain the actual accepted input format attributes - inputFormat = videoDecoder.getInputFormat(); - LimeLog.info("Input format: "+inputFormat); - } - - videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); - - // Start the decoder - videoDecoder.start(); - -// Diagnostics: dump negotiated input/output formats and check vendor keys acceptance -try { - MediaFormat __inF = videoDecoder.getInputFormat(); - MediaFormat __outF = videoDecoder.getOutputFormat(); - LimeLog.info("Decoder input format: " + (__inF != null ? __inF.toString() : "")); - LimeLog.info("Decoder output format: " + (__outF != null ? __outF.toString() : "")); -} catch (Throwable t) { - LimeLog.info("Decoder formats unavailable after start"); -} - - - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { - legacyInputBuffers = videoDecoder.getInputBuffers(); - } - } - - private boolean tryConfigureDecoder(MediaCodecInfo selectedDecoderInfo, MediaFormat format, boolean throwOnCodecError) { - boolean configured = false; - try { - videoDecoder = MediaCodec.createByCodecName(selectedDecoderInfo.getName()); - configureAndStartDecoder(format); - LimeLog.info("Using codec " + selectedDecoderInfo.getName() + " for hardware decoding " + format.getString(MediaFormat.KEY_MIME)); - configured = true; - } catch (IllegalArgumentException e) { - e.printStackTrace(); - if (throwOnCodecError) { - throw e; - } - } catch (IllegalStateException e) { - e.printStackTrace(); - if (throwOnCodecError) { - throw e; - } - } catch (IOException e) { - e.printStackTrace(); - if (throwOnCodecError) { - throw new RuntimeException(e); - } - } finally { - if (!configured && videoDecoder != null) { - videoDecoder.release(); - videoDecoder = null; - } - } - return configured; - } - - public int initializeDecoder(boolean throwOnCodecError) { - String mimeType; - MediaCodecInfo selectedDecoderInfo; - - if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H264) != 0) { - mimeType = "video/avc"; - selectedDecoderInfo = avcDecoder; - - if (avcDecoder == null) { - LimeLog.severe("No available AVC decoder!"); - return -1; - } - - if (initialWidth > 4096 || initialHeight > 4096) { - LimeLog.severe("> 4K streaming only supported on HEVC"); - return -1; - } - - // These fixups only apply to H264 decoders - needsSpsBitstreamFixup = MediaCodecHelper.decoderNeedsSpsBitstreamRestrictions(selectedDecoderInfo.getName()); - needsBaselineSpsHack = MediaCodecHelper.decoderNeedsBaselineSpsHack(selectedDecoderInfo.getName()); - constrainedHighProfile = MediaCodecHelper.decoderNeedsConstrainedHighProfile(selectedDecoderInfo.getName()); - isExynos4 = MediaCodecHelper.isExynos4Device(); - if (needsSpsBitstreamFixup) { - LimeLog.info("Decoder "+selectedDecoderInfo.getName()+" needs SPS bitstream restrictions fixup"); - } - if (needsBaselineSpsHack) { - LimeLog.info("Decoder "+selectedDecoderInfo.getName()+" needs baseline SPS hack"); - } - if (constrainedHighProfile) { - LimeLog.info("Decoder "+selectedDecoderInfo.getName()+" needs constrained high profile"); - } - if (isExynos4) { - LimeLog.info("Decoder "+selectedDecoderInfo.getName()+" is on Exynos 4"); - } - - refFrameInvalidationActive = refFrameInvalidationAvc; - } - else if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H265) != 0) { - mimeType = "video/hevc"; - selectedDecoderInfo = hevcDecoder; - - if (hevcDecoder == null) { - LimeLog.severe("No available HEVC decoder!"); - return -2; - } - - refFrameInvalidationActive = refFrameInvalidationHevc; - } - else if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_AV1) != 0) { - mimeType = "video/av01"; - selectedDecoderInfo = av1Decoder; - - if (av1Decoder == null) { - LimeLog.severe("No available AV1 decoder!"); - return -2; - } - - refFrameInvalidationActive = refFrameInvalidationAv1; - } - else { - // Unknown format - LimeLog.severe("Unknown format"); - return -3; - } - adaptivePlayback = MediaCodecHelper.decoderSupportsAdaptivePlayback(selectedDecoderInfo, mimeType); - fusedIdrFrame = MediaCodecHelper.decoderSupportsFusedIdrFrame(selectedDecoderInfo, mimeType); - - for (int tryNumber = 0;; tryNumber++) { - LimeLog.info("Decoder configuration try: "+tryNumber); - - MediaFormat mediaFormat = createBaseMediaFormat(mimeType); - // This will try low latency options until we find one that works (or we give up). - boolean newFormat = MediaCodecHelper.setDecoderLowLatencyOptions(mediaFormat, selectedDecoderInfo, prefs.enableUltraLowLatency, tryNumber); - //todo 色彩格式 -// MediaCodecInfo.CodecCapabilities codecCapabilities = selectedDecoderInfo.getCapabilitiesForType(mimeType); -// int[] colorFormats=codecCapabilities.colorFormats; -// for (int colorFormat : colorFormats) { -// LimeLog.info("Decoder configuration colorFormats: "+colorFormat); -// } - // Throw the underlying codec exception on the last attempt if the caller requested it - if (tryConfigureDecoder(selectedDecoderInfo, mediaFormat, !newFormat && throwOnCodecError)) { - // Success! - break; - } - - if (!newFormat) { - // We couldn't even configure a decoder without any low latency options - return -5; - } - } - - if (USE_FRAME_RENDER_TIME && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - videoDecoder.setOnFrameRenderedListener(new MediaCodec.OnFrameRenderedListener() { - @Override - public void onFrameRendered(MediaCodec mediaCodec, long presentationTimeUs, long renderTimeNanos) { - long delta = (renderTimeNanos / 1000000L) - (presentationTimeUs / 1000); - if (delta >= 0 && delta < 1000) { - if (USE_FRAME_RENDER_TIME) { - activeWindowVideoStats.totalTimeMs += delta; - } - } - } - }, null); - } - - return 0; - } - - @Override - public int setup(int format, int width, int height, int redrawRate) { - this.targetFps = (redrawRate > 0 ? redrawRate : 60); - this.initialWidth = invertResolution ? height : width; - this.initialHeight = invertResolution ? width : height; - this.videoFormat = format; - this.refreshRate = redrawRate; - - return initializeDecoder(false); - } - - // All threads that interact with the MediaCodec instance must call this function regularly! - private boolean doCodecRecoveryIfRequired(int quiescenceFlag) { - // NB: We cannot check 'stopping' here because we could end up bailing in a partially - // quiesced state that will cause the quiesced threads to never wake up. - if (codecRecoveryType.get() == CR_RECOVERY_TYPE_NONE) { - // Common case - return false; - } - - // We need some sort of recovery, so quiesce all threads before starting that - synchronized (codecRecoveryMonitor) { - if (choreographerHandlerThread == null) { - // If we have no choreographer thread, we can just mark that as quiesced right now. - codecRecoveryThreadQuiescedFlags |= CR_FLAG_CHOREOGRAPHER; - } - - codecRecoveryThreadQuiescedFlags |= quiescenceFlag; - - // This is the final thread to quiesce, so let's perform the codec recovery now. - if (codecRecoveryThreadQuiescedFlags == CR_FLAG_ALL) { - // Input and output buffers are invalidated by stop() and reset(). - nextInputBuffer = null; - nextInputBufferIndex = -1; - outputBufferQueue.clear(); - - // If we just need a flush, do so now with all threads quiesced. - if (codecRecoveryType.get() == CR_RECOVERY_TYPE_FLUSH) { - LimeLog.warning("Flushing decoder"); - try { - videoDecoder.flush(); - codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); - } catch (IllegalStateException e) { - e.printStackTrace(); - - // Something went wrong during the restart, let's use a bigger hammer - // and try a reset instead. - codecRecoveryType.set(CR_RECOVERY_TYPE_RESTART); - } - } - - // We don't count flushes as codec recovery attempts - if (codecRecoveryType.get() != CR_RECOVERY_TYPE_NONE) { - codecRecoveryAttempts++; - LimeLog.info("Codec recovery attempt: "+codecRecoveryAttempts); - } - - // For "recoverable" exceptions, we can just stop, reconfigure, and restart. - if (codecRecoveryType.get() == CR_RECOVERY_TYPE_RESTART) { - LimeLog.warning("Trying to restart decoder after CodecException"); - try { - videoDecoder.stop(); - configureAndStartDecoder(configuredFormat); - codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); - } catch (IllegalArgumentException e) { - e.printStackTrace(); - - // Our Surface is probably invalid, so just stop - stopping = true; - codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); - } catch (IllegalStateException e) { - e.printStackTrace(); - - // Something went wrong during the restart, let's use a bigger hammer - // and try a reset instead. - codecRecoveryType.set(CR_RECOVERY_TYPE_RESET); - } - } - - // For "non-recoverable" exceptions on L+, we can call reset() to recover - // without having to recreate the entire decoder again. - if (codecRecoveryType.get() == CR_RECOVERY_TYPE_RESET && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - LimeLog.warning("Trying to reset decoder after CodecException"); - try { - videoDecoder.reset(); - configureAndStartDecoder(configuredFormat); - codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); - } catch (IllegalArgumentException e) { - e.printStackTrace(); - - // Our Surface is probably invalid, so just stop - stopping = true; - codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); - } catch (IllegalStateException e) { - e.printStackTrace(); - - // Something went wrong during the reset, we'll have to resort to - // releasing and recreating the decoder now. - } - } - - // If we _still_ haven't managed to recover, go for the nuclear option and just - // throw away the old decoder and reinitialize a new one from scratch. - if (codecRecoveryType.get() == CR_RECOVERY_TYPE_RESET) { - LimeLog.warning("Trying to recreate decoder after CodecException"); - videoDecoder.release(); - - try { - int err = initializeDecoder(true); - if (err != 0) { - throw new IllegalStateException("Decoder reset failed: " + err); - } - codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); - } catch (IllegalArgumentException e) { - e.printStackTrace(); - - // Our Surface is probably invalid, so just stop - stopping = true; - codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); - } catch (IllegalStateException e) { - // If we failed to recover after all of these attempts, just crash - if (!reportedCrash) { - reportedCrash = true; - crashListener.notifyCrash(e); - } - throw new RendererException(this, e); - } - } - - // Wake all quiesced threads and allow them to begin work again - codecRecoveryThreadQuiescedFlags = 0; - codecRecoveryMonitor.notifyAll(); - } - else { - // If we haven't quiesced all threads yet, wait to be signalled after recovery. - // The final thread to be quiesced will handle the codec recovery. - while (codecRecoveryType.get() != CR_RECOVERY_TYPE_NONE) { - try { - LimeLog.info("Waiting to quiesce decoder threads: "+codecRecoveryThreadQuiescedFlags); - codecRecoveryMonitor.wait(1000); - } catch (InterruptedException e) { - e.printStackTrace(); - - // InterruptedException clears the thread's interrupt status. Since we can't - // handle that here, we will re-interrupt the thread to set the interrupt - // status back to true. - Thread.currentThread().interrupt(); - - break; - } - } - } - } - - return true; - } - - // Returns true if the exception is transient - private boolean handleDecoderException(IllegalStateException e) { - // Eat decoder exceptions if we're in the process of stopping - if (stopping) { - return false; - } - - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && e instanceof CodecException) { - CodecException codecExc = (CodecException) e; - - if (codecExc.isTransient()) { - // We'll let transient exceptions go - LimeLog.warning(codecExc.getDiagnosticInfo()); - return true; - } - - LimeLog.severe(codecExc.getDiagnosticInfo()); - - // We can attempt a recovery or reset at this stage to try to start decoding again - if (codecRecoveryAttempts < CR_MAX_TRIES) { - // If the exception is non-recoverable or we already require a reset, perform a reset. - // If we have no prior unrecoverable failure, we will try a restart instead. - if (codecExc.isRecoverable()) { - if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_NONE, CR_RECOVERY_TYPE_RESTART)) { - LimeLog.info("Decoder requires restart for recoverable CodecException"); - e.printStackTrace(); - } - else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_FLUSH, CR_RECOVERY_TYPE_RESTART)) { - LimeLog.info("Decoder flush promoted to restart for recoverable CodecException"); - e.printStackTrace(); - } - else if (codecRecoveryType.get() != CR_RECOVERY_TYPE_RESET && codecRecoveryType.get() != CR_RECOVERY_TYPE_RESTART) { - throw new IllegalStateException("Unexpected codec recovery type: " + codecRecoveryType.get()); - } - } - else if (!codecExc.isRecoverable()) { - if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_NONE, CR_RECOVERY_TYPE_RESET)) { - LimeLog.info("Decoder requires reset for non-recoverable CodecException"); - e.printStackTrace(); - } - else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_FLUSH, CR_RECOVERY_TYPE_RESET)) { - LimeLog.info("Decoder flush promoted to reset for non-recoverable CodecException"); - e.printStackTrace(); - } - else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_RESTART, CR_RECOVERY_TYPE_RESET)) { - LimeLog.info("Decoder restart promoted to reset for non-recoverable CodecException"); - e.printStackTrace(); - } - else if (codecRecoveryType.get() != CR_RECOVERY_TYPE_RESET) { - throw new IllegalStateException("Unexpected codec recovery type: " + codecRecoveryType.get()); - } - } - - // The recovery will take place when all threads reach doCodecRecoveryIfRequired(). - return false; - } - } - else { - // IllegalStateException was primarily used prior to the introduction of CodecException. - // Recovery from this requires a full decoder reset. - // - // NB: CodecException is an IllegalStateException, so we must check for it first. - if (codecRecoveryAttempts < CR_MAX_TRIES) { - if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_NONE, CR_RECOVERY_TYPE_RESET)) { - LimeLog.info("Decoder requires reset for IllegalStateException"); - e.printStackTrace(); - } - else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_FLUSH, CR_RECOVERY_TYPE_RESET)) { - LimeLog.info("Decoder flush promoted to reset for IllegalStateException"); - e.printStackTrace(); - } - else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_RESTART, CR_RECOVERY_TYPE_RESET)) { - LimeLog.info("Decoder restart promoted to reset for IllegalStateException"); - e.printStackTrace(); - } - else if (codecRecoveryType.get() != CR_RECOVERY_TYPE_RESET) { - throw new IllegalStateException("Unexpected codec recovery type: " + codecRecoveryType.get()); - } - - return false; - } - } - - // Only throw if we're not in the middle of codec recovery - if (codecRecoveryType.get() == CR_RECOVERY_TYPE_NONE) { - // - // There seems to be a race condition with decoder/surface teardown causing some - // decoders to to throw IllegalStateExceptions even before 'stopping' is set. - // To workaround this while allowing real exceptions to propagate, we will eat the - // first exception. If we are still receiving exceptions 3 seconds later, we will - // throw the original exception again. - // - if (initialException != null) { - // This isn't the first time we've had an exception processing video - if (SystemClock.uptimeMillis() - initialExceptionTimestamp >= EXCEPTION_REPORT_DELAY_MS) { - // It's been over 3 seconds and we're still getting exceptions. Throw the original now. - if (!reportedCrash) { - reportedCrash = true; - crashListener.notifyCrash(initialException); - } - throw initialException; - } - } - else { - // This is the first exception we've hit - initialException = new RendererException(this, e); - initialExceptionTimestamp = SystemClock.uptimeMillis(); - } - } - - // Not transient - return false; - } - - @Override - public void doFrame(long frameTimeNanos) { - // Do nothing if we're stopping - if (stopping) { - return; - } - - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - frameTimeNanos -= activity.getWindowManager().getDefaultDisplay().getAppVsyncOffsetNanos(); - } - - // Don't render unless a new frame is due. This prevents microstutter when streaming - // at a frame rate that doesn't match the display (such as 60 FPS on 120 Hz). - long actualFrameTimeDeltaNs = frameTimeNanos - lastRenderedFrameTimeNanos; - long expectedFrameTimeDeltaNs = 800000000 / refreshRate; // within 80% of the next frame - if (actualFrameTimeDeltaNs >= expectedFrameTimeDeltaNs) { - // Render up to one frame when in frame pacing mode. - // - // NB: Since the queue limit is 2, we won't starve the decoder of output buffers - // by holding onto them for too long. This also ensures we will have that 1 extra - // frame of buffer to smooth over network/rendering jitter. - Integer nextOutputBuffer = outputBufferQueue.poll(); - if (nextOutputBuffer != null) { - try { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - videoDecoder.releaseOutputBuffer(nextOutputBuffer, frameTimeNanos); - } - else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - videoDecoder.releaseOutputBuffer(nextOutputBuffer, __ts); - } else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - videoDecoder.releaseOutputBuffer(nextOutputBuffer, __ts); -} else { - videoDecoder.releaseOutputBuffer(nextOutputBuffer, true); -} - } - } - - lastRenderedFrameTimeNanos = frameTimeNanos; - activeWindowVideoStats.totalFramesRendered++; - } catch (IllegalStateException ignored) { - try { - // Try to avoid leaking the output buffer by releasing it without rendering - videoDecoder.releaseOutputBuffer(nextOutputBuffer, false); - } catch (IllegalStateException e) { - // This will leak nextOutputBuffer, but there's really nothing else we can do - e.printStackTrace(); - handleDecoderException(e); - } - } - } - } - - // Attempt codec recovery even if we have nothing to render right now. Recovery can still - // be required even if the codec died before giving any output. - doCodecRecoveryIfRequired(CR_FLAG_CHOREOGRAPHER); - - // Request another callback for next frame - Choreographer.getInstance().postFrameCallback(this); - } - - private void startChoreographerThread() { - if (prefs.framePacing != PreferenceConfiguration.FRAME_PACING_BALANCED) { - // Not using Choreographer in this pacing mode - return; - } - - // We use a separate thread to avoid any main thread delays from delaying rendering - choreographerHandlerThread = new HandlerThread("Video - Choreographer", Process.THREAD_PRIORITY_URGENT_DISPLAY); - choreographerHandlerThread.start(); - - // Start the frame callbacks - choreographerHandler = new Handler(choreographerHandlerThread.getLooper()); - choreographerHandler.post(new Runnable() { - @Override - public void run() { - Choreographer.getInstance().postFrameCallback(MediaCodecDecoderRenderer.this); - } - }); - } - - private void startRendererThread() - { - rendererThread = new Thread() { - @Override - public void run() { - // Boost thread priority to reduce decoding latency - android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_DISPLAY); - - // Compute display refresh and vsync period once (fallback 60 Hz if unavailable) - long vsyncPeriodNs; - float displayHz = 60f; - try { - if (Build.VERSION.SDK_INT >= 17 && context != null) { - android.view.Display d = ((android.view.WindowManager) context.getSystemService(android.content.Context.WINDOW_SERVICE)).getDefaultDisplay(); - if (d != null) displayHz = d.getRefreshRate(); - } - } catch (Throwable ignored) {} - if (displayHz <= 0f) displayHz = 60f; - vsyncPeriodNs = (long) (1_000_000_000L / displayHz); - - // Stream cadence (targetFps set in setup(...)) - final int tfps = (targetFps > 0 ? targetFps : 60); - final long streamPeriodNs = (long) (1_000_000_000L / Math.max(1, tfps)); - - // Adaptive period selection to avoid added latency on high-refresh devices - final boolean highRefresh = displayHz >= 90f; - final boolean managedMode = (prefs != null && prefs.framePacing == PreferenceConfiguration.FRAME_PACING_BALANCED); - // Use stream-aligned thresholds only on lower-refresh screens while in Balanced. - final long periodNs = forceTightThresholds - ? vsyncPeriodNs - : ((managedMode && !highRefresh) ? Math.max(vsyncPeriodNs, streamPeriodNs) : vsyncPeriodNs); -boolean isC2Decoder = false; - try { - String decName = videoDecoder.getName(); - if (decName != null) { - isC2Decoder = decName.toLowerCase(java.util.Locale.US).startsWith("c2."); - } - } catch (Throwable ignored) {} - - // Aggressive/adaptive state - final double EWMA_ALPHA = 0.25; - final double MIN_FACTOR = 1.00; - final double MAX_FACTOR = 1.20; - - long lastDecoderPtsUs = 0L; - long lastPresentNs = 0L; - long lastDropNs = 0L; - int lateStreak = 0; - int tryAgainStreak = 0; - int recentDrops = 0; - - double ewmaInterArrivalNs = (1_000_000_000.0 / Math.max(1, tfps)); - double ewmaDecodeToPresentNs = periodNs * 0.7; - double ewmaJitterNs = periodNs * 0.1; - - final android.media.MediaCodec.BufferInfo info = new android.media.MediaCodec.BufferInfo(); - while (!stopping) { - - /* LATEST_ONLY_LOW_LATENCY */ - if (preferLowerDelays) { - try { - final android.media.MediaCodec.BufferInfo __tmpInfo = new android.media.MediaCodec.BufferInfo(); - int __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); - int __last = -1; - long __lastPtsUs = -1L; - - // Drain non-blocking; keep only the newest buffer - while (__idx >= 0) { - final long ptsUs = __tmpInfo.presentationTimeUs; - - // Measure pure decode time at dequeue (for ALL frames, shown or discarded) - try { updateDecodeLatencyStats(ptsUs); } catch (Throwable ignored) {} - - if (__last >= 0) { - // Drop older buffer without rendering - try { videoDecoder.releaseOutputBuffer(__last, false); } catch (Throwable ignored) {} - } - - __last = __idx; - __lastPtsUs = ptsUs; - __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); - } - - if (__last >= 0) { - final long __nowNs = System.nanoTime(); - - // Present the newest buffer ASAP (timestamped) - if (android.os.Build.VERSION.SDK_INT >= 21) { - videoDecoder.releaseOutputBuffer(__last, __nowNs); - } else { - videoDecoder.releaseOutputBuffer(__last, true); - } - - try { - activeWindowVideoStats.totalFramesRendered++; - numFramesOut++; - lastDecoderPtsUs = __lastPtsUs; - } catch (Throwable ignored) {} - - // EWMA decode->present: - if (__lastPtsUs >= 0) { - final long __d2pNs = __nowNs - (__lastPtsUs * 1000L); - ewmaDecodeToPresentNs += EWMA_ALPHA * (__d2pNs - ewmaDecodeToPresentNs); - } - - continue; - } - } catch (Throwable ignored) {} - } - /* /LATEST_ONLY_LOW_LATENCY */ - - try { - // Try to output a frame (respect policy and do quick retry within budget) - final int policyUs = getOutputDequeueTimeoutUs(); - - final long t0 = System.nanoTime(); - int outIndex = videoDecoder.dequeueOutputBuffer(info, policyUs); - final long elapsedUs = (System.nanoTime() - t0) / 1000L; - - if (outIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { - tryAgainStreak++; - final int quickBackoffUs = (tryAgainStreak <= 2) ? 250 : 500; - - final int remainingUs = (policyUs > 0) ? Math.max(0, policyUs - (int) elapsedUs) : 0; - final int backoffUs = Math.min(remainingUs, quickBackoffUs); - - if (backoffUs > 0) { - outIndex = videoDecoder.dequeueOutputBuffer(info, backoffUs); - } - if (outIndex >= 0) { - tryAgainStreak = 0; - } - } else { - tryAgainStreak = 0; - } - - if (outIndex >= 0) { - // --- flags to manage statistics in a robust way --- - boolean statsUpdated = false; - boolean frameDropped = false; - - long presentationTimeUs = info.presentationTimeUs; - int lastIndex = outIndex; - long lastPtsUs = presentationTimeUs; - - numFramesOut++; - - // Measure decode latency AT DEQUEUE - try { updateDecodeLatencyStats(presentationTimeUs); } catch (Throwable ignored) {} - statsUpdated = true; - - // update inter-arrival - if (lastDecoderPtsUs != 0L) { - long interUs = presentationTimeUs - lastDecoderPtsUs; - if (interUs > 0) { - double sample = interUs * 1000.0; - ewmaInterArrivalNs += EWMA_ALPHA * (sample - ewmaInterArrivalNs); - } - } - lastDecoderPtsUs = presentationTimeUs; - - final PreferenceConfiguration p = prefs; // snapshot for null safety - - // Render the latest frame now if frame pacing isn't in balanced mode - if (p == null || p.framePacing != PreferenceConfiguration.FRAME_PACING_BALANCED) { - // Keep only the newest: measure decode for each new frame at DEQUEUE - while ((outIndex = videoDecoder.dequeueOutputBuffer(info, getOutputDequeueTimeoutUs())) >= 0) { - final long newPtsUs = info.presentationTimeUs; - try { updateDecodeLatencyStats(newPtsUs); } catch (Throwable ignored) {} - videoDecoder.releaseOutputBuffer(lastIndex, false); - frameDropped = true; // we're discarding the oldest one - - numFramesOut++; - lastIndex = outIndex; - presentationTimeUs = newPtsUs; - lastPtsUs = newPtsUs; - } - - if (p != null && (p.framePacing == PreferenceConfiguration.FRAME_PACING_MAX_SMOOTHNESS || - p.framePacing == PreferenceConfiguration.FRAME_PACING_CAP_FPS)) { - // Smoothness/Cap: avoid drop, present ASAP if not beyond threshold - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - final long nowNs = System.nanoTime(); - final long frameAgeNs = nowNs - (presentationTimeUs * 1000L); - - // Smoothness: tighter threshold 1.05..1.2× - double pressure = Math.min(1.0, (ewmaJitterNs / vsyncPeriodNs) + (recentDrops * 0.1)); - double factorSmooth = 1.2 - 0.15 * (1.0 - pressure); - factorSmooth = Math.max(1.05, Math.min(1.2, factorSmooth)); - - long dropThresholdSmoothNs = (long)(periodNs * factorSmooth); - - if (frameAgeNs >= dropThresholdSmoothNs) { - videoDecoder.releaseOutputBuffer(lastIndex, /* render */ false); - frameDropped = true; - lastDropNs = nowNs; - recentDrops = Math.min(10, recentDrops + 1); - continue; - } - - videoDecoder.releaseOutputBuffer(lastIndex, nowNs); - lastPresentNs = nowNs; - recentDrops = Math.max(0, recentDrops - 1); - - } else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - videoDecoder.releaseOutputBuffer(lastIndex, __ts); - } else { - videoDecoder.releaseOutputBuffer(lastIndex, true); - } - } - } - else { - // Latency mode - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - final long nowNs = System.nanoTime(); - final long frameAgeNs = nowNs - (presentationTimeUs * 1000L); - - // Latency: 1.0..1.15×, debounce = 1, cooldown = 0.5× - double backPressure = Math.min(1.0, (double)tryAgainStreak / 6.0); - double streamHz = Math.max(1.0, (double)tfps); - double mismatch = Math.abs((1_000_000_000.0 / streamHz) - (1_000_000_000.0 / Math.max(1.0, displayHz))) / vsyncPeriodNs; - mismatch = Math.min(2.0, mismatch); - - double factorLatency = 1.02 + 0.13 * (0.5 * (ewmaJitterNs / vsyncPeriodNs) - + 0.3 * backPressure - + 0.2 * mismatch); - factorLatency = Math.max(MIN_FACTOR, Math.min(1.15, factorLatency)); - - long dropThresholdNs = (long)(periodNs * factorLatency); - - final long sinceLastPresent = (lastPresentNs == 0L) ? Long.MAX_VALUE : (nowNs - lastPresentNs); - final boolean dropCooldownOk = (nowNs - lastDropNs) >= (periodNs / 2); - final boolean isLate = frameAgeNs > dropThresholdNs; - lateStreak = isLate ? (lateStreak + 1) : 0; - - final boolean shouldDrop = - isLate && - (lateStreak >= 1) && - (sinceLastPresent < (long)(periodNs * 0.5)) && - dropCooldownOk; - - if (shouldDrop) { - videoDecoder.releaseOutputBuffer(lastIndex, /* render */ false); - frameDropped = true; - lastDropNs = nowNs; - recentDrops = Math.min(10, recentDrops + 1); - continue; // stats already recorded at dequeue for this PTS - } - - videoDecoder.releaseOutputBuffer(lastIndex, nowNs); - lastPresentNs = nowNs; - if (!isLate) lateStreak = 0; - recentDrops = Math.max(0, recentDrops - 1); - - } else { - if (android.os.Build.VERSION.SDK_INT >= 21) { - long __ts = System.nanoTime(); - videoDecoder.releaseOutputBuffer(lastIndex, __ts); - } else { - videoDecoder.releaseOutputBuffer(lastIndex, true); - } - } - } - - activeWindowVideoStats.totalFramesRendered++; - } - else { - // For balanced frame pacing case, the Choreographer callback will handle rendering. - // We just put all frames into the output buffer queue and let it handle things. - - // Discard the oldest buffer if we've exceeded our limit. - // - // NB: We have to do this on the producer side because the consumer may not - // run for a while (if there is a huge mismatch between stream FPS and display - // refresh rate). - if (outputBufferQueue.size() == OUTPUT_BUFFER_QUEUE_LIMIT) { - try { - videoDecoder.releaseOutputBuffer(outputBufferQueue.take(), false); - frameDropped = true; - } catch (InterruptedException e) { - return; - } - } - - // Add this buffer - outputBufferQueue.add(lastIndex); - // NB: in BALANCED we don't present here; stats already updated at dequeue - } - - // --- Fallback stats update --- - // If we didn't update the stats in-branch and the frame wasn't dropped, - if (!statsUpdated && !frameDropped) { - updateDecodeLatencyStats(presentationTimeUs); - } - - } else { - switch (outIndex) { - case MediaCodec.INFO_TRY_AGAIN_LATER: - break; - case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: - LimeLog.info("Output format changed"); - outputFormat = videoDecoder.getOutputFormat(); - LimeLog.info("New output format: " + outputFormat); - break; - default: - break; - } - } - } catch (IllegalStateException e) { - handleDecoderException(e); - } finally { - doCodecRecoveryIfRequired(CR_FLAG_RENDER_THREAD); - } - } - } - }; - rendererThread.setName("Video - Renderer (MediaCodec)"); - rendererThread.setPriority(Thread.NORM_PRIORITY + 2); - rendererThread.start(); - } - private boolean fetchNextInputBuffer() { - long startTime; - boolean codecRecovered; - - if (nextInputBuffer != null) { - // We already have an input buffer - return true; - } - - startTime = SystemClock.uptimeMillis(); - - try { - // If we don't have an input buffer index yet, fetch one now - while (nextInputBufferIndex < 0 && !stopping) { - nextInputBufferIndex = videoDecoder.dequeueInputBuffer(10000); - } - - // Get the backing ByteBuffer for the input buffer index - if (nextInputBufferIndex >= 0) { - // Using the new getInputBuffer() API on Lollipop allows - // the framework to do some performance optimizations for us - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - nextInputBuffer = videoDecoder.getInputBuffer(nextInputBufferIndex); - if (nextInputBuffer == null) { - // According to the Android docs, getInputBuffer() can return null "if the - // index is not a dequeued input buffer". I don't think this ever should - // happen but if it does, let's try to get a new input buffer next time. - nextInputBufferIndex = -1; - } - } - else { - nextInputBuffer = legacyInputBuffers[nextInputBufferIndex]; - - // Clear old input data pre-Lollipop - nextInputBuffer.clear(); - } - } - } catch (IllegalStateException e) { - handleDecoderException(e); - return false; - } finally { - codecRecovered = doCodecRecoveryIfRequired(CR_FLAG_INPUT_THREAD); - } - - // If codec recovery is required, always return false to ensure the caller will request - // an IDR frame to complete the codec recovery. - if (codecRecovered) { - return false; - } - - int deltaMs = (int)(SystemClock.uptimeMillis() - startTime); - - if (deltaMs >= 20) { - LimeLog.warning("Dequeue input buffer ran long: " + deltaMs + " ms"); - } - - if (nextInputBuffer == null) { - // We've been hung for 5 seconds and no other exception was reported, - // so generate a decoder hung exception - if (deltaMs >= 5000 && initialException == null) { - DecoderHungException decoderHungException = new DecoderHungException(deltaMs); - if (!reportedCrash) { - reportedCrash = true; - crashListener.notifyCrash(decoderHungException); - } - throw new RendererException(this, decoderHungException); - } - - return false; - } - - return true; - } - - @Override - public void start() { - startRendererThread(); - startChoreographerThread(); - } - - // !!! May be called even if setup()/start() fails !!! - public void prepareForStop() { - // Let the decoding code know to ignore codec exceptions now - stopping = true; - - // Halt the rendering thread - if (rendererThread != null) { - rendererThread.interrupt(); - } - - // Stop any active codec recovery operations - synchronized (codecRecoveryMonitor) { - codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); - codecRecoveryMonitor.notifyAll(); - } - - // Post a quit message to the Choreographer looper (if we have one) - if (choreographerHandler != null) { - choreographerHandler.post(new Runnable() { - @Override - public void run() { - // Don't allow any further messages to be queued - choreographerHandlerThread.quit(); - - // Deregister the frame callback (if registered) - Choreographer.getInstance().removeFrameCallback(MediaCodecDecoderRenderer.this); - } - }); - } - } - - @Override - public void stop() { - // May be called already, but we'll call it now to be safe - prepareForStop(); - - // Wait for the Choreographer looper to shut down (if we have one) - if (choreographerHandlerThread != null) { - try { - choreographerHandlerThread.join(); - } catch (InterruptedException e) { - e.printStackTrace(); - - // InterruptedException clears the thread's interrupt status. Since we can't - // handle that here, we will re-interrupt the thread to set the interrupt - // status back to true. - Thread.currentThread().interrupt(); - } - } - - // Wait for the renderer thread to shut down - try { - rendererThread.join(); - } catch (InterruptedException e) { - e.printStackTrace(); - - // InterruptedException clears the thread's interrupt status. Since we can't - // handle that here, we will re-interrupt the thread to set the interrupt - // status back to true. - Thread.currentThread().interrupt(); - } - } - - @Override - public void cleanup() { - videoDecoder.release(); - } - - @Override - public void setHdrMode(boolean enabled, byte[] hdrMetadata) { - // HDR metadata is only supported in Android 7.0 and later, so don't bother - // restarting the codec on anything earlier than that. - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { - if (currentHdrMetadata != null && (!enabled || hdrMetadata == null)) { - currentHdrMetadata = null; - } - else if (enabled && hdrMetadata != null && !Arrays.equals(currentHdrMetadata, hdrMetadata)) { - currentHdrMetadata = hdrMetadata; - } - else { - // Nothing to do - return; - } - - // If we reach this point, we need to restart the MediaCodec instance to - // pick up the HDR metadata change. This will happen on the next input - // or output buffer. - - // HACK: Reset codec recovery attempt counter, since this is an expected "recovery" - codecRecoveryAttempts = 0; - - // Promote None/Flush to Restart and leave Reset alone - if (!codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_NONE, CR_RECOVERY_TYPE_RESTART)) { - codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_FLUSH, CR_RECOVERY_TYPE_RESTART); - } - } - } - - private boolean queueNextInputBuffer(long timestampUs, int codecFlags) { - boolean codecRecovered; - - try { - videoDecoder.queueInputBuffer(nextInputBufferIndex, - 0, nextInputBuffer.position(), - timestampUs, codecFlags); - - // Track enqueue time for this PTS - try { enqueueNsByPtsUs.put(timestampUs, System.nanoTime()); } catch (Throwable ignored) {} - - // We need a new buffer now - nextInputBufferIndex = -1; - nextInputBuffer = null; - } catch (IllegalStateException e) { - if (handleDecoderException(e)) { - // We encountered a transient error. In this case, just hold onto the buffer - // (to avoid leaking it), clear it, and keep it for the next frame. We'll return - // false to trigger an IDR frame to recover. - nextInputBuffer.clear(); - } - else { - // We encountered a non-transient error. In this case, we will simply leak the - // buffer because we cannot be sure we will ever succeed in queuing it. - nextInputBufferIndex = -1; - nextInputBuffer = null; - } - return false; - } finally { - codecRecovered = doCodecRecoveryIfRequired(CR_FLAG_INPUT_THREAD); - } - - // If codec recovery is required, always return false to ensure the caller will request - // an IDR frame to complete the codec recovery. - if (codecRecovered) { - return false; - } - - // Fetch a new input buffer now while we have some time between frames - // to have it ready immediately when the next frame arrives. - // - // We must propagate the return value here in order to properly handle - // codec recovery happening in fetchNextInputBuffer(). If we don't, we'll - // never get an IDR frame to complete the recovery process. - return fetchNextInputBuffer(); - } - - private void doProfileSpecificSpsPatching(SeqParameterSet sps) { - // Some devices benefit from setting constraint flags 4 & 5 to make this Constrained - // High Profile which allows the decoder to assume there will be no B-frames and - // reduce delay and buffering accordingly. Some devices (Marvell, Exynos 4) don't - // like it so we only set them on devices that are confirmed to benefit from it. - if (sps.profileIdc == 100 && constrainedHighProfile) { - LimeLog.info("Setting constraint set flags for constrained high profile"); - sps.constraintSet4Flag = true; - sps.constraintSet5Flag = true; - } - else { - // Force the constraints unset otherwise (some may be set by default) - sps.constraintSet4Flag = false; - sps.constraintSet5Flag = false; - } - } - - @SuppressWarnings("deprecation") - @Override - public int submitDecodeUnit(byte[] decodeUnitData, int decodeUnitLength, int decodeUnitType, - int frameNumber, int frameType, char frameHostProcessingLatency, - long receiveTimeMs, long enqueueTimeMs) { - if (stopping) { - // Don't bother if we're stopping - return MoonBridge.DR_OK; - } - - if (lastFrameNumber == 0) { - activeWindowVideoStats.measurementStartTimestamp = SystemClock.uptimeMillis(); - } else if (frameNumber != lastFrameNumber && frameNumber != lastFrameNumber + 1) { - // We can receive the same "frame" multiple times if it's an IDR frame. - // In that case, each frame start NALU is submitted independently. - activeWindowVideoStats.framesLost += frameNumber - lastFrameNumber - 1; - activeWindowVideoStats.totalFrames += frameNumber - lastFrameNumber - 1; - activeWindowVideoStats.frameLossEvents++; - } - - // Reset CSD data for each IDR frame - if (lastFrameNumber != frameNumber && frameType == MoonBridge.FRAME_TYPE_IDR) { - vpsBuffers.clear(); - spsBuffers.clear(); - ppsBuffers.clear(); - } - - lastFrameNumber = frameNumber; - - // Flip stats windows roughly every second - if (SystemClock.uptimeMillis() >= activeWindowVideoStats.measurementStartTimestamp + 1000) { - if (prefs.enablePerfOverlay || prefs.enablePerfLogging) { - VideoStats lastTwo = new VideoStats(); - lastTwo.add(lastWindowVideoStats); - lastTwo.add(activeWindowVideoStats); - VideoStatsFps fps = lastTwo.getFps(); - String decoder; - - if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H264) != 0) { - decoder = avcDecoder.getName(); - } else if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H265) != 0) { - decoder = hevcDecoder.getName(); - } else if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_AV1) != 0) { - decoder = av1Decoder.getName(); - } else { - decoder = "(unknown)"; - } - - float decodeTimeMs = (float)lastTwo.decoderTimeMs / lastTwo.totalFramesReceived; - long rttInfo = MoonBridge.getEstimatedRttInfo(); - StringBuilder sb = new StringBuilder(); - if(prefs.enablePerfOverlayLite){ - if(TrafficStatsHelper.getPackageRxBytes(Process.myUid()) != TrafficStats.UNSUPPORTED){ - long netData=TrafficStatsHelper.getPackageRxBytes(Process.myUid())+TrafficStatsHelper.getPackageTxBytes(Process.myUid()); - if(lastNetDataNum!=0){ - sb.append(context.getString(R.string.perf_overlay_lite_bandwidth) + ": "); - float realtimeNetData=(netData-lastNetDataNum)/1024f; - if(realtimeNetData>=1000){ - sb.append(String.format("%.2f", realtimeNetData/1024f) +"M/s\t "); - }else{ - sb.append(String.format("%.2f", realtimeNetData) +"K/s\t "); - } - } - lastNetDataNum=netData; - } -// sb.append("分辨率:"); -// sb.append(initialWidth + "x" + initialHeight); - sb.append(context.getString(R.string.perf_overlay_lite_network_decoding_delay) + ": "); - sb.append(context.getString(R.string.perf_overlay_lite_net,(int)(rttInfo >> 32))); - sb.append(" / "); - sb.append(context.getString(R.string.perf_overlay_lite_dectime,decodeTimeMs)); - sb.append("\t"); - sb.append(context.getString(R.string.perf_overlay_lite_packet_loss) + ": "); - sb.append(context.getString(R.string.perf_overlay_lite_netdrops,(float)lastTwo.framesLost / lastTwo.totalFrames * 100)); - sb.append("\t FPS:"); - sb.append(context.getString(R.string.perf_overlay_lite_fps, fps.totalFps)); - if(Stereo3DRenderer.isActive) { - sb.append(" "); - sb.append(context.getString(R.string.perf_overlay_ai_fps)); - sb.append(" "); - sb.append(Stereo3DRenderer.threeDFps); - sb.append(" "); - sb.append(context.getString(R.string.perf_overlay_ai_delegate)); - sb.append(" "); - sb.append(Stereo3DRenderer.renderer); - sb.append(" "); - sb.append(context.getString(R.string.perf_overlay_drawdelay, Stereo3DRenderer.drawDelay)); - } - }else{ - if(Stereo3DRenderer.isActive) { - sb.append(context.getString(R.string.perf_overlay_streamdetails, initialWidth + "x" + initialHeight, fps.totalFps)); - sb.append('\n'); - sb.append(" "); - sb.append(context.getString(R.string.perf_overlay_ai_fps)); - sb.append(" "); - sb.append(Stereo3DRenderer.threeDFps); - sb.append(" "); - sb.append(context.getString(R.string.perf_overlay_ai_delegate)); - sb.append(" "); - sb.append(Stereo3DRenderer.renderer); - sb.append(" "); - sb.append(context.getString(R.string.perf_overlay_drawdelay, Stereo3DRenderer.drawDelay)); - } else { - // If GPU renders the frames, the render FPS is the actual drawn and visible fps for the user - sb.append(context.getString(R.string.perf_overlay_streamdetails, initialWidth + "x" + initialHeight, fps.totalFps)); - } - sb.append('\n'); - sb.append(context.getString(R.string.perf_overlay_decoder, decoder)).append('\n'); - sb.append(context.getString(R.string.perf_overlay_incomingfps, fps.receivedFps)).append('\n'); - sb.append(context.getString(R.string.perf_overlay_renderingfps, fps.renderedFps)).append('\n'); - sb.append(context.getString(R.string.perf_overlay_netdrops, - (float)lastTwo.framesLost / lastTwo.totalFrames * 100)).append('\n'); - if(TrafficStatsHelper.getPackageRxBytes(Process.myUid()) != TrafficStats.UNSUPPORTED){ - long netData=TrafficStatsHelper.getPackageRxBytes(Process.myUid())+TrafficStatsHelper.getPackageTxBytes(Process.myUid()); - if(lastNetDataNum!=0){ - sb.append(context.getString(R.string.perf_overlay_lite_bandwidth) + ": "); - float realtimeNetData=(netData-lastNetDataNum)/1024f; - if(realtimeNetData>=1000){ - sb.append(String.format("%.2f", realtimeNetData/1024f) +"M/s\n"); - }else{ - sb.append(String.format("%.2f", realtimeNetData) +"K/s\n"); - } - } - lastNetDataNum=netData; - } - sb.append(context.getString(R.string.perf_overlay_netlatency, - (int)(rttInfo >> 32), (int)rttInfo)).append('\n'); - if (lastTwo.framesWithHostProcessingLatency > 0) { - sb.append(context.getString(R.string.perf_overlay_hostprocessinglatency, - (float)lastTwo.minHostProcessingLatency / 10, - (float)lastTwo.maxHostProcessingLatency / 10, - (float)lastTwo.totalHostProcessingLatency / 10 / lastTwo.framesWithHostProcessingLatency)).append('\n'); - } - sb.append(context.getString(R.string.perf_overlay_dectime, decodeTimeMs)); - } - String fullLog = sb.toString(); - if(prefs.enablePerfOverlay) { - perfListener.onPerfUpdate(fullLog); - } - // Best latency is only met at requested highest fps, rest can be ignored - Boolean targetFpsMatched = ((int) fps.totalFps == (int) prefs.fps); - if(minDecodeTime > decodeTimeMs && targetFpsMatched) { - minDecodeTime = decodeTimeMs; - minDecodeTimeFullLog = fullLog; - } - } - globalVideoStats.add(activeWindowVideoStats); - lastWindowVideoStats.copy(activeWindowVideoStats); - activeWindowVideoStats.clear(); - activeWindowVideoStats.measurementStartTimestamp = SystemClock.uptimeMillis(); - } - - boolean csdSubmittedForThisFrame = false; - - // IDR frames require special handling for CSD buffer submission - if (frameType == MoonBridge.FRAME_TYPE_IDR) { - // H264 SPS - if (decodeUnitType == MoonBridge.BUFFER_TYPE_SPS && (videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H264) != 0) { - numSpsIn++; - - ByteBuffer spsBuf = ByteBuffer.wrap(decodeUnitData); - int startSeqLen = decodeUnitData[2] == 0x01 ? 3 : 4; - - // Skip to the start of the NALU data - spsBuf.position(startSeqLen + 1); - - // The H264Utils.readSPS function safely handles - // Annex B NALUs (including NALUs with escape sequences) - SeqParameterSet sps = H264Utils.readSPS(spsBuf); - - // Some decoders rely on H264 level to decide how many buffers are needed - // Since we only need one frame buffered, we'll set the level as low as we can - // for known resolution combinations. Reference frame invalidation may need - // these, so leave them be for those decoders. - if (!refFrameInvalidationActive) { - if (initialWidth <= 720 && initialHeight <= 480 && refreshRate <= 60) { - // Max 5 buffered frames at 720x480x60 - LimeLog.info("Patching level_idc to 31"); - sps.levelIdc = 31; - } - else if (initialWidth <= 1280 && initialHeight <= 720 && refreshRate <= 60) { - // Max 5 buffered frames at 1280x720x60 - LimeLog.info("Patching level_idc to 32"); - sps.levelIdc = 32; - } - else if (initialWidth <= 1920 && initialHeight <= 1080 && refreshRate <= 60) { - // Max 4 buffered frames at 1920x1080x64 - LimeLog.info("Patching level_idc to 42"); - sps.levelIdc = 42; - } - else { - // Leave the profile alone (currently 5.0) - } - } - - // TI OMAP4 requires a reference frame count of 1 to decode successfully. Exynos 4 - // also requires this fixup. - // - // I'm doing this fixup for all devices because I haven't seen any devices that - // this causes issues for. At worst, it seems to do nothing and at best it fixes - // issues with video lag, hangs, and crashes. - // - // It does break reference frame invalidation, so we will not do that for decoders - // where we've enabled reference frame invalidation. - if (!refFrameInvalidationActive) { - LimeLog.info("Patching num_ref_frames in SPS"); - sps.numRefFrames = 1; - } - - // GFE 2.5.11 changed the SPS to add additional extensions. Some devices don't like these - // so we remove them here on old devices unless these devices also support HEVC. - // See getPreferredColorSpace() for further information. - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O && - sps.vuiParams != null && - hevcDecoder == null && - av1Decoder == null) { - sps.vuiParams.videoSignalTypePresentFlag = false; - sps.vuiParams.colourDescriptionPresentFlag = false; - sps.vuiParams.chromaLocInfoPresentFlag = false; - } - - // Some older devices used to choke on a bitstream restrictions, so we won't provide them - // unless explicitly whitelisted. For newer devices, leave the bitstream restrictions present. - if (needsSpsBitstreamFixup || isExynos4 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { - // The SPS that comes in the current H264 bytestream doesn't set bitstream_restriction_flag - // or max_dec_frame_buffering which increases decoding latency on Tegra. - - // If the encoder didn't include VUI parameters in the SPS, add them now - if (sps.vuiParams == null) { - LimeLog.info("Adding VUI parameters"); - sps.vuiParams = new VUIParameters(); - } - - // GFE 2.5.11 started sending bitstream restrictions - if (sps.vuiParams.bitstreamRestriction == null) { - LimeLog.info("Adding bitstream restrictions"); - sps.vuiParams.bitstreamRestriction = new VUIParameters.BitstreamRestriction(); - sps.vuiParams.bitstreamRestriction.motionVectorsOverPicBoundariesFlag = true; - sps.vuiParams.bitstreamRestriction.maxBytesPerPicDenom = 2; - sps.vuiParams.bitstreamRestriction.maxBitsPerMbDenom = 1; - sps.vuiParams.bitstreamRestriction.log2MaxMvLengthHorizontal = 16; - sps.vuiParams.bitstreamRestriction.log2MaxMvLengthVertical = 16; - sps.vuiParams.bitstreamRestriction.numReorderFrames = 0; - } - else { - LimeLog.info("Patching bitstream restrictions"); - } - - // Some devices throw errors if maxDecFrameBuffering < numRefFrames - sps.vuiParams.bitstreamRestriction.maxDecFrameBuffering = sps.numRefFrames; - - // These values are the defaults for the fields, but they are more aggressive - // than what GFE sends in 2.5.11, but it doesn't seem to cause picture problems. - // We'll leave these alone for "modern" devices just in case they care. - if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { - sps.vuiParams.bitstreamRestriction.maxBytesPerPicDenom = 2; - sps.vuiParams.bitstreamRestriction.maxBitsPerMbDenom = 1; - } - - // log2_max_mv_length_horizontal and log2_max_mv_length_vertical are set to more - // conservative values by GFE 2.5.11. We'll let those values stand. - } - else if (sps.vuiParams != null) { - // Devices that didn't/couldn't get bitstream restrictions before GFE 2.5.11 - // will continue to not receive them now - sps.vuiParams.bitstreamRestriction = null; - } - - // If we need to hack this SPS to say we're baseline, do so now - if (needsBaselineSpsHack) { - LimeLog.info("Hacking SPS to baseline"); - sps.profileIdc = 66; - savedSps = sps; - } - - // Patch the SPS constraint flags - doProfileSpecificSpsPatching(sps); - - // The H264Utils.writeSPS function safely handles - // Annex B NALUs (including NALUs with escape sequences) - ByteBuffer escapedNalu = H264Utils.writeSPS(sps, decodeUnitLength); - - // Construct the patched SPS - byte[] naluBuffer = new byte[startSeqLen + 1 + escapedNalu.limit()]; - System.arraycopy(decodeUnitData, 0, naluBuffer, 0, startSeqLen + 1); - escapedNalu.get(naluBuffer, startSeqLen + 1, escapedNalu.limit()); - - // Batch this to submit together with other CSD per AOSP docs - spsBuffers.add(naluBuffer); - return MoonBridge.DR_OK; - } - else if (decodeUnitType == MoonBridge.BUFFER_TYPE_VPS) { - numVpsIn++; - - // Batch this to submit together with other CSD per AOSP docs - byte[] naluBuffer = new byte[decodeUnitLength]; - System.arraycopy(decodeUnitData, 0, naluBuffer, 0, decodeUnitLength); - vpsBuffers.add(naluBuffer); - return MoonBridge.DR_OK; - } - // Only the HEVC SPS hits this path (H.264 is handled above) - else if (decodeUnitType == MoonBridge.BUFFER_TYPE_SPS) { - numSpsIn++; - - // Batch this to submit together with other CSD per AOSP docs - byte[] naluBuffer = new byte[decodeUnitLength]; - System.arraycopy(decodeUnitData, 0, naluBuffer, 0, decodeUnitLength); - spsBuffers.add(naluBuffer); - return MoonBridge.DR_OK; - } - else if (decodeUnitType == MoonBridge.BUFFER_TYPE_PPS) { - numPpsIn++; - - // Batch this to submit together with other CSD per AOSP docs - byte[] naluBuffer = new byte[decodeUnitLength]; - System.arraycopy(decodeUnitData, 0, naluBuffer, 0, decodeUnitLength); - ppsBuffers.add(naluBuffer); - return MoonBridge.DR_OK; - } - else if ((videoFormat & (MoonBridge.VIDEO_FORMAT_MASK_H264 | MoonBridge.VIDEO_FORMAT_MASK_H265)) != 0) { - // If this is the first CSD blob or we aren't supporting fused IDR frames, we will - // submit the CSD blob in a separate input buffer for each IDR frame. - if (!submittedCsd || !fusedIdrFrame) { - if (!fetchNextInputBuffer()) { - return MoonBridge.DR_NEED_IDR; - } - - // Submit all CSD when we receive the first non-CSD blob in an IDR frame - for (byte[] vpsBuffer : vpsBuffers) { - nextInputBuffer.put(vpsBuffer); - } - for (byte[] spsBuffer : spsBuffers) { - nextInputBuffer.put(spsBuffer); - } - for (byte[] ppsBuffer : ppsBuffers) { - nextInputBuffer.put(ppsBuffer); - } - - if (!queueNextInputBuffer(0, MediaCodec.BUFFER_FLAG_CODEC_CONFIG)) { - return MoonBridge.DR_NEED_IDR; - } - - // Remember that we already submitted CSD for this frame, so we don't do it - // again in the fused IDR case below. - csdSubmittedForThisFrame = true; - - // Remember that we submitted CSD globally for this MediaCodec instance - submittedCsd = true; - - if (needsBaselineSpsHack) { - needsBaselineSpsHack = false; - - if (!replaySps()) { - return MoonBridge.DR_NEED_IDR; - } - - LimeLog.info("SPS replay complete"); - } - } - } - } - - if (frameHostProcessingLatency != 0) { - if (activeWindowVideoStats.minHostProcessingLatency != 0) { - activeWindowVideoStats.minHostProcessingLatency = (char) Math.min(activeWindowVideoStats.minHostProcessingLatency, frameHostProcessingLatency); - } else { - activeWindowVideoStats.minHostProcessingLatency = frameHostProcessingLatency; - } - activeWindowVideoStats.framesWithHostProcessingLatency += 1; - } - activeWindowVideoStats.maxHostProcessingLatency = (char) Math.max(activeWindowVideoStats.maxHostProcessingLatency, frameHostProcessingLatency); - activeWindowVideoStats.totalHostProcessingLatency += frameHostProcessingLatency; - - activeWindowVideoStats.totalFramesReceived++; - activeWindowVideoStats.totalFrames++; - - if (!FRAME_RENDER_TIME_ONLY) { - // Count time from first packet received to enqueue time as receive time - // We will count DU queue time as part of decoding, because it is directly - // caused by a slow decoder. - activeWindowVideoStats.totalTimeMs += enqueueTimeMs - receiveTimeMs; - } - - if (!fetchNextInputBuffer()) { - return MoonBridge.DR_NEED_IDR; - } - - int codecFlags = 0; - - if (frameType == MoonBridge.FRAME_TYPE_IDR) { - codecFlags |= MediaCodec.BUFFER_FLAG_SYNC_FRAME; - - // If we are using fused IDR frames, submit the CSD with each IDR frame - if (fusedIdrFrame && !csdSubmittedForThisFrame) { - for (byte[] vpsBuffer : vpsBuffers) { - nextInputBuffer.put(vpsBuffer); - } - for (byte[] spsBuffer : spsBuffers) { - nextInputBuffer.put(spsBuffer); - } - for (byte[] ppsBuffer : ppsBuffers) { - nextInputBuffer.put(ppsBuffer); - } - } - } - - long timestampUs = enqueueTimeMs * 1000; - if (timestampUs <= lastTimestampUs) { - // We can't submit multiple buffers with the same timestamp - // so bump it up by one before queuing - timestampUs = lastTimestampUs + 1; - } - lastTimestampUs = timestampUs; - - numFramesIn++; - - if (decodeUnitLength > nextInputBuffer.limit() - nextInputBuffer.position()) { - IllegalArgumentException exception = new IllegalArgumentException( - "Decode unit length "+decodeUnitLength+" too large for input buffer "+nextInputBuffer.limit()); - if (!reportedCrash) { - reportedCrash = true; - crashListener.notifyCrash(exception); - } - throw new RendererException(this, exception); - } - - // Copy data from our buffer list into the input buffer - nextInputBuffer.put(decodeUnitData, 0, decodeUnitLength); - - if (!queueNextInputBuffer(timestampUs, codecFlags)) { - return MoonBridge.DR_NEED_IDR; - } - - return MoonBridge.DR_OK; - } - - private boolean replaySps() { - if (!fetchNextInputBuffer()) { - return false; - } - - // Write the Annex B header - nextInputBuffer.put(new byte[]{0x00, 0x00, 0x00, 0x01, 0x67}); - - // Switch the H264 profile back to high - savedSps.profileIdc = 100; - - // Patch the SPS constraint flags - doProfileSpecificSpsPatching(savedSps); - - // The H264Utils.writeSPS function safely handles - // Annex B NALUs (including NALUs with escape sequences) - ByteBuffer escapedNalu = H264Utils.writeSPS(savedSps, 128); - nextInputBuffer.put(escapedNalu); - - // No need for the SPS anymore - savedSps = null; - - // Queue the new SPS - return queueNextInputBuffer(0, MediaCodec.BUFFER_FLAG_CODEC_CONFIG); - } - - @Override - public int getCapabilities() { - int capabilities = 0; - - // Request the optimal number of slices per frame for this decoder - capabilities |= MoonBridge.CAPABILITY_SLICES_PER_FRAME(optimalSlicesPerFrame); - - // Enable reference frame invalidation on supported hardware - if (refFrameInvalidationAvc) { - capabilities |= MoonBridge.CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC; - } - if (refFrameInvalidationHevc) { - capabilities |= MoonBridge.CAPABILITY_REFERENCE_FRAME_INVALIDATION_HEVC; - } - if (refFrameInvalidationAv1) { - capabilities |= MoonBridge.CAPABILITY_REFERENCE_FRAME_INVALIDATION_AV1; - } - - // Enable direct submit on supported hardware - if (directSubmit) { - capabilities |= MoonBridge.CAPABILITY_DIRECT_SUBMIT; - } - - return capabilities; - } - - public int getAverageEndToEndLatency() { - if (globalVideoStats.totalFramesReceived == 0) { - return 0; - } - return (int)(globalVideoStats.totalTimeMs / globalVideoStats.totalFramesReceived); - } - - public int getAverageDecoderLatency() { - if (globalVideoStats.totalFramesReceived == 0) { - return 0; - } - return (int)(globalVideoStats.decoderTimeMs / globalVideoStats.totalFramesReceived); - } - - public Boolean performanceWasTracked() { - return minDecodeTime < Float.MAX_VALUE; - } - - @SuppressLint("DefaultLocale") - public String getMinDecoderLatency() { - return String.format("%1$.2f", minDecodeTime); - } - - public String getMinDecoderLatencyFullLog() { - return minDecodeTimeFullLog; - } - - static class DecoderHungException extends RuntimeException { - private int hangTimeMs; - - DecoderHungException(int hangTimeMs) { - this.hangTimeMs = hangTimeMs; - } - - public String toString() { - String str = ""; - - str += "Hang time: "+hangTimeMs+" ms"+ RendererException.DELIMITER; - str += super.toString(); - - return str; - } - } - - static class RendererException extends RuntimeException { - private static final long serialVersionUID = 8985937536997012406L; - protected static final String DELIMITER = BuildConfig.DEBUG ? "\n" : " | "; - - private String text; - - RendererException(MediaCodecDecoderRenderer renderer, Exception e) { - this.text = generateText(renderer, e); - } - - public String toString() { - return text; - } - - private String generateText(MediaCodecDecoderRenderer renderer, Exception originalException) { - String str; - - if (renderer.numVpsIn == 0 && renderer.numSpsIn == 0 && renderer.numPpsIn == 0) { - str = "PreSPSError"; - } - else if (renderer.numSpsIn > 0 && renderer.numPpsIn == 0) { - str = "PrePPSError"; - } - else if (renderer.numPpsIn > 0 && renderer.numFramesIn == 0) { - str = "PreIFrameError"; - } - else if (renderer.numFramesIn > 0 && renderer.outputFormat == null) { - str = "PreOutputConfigError"; - } - else if (renderer.outputFormat != null && renderer.numFramesOut == 0) { - str = "PreOutputError"; - } - else if (renderer.numFramesOut <= renderer.refreshRate * 30) { - str = "EarlyOutputError"; - } - else { - str = "ErrorWhileStreaming"; - } - - str += "Format: "+String.format("%x", renderer.videoFormat)+DELIMITER; - str += "AVC Decoder: "+((renderer.avcDecoder != null) ? renderer.avcDecoder.getName():"(none)")+DELIMITER; - str += "HEVC Decoder: "+((renderer.hevcDecoder != null) ? renderer.hevcDecoder.getName():"(none)")+DELIMITER; - str += "AV1 Decoder: "+((renderer.av1Decoder != null) ? renderer.av1Decoder.getName():"(none)")+DELIMITER; - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && renderer.avcDecoder != null) { - Range avcWidthRange = renderer.avcDecoder.getCapabilitiesForType("video/avc").getVideoCapabilities().getSupportedWidths(); - str += "AVC supported width range: "+avcWidthRange+DELIMITER; - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - try { - Range avcFpsRange = renderer.avcDecoder.getCapabilitiesForType("video/avc").getVideoCapabilities().getAchievableFrameRatesFor(renderer.initialWidth, renderer.initialHeight); - str += "AVC achievable FPS range: "+avcFpsRange+DELIMITER; - } catch (IllegalArgumentException e) { - str += "AVC achievable FPS range: UNSUPPORTED!"+DELIMITER; - } - } - } - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && renderer.hevcDecoder != null) { - Range hevcWidthRange = renderer.hevcDecoder.getCapabilitiesForType("video/hevc").getVideoCapabilities().getSupportedWidths(); - str += "HEVC supported width range: "+hevcWidthRange+DELIMITER; - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - try { - Range hevcFpsRange = renderer.hevcDecoder.getCapabilitiesForType("video/hevc").getVideoCapabilities().getAchievableFrameRatesFor(renderer.initialWidth, renderer.initialHeight); - str += "HEVC achievable FPS range: " + hevcFpsRange + DELIMITER; - } catch (IllegalArgumentException e) { - str += "HEVC achievable FPS range: UNSUPPORTED!"+DELIMITER; - } - } - } - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && renderer.av1Decoder != null) { - Range av1WidthRange = renderer.av1Decoder.getCapabilitiesForType("video/av01").getVideoCapabilities().getSupportedWidths(); - str += "AV1 supported width range: "+av1WidthRange+DELIMITER; - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - try { - Range av1FpsRange = renderer.av1Decoder.getCapabilitiesForType("video/av01").getVideoCapabilities().getAchievableFrameRatesFor(renderer.initialWidth, renderer.initialHeight); - str += "AV1 achievable FPS range: " + av1FpsRange + DELIMITER; - } catch (IllegalArgumentException e) { - str += "AV1 achievable FPS range: UNSUPPORTED!"+DELIMITER; - } - } - } - str += "Configured format: "+renderer.configuredFormat+DELIMITER; - str += "Input format: "+renderer.inputFormat+DELIMITER; - str += "Output format: "+renderer.outputFormat+DELIMITER; - str += "Adaptive playback: "+renderer.adaptivePlayback+DELIMITER; - str += "GL Renderer: "+renderer.glRenderer+DELIMITER; - //str += "Build fingerprint: "+Build.FINGERPRINT+DELIMITER; - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { - str += "SOC: "+Build.SOC_MANUFACTURER+" - "+Build.SOC_MODEL+DELIMITER; - str += "Performance class: "+Build.VERSION.MEDIA_PERFORMANCE_CLASS+DELIMITER; - /*str += "Vendor params: "; - List params = renderer.videoDecoder.getSupportedVendorParameters(); - if (params.isEmpty()) { - str += "NONE"; - } - else { - for (String param : params) { - str += param + " "; - } - } - str += DELIMITER;*/ - } - str += "Consecutive crashes: "+renderer.consecutiveCrashCount+DELIMITER; - str += "RFI active: "+renderer.refFrameInvalidationActive+DELIMITER; - str += "Using modern SPS patching: "+(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O)+DELIMITER; - str += "Fused IDR frames: "+renderer.fusedIdrFrame+DELIMITER; - str += "Video dimensions: "+renderer.initialWidth+"x"+renderer.initialHeight+DELIMITER; - str += "FPS target: "+renderer.refreshRate+DELIMITER; - str += "Bitrate: "+renderer.prefs.bitrate+" Kbps"+DELIMITER; - str += "CSD stats: "+renderer.numVpsIn+", "+renderer.numSpsIn+", "+renderer.numPpsIn+DELIMITER; - str += "Frames in-out: "+renderer.numFramesIn+", "+renderer.numFramesOut+DELIMITER; - str += "Total frames received: "+renderer.globalVideoStats.totalFramesReceived+DELIMITER; - str += "Total frames rendered: "+renderer.globalVideoStats.totalFramesRendered+DELIMITER; - str += "Frame losses: "+renderer.globalVideoStats.framesLost+" in "+renderer.globalVideoStats.frameLossEvents+" loss events"+DELIMITER; - str += "Average end-to-end client latency: "+renderer.getAverageEndToEndLatency()+"ms"+DELIMITER; - str += "Average hardware decoder latency: "+renderer.getAverageDecoderLatency()+"ms"+DELIMITER; - str += "Frame pacing mode: "+renderer.prefs.framePacing+DELIMITER; - - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { - if (originalException instanceof CodecException) { - CodecException ce = (CodecException) originalException; - - str += "Diagnostic Info: "+ce.getDiagnosticInfo()+DELIMITER; - str += "Recoverable: "+ce.isRecoverable()+DELIMITER; - str += "Transient: "+ce.isTransient()+DELIMITER; - - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - str += "Codec Error Code: "+ce.getErrorCode()+DELIMITER; - } - } - } - - str += originalException.toString(); - - return str; - } - } - - -private boolean isMTKDecoderName(String name) { - if (name == null) return false; - String n = name.toLowerCase(); - return n.startsWith("c2.mtk") || n.startsWith("omx.mtk"); -} - -} +package com.limelight.binding.video; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.atomic.AtomicInteger; + +import org.jcodec.codecs.h264.H264Utils; +import org.jcodec.codecs.h264.io.model.SeqParameterSet; +import org.jcodec.codecs.h264.io.model.VUIParameters; + +import com.limelight.BuildConfig; +import com.limelight.LimeLog; +import com.limelight.R; +import com.limelight.nvstream.av.video.VideoDecoderRenderer; +import com.limelight.nvstream.jni.MoonBridge; +import com.limelight.preferences.PreferenceConfiguration; +import com.limelight.utils.Stereo3DRenderer; +import com.limelight.utils.TrafficStatsHelper; + +import android.annotation.SuppressLint; +import android.util.LongSparseArray; +import android.annotation.TargetApi; +import android.app.Activity; +import android.content.Context; +import android.media.MediaCodec; +import android.os.Bundle; +import android.media.MediaCodecInfo; +import android.media.MediaFormat; +import android.media.MediaCodec.BufferInfo; +import android.media.MediaCodec.CodecException; +import android.net.TrafficStats; +import android.os.Build; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.Process; +import android.os.SystemClock; +import android.util.Range; +import android.view.Choreographer; +import android.view.Surface; + +public class MediaCodecDecoderRenderer extends VideoDecoderRenderer implements Choreographer.FrameCallback { + // Latency profile: favor minimal end-to-end delay over absolute smoothness. + // Set true to enable a 'latest-only' fast path in the render loop. + private boolean preferLowerDelays = false; + + +// Force tight thresholds regardless of device refresh (use vsyncPeriodNs always) +private volatile boolean forceTightThresholds = false; +/** Toggle tight frame pacing thresholds globally. */ +public void setForceTightThresholds(boolean v) { this.forceTightThresholds = v; } +// Toggle at runtime if needed + // Decode latency tracking: map PTS(us) -> enqueue time (ns) + private final LongSparseArray enqueueNsByPtsUs = new LongSparseArray<>(); + + // When preferLowerDelays = true (PURE LFR/ULL): force non-blocking (0 µs). +// When preferLowerDelays = false (managed): small timeout per profile to stabilize pacing. + private volatile int preferLowerDelaysTimeoutUs = 0; // default 0 for LFR; policy may override if needed + + public void setPreferLowerDelaysTimeoutUs(int us) { + this.preferLowerDelaysTimeoutUs = Math.max(0, us); // 0 allowed for LFR + } + + private int getOutputDequeueTimeoutUs() { + // PURE LFR (latest-only): use configured timeout (0 µs) + if (preferLowerDelays) return preferLowerDelaysTimeoutUs; + + if (prefs != null) { + switch (prefs.framePacing) { + case PreferenceConfiguration.FRAME_PACING_BALANCED: + return 1000; + case PreferenceConfiguration.FRAME_PACING_MAX_SMOOTHNESS: + return 2000; + case PreferenceConfiguration.FRAME_PACING_CAP_FPS: + return 1500; + default: + break; + } + } + // Default: small wait to avoid spin on buggy codecs + return 500; + } + // Update stats using real decode time: enqueue->dequeue, instead of uptime - PTS + private void updateDecodeLatencyStats(long presentationTimeUs) { + Long enqNs = enqueueNsByPtsUs.get(presentationTimeUs); + if (enqNs != null) { + enqueueNsByPtsUs.delete(presentationTimeUs); + long decMs = (System.nanoTime() - enqNs) / 1_000_000L; + if (decMs >= 0 && decMs < 1000) { + activeWindowVideoStats.decoderTimeMs += decMs; + if (!USE_FRAME_RENDER_TIME) { + activeWindowVideoStats.totalTimeMs += decMs; + } + } + } + } + + public void setPreferLowerDelays(boolean v) { this.preferLowerDelays = v; } + + + private static final boolean USE_FRAME_RENDER_TIME = false; + private static final boolean FRAME_RENDER_TIME_ONLY = USE_FRAME_RENDER_TIME && false; + + // Used on versions < 5.0 + private ByteBuffer[] legacyInputBuffers; + + private MediaCodecInfo avcDecoder; + private MediaCodecInfo hevcDecoder; + private MediaCodecInfo av1Decoder; + + private final ArrayList vpsBuffers = new ArrayList<>(); + private final ArrayList spsBuffers = new ArrayList<>(); + private final ArrayList ppsBuffers = new ArrayList<>(); + private boolean submittedCsd; + private byte[] currentHdrMetadata; + + private int nextInputBufferIndex = -1; + private ByteBuffer nextInputBuffer; + + private Context context; + private Activity activity; + private MediaCodec videoDecoder; + private Thread rendererThread; + private boolean needsSpsBitstreamFixup, isExynos4; + private boolean adaptivePlayback, directSubmit, fusedIdrFrame; + private boolean constrainedHighProfile; + private boolean refFrameInvalidationAvc, refFrameInvalidationHevc, refFrameInvalidationAv1; + private byte optimalSlicesPerFrame; + private boolean refFrameInvalidationActive; + private int initialWidth, initialHeight; + private boolean invertResolution; + private int videoFormat; + private Surface renderTarget; + private volatile boolean stopping; + private CrashListener crashListener; + private boolean reportedCrash; + private int consecutiveCrashCount; + private String glRenderer; + private boolean foreground = true; + private PerfOverlayListener perfListener; + + private static final int CR_MAX_TRIES = 10; + private static final int CR_RECOVERY_TYPE_NONE = 0; + private static final int CR_RECOVERY_TYPE_FLUSH = 1; + private static final int CR_RECOVERY_TYPE_RESTART = 2; + private static final int CR_RECOVERY_TYPE_RESET = 3; + private AtomicInteger codecRecoveryType = new AtomicInteger(CR_RECOVERY_TYPE_NONE); + private final Object codecRecoveryMonitor = new Object(); + + // Each thread that touches the MediaCodec object or any associated buffers must have a flag + // here and must call doCodecRecoveryIfRequired() on a regular basis. + private static final int CR_FLAG_INPUT_THREAD = 0x1; + private static final int CR_FLAG_RENDER_THREAD = 0x2; + private static final int CR_FLAG_CHOREOGRAPHER = 0x4; + private static final int CR_FLAG_ALL = CR_FLAG_INPUT_THREAD | CR_FLAG_RENDER_THREAD | CR_FLAG_CHOREOGRAPHER; + private int codecRecoveryThreadQuiescedFlags = 0; + private int codecRecoveryAttempts = 0; + + private MediaFormat inputFormat; + private MediaFormat outputFormat; + private MediaFormat configuredFormat; + + private boolean needsBaselineSpsHack; + private SeqParameterSet savedSps; + + private RendererException initialException; + private long initialExceptionTimestamp; + private static final int EXCEPTION_REPORT_DELAY_MS = 3000; + + private VideoStats activeWindowVideoStats; + private VideoStats lastWindowVideoStats; + private VideoStats globalVideoStats; + + private long lastTimestampUs; + private int lastFrameNumber; + private int refreshRate; + private PreferenceConfiguration prefs; + + private float minDecodeTime = Float.MAX_VALUE; + private String minDecodeTimeFullLog = ""; + + private long lastNetDataNum; + private LinkedBlockingQueue outputBufferQueue = new LinkedBlockingQueue<>(); + private static final int OUTPUT_BUFFER_QUEUE_LIMIT = 2; + private long lastRenderedFrameTimeNanos; + private HandlerThread choreographerHandlerThread; + private Handler choreographerHandler; + + private int numSpsIn; + private int numPpsIn; + private int numVpsIn; + private int numFramesIn; + private int numFramesOut; + + private int targetFps = 0; + + private MediaCodecInfo findAvcDecoder() { + MediaCodecInfo decoder = MediaCodecHelper.findProbableSafeDecoder("video/avc", MediaCodecInfo.CodecProfileLevel.AVCProfileHigh); + if (decoder == null) { + decoder = MediaCodecHelper.findFirstDecoder("video/avc"); + } + return decoder; + } + + @TargetApi(Build.VERSION_CODES.LOLLIPOP) + private boolean decoderCanMeetPerformancePoint(MediaCodecInfo.VideoCapabilities caps, PreferenceConfiguration prefs) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + MediaCodecInfo.VideoCapabilities.PerformancePoint targetPerfPoint = new MediaCodecInfo.VideoCapabilities.PerformancePoint(initialWidth, initialHeight, Math.round(prefs.fps)); + List perfPoints = caps.getSupportedPerformancePoints(); + if (perfPoints != null) { + for (MediaCodecInfo.VideoCapabilities.PerformancePoint perfPoint : perfPoints) { + // If we find a performance point that covers our target, we're good to go + if (perfPoint.covers(targetPerfPoint)) { + return true; + } + } + + // We had performance point data but none met the specified streaming settings + return false; + } + + // Fall-through to try the Android M API if there's no performance point data + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + try { + // We'll ask the decoder what it can do for us at this resolution and see if our + // requested frame rate falls below or inside the range of achievable frame rates. + Range fpsRange = caps.getAchievableFrameRatesFor(initialWidth, initialHeight); + if (fpsRange != null) { + return prefs.fps <= fpsRange.getUpper(); + } + + // Fall-through to try the Android L API if there's no performance point data + } catch (IllegalArgumentException e) { + // Video size not supported at any frame rate + return false; + } + } + + // As a last resort, we will use areSizeAndRateSupported() which is explicitly NOT a + // performance metric, but it can work at least for the purpose of determining if + // the codec is going to die when given a stream with the specified settings. + return caps.areSizeAndRateSupported(initialWidth, initialHeight, prefs.fps); + } + + private boolean decoderCanMeetPerformancePointWithHevcAndNotAvc(MediaCodecInfo hevcDecoderInfo, MediaCodecInfo avcDecoderInfo, PreferenceConfiguration prefs) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + MediaCodecInfo.VideoCapabilities avcCaps = avcDecoderInfo.getCapabilitiesForType("video/avc").getVideoCapabilities(); + MediaCodecInfo.VideoCapabilities hevcCaps = hevcDecoderInfo.getCapabilitiesForType("video/hevc").getVideoCapabilities(); + + return !decoderCanMeetPerformancePoint(avcCaps, prefs) && decoderCanMeetPerformancePoint(hevcCaps, prefs); + } + else { + // No performance data + return false; + } + } + + private boolean decoderCanMeetPerformancePointWithAv1AndNotHevc(MediaCodecInfo av1DecoderInfo, MediaCodecInfo hevcDecoderInfo, PreferenceConfiguration prefs) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + MediaCodecInfo.VideoCapabilities av1Caps = av1DecoderInfo.getCapabilitiesForType("video/av01").getVideoCapabilities(); + MediaCodecInfo.VideoCapabilities hevcCaps = hevcDecoderInfo.getCapabilitiesForType("video/hevc").getVideoCapabilities(); + + return !decoderCanMeetPerformancePoint(hevcCaps, prefs) && decoderCanMeetPerformancePoint(av1Caps, prefs); + } + else { + // No performance data + return false; + } + } + + private boolean decoderCanMeetPerformancePointWithAv1AndNotAvc(MediaCodecInfo av1DecoderInfo, MediaCodecInfo avcDecoderInfo, PreferenceConfiguration prefs) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + MediaCodecInfo.VideoCapabilities avcCaps = avcDecoderInfo.getCapabilitiesForType("video/avc").getVideoCapabilities(); + MediaCodecInfo.VideoCapabilities av1Caps = av1DecoderInfo.getCapabilitiesForType("video/av01").getVideoCapabilities(); + + return !decoderCanMeetPerformancePoint(avcCaps, prefs) && decoderCanMeetPerformancePoint(av1Caps, prefs); + } + else { + // No performance data + return false; + } + } + + private MediaCodecInfo findHevcDecoder(PreferenceConfiguration prefs, boolean meteredNetwork, boolean requestedHdr) { + // Don't return anything if H.264 is forced + if (prefs.videoFormat == PreferenceConfiguration.FormatOption.FORCE_H264) { + return null; + } + + // We don't try the first HEVC decoder. We'd rather fall back to hardware accelerated AVC instead + // + // We need HEVC Main profile, so we could pass that constant to findProbableSafeDecoder, however + // some decoders (at least Qualcomm's Snapdragon 805) don't properly report support + // for even required levels of HEVC. + MediaCodecInfo hevcDecoderInfo = MediaCodecHelper.findProbableSafeDecoder("video/hevc", -1); + if (hevcDecoderInfo != null) { + if (!MediaCodecHelper.decoderIsWhitelistedForHevc(hevcDecoderInfo)) { + LimeLog.info("Found HEVC decoder, but it's not whitelisted - "+hevcDecoderInfo.getName()); + + // Force HEVC enabled if the user asked for it + if (prefs.videoFormat == PreferenceConfiguration.FormatOption.FORCE_HEVC) { + LimeLog.info("Forcing HEVC enabled despite non-whitelisted decoder"); + } + // HDR implies HEVC forced on, since HEVCMain10HDR10 is required for HDR. + else if (requestedHdr) { + LimeLog.info("Forcing HEVC enabled for HDR streaming"); + } + // > 4K streaming also requires HEVC, so force it on there too. + else if (initialWidth > 4096 || initialHeight > 4096) { + LimeLog.info("Forcing HEVC enabled for over 4K streaming"); + } + // Use HEVC if the H.264 decoder is unable to meet the performance point + else if (avcDecoder != null && decoderCanMeetPerformancePointWithHevcAndNotAvc(hevcDecoderInfo, avcDecoder, prefs)) { + LimeLog.info("Using non-whitelisted HEVC decoder to meet performance point"); + } + else { + return null; + } + } + } + + return hevcDecoderInfo; + } + + private MediaCodecInfo findAv1Decoder(PreferenceConfiguration prefs) { + // For now, don't use AV1 unless explicitly requested + if (prefs.videoFormat != PreferenceConfiguration.FormatOption.FORCE_AV1) { + return null; + } + + MediaCodecInfo decoderInfo = MediaCodecHelper.findProbableSafeDecoder("video/av01", -1); + if (decoderInfo != null) { + if (!MediaCodecHelper.isDecoderWhitelistedForAv1(decoderInfo)) { + LimeLog.info("Found AV1 decoder, but it's not whitelisted - "+decoderInfo.getName()); + + // Force HEVC enabled if the user asked for it + if (prefs.videoFormat == PreferenceConfiguration.FormatOption.FORCE_AV1) { + LimeLog.info("Forcing AV1 enabled despite non-whitelisted decoder"); + } + // Use AV1 if the HEVC decoder is unable to meet the performance point + else if (hevcDecoder != null && decoderCanMeetPerformancePointWithAv1AndNotHevc(decoderInfo, hevcDecoder, prefs)) { + LimeLog.info("Using non-whitelisted AV1 decoder to meet performance point"); + } + // Use AV1 if the H.264 decoder is unable to meet the performance point and we have no HEVC decoder + else if (hevcDecoder == null && decoderCanMeetPerformancePointWithAv1AndNotAvc(decoderInfo, avcDecoder, prefs)) { + LimeLog.info("Using non-whitelisted AV1 decoder to meet performance point"); + } + else { + return null; + } + } + } + + return decoderInfo; + } + + public void setRenderTarget(Surface renderTarget) { + this.renderTarget = renderTarget; + } + + public MediaCodecDecoderRenderer(Activity activity, PreferenceConfiguration prefs, + CrashListener crashListener, int consecutiveCrashCount, + boolean meteredData, boolean requestedHdr, boolean invertResolution, + String glRenderer, PerfOverlayListener perfListener) { + //dumpDecoders(); + + this.context = activity; + this.activity = activity; + this.prefs = prefs; + this.crashListener = crashListener; + this.consecutiveCrashCount = consecutiveCrashCount; + this.glRenderer = glRenderer; + this.perfListener = perfListener; + this.invertResolution = invertResolution; + + this.activeWindowVideoStats = new VideoStats(); + this.lastWindowVideoStats = new VideoStats(); + this.globalVideoStats = new VideoStats(); + + avcDecoder = findAvcDecoder(); + if (avcDecoder != null) { + LimeLog.info("Selected AVC decoder: "+avcDecoder.getName()); + } + else { + LimeLog.warning("No AVC decoder found"); + } + + hevcDecoder = findHevcDecoder(prefs, meteredData, requestedHdr); + if (hevcDecoder != null) { + LimeLog.info("Selected HEVC decoder: "+hevcDecoder.getName()); + } + else { + LimeLog.info("No HEVC decoder found"); + } + + av1Decoder = findAv1Decoder(prefs); + if (av1Decoder != null) { + LimeLog.info("Selected AV1 decoder: "+av1Decoder.getName()); + } + else { + LimeLog.info("No AV1 decoder found"); + } + + // Set attributes that are queried in getCapabilities(). This must be done here + // because getCapabilities() may be called before setup() in current versions of the common + // library. The limitation of this is that we don't know whether we're using HEVC or AVC. + int avcOptimalSlicesPerFrame = 0; + int hevcOptimalSlicesPerFrame = 0; + if (avcDecoder != null) { + directSubmit = MediaCodecHelper.decoderCanDirectSubmit(avcDecoder.getName()); + refFrameInvalidationAvc = MediaCodecHelper.decoderSupportsRefFrameInvalidationAvc(avcDecoder.getName(), initialHeight); + avcOptimalSlicesPerFrame = MediaCodecHelper.getDecoderOptimalSlicesPerFrame(avcDecoder.getName()); + + if (directSubmit) { + LimeLog.info("Decoder "+avcDecoder.getName()+" will use direct submit"); + } + if (refFrameInvalidationAvc) { + LimeLog.info("Decoder "+avcDecoder.getName()+" will use reference frame invalidation for AVC"); + } + LimeLog.info("Decoder "+avcDecoder.getName()+" wants "+avcOptimalSlicesPerFrame+" slices per frame"); + } + + if (hevcDecoder != null) { + refFrameInvalidationHevc = MediaCodecHelper.decoderSupportsRefFrameInvalidationHevc(hevcDecoder); + hevcOptimalSlicesPerFrame = MediaCodecHelper.getDecoderOptimalSlicesPerFrame(hevcDecoder.getName()); + + if (refFrameInvalidationHevc) { + LimeLog.info("Decoder "+hevcDecoder.getName()+" will use reference frame invalidation for HEVC"); + } + + LimeLog.info("Decoder "+hevcDecoder.getName()+" wants "+hevcOptimalSlicesPerFrame+" slices per frame"); + } + + if (av1Decoder != null) { + refFrameInvalidationAv1 = MediaCodecHelper.decoderSupportsRefFrameInvalidationAv1(av1Decoder); + + if (refFrameInvalidationAv1) { + LimeLog.info("Decoder "+av1Decoder.getName()+" will use reference frame invalidation for AV1"); + } + } + + // Use the larger of the two slices per frame preferences + optimalSlicesPerFrame = (byte)Math.max(avcOptimalSlicesPerFrame, hevcOptimalSlicesPerFrame); + LimeLog.info("Requesting "+optimalSlicesPerFrame+" slices per frame"); + + if (consecutiveCrashCount % 2 == 1) { + refFrameInvalidationAvc = refFrameInvalidationHevc = false; + LimeLog.warning("Disabling RFI due to previous crash"); + } + } + + public boolean isHevcSupported() { + return hevcDecoder != null; + } + + public boolean isAvcSupported() { + return avcDecoder != null; + } + + public boolean isHevcMain10Hdr10Supported() { + if (hevcDecoder == null) { + return false; + } + + for (MediaCodecInfo.CodecProfileLevel profileLevel : hevcDecoder.getCapabilitiesForType("video/hevc").profileLevels) { + if (profileLevel.profile == MediaCodecInfo.CodecProfileLevel.HEVCProfileMain10HDR10) { + LimeLog.info("HEVC decoder "+hevcDecoder.getName()+" supports HEVC Main10 HDR10"); + return true; + } + } + + return false; + } + + public boolean isAv1Supported() { + return av1Decoder != null; + } + + public boolean isAv1Main10Supported() { + if (av1Decoder == null) { + return false; + } + + for (MediaCodecInfo.CodecProfileLevel profileLevel : av1Decoder.getCapabilitiesForType("video/av01").profileLevels) { + if (profileLevel.profile == MediaCodecInfo.CodecProfileLevel.AV1ProfileMain10HDR10) { + LimeLog.info("AV1 decoder "+av1Decoder.getName()+" supports AV1 Main 10 HDR10"); + return true; + } + } + + return false; + } + + public int getPreferredColorSpace() { + // Default to Rec 709 which is probably better supported on modern devices. + // + // We are sticking to Rec 601 on older devices unless the device has an HEVC decoder + // to avoid possible regressions (and they are < 5% of installed devices). If we have + // an HEVC decoder, we will use Rec 709 (even for H.264) since we can't choose a + // colorspace by codec (and it's probably safe to say a SoC with HEVC decoding is + // plenty modern enough to handle H.264 VUI colorspace info). + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O || hevcDecoder != null || av1Decoder != null) { + return MoonBridge.COLORSPACE_REC_709; + } + else { + return MoonBridge.COLORSPACE_REC_601; + } + } + + public int getPreferredColorRange() { + if (prefs.fullRange) { + return MoonBridge.COLOR_RANGE_FULL; + } + else { + return MoonBridge.COLOR_RANGE_LIMITED; + } + } + + public void notifyVideoForeground() { + foreground = true; + } + + public void notifyVideoBackground() { + foreground = false; + } + + public int getActiveVideoFormat() { + return this.videoFormat; + } + + private MediaFormat createBaseMediaFormat(String mimeType) { + MediaFormat videoFormat = MediaFormat.createVideoFormat(mimeType, initialWidth, initialHeight); + + // Avoid setting KEY_FRAME_RATE on Lollipop and earlier to reduce compatibility risk + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, refreshRate); + } + + // Populate keys for adaptive playback + if (adaptivePlayback) { + videoFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, initialWidth); + videoFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, initialHeight); + } + + // Android 7.0 adds color options to the MediaFormat + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + videoFormat.setInteger(MediaFormat.KEY_COLOR_RANGE, + getPreferredColorRange() == MoonBridge.COLOR_RANGE_FULL ? + MediaFormat.COLOR_RANGE_FULL : MediaFormat.COLOR_RANGE_LIMITED); + + // If the stream is HDR-capable, the decoder will detect transitions in color standards + // rather than us hardcoding them into the MediaFormat. + if ((getActiveVideoFormat() & MoonBridge.VIDEO_FORMAT_MASK_10BIT) == 0) { + // Set color format keys when not in HDR mode, since we know they won't change + videoFormat.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); + switch (getPreferredColorSpace()) { + case MoonBridge.COLORSPACE_REC_601: + videoFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_NTSC); + break; + case MoonBridge.COLORSPACE_REC_709: + videoFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT709); + break; + case MoonBridge.COLORSPACE_REC_2020: + videoFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT2020); + break; + } + } + } + +return videoFormat; + } + + private void configureAndStartDecoder(MediaFormat format) { + // Set HDR metadata if present + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + if (currentHdrMetadata != null) { + ByteBuffer hdrStaticInfo = ByteBuffer.allocate(25).order(ByteOrder.LITTLE_ENDIAN); + ByteBuffer hdrMetadata = ByteBuffer.wrap(currentHdrMetadata).order(ByteOrder.LITTLE_ENDIAN); + + // Create a HDMI Dynamic Range and Mastering InfoFrame as defined by CTA-861.3 + hdrStaticInfo.put((byte) 0); // Metadata type + hdrStaticInfo.putShort(hdrMetadata.getShort()); // RX + hdrStaticInfo.putShort(hdrMetadata.getShort()); // RY + hdrStaticInfo.putShort(hdrMetadata.getShort()); // GX + hdrStaticInfo.putShort(hdrMetadata.getShort()); // GY + hdrStaticInfo.putShort(hdrMetadata.getShort()); // BX + hdrStaticInfo.putShort(hdrMetadata.getShort()); // BY + hdrStaticInfo.putShort(hdrMetadata.getShort()); // White X + hdrStaticInfo.putShort(hdrMetadata.getShort()); // White Y + hdrStaticInfo.putShort(hdrMetadata.getShort()); // Max mastering luminance + hdrStaticInfo.putShort(hdrMetadata.getShort()); // Min mastering luminance + hdrStaticInfo.putShort(hdrMetadata.getShort()); // Max content luminance + hdrStaticInfo.putShort(hdrMetadata.getShort()); // Max frame average luminance + + hdrStaticInfo.rewind(); + format.setByteBuffer(MediaFormat.KEY_HDR_STATIC_INFO, hdrStaticInfo); + } + else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + format.removeKey(MediaFormat.KEY_HDR_STATIC_INFO); + } + } + + LimeLog.info("Configuring with format: "+format); + + videoDecoder.configure(format, renderTarget, null, 0); + +try { + MediaCodecInfo __info = (android.os.Build.VERSION.SDK_INT >= 21) ? videoDecoder.getCodecInfo() : null; + String __name = (__info != null) ? __info.getName() : ""; + LimeLog.info("Decoder name: " + __name); +} catch (Throwable t) { + LimeLog.info("Decoder name: "); +} + + + configuredFormat = format; + + // After reconfiguration, we must resubmit CSD buffers + submittedCsd = false; + vpsBuffers.clear(); + spsBuffers.clear(); + ppsBuffers.clear(); + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + // This will contain the actual accepted input format attributes + inputFormat = videoDecoder.getInputFormat(); + LimeLog.info("Input format: "+inputFormat); + } + + videoDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); + + // Start the decoder + videoDecoder.start(); + +// Diagnostics: dump negotiated input/output formats and check vendor keys acceptance +try { + MediaFormat __inF = videoDecoder.getInputFormat(); + MediaFormat __outF = videoDecoder.getOutputFormat(); + LimeLog.info("Decoder input format: " + (__inF != null ? __inF.toString() : "")); + LimeLog.info("Decoder output format: " + (__outF != null ? __outF.toString() : "")); +} catch (Throwable t) { + LimeLog.info("Decoder formats unavailable after start"); +} + + + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { + legacyInputBuffers = videoDecoder.getInputBuffers(); + } + } + + private boolean tryConfigureDecoder(MediaCodecInfo selectedDecoderInfo, MediaFormat format, boolean throwOnCodecError) { + boolean configured = false; + try { + videoDecoder = MediaCodec.createByCodecName(selectedDecoderInfo.getName()); + configureAndStartDecoder(format); + LimeLog.info("Using codec " + selectedDecoderInfo.getName() + " for hardware decoding " + format.getString(MediaFormat.KEY_MIME)); + configured = true; + } catch (IllegalArgumentException e) { + e.printStackTrace(); + if (throwOnCodecError) { + throw e; + } + } catch (IllegalStateException e) { + e.printStackTrace(); + if (throwOnCodecError) { + throw e; + } + } catch (IOException e) { + e.printStackTrace(); + if (throwOnCodecError) { + throw new RuntimeException(e); + } + } finally { + if (!configured && videoDecoder != null) { + videoDecoder.release(); + videoDecoder = null; + } + } + return configured; + } + + public int initializeDecoder(boolean throwOnCodecError) { + String mimeType; + MediaCodecInfo selectedDecoderInfo; + + if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H264) != 0) { + mimeType = "video/avc"; + selectedDecoderInfo = avcDecoder; + + if (avcDecoder == null) { + LimeLog.severe("No available AVC decoder!"); + return -1; + } + + if (initialWidth > 4096 || initialHeight > 4096) { + LimeLog.severe("> 4K streaming only supported on HEVC"); + return -1; + } + + // These fixups only apply to H264 decoders + needsSpsBitstreamFixup = MediaCodecHelper.decoderNeedsSpsBitstreamRestrictions(selectedDecoderInfo.getName()); + needsBaselineSpsHack = MediaCodecHelper.decoderNeedsBaselineSpsHack(selectedDecoderInfo.getName()); + constrainedHighProfile = MediaCodecHelper.decoderNeedsConstrainedHighProfile(selectedDecoderInfo.getName()); + isExynos4 = MediaCodecHelper.isExynos4Device(); + if (needsSpsBitstreamFixup) { + LimeLog.info("Decoder "+selectedDecoderInfo.getName()+" needs SPS bitstream restrictions fixup"); + } + if (needsBaselineSpsHack) { + LimeLog.info("Decoder "+selectedDecoderInfo.getName()+" needs baseline SPS hack"); + } + if (constrainedHighProfile) { + LimeLog.info("Decoder "+selectedDecoderInfo.getName()+" needs constrained high profile"); + } + if (isExynos4) { + LimeLog.info("Decoder "+selectedDecoderInfo.getName()+" is on Exynos 4"); + } + + refFrameInvalidationActive = refFrameInvalidationAvc; + } + else if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H265) != 0) { + mimeType = "video/hevc"; + selectedDecoderInfo = hevcDecoder; + + if (hevcDecoder == null) { + LimeLog.severe("No available HEVC decoder!"); + return -2; + } + + refFrameInvalidationActive = refFrameInvalidationHevc; + } + else if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_AV1) != 0) { + mimeType = "video/av01"; + selectedDecoderInfo = av1Decoder; + + if (av1Decoder == null) { + LimeLog.severe("No available AV1 decoder!"); + return -2; + } + + refFrameInvalidationActive = refFrameInvalidationAv1; + } + else { + // Unknown format + LimeLog.severe("Unknown format"); + return -3; + } + adaptivePlayback = MediaCodecHelper.decoderSupportsAdaptivePlayback(selectedDecoderInfo, mimeType); + fusedIdrFrame = MediaCodecHelper.decoderSupportsFusedIdrFrame(selectedDecoderInfo, mimeType); + + for (int tryNumber = 0;; tryNumber++) { + LimeLog.info("Decoder configuration try: "+tryNumber); + + MediaFormat mediaFormat = createBaseMediaFormat(mimeType); + // This will try low latency options until we find one that works (or we give up). + boolean newFormat = MediaCodecHelper.setDecoderLowLatencyOptions(mediaFormat, selectedDecoderInfo, prefs.enableUltraLowLatency, tryNumber); + //todo 色彩格式 +// MediaCodecInfo.CodecCapabilities codecCapabilities = selectedDecoderInfo.getCapabilitiesForType(mimeType); +// int[] colorFormats=codecCapabilities.colorFormats; +// for (int colorFormat : colorFormats) { +// LimeLog.info("Decoder configuration colorFormats: "+colorFormat); +// } + // Throw the underlying codec exception on the last attempt if the caller requested it + if (tryConfigureDecoder(selectedDecoderInfo, mediaFormat, !newFormat && throwOnCodecError)) { + // Success! + break; + } + + if (!newFormat) { + // We couldn't even configure a decoder without any low latency options + return -5; + } + } + + if (USE_FRAME_RENDER_TIME && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + videoDecoder.setOnFrameRenderedListener(new MediaCodec.OnFrameRenderedListener() { + @Override + public void onFrameRendered(MediaCodec mediaCodec, long presentationTimeUs, long renderTimeNanos) { + long delta = (renderTimeNanos / 1000000L) - (presentationTimeUs / 1000); + if (delta >= 0 && delta < 1000) { + if (USE_FRAME_RENDER_TIME) { + activeWindowVideoStats.totalTimeMs += delta; + } + } + } + }, null); + } + + return 0; + } + + @Override + public int setup(int format, int width, int height, int redrawRate) { + this.targetFps = (redrawRate > 0 ? redrawRate : 60); + this.initialWidth = invertResolution ? height : width; + this.initialHeight = invertResolution ? width : height; + this.videoFormat = format; + this.refreshRate = redrawRate; + + return initializeDecoder(false); + } + + // All threads that interact with the MediaCodec instance must call this function regularly! + private boolean doCodecRecoveryIfRequired(int quiescenceFlag) { + // NB: We cannot check 'stopping' here because we could end up bailing in a partially + // quiesced state that will cause the quiesced threads to never wake up. + if (codecRecoveryType.get() == CR_RECOVERY_TYPE_NONE) { + // Common case + return false; + } + + // We need some sort of recovery, so quiesce all threads before starting that + synchronized (codecRecoveryMonitor) { + if (choreographerHandlerThread == null) { + // If we have no choreographer thread, we can just mark that as quiesced right now. + codecRecoveryThreadQuiescedFlags |= CR_FLAG_CHOREOGRAPHER; + } + + codecRecoveryThreadQuiescedFlags |= quiescenceFlag; + + // This is the final thread to quiesce, so let's perform the codec recovery now. + if (codecRecoveryThreadQuiescedFlags == CR_FLAG_ALL) { + // Input and output buffers are invalidated by stop() and reset(). + nextInputBuffer = null; + nextInputBufferIndex = -1; + outputBufferQueue.clear(); + + // If we just need a flush, do so now with all threads quiesced. + if (codecRecoveryType.get() == CR_RECOVERY_TYPE_FLUSH) { + LimeLog.warning("Flushing decoder"); + try { + videoDecoder.flush(); + codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); + } catch (IllegalStateException e) { + e.printStackTrace(); + + // Something went wrong during the restart, let's use a bigger hammer + // and try a reset instead. + codecRecoveryType.set(CR_RECOVERY_TYPE_RESTART); + } + } + + // We don't count flushes as codec recovery attempts + if (codecRecoveryType.get() != CR_RECOVERY_TYPE_NONE) { + codecRecoveryAttempts++; + LimeLog.info("Codec recovery attempt: "+codecRecoveryAttempts); + } + + // For "recoverable" exceptions, we can just stop, reconfigure, and restart. + if (codecRecoveryType.get() == CR_RECOVERY_TYPE_RESTART) { + LimeLog.warning("Trying to restart decoder after CodecException"); + try { + videoDecoder.stop(); + configureAndStartDecoder(configuredFormat); + codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); + } catch (IllegalArgumentException e) { + e.printStackTrace(); + + // Our Surface is probably invalid, so just stop + stopping = true; + codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); + } catch (IllegalStateException e) { + e.printStackTrace(); + + // Something went wrong during the restart, let's use a bigger hammer + // and try a reset instead. + codecRecoveryType.set(CR_RECOVERY_TYPE_RESET); + } + } + + // For "non-recoverable" exceptions on L+, we can call reset() to recover + // without having to recreate the entire decoder again. + if (codecRecoveryType.get() == CR_RECOVERY_TYPE_RESET && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + LimeLog.warning("Trying to reset decoder after CodecException"); + try { + videoDecoder.reset(); + configureAndStartDecoder(configuredFormat); + codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); + } catch (IllegalArgumentException e) { + e.printStackTrace(); + + // Our Surface is probably invalid, so just stop + stopping = true; + codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); + } catch (IllegalStateException e) { + e.printStackTrace(); + + // Something went wrong during the reset, we'll have to resort to + // releasing and recreating the decoder now. + } + } + + // If we _still_ haven't managed to recover, go for the nuclear option and just + // throw away the old decoder and reinitialize a new one from scratch. + if (codecRecoveryType.get() == CR_RECOVERY_TYPE_RESET) { + LimeLog.warning("Trying to recreate decoder after CodecException"); + videoDecoder.release(); + + try { + int err = initializeDecoder(true); + if (err != 0) { + throw new IllegalStateException("Decoder reset failed: " + err); + } + codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); + } catch (IllegalArgumentException e) { + e.printStackTrace(); + + // Our Surface is probably invalid, so just stop + stopping = true; + codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); + } catch (IllegalStateException e) { + // If we failed to recover after all of these attempts, just crash + if (!reportedCrash) { + reportedCrash = true; + crashListener.notifyCrash(e); + } + throw new RendererException(this, e); + } + } + + // Wake all quiesced threads and allow them to begin work again + codecRecoveryThreadQuiescedFlags = 0; + codecRecoveryMonitor.notifyAll(); + } + else { + // If we haven't quiesced all threads yet, wait to be signalled after recovery. + // The final thread to be quiesced will handle the codec recovery. + while (codecRecoveryType.get() != CR_RECOVERY_TYPE_NONE) { + try { + LimeLog.info("Waiting to quiesce decoder threads: "+codecRecoveryThreadQuiescedFlags); + codecRecoveryMonitor.wait(1000); + } catch (InterruptedException e) { + e.printStackTrace(); + + // InterruptedException clears the thread's interrupt status. Since we can't + // handle that here, we will re-interrupt the thread to set the interrupt + // status back to true. + Thread.currentThread().interrupt(); + + break; + } + } + } + } + + return true; + } + + // Returns true if the exception is transient + private boolean handleDecoderException(IllegalStateException e) { + // Eat decoder exceptions if we're in the process of stopping + if (stopping) { + return false; + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && e instanceof CodecException) { + CodecException codecExc = (CodecException) e; + + if (codecExc.isTransient()) { + // We'll let transient exceptions go + LimeLog.warning(codecExc.getDiagnosticInfo()); + return true; + } + + LimeLog.severe(codecExc.getDiagnosticInfo()); + + // We can attempt a recovery or reset at this stage to try to start decoding again + if (codecRecoveryAttempts < CR_MAX_TRIES) { + // If the exception is non-recoverable or we already require a reset, perform a reset. + // If we have no prior unrecoverable failure, we will try a restart instead. + if (codecExc.isRecoverable()) { + if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_NONE, CR_RECOVERY_TYPE_RESTART)) { + LimeLog.info("Decoder requires restart for recoverable CodecException"); + e.printStackTrace(); + } + else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_FLUSH, CR_RECOVERY_TYPE_RESTART)) { + LimeLog.info("Decoder flush promoted to restart for recoverable CodecException"); + e.printStackTrace(); + } + else if (codecRecoveryType.get() != CR_RECOVERY_TYPE_RESET && codecRecoveryType.get() != CR_RECOVERY_TYPE_RESTART) { + throw new IllegalStateException("Unexpected codec recovery type: " + codecRecoveryType.get()); + } + } + else if (!codecExc.isRecoverable()) { + if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_NONE, CR_RECOVERY_TYPE_RESET)) { + LimeLog.info("Decoder requires reset for non-recoverable CodecException"); + e.printStackTrace(); + } + else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_FLUSH, CR_RECOVERY_TYPE_RESET)) { + LimeLog.info("Decoder flush promoted to reset for non-recoverable CodecException"); + e.printStackTrace(); + } + else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_RESTART, CR_RECOVERY_TYPE_RESET)) { + LimeLog.info("Decoder restart promoted to reset for non-recoverable CodecException"); + e.printStackTrace(); + } + else if (codecRecoveryType.get() != CR_RECOVERY_TYPE_RESET) { + throw new IllegalStateException("Unexpected codec recovery type: " + codecRecoveryType.get()); + } + } + + // The recovery will take place when all threads reach doCodecRecoveryIfRequired(). + return false; + } + } + else { + // IllegalStateException was primarily used prior to the introduction of CodecException. + // Recovery from this requires a full decoder reset. + // + // NB: CodecException is an IllegalStateException, so we must check for it first. + if (codecRecoveryAttempts < CR_MAX_TRIES) { + if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_NONE, CR_RECOVERY_TYPE_RESET)) { + LimeLog.info("Decoder requires reset for IllegalStateException"); + e.printStackTrace(); + } + else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_FLUSH, CR_RECOVERY_TYPE_RESET)) { + LimeLog.info("Decoder flush promoted to reset for IllegalStateException"); + e.printStackTrace(); + } + else if (codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_RESTART, CR_RECOVERY_TYPE_RESET)) { + LimeLog.info("Decoder restart promoted to reset for IllegalStateException"); + e.printStackTrace(); + } + else if (codecRecoveryType.get() != CR_RECOVERY_TYPE_RESET) { + throw new IllegalStateException("Unexpected codec recovery type: " + codecRecoveryType.get()); + } + + return false; + } + } + + // Only throw if we're not in the middle of codec recovery + if (codecRecoveryType.get() == CR_RECOVERY_TYPE_NONE) { + // + // There seems to be a race condition with decoder/surface teardown causing some + // decoders to to throw IllegalStateExceptions even before 'stopping' is set. + // To workaround this while allowing real exceptions to propagate, we will eat the + // first exception. If we are still receiving exceptions 3 seconds later, we will + // throw the original exception again. + // + if (initialException != null) { + // This isn't the first time we've had an exception processing video + if (SystemClock.uptimeMillis() - initialExceptionTimestamp >= EXCEPTION_REPORT_DELAY_MS) { + // It's been over 3 seconds and we're still getting exceptions. Throw the original now. + if (!reportedCrash) { + reportedCrash = true; + crashListener.notifyCrash(initialException); + } + throw initialException; + } + } + else { + // This is the first exception we've hit + initialException = new RendererException(this, e); + initialExceptionTimestamp = SystemClock.uptimeMillis(); + } + } + + // Not transient + return false; + } + + @Override + public void doFrame(long frameTimeNanos) { + // Do nothing if we're stopping + if (stopping) { + return; + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + frameTimeNanos -= activity.getWindowManager().getDefaultDisplay().getAppVsyncOffsetNanos(); + } + + // Don't render unless a new frame is due. This prevents microstutter when streaming + // at a frame rate that doesn't match the display (such as 60 FPS on 120 Hz). + long actualFrameTimeDeltaNs = frameTimeNanos - lastRenderedFrameTimeNanos; + long expectedFrameTimeDeltaNs = 800000000 / refreshRate; // within 80% of the next frame + if (actualFrameTimeDeltaNs >= expectedFrameTimeDeltaNs) { + // Render up to one frame when in frame pacing mode. + // + // NB: Since the queue limit is 2, we won't starve the decoder of output buffers + // by holding onto them for too long. This also ensures we will have that 1 extra + // frame of buffer to smooth over network/rendering jitter. + Integer nextOutputBuffer = outputBufferQueue.poll(); + if (nextOutputBuffer != null) { + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + videoDecoder.releaseOutputBuffer(nextOutputBuffer, frameTimeNanos); + } + else { + if (android.os.Build.VERSION.SDK_INT >= 21) { + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(nextOutputBuffer, __ts); + } else { + if (android.os.Build.VERSION.SDK_INT >= 21) { + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(nextOutputBuffer, __ts); +} else { + videoDecoder.releaseOutputBuffer(nextOutputBuffer, true); +} + } + } + + lastRenderedFrameTimeNanos = frameTimeNanos; + activeWindowVideoStats.totalFramesRendered++; + } catch (IllegalStateException ignored) { + try { + // Try to avoid leaking the output buffer by releasing it without rendering + videoDecoder.releaseOutputBuffer(nextOutputBuffer, false); + } catch (IllegalStateException e) { + // This will leak nextOutputBuffer, but there's really nothing else we can do + e.printStackTrace(); + handleDecoderException(e); + } + } + } + } + + // Attempt codec recovery even if we have nothing to render right now. Recovery can still + // be required even if the codec died before giving any output. + doCodecRecoveryIfRequired(CR_FLAG_CHOREOGRAPHER); + + // Request another callback for next frame + Choreographer.getInstance().postFrameCallback(this); + } + + private void startChoreographerThread() { + if (prefs.framePacing != PreferenceConfiguration.FRAME_PACING_BALANCED) { + // Not using Choreographer in this pacing mode + return; + } + + // We use a separate thread to avoid any main thread delays from delaying rendering + choreographerHandlerThread = new HandlerThread("Video - Choreographer", Process.THREAD_PRIORITY_URGENT_DISPLAY); + choreographerHandlerThread.start(); + + // Start the frame callbacks + choreographerHandler = new Handler(choreographerHandlerThread.getLooper()); + choreographerHandler.post(new Runnable() { + @Override + public void run() { + Choreographer.getInstance().postFrameCallback(MediaCodecDecoderRenderer.this); + } + }); + } + + private void startRendererThread() + { + rendererThread = new Thread() { + @Override + public void run() { + // Boost thread priority to reduce decoding latency + android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_DISPLAY); + + // Compute display refresh and vsync period once (fallback 60 Hz if unavailable) + long vsyncPeriodNs; + float displayHz = 60f; + try { + if (Build.VERSION.SDK_INT >= 17 && context != null) { + android.view.Display d = ((android.view.WindowManager) context.getSystemService(android.content.Context.WINDOW_SERVICE)).getDefaultDisplay(); + if (d != null) displayHz = d.getRefreshRate(); + } + } catch (Throwable ignored) {} + if (displayHz <= 0f) displayHz = 60f; + vsyncPeriodNs = (long) (1_000_000_000L / displayHz); + + // Stream cadence (targetFps set in setup(...)) + final int tfps = (targetFps > 0 ? targetFps : 60); + final long streamPeriodNs = (long) (1_000_000_000L / Math.max(1, tfps)); + + // Adaptive period selection to avoid added latency on high-refresh devices + final boolean highRefresh = displayHz >= 90f; + final boolean managedMode = (prefs != null && prefs.framePacing == PreferenceConfiguration.FRAME_PACING_BALANCED); + // Use stream-aligned thresholds only on lower-refresh screens while in Balanced. + final long periodNs = forceTightThresholds + ? vsyncPeriodNs + : ((managedMode && !highRefresh) ? Math.max(vsyncPeriodNs, streamPeriodNs) : vsyncPeriodNs); +boolean isC2Decoder = false; + try { + String decName = videoDecoder.getName(); + if (decName != null) { + isC2Decoder = decName.toLowerCase(java.util.Locale.US).startsWith("c2."); + } + } catch (Throwable ignored) {} + + // Aggressive/adaptive state + final double EWMA_ALPHA = 0.25; + final double MIN_FACTOR = 1.00; + final double MAX_FACTOR = 1.20; + + long lastDecoderPtsUs = 0L; + long lastPresentNs = 0L; + long lastDropNs = 0L; + int lateStreak = 0; + int tryAgainStreak = 0; + int recentDrops = 0; + + double ewmaInterArrivalNs = (1_000_000_000.0 / Math.max(1, tfps)); + double ewmaDecodeToPresentNs = periodNs * 0.7; + double ewmaJitterNs = periodNs * 0.1; + + final android.media.MediaCodec.BufferInfo info = new android.media.MediaCodec.BufferInfo(); + while (!stopping) { + + /* LATEST_ONLY_LOW_LATENCY */ + if (preferLowerDelays) { + try { + final android.media.MediaCodec.BufferInfo __tmpInfo = new android.media.MediaCodec.BufferInfo(); + int __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); + int __last = -1; + long __lastPtsUs = -1L; + + // Drain non-blocking; keep only the newest buffer + while (__idx >= 0) { + final long ptsUs = __tmpInfo.presentationTimeUs; + + // Measure pure decode time at dequeue (for ALL frames, shown or discarded) + try { updateDecodeLatencyStats(ptsUs); } catch (Throwable ignored) {} + + if (__last >= 0) { + // Drop older buffer without rendering + try { videoDecoder.releaseOutputBuffer(__last, false); } catch (Throwable ignored) {} + } + + __last = __idx; + __lastPtsUs = ptsUs; + __idx = videoDecoder.dequeueOutputBuffer(__tmpInfo, 0); + } + + if (__last >= 0) { + final long __nowNs = System.nanoTime(); + + // Present the newest buffer ASAP (timestamped) + if (android.os.Build.VERSION.SDK_INT >= 21) { + videoDecoder.releaseOutputBuffer(__last, __nowNs); + } else { + videoDecoder.releaseOutputBuffer(__last, true); + } + + try { + activeWindowVideoStats.totalFramesRendered++; + numFramesOut++; + lastDecoderPtsUs = __lastPtsUs; + } catch (Throwable ignored) {} + + // EWMA decode->present: + if (__lastPtsUs >= 0) { + final long __d2pNs = __nowNs - (__lastPtsUs * 1000L); + ewmaDecodeToPresentNs += EWMA_ALPHA * (__d2pNs - ewmaDecodeToPresentNs); + } + + continue; + } + } catch (Throwable ignored) {} + } + /* /LATEST_ONLY_LOW_LATENCY */ + + try { + // Try to output a frame (respect policy and do quick retry within budget) + final int policyUs = getOutputDequeueTimeoutUs(); + + final long t0 = System.nanoTime(); + int outIndex = videoDecoder.dequeueOutputBuffer(info, policyUs); + final long elapsedUs = (System.nanoTime() - t0) / 1000L; + + if (outIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { + tryAgainStreak++; + final int quickBackoffUs = (tryAgainStreak <= 2) ? 250 : 500; + + final int remainingUs = (policyUs > 0) ? Math.max(0, policyUs - (int) elapsedUs) : 0; + final int backoffUs = Math.min(remainingUs, quickBackoffUs); + + if (backoffUs > 0) { + outIndex = videoDecoder.dequeueOutputBuffer(info, backoffUs); + } + if (outIndex >= 0) { + tryAgainStreak = 0; + } + } else { + tryAgainStreak = 0; + } + + if (outIndex >= 0) { + // --- flags to manage statistics in a robust way --- + boolean statsUpdated = false; + boolean frameDropped = false; + + long presentationTimeUs = info.presentationTimeUs; + int lastIndex = outIndex; + long lastPtsUs = presentationTimeUs; + + numFramesOut++; + + // Measure decode latency AT DEQUEUE + try { updateDecodeLatencyStats(presentationTimeUs); } catch (Throwable ignored) {} + statsUpdated = true; + + // update inter-arrival + if (lastDecoderPtsUs != 0L) { + long interUs = presentationTimeUs - lastDecoderPtsUs; + if (interUs > 0) { + double sample = interUs * 1000.0; + ewmaInterArrivalNs += EWMA_ALPHA * (sample - ewmaInterArrivalNs); + } + } + lastDecoderPtsUs = presentationTimeUs; + + final PreferenceConfiguration p = prefs; // snapshot for null safety + + // Render the latest frame now if frame pacing isn't in balanced mode + if (p == null || p.framePacing != PreferenceConfiguration.FRAME_PACING_BALANCED) { + // Keep only the newest: measure decode for each new frame at DEQUEUE + while ((outIndex = videoDecoder.dequeueOutputBuffer(info, getOutputDequeueTimeoutUs())) >= 0) { + final long newPtsUs = info.presentationTimeUs; + try { updateDecodeLatencyStats(newPtsUs); } catch (Throwable ignored) {} + videoDecoder.releaseOutputBuffer(lastIndex, false); + frameDropped = true; // we're discarding the oldest one + + numFramesOut++; + lastIndex = outIndex; + presentationTimeUs = newPtsUs; + lastPtsUs = newPtsUs; + } + + if (p != null && (p.framePacing == PreferenceConfiguration.FRAME_PACING_MAX_SMOOTHNESS || + p.framePacing == PreferenceConfiguration.FRAME_PACING_CAP_FPS)) { + // Smoothness/Cap: avoid drop, present ASAP if not beyond threshold + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + final long nowNs = System.nanoTime(); + final long frameAgeNs = nowNs - (presentationTimeUs * 1000L); + + // Smoothness: tighter threshold 1.05..1.2× + double pressure = Math.min(1.0, (ewmaJitterNs / vsyncPeriodNs) + (recentDrops * 0.1)); + double factorSmooth = 1.2 - 0.15 * (1.0 - pressure); + factorSmooth = Math.max(1.05, Math.min(1.2, factorSmooth)); + + long dropThresholdSmoothNs = (long)(periodNs * factorSmooth); + + if (frameAgeNs >= dropThresholdSmoothNs) { + videoDecoder.releaseOutputBuffer(lastIndex, /* render */ false); + frameDropped = true; + lastDropNs = nowNs; + recentDrops = Math.min(10, recentDrops + 1); + continue; + } + + videoDecoder.releaseOutputBuffer(lastIndex, nowNs); + lastPresentNs = nowNs; + recentDrops = Math.max(0, recentDrops - 1); + + } else { + if (android.os.Build.VERSION.SDK_INT >= 21) { + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(lastIndex, __ts); + } else { + videoDecoder.releaseOutputBuffer(lastIndex, true); + } + } + } + else { + // Latency mode + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + final long nowNs = System.nanoTime(); + final long frameAgeNs = nowNs - (presentationTimeUs * 1000L); + + // Latency: 1.0..1.15×, debounce = 1, cooldown = 0.5× + double backPressure = Math.min(1.0, (double)tryAgainStreak / 6.0); + double streamHz = Math.max(1.0, (double)tfps); + double mismatch = Math.abs((1_000_000_000.0 / streamHz) - (1_000_000_000.0 / Math.max(1.0, displayHz))) / vsyncPeriodNs; + mismatch = Math.min(2.0, mismatch); + + double factorLatency = 1.02 + 0.13 * (0.5 * (ewmaJitterNs / vsyncPeriodNs) + + 0.3 * backPressure + + 0.2 * mismatch); + factorLatency = Math.max(MIN_FACTOR, Math.min(1.15, factorLatency)); + + long dropThresholdNs = (long)(periodNs * factorLatency); + + final long sinceLastPresent = (lastPresentNs == 0L) ? Long.MAX_VALUE : (nowNs - lastPresentNs); + final boolean dropCooldownOk = (nowNs - lastDropNs) >= (periodNs / 2); + final boolean isLate = frameAgeNs > dropThresholdNs; + lateStreak = isLate ? (lateStreak + 1) : 0; + + final boolean shouldDrop = + isLate && + (lateStreak >= 1) && + (sinceLastPresent < (long)(periodNs * 0.5)) && + dropCooldownOk; + + if (shouldDrop) { + videoDecoder.releaseOutputBuffer(lastIndex, /* render */ false); + frameDropped = true; + lastDropNs = nowNs; + recentDrops = Math.min(10, recentDrops + 1); + continue; // stats already recorded at dequeue for this PTS + } + + videoDecoder.releaseOutputBuffer(lastIndex, nowNs); + lastPresentNs = nowNs; + if (!isLate) lateStreak = 0; + recentDrops = Math.max(0, recentDrops - 1); + + } else { + if (android.os.Build.VERSION.SDK_INT >= 21) { + long __ts = System.nanoTime(); + videoDecoder.releaseOutputBuffer(lastIndex, __ts); + } else { + videoDecoder.releaseOutputBuffer(lastIndex, true); + } + } + } + + activeWindowVideoStats.totalFramesRendered++; + } + else { + // For balanced frame pacing case, the Choreographer callback will handle rendering. + // We just put all frames into the output buffer queue and let it handle things. + + // Discard the oldest buffer if we've exceeded our limit. + // + // NB: We have to do this on the producer side because the consumer may not + // run for a while (if there is a huge mismatch between stream FPS and display + // refresh rate). + if (outputBufferQueue.size() == OUTPUT_BUFFER_QUEUE_LIMIT) { + try { + videoDecoder.releaseOutputBuffer(outputBufferQueue.take(), false); + frameDropped = true; + } catch (InterruptedException e) { + return; + } + } + + // Add this buffer + outputBufferQueue.add(lastIndex); + // NB: in BALANCED we don't present here; stats already updated at dequeue + } + + // --- Fallback stats update --- + // If we didn't update the stats in-branch and the frame wasn't dropped, + if (!statsUpdated && !frameDropped) { + updateDecodeLatencyStats(presentationTimeUs); + } + + } else { + switch (outIndex) { + case MediaCodec.INFO_TRY_AGAIN_LATER: + break; + case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: + LimeLog.info("Output format changed"); + outputFormat = videoDecoder.getOutputFormat(); + LimeLog.info("New output format: " + outputFormat); + break; + default: + break; + } + } + } catch (IllegalStateException e) { + handleDecoderException(e); + } finally { + doCodecRecoveryIfRequired(CR_FLAG_RENDER_THREAD); + } + } + } + }; + rendererThread.setName("Video - Renderer (MediaCodec)"); + rendererThread.setPriority(Thread.NORM_PRIORITY + 2); + rendererThread.start(); + } + private boolean fetchNextInputBuffer() { + long startTime; + boolean codecRecovered; + + if (nextInputBuffer != null) { + // We already have an input buffer + return true; + } + + startTime = SystemClock.uptimeMillis(); + + try { + // If we don't have an input buffer index yet, fetch one now + while (nextInputBufferIndex < 0 && !stopping) { + nextInputBufferIndex = videoDecoder.dequeueInputBuffer(10000); + } + + // Get the backing ByteBuffer for the input buffer index + if (nextInputBufferIndex >= 0) { + // Using the new getInputBuffer() API on Lollipop allows + // the framework to do some performance optimizations for us + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + nextInputBuffer = videoDecoder.getInputBuffer(nextInputBufferIndex); + if (nextInputBuffer == null) { + // According to the Android docs, getInputBuffer() can return null "if the + // index is not a dequeued input buffer". I don't think this ever should + // happen but if it does, let's try to get a new input buffer next time. + nextInputBufferIndex = -1; + } + } + else { + nextInputBuffer = legacyInputBuffers[nextInputBufferIndex]; + + // Clear old input data pre-Lollipop + nextInputBuffer.clear(); + } + } + } catch (IllegalStateException e) { + handleDecoderException(e); + return false; + } finally { + codecRecovered = doCodecRecoveryIfRequired(CR_FLAG_INPUT_THREAD); + } + + // If codec recovery is required, always return false to ensure the caller will request + // an IDR frame to complete the codec recovery. + if (codecRecovered) { + return false; + } + + int deltaMs = (int)(SystemClock.uptimeMillis() - startTime); + + if (deltaMs >= 20) { + LimeLog.warning("Dequeue input buffer ran long: " + deltaMs + " ms"); + } + + if (nextInputBuffer == null) { + // We've been hung for 5 seconds and no other exception was reported, + // so generate a decoder hung exception + if (deltaMs >= 5000 && initialException == null) { + DecoderHungException decoderHungException = new DecoderHungException(deltaMs); + if (!reportedCrash) { + reportedCrash = true; + crashListener.notifyCrash(decoderHungException); + } + throw new RendererException(this, decoderHungException); + } + + return false; + } + + return true; + } + + @Override + public void start() { + startRendererThread(); + startChoreographerThread(); + } + + // !!! May be called even if setup()/start() fails !!! + public void prepareForStop() { + // Let the decoding code know to ignore codec exceptions now + stopping = true; + + // Halt the rendering thread + if (rendererThread != null) { + rendererThread.interrupt(); + } + + // Stop any active codec recovery operations + synchronized (codecRecoveryMonitor) { + codecRecoveryType.set(CR_RECOVERY_TYPE_NONE); + codecRecoveryMonitor.notifyAll(); + } + + // Post a quit message to the Choreographer looper (if we have one) + if (choreographerHandler != null) { + choreographerHandler.post(new Runnable() { + @Override + public void run() { + // Don't allow any further messages to be queued + choreographerHandlerThread.quit(); + + // Deregister the frame callback (if registered) + Choreographer.getInstance().removeFrameCallback(MediaCodecDecoderRenderer.this); + } + }); + } + } + + @Override + public void stop() { + // May be called already, but we'll call it now to be safe + prepareForStop(); + + // Wait for the Choreographer looper to shut down (if we have one) + if (choreographerHandlerThread != null) { + try { + choreographerHandlerThread.join(); + } catch (InterruptedException e) { + e.printStackTrace(); + + // InterruptedException clears the thread's interrupt status. Since we can't + // handle that here, we will re-interrupt the thread to set the interrupt + // status back to true. + Thread.currentThread().interrupt(); + } + } + + // Wait for the renderer thread to shut down + try { + rendererThread.join(); + } catch (InterruptedException e) { + e.printStackTrace(); + + // InterruptedException clears the thread's interrupt status. Since we can't + // handle that here, we will re-interrupt the thread to set the interrupt + // status back to true. + Thread.currentThread().interrupt(); + } + } + + @Override + public void cleanup() { + videoDecoder.release(); + } + + @Override + public void setHdrMode(boolean enabled, byte[] hdrMetadata) { + // HDR metadata is only supported in Android 7.0 and later, so don't bother + // restarting the codec on anything earlier than that. + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + if (currentHdrMetadata != null && (!enabled || hdrMetadata == null)) { + currentHdrMetadata = null; + } + else if (enabled && hdrMetadata != null && !Arrays.equals(currentHdrMetadata, hdrMetadata)) { + currentHdrMetadata = hdrMetadata; + } + else { + // Nothing to do + return; + } + + // If we reach this point, we need to restart the MediaCodec instance to + // pick up the HDR metadata change. This will happen on the next input + // or output buffer. + + // HACK: Reset codec recovery attempt counter, since this is an expected "recovery" + codecRecoveryAttempts = 0; + + // Promote None/Flush to Restart and leave Reset alone + if (!codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_NONE, CR_RECOVERY_TYPE_RESTART)) { + codecRecoveryType.compareAndSet(CR_RECOVERY_TYPE_FLUSH, CR_RECOVERY_TYPE_RESTART); + } + } + } + + private boolean queueNextInputBuffer(long timestampUs, int codecFlags) { + boolean codecRecovered; + + try { + videoDecoder.queueInputBuffer(nextInputBufferIndex, + 0, nextInputBuffer.position(), + timestampUs, codecFlags); + + // Track enqueue time for this PTS + try { enqueueNsByPtsUs.put(timestampUs, System.nanoTime()); } catch (Throwable ignored) {} + + // We need a new buffer now + nextInputBufferIndex = -1; + nextInputBuffer = null; + } catch (IllegalStateException e) { + if (handleDecoderException(e)) { + // We encountered a transient error. In this case, just hold onto the buffer + // (to avoid leaking it), clear it, and keep it for the next frame. We'll return + // false to trigger an IDR frame to recover. + nextInputBuffer.clear(); + } + else { + // We encountered a non-transient error. In this case, we will simply leak the + // buffer because we cannot be sure we will ever succeed in queuing it. + nextInputBufferIndex = -1; + nextInputBuffer = null; + } + return false; + } finally { + codecRecovered = doCodecRecoveryIfRequired(CR_FLAG_INPUT_THREAD); + } + + // If codec recovery is required, always return false to ensure the caller will request + // an IDR frame to complete the codec recovery. + if (codecRecovered) { + return false; + } + + // Fetch a new input buffer now while we have some time between frames + // to have it ready immediately when the next frame arrives. + // + // We must propagate the return value here in order to properly handle + // codec recovery happening in fetchNextInputBuffer(). If we don't, we'll + // never get an IDR frame to complete the recovery process. + return fetchNextInputBuffer(); + } + + private void doProfileSpecificSpsPatching(SeqParameterSet sps) { + // Some devices benefit from setting constraint flags 4 & 5 to make this Constrained + // High Profile which allows the decoder to assume there will be no B-frames and + // reduce delay and buffering accordingly. Some devices (Marvell, Exynos 4) don't + // like it so we only set them on devices that are confirmed to benefit from it. + if (sps.profileIdc == 100 && constrainedHighProfile) { + LimeLog.info("Setting constraint set flags for constrained high profile"); + sps.constraintSet4Flag = true; + sps.constraintSet5Flag = true; + } + else { + // Force the constraints unset otherwise (some may be set by default) + sps.constraintSet4Flag = false; + sps.constraintSet5Flag = false; + } + } + + @SuppressWarnings("deprecation") + @Override + public int submitDecodeUnit(byte[] decodeUnitData, int decodeUnitLength, int decodeUnitType, + int frameNumber, int frameType, char frameHostProcessingLatency, + long receiveTimeMs, long enqueueTimeMs) { + if (stopping) { + // Don't bother if we're stopping + return MoonBridge.DR_OK; + } + + if (lastFrameNumber == 0) { + activeWindowVideoStats.measurementStartTimestamp = SystemClock.uptimeMillis(); + } else if (frameNumber != lastFrameNumber && frameNumber != lastFrameNumber + 1) { + // We can receive the same "frame" multiple times if it's an IDR frame. + // In that case, each frame start NALU is submitted independently. + activeWindowVideoStats.framesLost += frameNumber - lastFrameNumber - 1; + activeWindowVideoStats.totalFrames += frameNumber - lastFrameNumber - 1; + activeWindowVideoStats.frameLossEvents++; + } + + // Reset CSD data for each IDR frame + if (lastFrameNumber != frameNumber && frameType == MoonBridge.FRAME_TYPE_IDR) { + vpsBuffers.clear(); + spsBuffers.clear(); + ppsBuffers.clear(); + } + + lastFrameNumber = frameNumber; + + // Flip stats windows roughly every second + if (SystemClock.uptimeMillis() >= activeWindowVideoStats.measurementStartTimestamp + 1000) { + if (prefs.enablePerfOverlay || prefs.enablePerfLogging) { + VideoStats lastTwo = new VideoStats(); + lastTwo.add(lastWindowVideoStats); + lastTwo.add(activeWindowVideoStats); + VideoStatsFps fps = lastTwo.getFps(); + String decoder; + + if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H264) != 0) { + decoder = avcDecoder.getName(); + } else if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H265) != 0) { + decoder = hevcDecoder.getName(); + } else if ((videoFormat & MoonBridge.VIDEO_FORMAT_MASK_AV1) != 0) { + decoder = av1Decoder.getName(); + } else { + decoder = "(unknown)"; + } + + float decodeTimeMs = (float)lastTwo.decoderTimeMs / lastTwo.totalFramesReceived; + long rttInfo = MoonBridge.getEstimatedRttInfo(); + StringBuilder sb = new StringBuilder(); + if(prefs.enablePerfOverlayLite){ + if(TrafficStatsHelper.getPackageRxBytes(Process.myUid()) != TrafficStats.UNSUPPORTED){ + long netData=TrafficStatsHelper.getPackageRxBytes(Process.myUid())+TrafficStatsHelper.getPackageTxBytes(Process.myUid()); + if(lastNetDataNum!=0){ + sb.append(context.getString(R.string.perf_overlay_lite_bandwidth) + ": "); + float realtimeNetData=(netData-lastNetDataNum)/1024f; + if(realtimeNetData>=1000){ + sb.append(String.format("%.2f", realtimeNetData/1024f) +"M/s\t "); + }else{ + sb.append(String.format("%.2f", realtimeNetData) +"K/s\t "); + } + } + lastNetDataNum=netData; + } +// sb.append("分辨率:"); +// sb.append(initialWidth + "x" + initialHeight); + sb.append(context.getString(R.string.perf_overlay_lite_network_decoding_delay) + ": "); + sb.append(context.getString(R.string.perf_overlay_lite_net,(int)(rttInfo >> 32))); + sb.append(" / "); + sb.append(context.getString(R.string.perf_overlay_lite_dectime,decodeTimeMs)); + sb.append("\t"); + sb.append(context.getString(R.string.perf_overlay_lite_packet_loss) + ": "); + sb.append(context.getString(R.string.perf_overlay_lite_netdrops,(float)lastTwo.framesLost / lastTwo.totalFrames * 100)); + sb.append("\t FPS:"); + sb.append(context.getString(R.string.perf_overlay_lite_fps, fps.totalFps)); + if(Stereo3DRenderer.isActive) { + sb.append(" "); + sb.append(context.getString(R.string.perf_overlay_ai_fps)); + sb.append(" "); + sb.append(Stereo3DRenderer.threeDFps); + sb.append(" "); + sb.append(context.getString(R.string.perf_overlay_ai_delegate)); + sb.append(" "); + sb.append(Stereo3DRenderer.renderer); + sb.append(" "); + sb.append(context.getString(R.string.perf_overlay_drawdelay, Stereo3DRenderer.drawDelay)); + } + }else{ + if(Stereo3DRenderer.isActive) { + sb.append(context.getString(R.string.perf_overlay_streamdetails, initialWidth + "x" + initialHeight, fps.totalFps)); + sb.append('\n'); + sb.append(" "); + sb.append(context.getString(R.string.perf_overlay_ai_fps)); + sb.append(" "); + sb.append(Stereo3DRenderer.threeDFps); + sb.append(" "); + sb.append(context.getString(R.string.perf_overlay_ai_delegate)); + sb.append(" "); + sb.append(Stereo3DRenderer.renderer); + sb.append(" "); + sb.append(context.getString(R.string.perf_overlay_drawdelay, Stereo3DRenderer.drawDelay)); + } else { + // If GPU renders the frames, the render FPS is the actual drawn and visible fps for the user + sb.append(context.getString(R.string.perf_overlay_streamdetails, initialWidth + "x" + initialHeight, fps.totalFps)); + } + sb.append('\n'); + sb.append(context.getString(R.string.perf_overlay_decoder, decoder)).append('\n'); + sb.append(context.getString(R.string.perf_overlay_incomingfps, fps.receivedFps)).append('\n'); + sb.append(context.getString(R.string.perf_overlay_renderingfps, fps.renderedFps)).append('\n'); + sb.append(context.getString(R.string.perf_overlay_netdrops, + (float)lastTwo.framesLost / lastTwo.totalFrames * 100)).append('\n'); + if(TrafficStatsHelper.getPackageRxBytes(Process.myUid()) != TrafficStats.UNSUPPORTED){ + long netData=TrafficStatsHelper.getPackageRxBytes(Process.myUid())+TrafficStatsHelper.getPackageTxBytes(Process.myUid()); + if(lastNetDataNum!=0){ + sb.append(context.getString(R.string.perf_overlay_lite_bandwidth) + ": "); + float realtimeNetData=(netData-lastNetDataNum)/1024f; + if(realtimeNetData>=1000){ + sb.append(String.format("%.2f", realtimeNetData/1024f) +"M/s\n"); + }else{ + sb.append(String.format("%.2f", realtimeNetData) +"K/s\n"); + } + } + lastNetDataNum=netData; + } + sb.append(context.getString(R.string.perf_overlay_netlatency, + (int)(rttInfo >> 32), (int)rttInfo)).append('\n'); + if (lastTwo.framesWithHostProcessingLatency > 0) { + sb.append(context.getString(R.string.perf_overlay_hostprocessinglatency, + (float)lastTwo.minHostProcessingLatency / 10, + (float)lastTwo.maxHostProcessingLatency / 10, + (float)lastTwo.totalHostProcessingLatency / 10 / lastTwo.framesWithHostProcessingLatency)).append('\n'); + } + sb.append(context.getString(R.string.perf_overlay_dectime, decodeTimeMs)); + } + String fullLog = sb.toString(); + if(prefs.enablePerfOverlay) { + perfListener.onPerfUpdate(fullLog); + } + // Best latency is only met at requested highest fps, rest can be ignored + Boolean targetFpsMatched = ((int) fps.totalFps == (int) prefs.fps); + if(minDecodeTime > decodeTimeMs && targetFpsMatched) { + minDecodeTime = decodeTimeMs; + minDecodeTimeFullLog = fullLog; + } + } + globalVideoStats.add(activeWindowVideoStats); + lastWindowVideoStats.copy(activeWindowVideoStats); + activeWindowVideoStats.clear(); + activeWindowVideoStats.measurementStartTimestamp = SystemClock.uptimeMillis(); + } + + boolean csdSubmittedForThisFrame = false; + + // IDR frames require special handling for CSD buffer submission + if (frameType == MoonBridge.FRAME_TYPE_IDR) { + // H264 SPS + if (decodeUnitType == MoonBridge.BUFFER_TYPE_SPS && (videoFormat & MoonBridge.VIDEO_FORMAT_MASK_H264) != 0) { + numSpsIn++; + + ByteBuffer spsBuf = ByteBuffer.wrap(decodeUnitData); + int startSeqLen = decodeUnitData[2] == 0x01 ? 3 : 4; + + // Skip to the start of the NALU data + spsBuf.position(startSeqLen + 1); + + // The H264Utils.readSPS function safely handles + // Annex B NALUs (including NALUs with escape sequences) + SeqParameterSet sps = H264Utils.readSPS(spsBuf); + + // Some decoders rely on H264 level to decide how many buffers are needed + // Since we only need one frame buffered, we'll set the level as low as we can + // for known resolution combinations. Reference frame invalidation may need + // these, so leave them be for those decoders. + if (!refFrameInvalidationActive) { + if (initialWidth <= 720 && initialHeight <= 480 && refreshRate <= 60) { + // Max 5 buffered frames at 720x480x60 + LimeLog.info("Patching level_idc to 31"); + sps.levelIdc = 31; + } + else if (initialWidth <= 1280 && initialHeight <= 720 && refreshRate <= 60) { + // Max 5 buffered frames at 1280x720x60 + LimeLog.info("Patching level_idc to 32"); + sps.levelIdc = 32; + } + else if (initialWidth <= 1920 && initialHeight <= 1080 && refreshRate <= 60) { + // Max 4 buffered frames at 1920x1080x64 + LimeLog.info("Patching level_idc to 42"); + sps.levelIdc = 42; + } + else { + // Leave the profile alone (currently 5.0) + } + } + + // TI OMAP4 requires a reference frame count of 1 to decode successfully. Exynos 4 + // also requires this fixup. + // + // I'm doing this fixup for all devices because I haven't seen any devices that + // this causes issues for. At worst, it seems to do nothing and at best it fixes + // issues with video lag, hangs, and crashes. + // + // It does break reference frame invalidation, so we will not do that for decoders + // where we've enabled reference frame invalidation. + if (!refFrameInvalidationActive) { + LimeLog.info("Patching num_ref_frames in SPS"); + sps.numRefFrames = 1; + } + + // GFE 2.5.11 changed the SPS to add additional extensions. Some devices don't like these + // so we remove them here on old devices unless these devices also support HEVC. + // See getPreferredColorSpace() for further information. + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O && + sps.vuiParams != null && + hevcDecoder == null && + av1Decoder == null) { + sps.vuiParams.videoSignalTypePresentFlag = false; + sps.vuiParams.colourDescriptionPresentFlag = false; + sps.vuiParams.chromaLocInfoPresentFlag = false; + } + + // Some older devices used to choke on a bitstream restrictions, so we won't provide them + // unless explicitly whitelisted. For newer devices, leave the bitstream restrictions present. + if (needsSpsBitstreamFixup || isExynos4 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + // The SPS that comes in the current H264 bytestream doesn't set bitstream_restriction_flag + // or max_dec_frame_buffering which increases decoding latency on Tegra. + + // If the encoder didn't include VUI parameters in the SPS, add them now + if (sps.vuiParams == null) { + LimeLog.info("Adding VUI parameters"); + sps.vuiParams = new VUIParameters(); + } + + // GFE 2.5.11 started sending bitstream restrictions + if (sps.vuiParams.bitstreamRestriction == null) { + LimeLog.info("Adding bitstream restrictions"); + sps.vuiParams.bitstreamRestriction = new VUIParameters.BitstreamRestriction(); + sps.vuiParams.bitstreamRestriction.motionVectorsOverPicBoundariesFlag = true; + sps.vuiParams.bitstreamRestriction.maxBytesPerPicDenom = 2; + sps.vuiParams.bitstreamRestriction.maxBitsPerMbDenom = 1; + sps.vuiParams.bitstreamRestriction.log2MaxMvLengthHorizontal = 16; + sps.vuiParams.bitstreamRestriction.log2MaxMvLengthVertical = 16; + sps.vuiParams.bitstreamRestriction.numReorderFrames = 0; + } + else { + LimeLog.info("Patching bitstream restrictions"); + } + + // Some devices throw errors if maxDecFrameBuffering < numRefFrames + sps.vuiParams.bitstreamRestriction.maxDecFrameBuffering = sps.numRefFrames; + + // These values are the defaults for the fields, but they are more aggressive + // than what GFE sends in 2.5.11, but it doesn't seem to cause picture problems. + // We'll leave these alone for "modern" devices just in case they care. + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) { + sps.vuiParams.bitstreamRestriction.maxBytesPerPicDenom = 2; + sps.vuiParams.bitstreamRestriction.maxBitsPerMbDenom = 1; + } + + // log2_max_mv_length_horizontal and log2_max_mv_length_vertical are set to more + // conservative values by GFE 2.5.11. We'll let those values stand. + } + else if (sps.vuiParams != null) { + // Devices that didn't/couldn't get bitstream restrictions before GFE 2.5.11 + // will continue to not receive them now + sps.vuiParams.bitstreamRestriction = null; + } + + // If we need to hack this SPS to say we're baseline, do so now + if (needsBaselineSpsHack) { + LimeLog.info("Hacking SPS to baseline"); + sps.profileIdc = 66; + savedSps = sps; + } + + // Patch the SPS constraint flags + doProfileSpecificSpsPatching(sps); + + // The H264Utils.writeSPS function safely handles + // Annex B NALUs (including NALUs with escape sequences) + ByteBuffer escapedNalu = H264Utils.writeSPS(sps, decodeUnitLength); + + // Construct the patched SPS + byte[] naluBuffer = new byte[startSeqLen + 1 + escapedNalu.limit()]; + System.arraycopy(decodeUnitData, 0, naluBuffer, 0, startSeqLen + 1); + escapedNalu.get(naluBuffer, startSeqLen + 1, escapedNalu.limit()); + + // Batch this to submit together with other CSD per AOSP docs + spsBuffers.add(naluBuffer); + return MoonBridge.DR_OK; + } + else if (decodeUnitType == MoonBridge.BUFFER_TYPE_VPS) { + numVpsIn++; + + // Batch this to submit together with other CSD per AOSP docs + byte[] naluBuffer = new byte[decodeUnitLength]; + System.arraycopy(decodeUnitData, 0, naluBuffer, 0, decodeUnitLength); + vpsBuffers.add(naluBuffer); + return MoonBridge.DR_OK; + } + // Only the HEVC SPS hits this path (H.264 is handled above) + else if (decodeUnitType == MoonBridge.BUFFER_TYPE_SPS) { + numSpsIn++; + + // Batch this to submit together with other CSD per AOSP docs + byte[] naluBuffer = new byte[decodeUnitLength]; + System.arraycopy(decodeUnitData, 0, naluBuffer, 0, decodeUnitLength); + spsBuffers.add(naluBuffer); + return MoonBridge.DR_OK; + } + else if (decodeUnitType == MoonBridge.BUFFER_TYPE_PPS) { + numPpsIn++; + + // Batch this to submit together with other CSD per AOSP docs + byte[] naluBuffer = new byte[decodeUnitLength]; + System.arraycopy(decodeUnitData, 0, naluBuffer, 0, decodeUnitLength); + ppsBuffers.add(naluBuffer); + return MoonBridge.DR_OK; + } + else if ((videoFormat & (MoonBridge.VIDEO_FORMAT_MASK_H264 | MoonBridge.VIDEO_FORMAT_MASK_H265)) != 0) { + // If this is the first CSD blob or we aren't supporting fused IDR frames, we will + // submit the CSD blob in a separate input buffer for each IDR frame. + if (!submittedCsd || !fusedIdrFrame) { + if (!fetchNextInputBuffer()) { + return MoonBridge.DR_NEED_IDR; + } + + // Submit all CSD when we receive the first non-CSD blob in an IDR frame + for (byte[] vpsBuffer : vpsBuffers) { + nextInputBuffer.put(vpsBuffer); + } + for (byte[] spsBuffer : spsBuffers) { + nextInputBuffer.put(spsBuffer); + } + for (byte[] ppsBuffer : ppsBuffers) { + nextInputBuffer.put(ppsBuffer); + } + + if (!queueNextInputBuffer(0, MediaCodec.BUFFER_FLAG_CODEC_CONFIG)) { + return MoonBridge.DR_NEED_IDR; + } + + // Remember that we already submitted CSD for this frame, so we don't do it + // again in the fused IDR case below. + csdSubmittedForThisFrame = true; + + // Remember that we submitted CSD globally for this MediaCodec instance + submittedCsd = true; + + if (needsBaselineSpsHack) { + needsBaselineSpsHack = false; + + if (!replaySps()) { + return MoonBridge.DR_NEED_IDR; + } + + LimeLog.info("SPS replay complete"); + } + } + } + } + + if (frameHostProcessingLatency != 0) { + if (activeWindowVideoStats.minHostProcessingLatency != 0) { + activeWindowVideoStats.minHostProcessingLatency = (char) Math.min(activeWindowVideoStats.minHostProcessingLatency, frameHostProcessingLatency); + } else { + activeWindowVideoStats.minHostProcessingLatency = frameHostProcessingLatency; + } + activeWindowVideoStats.framesWithHostProcessingLatency += 1; + } + activeWindowVideoStats.maxHostProcessingLatency = (char) Math.max(activeWindowVideoStats.maxHostProcessingLatency, frameHostProcessingLatency); + activeWindowVideoStats.totalHostProcessingLatency += frameHostProcessingLatency; + + activeWindowVideoStats.totalFramesReceived++; + activeWindowVideoStats.totalFrames++; + + if (!FRAME_RENDER_TIME_ONLY) { + // Count time from first packet received to enqueue time as receive time + // We will count DU queue time as part of decoding, because it is directly + // caused by a slow decoder. + activeWindowVideoStats.totalTimeMs += enqueueTimeMs - receiveTimeMs; + } + + if (!fetchNextInputBuffer()) { + return MoonBridge.DR_NEED_IDR; + } + + int codecFlags = 0; + + if (frameType == MoonBridge.FRAME_TYPE_IDR) { + codecFlags |= MediaCodec.BUFFER_FLAG_SYNC_FRAME; + + // If we are using fused IDR frames, submit the CSD with each IDR frame + if (fusedIdrFrame && !csdSubmittedForThisFrame) { + for (byte[] vpsBuffer : vpsBuffers) { + nextInputBuffer.put(vpsBuffer); + } + for (byte[] spsBuffer : spsBuffers) { + nextInputBuffer.put(spsBuffer); + } + for (byte[] ppsBuffer : ppsBuffers) { + nextInputBuffer.put(ppsBuffer); + } + } + } + + long timestampUs = enqueueTimeMs * 1000; + if (timestampUs <= lastTimestampUs) { + // We can't submit multiple buffers with the same timestamp + // so bump it up by one before queuing + timestampUs = lastTimestampUs + 1; + } + lastTimestampUs = timestampUs; + + numFramesIn++; + + if (decodeUnitLength > nextInputBuffer.limit() - nextInputBuffer.position()) { + IllegalArgumentException exception = new IllegalArgumentException( + "Decode unit length "+decodeUnitLength+" too large for input buffer "+nextInputBuffer.limit()); + if (!reportedCrash) { + reportedCrash = true; + crashListener.notifyCrash(exception); + } + throw new RendererException(this, exception); + } + + // Copy data from our buffer list into the input buffer + nextInputBuffer.put(decodeUnitData, 0, decodeUnitLength); + + if (!queueNextInputBuffer(timestampUs, codecFlags)) { + return MoonBridge.DR_NEED_IDR; + } + + return MoonBridge.DR_OK; + } + + private boolean replaySps() { + if (!fetchNextInputBuffer()) { + return false; + } + + // Write the Annex B header + nextInputBuffer.put(new byte[]{0x00, 0x00, 0x00, 0x01, 0x67}); + + // Switch the H264 profile back to high + savedSps.profileIdc = 100; + + // Patch the SPS constraint flags + doProfileSpecificSpsPatching(savedSps); + + // The H264Utils.writeSPS function safely handles + // Annex B NALUs (including NALUs with escape sequences) + ByteBuffer escapedNalu = H264Utils.writeSPS(savedSps, 128); + nextInputBuffer.put(escapedNalu); + + // No need for the SPS anymore + savedSps = null; + + // Queue the new SPS + return queueNextInputBuffer(0, MediaCodec.BUFFER_FLAG_CODEC_CONFIG); + } + + @Override + public int getCapabilities() { + int capabilities = 0; + + // Request the optimal number of slices per frame for this decoder + capabilities |= MoonBridge.CAPABILITY_SLICES_PER_FRAME(optimalSlicesPerFrame); + + // Enable reference frame invalidation on supported hardware + if (refFrameInvalidationAvc) { + capabilities |= MoonBridge.CAPABILITY_REFERENCE_FRAME_INVALIDATION_AVC; + } + if (refFrameInvalidationHevc) { + capabilities |= MoonBridge.CAPABILITY_REFERENCE_FRAME_INVALIDATION_HEVC; + } + if (refFrameInvalidationAv1) { + capabilities |= MoonBridge.CAPABILITY_REFERENCE_FRAME_INVALIDATION_AV1; + } + + // Enable direct submit on supported hardware + if (directSubmit) { + capabilities |= MoonBridge.CAPABILITY_DIRECT_SUBMIT; + } + + return capabilities; + } + + public int getAverageEndToEndLatency() { + if (globalVideoStats.totalFramesReceived == 0) { + return 0; + } + return (int)(globalVideoStats.totalTimeMs / globalVideoStats.totalFramesReceived); + } + + public int getAverageDecoderLatency() { + if (globalVideoStats.totalFramesReceived == 0) { + return 0; + } + return (int)(globalVideoStats.decoderTimeMs / globalVideoStats.totalFramesReceived); + } + + public Boolean performanceWasTracked() { + return minDecodeTime < Float.MAX_VALUE; + } + + @SuppressLint("DefaultLocale") + public String getMinDecoderLatency() { + return String.format("%1$.2f", minDecodeTime); + } + + public String getMinDecoderLatencyFullLog() { + return minDecodeTimeFullLog; + } + + static class DecoderHungException extends RuntimeException { + private int hangTimeMs; + + DecoderHungException(int hangTimeMs) { + this.hangTimeMs = hangTimeMs; + } + + public String toString() { + String str = ""; + + str += "Hang time: "+hangTimeMs+" ms"+ RendererException.DELIMITER; + str += super.toString(); + + return str; + } + } + + static class RendererException extends RuntimeException { + private static final long serialVersionUID = 8985937536997012406L; + protected static final String DELIMITER = BuildConfig.DEBUG ? "\n" : " | "; + + private String text; + + RendererException(MediaCodecDecoderRenderer renderer, Exception e) { + this.text = generateText(renderer, e); + } + + public String toString() { + return text; + } + + private String generateText(MediaCodecDecoderRenderer renderer, Exception originalException) { + String str; + + if (renderer.numVpsIn == 0 && renderer.numSpsIn == 0 && renderer.numPpsIn == 0) { + str = "PreSPSError"; + } + else if (renderer.numSpsIn > 0 && renderer.numPpsIn == 0) { + str = "PrePPSError"; + } + else if (renderer.numPpsIn > 0 && renderer.numFramesIn == 0) { + str = "PreIFrameError"; + } + else if (renderer.numFramesIn > 0 && renderer.outputFormat == null) { + str = "PreOutputConfigError"; + } + else if (renderer.outputFormat != null && renderer.numFramesOut == 0) { + str = "PreOutputError"; + } + else if (renderer.numFramesOut <= renderer.refreshRate * 30) { + str = "EarlyOutputError"; + } + else { + str = "ErrorWhileStreaming"; + } + + str += "Format: "+String.format("%x", renderer.videoFormat)+DELIMITER; + str += "AVC Decoder: "+((renderer.avcDecoder != null) ? renderer.avcDecoder.getName():"(none)")+DELIMITER; + str += "HEVC Decoder: "+((renderer.hevcDecoder != null) ? renderer.hevcDecoder.getName():"(none)")+DELIMITER; + str += "AV1 Decoder: "+((renderer.av1Decoder != null) ? renderer.av1Decoder.getName():"(none)")+DELIMITER; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && renderer.avcDecoder != null) { + Range avcWidthRange = renderer.avcDecoder.getCapabilitiesForType("video/avc").getVideoCapabilities().getSupportedWidths(); + str += "AVC supported width range: "+avcWidthRange+DELIMITER; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + try { + Range avcFpsRange = renderer.avcDecoder.getCapabilitiesForType("video/avc").getVideoCapabilities().getAchievableFrameRatesFor(renderer.initialWidth, renderer.initialHeight); + str += "AVC achievable FPS range: "+avcFpsRange+DELIMITER; + } catch (IllegalArgumentException e) { + str += "AVC achievable FPS range: UNSUPPORTED!"+DELIMITER; + } + } + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && renderer.hevcDecoder != null) { + Range hevcWidthRange = renderer.hevcDecoder.getCapabilitiesForType("video/hevc").getVideoCapabilities().getSupportedWidths(); + str += "HEVC supported width range: "+hevcWidthRange+DELIMITER; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + try { + Range hevcFpsRange = renderer.hevcDecoder.getCapabilitiesForType("video/hevc").getVideoCapabilities().getAchievableFrameRatesFor(renderer.initialWidth, renderer.initialHeight); + str += "HEVC achievable FPS range: " + hevcFpsRange + DELIMITER; + } catch (IllegalArgumentException e) { + str += "HEVC achievable FPS range: UNSUPPORTED!"+DELIMITER; + } + } + } + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && renderer.av1Decoder != null) { + Range av1WidthRange = renderer.av1Decoder.getCapabilitiesForType("video/av01").getVideoCapabilities().getSupportedWidths(); + str += "AV1 supported width range: "+av1WidthRange+DELIMITER; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + try { + Range av1FpsRange = renderer.av1Decoder.getCapabilitiesForType("video/av01").getVideoCapabilities().getAchievableFrameRatesFor(renderer.initialWidth, renderer.initialHeight); + str += "AV1 achievable FPS range: " + av1FpsRange + DELIMITER; + } catch (IllegalArgumentException e) { + str += "AV1 achievable FPS range: UNSUPPORTED!"+DELIMITER; + } + } + } + str += "Configured format: "+renderer.configuredFormat+DELIMITER; + str += "Input format: "+renderer.inputFormat+DELIMITER; + str += "Output format: "+renderer.outputFormat+DELIMITER; + str += "Adaptive playback: "+renderer.adaptivePlayback+DELIMITER; + str += "GL Renderer: "+renderer.glRenderer+DELIMITER; + //str += "Build fingerprint: "+Build.FINGERPRINT+DELIMITER; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + str += "SOC: "+Build.SOC_MANUFACTURER+" - "+Build.SOC_MODEL+DELIMITER; + str += "Performance class: "+Build.VERSION.MEDIA_PERFORMANCE_CLASS+DELIMITER; + /*str += "Vendor params: "; + List params = renderer.videoDecoder.getSupportedVendorParameters(); + if (params.isEmpty()) { + str += "NONE"; + } + else { + for (String param : params) { + str += param + " "; + } + } + str += DELIMITER;*/ + } + str += "Consecutive crashes: "+renderer.consecutiveCrashCount+DELIMITER; + str += "RFI active: "+renderer.refFrameInvalidationActive+DELIMITER; + str += "Using modern SPS patching: "+(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O)+DELIMITER; + str += "Fused IDR frames: "+renderer.fusedIdrFrame+DELIMITER; + str += "Video dimensions: "+renderer.initialWidth+"x"+renderer.initialHeight+DELIMITER; + str += "FPS target: "+renderer.refreshRate+DELIMITER; + str += "Bitrate: "+renderer.prefs.bitrate+" Kbps"+DELIMITER; + str += "CSD stats: "+renderer.numVpsIn+", "+renderer.numSpsIn+", "+renderer.numPpsIn+DELIMITER; + str += "Frames in-out: "+renderer.numFramesIn+", "+renderer.numFramesOut+DELIMITER; + str += "Total frames received: "+renderer.globalVideoStats.totalFramesReceived+DELIMITER; + str += "Total frames rendered: "+renderer.globalVideoStats.totalFramesRendered+DELIMITER; + str += "Frame losses: "+renderer.globalVideoStats.framesLost+" in "+renderer.globalVideoStats.frameLossEvents+" loss events"+DELIMITER; + str += "Average end-to-end client latency: "+renderer.getAverageEndToEndLatency()+"ms"+DELIMITER; + str += "Average hardware decoder latency: "+renderer.getAverageDecoderLatency()+"ms"+DELIMITER; + str += "Frame pacing mode: "+renderer.prefs.framePacing+DELIMITER; + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { + if (originalException instanceof CodecException) { + CodecException ce = (CodecException) originalException; + + str += "Diagnostic Info: "+ce.getDiagnosticInfo()+DELIMITER; + str += "Recoverable: "+ce.isRecoverable()+DELIMITER; + str += "Transient: "+ce.isTransient()+DELIMITER; + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + str += "Codec Error Code: "+ce.getErrorCode()+DELIMITER; + } + } + } + + str += originalException.toString(); + + return str; + } + } + + +private boolean isMTKDecoderName(String name) { + if (name == null) return false; + String n = name.toLowerCase(); + return n.startsWith("c2.mtk") || n.startsWith("omx.mtk"); +} + +} From b20adba738804bfc07e9d679b31fb95c9b4d888a Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Fri, 17 Oct 2025 17:10:59 +0200 Subject: [PATCH 10/12] video: HDR10 detect by renderer, update window mode; --- app/src/main/java/com/limelight/Game.java | 20 +++++++++++++++++++ .../video/MediaCodecDecoderRenderer.java | 14 +++++++++++++ 2 files changed, 34 insertions(+) diff --git a/app/src/main/java/com/limelight/Game.java b/app/src/main/java/com/limelight/Game.java index 51a48a92f2..091bba7484 100755 --- a/app/src/main/java/com/limelight/Game.java +++ b/app/src/main/java/com/limelight/Game.java @@ -143,6 +143,26 @@ public class Game extends AppCompatActivity implements SurfaceHolder.Callback, PerfOverlayListener, UsbDriverService.UsbDriverStateListener, View.OnKeyListener { public static Game instance; + // === HDR window color mode control === + public static void updateHdrWindowMode(final boolean enable) { + try { + final Game inst = instance; + if (inst == null) return; + if (android.os.Build.VERSION.SDK_INT >= 26) { + inst.runOnUiThread(() -> { + try { + inst.getWindow().setColorMode(enable + ? ActivityInfo.COLOR_MODE_HDR + : ActivityInfo.COLOR_MODE_DEFAULT); + LimeLog.info("Display HDR mode: " + (enable ? "enabled" : "disabled")); + } catch (Throwable t) { + LimeLog.warning("HDR window mode switch failed: " + t); + } + }); + } + } catch (Throwable ignored) {} + } + private int lastButtonState = 0; // Only 2 touches are supported diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index 451b876e95..2b027e5cc9 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -1453,6 +1453,20 @@ public void run() { case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: LimeLog.info("Output format changed"); outputFormat = videoDecoder.getOutputFormat(); + try { + android.media.MediaFormat __fmt = outputFormat; + int __std = -1, __tr = -1, __rng = -1; + try { __std = __fmt.getInteger("color-standard"); } catch (Throwable ignored) {} + try { __tr = __fmt.getInteger("color-transfer"); } catch (Throwable ignored) {} + try { __rng = __fmt.getInteger("color-range"); } catch (Throwable ignored) {} + boolean __isHdr = (__std == 6) && (__tr == 6 || __tr == 7); + // Notify window color mode (no-op <26) + try { com.limelight.Game.updateHdrWindowMode(__isHdr); } catch (Throwable ignored) {} + // Pass HDR static info to GL upscaler if available + java.nio.ByteBuffer __hdr = null; + try { __hdr = __fmt.getByteBuffer("hdr-static-info"); } catch (Throwable ignored) {} + byte[] __hdrArr = null; if (__hdr != null && __hdr.remaining() > 0) { __hdrArr = new byte[__hdr.remaining()]; __hdr.get(__hdrArr); } + } catch (Throwable ignored) {} LimeLog.info("New output format: " + outputFormat); break; default: From da3837e4a6b2c440e79c6dfcd87e5717321efbf4 Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Thu, 2 Oct 2025 21:40:25 +0200 Subject: [PATCH 11/12] perf(overlay-lite): show SDR/HDR tag - Add hdrActive flag on decoder renderer - Detect HDR via BT.2020 + (PQ|HLG) on output format change - Append "HDR"/"SDR" in Perf Lite next to rendered FPSPS --- .../limelight/binding/video/MediaCodecDecoderRenderer.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index 2b027e5cc9..6e6f47d6c6 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -47,7 +47,9 @@ public class MediaCodecDecoderRenderer extends VideoDecoderRenderer implements C // Latency profile: favor minimal end-to-end delay over absolute smoothness. // Set true to enable a 'latest-only' fast path in the render loop. private boolean preferLowerDelays = false; - + // --- HDR state for overlays --- + private volatile boolean hdrActive = false; + public boolean isHdrActive() { return hdrActive; } // Force tight thresholds regardless of device refresh (use vsyncPeriodNs always) private volatile boolean forceTightThresholds = false; @@ -1803,6 +1805,8 @@ public int submitDecodeUnit(byte[] decodeUnitData, int decodeUnitLength, int dec sb.append(context.getString(R.string.perf_overlay_lite_netdrops,(float)lastTwo.framesLost / lastTwo.totalFrames * 100)); sb.append("\t FPS:"); sb.append(context.getString(R.string.perf_overlay_lite_fps, fps.totalFps)); + // Show SDR/HDR mode in Perf Lite + sb.append(" ").append(hdrActive ? "HDR" : "SDR"); if(Stereo3DRenderer.isActive) { sb.append(" "); sb.append(context.getString(R.string.perf_overlay_ai_fps)); From ebc26ac88cbb72f9576f063ad44f30215b1921c9 Mon Sep 17 00:00:00 2001 From: IlFlacco Date: Thu, 2 Oct 2025 21:40:25 +0200 Subject: [PATCH 12/12] perf(overlay-lite): Fix HDR detection logic - Use standard Android color constants and proper BT.2020 + PQ/HLG criteria for HDR detection --- .../binding/video/MediaCodecDecoderRenderer.java | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java index 6e6f47d6c6..857882a81c 100755 --- a/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java +++ b/app/src/main/java/com/limelight/binding/video/MediaCodecDecoderRenderer.java @@ -1461,13 +1461,23 @@ public void run() { try { __std = __fmt.getInteger("color-standard"); } catch (Throwable ignored) {} try { __tr = __fmt.getInteger("color-transfer"); } catch (Throwable ignored) {} try { __rng = __fmt.getInteger("color-range"); } catch (Throwable ignored) {} - boolean __isHdr = (__std == 6) && (__tr == 6 || __tr == 7); + // BT.2020 + (PQ o HLG) => HDR + boolean __isHdr = + (__std == android.media.MediaFormat.COLOR_STANDARD_BT2020) && + (__tr == android.media.MediaFormat.COLOR_TRANSFER_ST2084 + || __tr == android.media.MediaFormat.COLOR_TRANSFER_HLG); + // Update shared flag so overlays/renderer can see it + hdrActive = __isHdr; // Notify window color mode (no-op <26) try { com.limelight.Game.updateHdrWindowMode(__isHdr); } catch (Throwable ignored) {} // Pass HDR static info to GL upscaler if available java.nio.ByteBuffer __hdr = null; try { __hdr = __fmt.getByteBuffer("hdr-static-info"); } catch (Throwable ignored) {} - byte[] __hdrArr = null; if (__hdr != null && __hdr.remaining() > 0) { __hdrArr = new byte[__hdr.remaining()]; __hdr.get(__hdrArr); } + byte[] __hdrArr = null; + if (__hdr != null && __hdr.remaining() > 0) { + __hdrArr = new byte[__hdr.remaining()]; + __hdr.get(__hdrArr); + } } catch (Throwable ignored) {} LimeLog.info("New output format: " + outputFormat); break;