diff --git a/server/src/main/java/com/genymobile/scrcpy/control/PositionMapper.java b/server/src/main/java/com/genymobile/scrcpy/control/PositionMapper.java index cf9b25ab..4d3b8875 100644 --- a/server/src/main/java/com/genymobile/scrcpy/control/PositionMapper.java +++ b/server/src/main/java/com/genymobile/scrcpy/control/PositionMapper.java @@ -15,6 +15,18 @@ public final class PositionMapper { this.videoToDeviceMatrix = videoToDeviceMatrix; } + public static PositionMapper create(Size videoSize, AffineMatrix filterTransform, Size targetSize) { + boolean convertToPixels = !videoSize.equals(targetSize) || filterTransform != null; + AffineMatrix transform = filterTransform; + if (convertToPixels) { + AffineMatrix inputTransform = AffineMatrix.ndcFromPixels(videoSize); + AffineMatrix outputTransform = AffineMatrix.ndcToPixels(targetSize); + transform = outputTransform.multiply(transform).multiply(inputTransform); + } + + return new PositionMapper(videoSize, transform); + } + public Point map(Position position) { Size clientVideoSize = position.getScreenSize(); if (!videoSize.equals(clientVideoSize)) { diff --git a/server/src/main/java/com/genymobile/scrcpy/video/ScreenCapture.java b/server/src/main/java/com/genymobile/scrcpy/video/ScreenCapture.java index 8873cb6d..79d4974d 100644 --- a/server/src/main/java/com/genymobile/scrcpy/video/ScreenCapture.java +++ b/server/src/main/java/com/genymobile/scrcpy/video/ScreenCapture.java @@ -7,6 +7,9 @@ import com.genymobile.scrcpy.device.ConfigurationException; import com.genymobile.scrcpy.device.Device; import com.genymobile.scrcpy.device.DisplayInfo; import com.genymobile.scrcpy.device.Size; +import com.genymobile.scrcpy.opengl.AffineOpenGLFilter; +import com.genymobile.scrcpy.opengl.OpenGLFilter; +import com.genymobile.scrcpy.opengl.OpenGLRunner; import com.genymobile.scrcpy.util.AffineMatrix; import com.genymobile.scrcpy.util.Ln; import com.genymobile.scrcpy.util.LogUtils; @@ -24,11 +27,14 @@ import android.view.IDisplayFoldListener; import android.view.IRotationWatcher; import android.view.Surface; +import java.io.IOException; + public class ScreenCapture extends SurfaceCapture { private final VirtualDisplayListener vdListener; private final int displayId; private int maxSize; + private final Rect crop; private DisplayInfo displayInfo; private Size videoSize; @@ -39,6 +45,9 @@ public class ScreenCapture extends SurfaceCapture { private IBinder display; private VirtualDisplay virtualDisplay; + private AffineMatrix transform; + private OpenGLRunner glRunner; + private DisplayManager.DisplayListenerHandle displayListenerHandle; private HandlerThread handlerThread; @@ -54,6 +63,7 @@ public class ScreenCapture extends SurfaceCapture { this.displayId = options.getDisplayId(); assert displayId != Device.DISPLAY_ID_NONE; this.maxSize = options.getMaxSize(); + this.crop = options.getCrop(); } @Override @@ -125,11 +135,20 @@ public class ScreenCapture extends SurfaceCapture { Size displaySize = displayInfo.getSize(); setSessionDisplaySize(displaySize); - videoSize = displaySize.limit(maxSize).round8(); + + VideoFilter filter = new VideoFilter(displaySize); + + if (crop != null) { + boolean transposed = (displayInfo.getRotation() % 2) != 0; + filter.addCrop(crop, transposed); + } + + transform = filter.getInverseTransform(); + videoSize = filter.getOutputSize().limit(maxSize).round8(); } @Override - public void start(Surface surface) { + public void start(Surface surface) throws IOException { if (display != null) { SurfaceControl.destroyDisplay(display); display = null; @@ -139,14 +158,28 @@ public class ScreenCapture extends SurfaceCapture { virtualDisplay = null; } + Size inputSize; + if (transform != null) { + // If there is a filter, it must receive the full display content + inputSize = displayInfo.getSize(); + assert glRunner == null; + OpenGLFilter glFilter = new AffineOpenGLFilter(transform); + glRunner = new OpenGLRunner(glFilter); + surface = glRunner.start(inputSize, videoSize, surface); + } else { + // If there is no filter, the display must be rendered at target video size directly + inputSize = videoSize; + } + int virtualDisplayId; PositionMapper positionMapper; try { virtualDisplay = ServiceManager.getDisplayManager() - .createVirtualDisplay("scrcpy", videoSize.getWidth(), videoSize.getHeight(), displayId, surface); + .createVirtualDisplay("scrcpy", inputSize.getWidth(), inputSize.getHeight(), displayId, surface); virtualDisplayId = virtualDisplay.getDisplay().getDisplayId(); - // The position are relative to the virtual display, not the original display - positionMapper = new PositionMapper(videoSize, null); + + // The positions are relative to the virtual display, not the original display (so use inputSize, not deviceSize!) + positionMapper = PositionMapper.create(videoSize, transform, inputSize); Ln.d("Display: using DisplayManager API"); } catch (Exception displayManagerException) { try { @@ -155,11 +188,10 @@ public class ScreenCapture extends SurfaceCapture { Size deviceSize = displayInfo.getSize(); int layerStack = displayInfo.getLayerStack(); - setDisplaySurface(display, surface, deviceSize.toRect(), videoSize.toRect(), layerStack); + setDisplaySurface(display, surface, deviceSize.toRect(), inputSize.toRect(), layerStack); virtualDisplayId = displayId; - AffineMatrix videoToDeviceMatrix = videoSize.equals(deviceSize) ? null : AffineMatrix.scale(videoSize, deviceSize); - positionMapper = new PositionMapper(videoSize, videoToDeviceMatrix); + positionMapper = PositionMapper.create(videoSize, transform, deviceSize); Ln.d("Display: using SurfaceControl API"); } catch (Exception surfaceControlException) { Ln.e("Could not create display using DisplayManager", displayManagerException); @@ -173,6 +205,14 @@ public class ScreenCapture extends SurfaceCapture { } } + @Override + public void stop() { + if (glRunner != null) { + glRunner.stopAndRelease(); + glRunner = null; + } + } + @Override public void release() { if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) { diff --git a/server/src/main/java/com/genymobile/scrcpy/video/SurfaceCapture.java b/server/src/main/java/com/genymobile/scrcpy/video/SurfaceCapture.java index d0d93f54..39d3bdb8 100644 --- a/server/src/main/java/com/genymobile/scrcpy/video/SurfaceCapture.java +++ b/server/src/main/java/com/genymobile/scrcpy/video/SurfaceCapture.java @@ -57,6 +57,13 @@ public abstract class SurfaceCapture { */ public abstract void start(Surface surface) throws IOException; + /** + * Stop the capture. + */ + public void stop() { + // Do nothing by default + } + /** * Return the video size * diff --git a/server/src/main/java/com/genymobile/scrcpy/video/SurfaceEncoder.java b/server/src/main/java/com/genymobile/scrcpy/video/SurfaceEncoder.java index dcb5d648..bc120107 100644 --- a/server/src/main/java/com/genymobile/scrcpy/video/SurfaceEncoder.java +++ b/server/src/main/java/com/genymobile/scrcpy/video/SurfaceEncoder.java @@ -87,11 +87,13 @@ public class SurfaceEncoder implements AsyncProcessor { Surface surface = null; boolean mediaCodecStarted = false; + boolean captureStarted = false; try { mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); surface = mediaCodec.createInputSurface(); capture.start(surface); + captureStarted = true; mediaCodec.start(); mediaCodecStarted = true; @@ -118,6 +120,9 @@ public class SurfaceEncoder implements AsyncProcessor { alive = true; } finally { reset.setRunningMediaCodec(null); + if (captureStarted) { + capture.stop(); + } if (mediaCodecStarted) { try { mediaCodec.stop(); diff --git a/server/src/main/java/com/genymobile/scrcpy/video/VideoFilter.java b/server/src/main/java/com/genymobile/scrcpy/video/VideoFilter.java new file mode 100644 index 00000000..5a52231f --- /dev/null +++ b/server/src/main/java/com/genymobile/scrcpy/video/VideoFilter.java @@ -0,0 +1,69 @@ +package com.genymobile.scrcpy.video; + +import com.genymobile.scrcpy.device.Size; +import com.genymobile.scrcpy.util.AffineMatrix; + +import android.graphics.Rect; + +public class VideoFilter { + + private Size size; + private AffineMatrix transform; + + public VideoFilter(Size inputSize) { + this.size = inputSize; + } + + public Size getOutputSize() { + return size; + } + + public AffineMatrix getTransform() { + return transform; + } + + /** + * Return the inverse transform. + *

+ * The direct affine transform describes how the input image is transformed. + *

+ * It is often useful to retrieve the inverse transform instead: + *

+ * + * @return the inverse transform + */ + public AffineMatrix getInverseTransform() { + if (transform == null) { + return null; + } + return transform.invert(); + } + + private static Rect transposeRect(Rect rect) { + return new Rect(rect.top, rect.left, rect.bottom, rect.right); + } + + public void addCrop(Rect crop, boolean transposed) { + if (transposed) { + crop = transposeRect(crop); + } + + double inputWidth = size.getWidth(); + double inputHeight = size.getHeight(); + + if (crop.left < 0 || crop.top < 0 || crop.right > inputWidth || crop.bottom > inputHeight) { + throw new IllegalArgumentException("Crop " + crop + " exceeds the input area (" + size + ")"); + } + + double x = crop.left / inputWidth; + double y = 1 - (crop.bottom / inputHeight); // OpenGL origin is bottom-left + double w = crop.width() / inputWidth; + double h = crop.height() / inputHeight; + + transform = AffineMatrix.reframe(x, y, w, h).multiply(transform); + size = new Size(crop.width(), crop.height()); + } +}