Reimplement crop using transforms
Reimplement the --crop feature using affine transforms. Fixes #4162 <https://github.com/Genymobile/scrcpy/issues/4162> PR #5455 <https://github.com/Genymobile/scrcpy/pull/5455>
This commit is contained in:
parent
23960ca11a
commit
9fb0a3dac1
@ -15,6 +15,18 @@ public final class PositionMapper {
|
|||||||
this.videoToDeviceMatrix = videoToDeviceMatrix;
|
this.videoToDeviceMatrix = videoToDeviceMatrix;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static PositionMapper create(Size videoSize, AffineMatrix filterTransform, Size targetSize) {
|
||||||
|
boolean convertToPixels = !videoSize.equals(targetSize) || filterTransform != null;
|
||||||
|
AffineMatrix transform = filterTransform;
|
||||||
|
if (convertToPixels) {
|
||||||
|
AffineMatrix inputTransform = AffineMatrix.ndcFromPixels(videoSize);
|
||||||
|
AffineMatrix outputTransform = AffineMatrix.ndcToPixels(targetSize);
|
||||||
|
transform = outputTransform.multiply(transform).multiply(inputTransform);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new PositionMapper(videoSize, transform);
|
||||||
|
}
|
||||||
|
|
||||||
public Point map(Position position) {
|
public Point map(Position position) {
|
||||||
Size clientVideoSize = position.getScreenSize();
|
Size clientVideoSize = position.getScreenSize();
|
||||||
if (!videoSize.equals(clientVideoSize)) {
|
if (!videoSize.equals(clientVideoSize)) {
|
||||||
|
@ -7,6 +7,9 @@ import com.genymobile.scrcpy.device.ConfigurationException;
|
|||||||
import com.genymobile.scrcpy.device.Device;
|
import com.genymobile.scrcpy.device.Device;
|
||||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||||
import com.genymobile.scrcpy.device.Size;
|
import com.genymobile.scrcpy.device.Size;
|
||||||
|
import com.genymobile.scrcpy.opengl.AffineOpenGLFilter;
|
||||||
|
import com.genymobile.scrcpy.opengl.OpenGLFilter;
|
||||||
|
import com.genymobile.scrcpy.opengl.OpenGLRunner;
|
||||||
import com.genymobile.scrcpy.util.AffineMatrix;
|
import com.genymobile.scrcpy.util.AffineMatrix;
|
||||||
import com.genymobile.scrcpy.util.Ln;
|
import com.genymobile.scrcpy.util.Ln;
|
||||||
import com.genymobile.scrcpy.util.LogUtils;
|
import com.genymobile.scrcpy.util.LogUtils;
|
||||||
@ -24,11 +27,14 @@ import android.view.IDisplayFoldListener;
|
|||||||
import android.view.IRotationWatcher;
|
import android.view.IRotationWatcher;
|
||||||
import android.view.Surface;
|
import android.view.Surface;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
public class ScreenCapture extends SurfaceCapture {
|
public class ScreenCapture extends SurfaceCapture {
|
||||||
|
|
||||||
private final VirtualDisplayListener vdListener;
|
private final VirtualDisplayListener vdListener;
|
||||||
private final int displayId;
|
private final int displayId;
|
||||||
private int maxSize;
|
private int maxSize;
|
||||||
|
private final Rect crop;
|
||||||
|
|
||||||
private DisplayInfo displayInfo;
|
private DisplayInfo displayInfo;
|
||||||
private Size videoSize;
|
private Size videoSize;
|
||||||
@ -39,6 +45,9 @@ public class ScreenCapture extends SurfaceCapture {
|
|||||||
private IBinder display;
|
private IBinder display;
|
||||||
private VirtualDisplay virtualDisplay;
|
private VirtualDisplay virtualDisplay;
|
||||||
|
|
||||||
|
private AffineMatrix transform;
|
||||||
|
private OpenGLRunner glRunner;
|
||||||
|
|
||||||
private DisplayManager.DisplayListenerHandle displayListenerHandle;
|
private DisplayManager.DisplayListenerHandle displayListenerHandle;
|
||||||
private HandlerThread handlerThread;
|
private HandlerThread handlerThread;
|
||||||
|
|
||||||
@ -54,6 +63,7 @@ public class ScreenCapture extends SurfaceCapture {
|
|||||||
this.displayId = options.getDisplayId();
|
this.displayId = options.getDisplayId();
|
||||||
assert displayId != Device.DISPLAY_ID_NONE;
|
assert displayId != Device.DISPLAY_ID_NONE;
|
||||||
this.maxSize = options.getMaxSize();
|
this.maxSize = options.getMaxSize();
|
||||||
|
this.crop = options.getCrop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -125,11 +135,20 @@ public class ScreenCapture extends SurfaceCapture {
|
|||||||
|
|
||||||
Size displaySize = displayInfo.getSize();
|
Size displaySize = displayInfo.getSize();
|
||||||
setSessionDisplaySize(displaySize);
|
setSessionDisplaySize(displaySize);
|
||||||
videoSize = displaySize.limit(maxSize).round8();
|
|
||||||
|
VideoFilter filter = new VideoFilter(displaySize);
|
||||||
|
|
||||||
|
if (crop != null) {
|
||||||
|
boolean transposed = (displayInfo.getRotation() % 2) != 0;
|
||||||
|
filter.addCrop(crop, transposed);
|
||||||
|
}
|
||||||
|
|
||||||
|
transform = filter.getInverseTransform();
|
||||||
|
videoSize = filter.getOutputSize().limit(maxSize).round8();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void start(Surface surface) {
|
public void start(Surface surface) throws IOException {
|
||||||
if (display != null) {
|
if (display != null) {
|
||||||
SurfaceControl.destroyDisplay(display);
|
SurfaceControl.destroyDisplay(display);
|
||||||
display = null;
|
display = null;
|
||||||
@ -139,14 +158,28 @@ public class ScreenCapture extends SurfaceCapture {
|
|||||||
virtualDisplay = null;
|
virtualDisplay = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Size inputSize;
|
||||||
|
if (transform != null) {
|
||||||
|
// If there is a filter, it must receive the full display content
|
||||||
|
inputSize = displayInfo.getSize();
|
||||||
|
assert glRunner == null;
|
||||||
|
OpenGLFilter glFilter = new AffineOpenGLFilter(transform);
|
||||||
|
glRunner = new OpenGLRunner(glFilter);
|
||||||
|
surface = glRunner.start(inputSize, videoSize, surface);
|
||||||
|
} else {
|
||||||
|
// If there is no filter, the display must be rendered at target video size directly
|
||||||
|
inputSize = videoSize;
|
||||||
|
}
|
||||||
|
|
||||||
int virtualDisplayId;
|
int virtualDisplayId;
|
||||||
PositionMapper positionMapper;
|
PositionMapper positionMapper;
|
||||||
try {
|
try {
|
||||||
virtualDisplay = ServiceManager.getDisplayManager()
|
virtualDisplay = ServiceManager.getDisplayManager()
|
||||||
.createVirtualDisplay("scrcpy", videoSize.getWidth(), videoSize.getHeight(), displayId, surface);
|
.createVirtualDisplay("scrcpy", inputSize.getWidth(), inputSize.getHeight(), displayId, surface);
|
||||||
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
|
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
|
||||||
// The position are relative to the virtual display, not the original display
|
|
||||||
positionMapper = new PositionMapper(videoSize, null);
|
// The positions are relative to the virtual display, not the original display (so use inputSize, not deviceSize!)
|
||||||
|
positionMapper = PositionMapper.create(videoSize, transform, inputSize);
|
||||||
Ln.d("Display: using DisplayManager API");
|
Ln.d("Display: using DisplayManager API");
|
||||||
} catch (Exception displayManagerException) {
|
} catch (Exception displayManagerException) {
|
||||||
try {
|
try {
|
||||||
@ -155,11 +188,10 @@ public class ScreenCapture extends SurfaceCapture {
|
|||||||
Size deviceSize = displayInfo.getSize();
|
Size deviceSize = displayInfo.getSize();
|
||||||
int layerStack = displayInfo.getLayerStack();
|
int layerStack = displayInfo.getLayerStack();
|
||||||
|
|
||||||
setDisplaySurface(display, surface, deviceSize.toRect(), videoSize.toRect(), layerStack);
|
setDisplaySurface(display, surface, deviceSize.toRect(), inputSize.toRect(), layerStack);
|
||||||
virtualDisplayId = displayId;
|
virtualDisplayId = displayId;
|
||||||
|
|
||||||
AffineMatrix videoToDeviceMatrix = videoSize.equals(deviceSize) ? null : AffineMatrix.scale(videoSize, deviceSize);
|
positionMapper = PositionMapper.create(videoSize, transform, deviceSize);
|
||||||
positionMapper = new PositionMapper(videoSize, videoToDeviceMatrix);
|
|
||||||
Ln.d("Display: using SurfaceControl API");
|
Ln.d("Display: using SurfaceControl API");
|
||||||
} catch (Exception surfaceControlException) {
|
} catch (Exception surfaceControlException) {
|
||||||
Ln.e("Could not create display using DisplayManager", displayManagerException);
|
Ln.e("Could not create display using DisplayManager", displayManagerException);
|
||||||
@ -173,6 +205,14 @@ public class ScreenCapture extends SurfaceCapture {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void stop() {
|
||||||
|
if (glRunner != null) {
|
||||||
|
glRunner.stopAndRelease();
|
||||||
|
glRunner = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void release() {
|
public void release() {
|
||||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
|
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
|
||||||
|
@ -57,6 +57,13 @@ public abstract class SurfaceCapture {
|
|||||||
*/
|
*/
|
||||||
public abstract void start(Surface surface) throws IOException;
|
public abstract void start(Surface surface) throws IOException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the capture.
|
||||||
|
*/
|
||||||
|
public void stop() {
|
||||||
|
// Do nothing by default
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the video size
|
* Return the video size
|
||||||
*
|
*
|
||||||
|
@ -87,11 +87,13 @@ public class SurfaceEncoder implements AsyncProcessor {
|
|||||||
|
|
||||||
Surface surface = null;
|
Surface surface = null;
|
||||||
boolean mediaCodecStarted = false;
|
boolean mediaCodecStarted = false;
|
||||||
|
boolean captureStarted = false;
|
||||||
try {
|
try {
|
||||||
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||||
surface = mediaCodec.createInputSurface();
|
surface = mediaCodec.createInputSurface();
|
||||||
|
|
||||||
capture.start(surface);
|
capture.start(surface);
|
||||||
|
captureStarted = true;
|
||||||
|
|
||||||
mediaCodec.start();
|
mediaCodec.start();
|
||||||
mediaCodecStarted = true;
|
mediaCodecStarted = true;
|
||||||
@ -118,6 +120,9 @@ public class SurfaceEncoder implements AsyncProcessor {
|
|||||||
alive = true;
|
alive = true;
|
||||||
} finally {
|
} finally {
|
||||||
reset.setRunningMediaCodec(null);
|
reset.setRunningMediaCodec(null);
|
||||||
|
if (captureStarted) {
|
||||||
|
capture.stop();
|
||||||
|
}
|
||||||
if (mediaCodecStarted) {
|
if (mediaCodecStarted) {
|
||||||
try {
|
try {
|
||||||
mediaCodec.stop();
|
mediaCodec.stop();
|
||||||
|
@ -0,0 +1,69 @@
|
|||||||
|
package com.genymobile.scrcpy.video;
|
||||||
|
|
||||||
|
import com.genymobile.scrcpy.device.Size;
|
||||||
|
import com.genymobile.scrcpy.util.AffineMatrix;
|
||||||
|
|
||||||
|
import android.graphics.Rect;
|
||||||
|
|
||||||
|
public class VideoFilter {
|
||||||
|
|
||||||
|
private Size size;
|
||||||
|
private AffineMatrix transform;
|
||||||
|
|
||||||
|
public VideoFilter(Size inputSize) {
|
||||||
|
this.size = inputSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Size getOutputSize() {
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
|
public AffineMatrix getTransform() {
|
||||||
|
return transform;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the inverse transform.
|
||||||
|
* <p/>
|
||||||
|
* The direct affine transform describes how the input image is transformed.
|
||||||
|
* <p/>
|
||||||
|
* It is often useful to retrieve the inverse transform instead:
|
||||||
|
* <ul>
|
||||||
|
* <li>The OpenGL filter expects the matrix to transform the image <em>coordinates</em>, which is the inverse transform;</li>
|
||||||
|
* <li>The click positions must be transformed back to the device positions, using the inverse transform too.</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @return the inverse transform
|
||||||
|
*/
|
||||||
|
public AffineMatrix getInverseTransform() {
|
||||||
|
if (transform == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return transform.invert();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Rect transposeRect(Rect rect) {
|
||||||
|
return new Rect(rect.top, rect.left, rect.bottom, rect.right);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addCrop(Rect crop, boolean transposed) {
|
||||||
|
if (transposed) {
|
||||||
|
crop = transposeRect(crop);
|
||||||
|
}
|
||||||
|
|
||||||
|
double inputWidth = size.getWidth();
|
||||||
|
double inputHeight = size.getHeight();
|
||||||
|
|
||||||
|
if (crop.left < 0 || crop.top < 0 || crop.right > inputWidth || crop.bottom > inputHeight) {
|
||||||
|
throw new IllegalArgumentException("Crop " + crop + " exceeds the input area (" + size + ")");
|
||||||
|
}
|
||||||
|
|
||||||
|
double x = crop.left / inputWidth;
|
||||||
|
double y = 1 - (crop.bottom / inputHeight); // OpenGL origin is bottom-left
|
||||||
|
double w = crop.width() / inputWidth;
|
||||||
|
double h = crop.height() / inputHeight;
|
||||||
|
|
||||||
|
transform = AffineMatrix.reframe(x, y, w, h).multiply(transform);
|
||||||
|
size = new Size(crop.width(), crop.height());
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user