Compare commits

..

1 Commits

Author SHA1 Message Date
4397dfba89 OpenGL filter prototype (does not work) 2024-11-03 15:57:48 +01:00
100 changed files with 1386 additions and 3698 deletions

View File

@ -6,15 +6,11 @@ on:
name:
description: 'Version name (default is ref name)'
env:
# $VERSION is used by release scripts
VERSION: ${{ github.event.inputs.name || github.ref_name }}
jobs:
test-scrcpy-server:
build-scrcpy-server:
runs-on: ubuntu-latest
env:
GRADLE: gradle # use native gradle instead of ./gradlew in scripts
GRADLE: gradle # use native gradle instead of ./gradlew in release.mk
steps:
- name: Checkout code
uses: actions/checkout@v4
@ -26,45 +22,16 @@ jobs:
java-version: '17'
- name: Test scrcpy-server
run: release/test_server.sh
build-scrcpy-server:
runs-on: ubuntu-latest
env:
GRADLE: gradle # use native gradle instead of ./gradlew in scripts
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup JDK
uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: '17'
run: make -f release.mk test-server
- name: Build scrcpy-server
run: release/build_server.sh
run: make -f release.mk build-server
- name: Upload scrcpy-server artifact
uses: actions/upload-artifact@v4
with:
name: scrcpy-server
path: release/work/build-server/server/scrcpy-server
test-build-scrcpy-server-without-gradle:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup JDK
uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: '17'
- name: Build scrcpy-server without gradle
run: server/build_without_gradle.sh
path: build-server/server/scrcpy-server
test-client:
runs-on: ubuntu-latest
@ -77,42 +44,15 @@ jobs:
sudo apt update
sudo apt install -y meson ninja-build nasm ffmpeg libsdl2-2.0-0 \
libsdl2-dev libavcodec-dev libavdevice-dev libavformat-dev \
libavutil-dev libswresample-dev libusb-1.0-0 libusb-1.0-0-dev \
libv4l-dev
libavutil-dev libswresample-dev libusb-1.0-0 libusb-1.0-0-dev
- name: Build
run: |
meson setup d -Db_sanitize=address,undefined
- name: Test
run: release/test_client.sh
build-linux:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install dependencies
run: |
sudo apt update
sudo apt install -y meson ninja-build nasm ffmpeg libsdl2-2.0-0 \
libsdl2-dev libavcodec-dev libavdevice-dev libavformat-dev \
libavutil-dev libswresample-dev libusb-1.0-0 libusb-1.0-0-dev \
libv4l-dev
- name: Build linux
run: release/build_linux.sh
# upload-artifact does not preserve permissions
- name: Tar
run: |
cd release/work/build-linux
mkdir dist-tar
cd dist-tar
tar -C .. -cvf dist.tar.gz dist/
- name: Upload build-linux artifact
uses: actions/upload-artifact@v4
with:
name: build-linux-intermediate
path: release/work/build-linux/dist-tar/
meson test -Cd
build-win32:
runs-on: ubuntu-latest
@ -131,22 +71,14 @@ jobs:
- name: Workaround for old meson version run by Github Actions
run: sed -i 's/^pkg-config/pkgconfig/' cross_win32.txt
- name: Build win32
run: release/build_windows.sh 32
# upload-artifact does not preserve permissions
- name: Tar
run: |
cd release/work/build-win32
mkdir dist-tar
cd dist-tar
tar -C .. -cvf dist.tar.gz dist/
- name: Build scrcpy win32
run: make -f release.mk build-win32
- name: Upload build-win32 artifact
uses: actions/upload-artifact@v4
with:
name: build-win32-intermediate
path: release/work/build-win32/dist-tar/
path: build-win32/dist/
build-win64:
runs-on: ubuntu-latest
@ -165,92 +97,24 @@ jobs:
- name: Workaround for old meson version run by Github Actions
run: sed -i 's/^pkg-config/pkgconfig/' cross_win64.txt
- name: Build win64
run: release/build_windows.sh 64
# upload-artifact does not preserve permissions
- name: Tar
run: |
cd release/work/build-win64
mkdir dist-tar
cd dist-tar
tar -C .. -cvf dist.tar.gz dist/
- name: Build scrcpy win64
run: make -f release.mk build-win64
- name: Upload build-win64 artifact
uses: actions/upload-artifact@v4
with:
name: build-win64-intermediate
path: release/work/build-win64/dist-tar/
path: build-win64/dist/
build-macos:
runs-on: macos-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install dependencies
run: |
brew install meson ninja nasm libiconv zlib automake autoconf \
libtool
- name: Build macOS
run: release/build_macos.sh
# upload-artifact does not preserve permissions
- name: Tar
run: |
cd release/work/build-macos
mkdir dist-tar
cd dist-tar
tar -C .. -cvf dist.tar.gz dist/
- name: Upload build-macos artifact
uses: actions/upload-artifact@v4
with:
name: build-macos-intermediate
path: release/work/build-macos/dist-tar/
package-linux:
needs:
- build-scrcpy-server
- build-linux
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download scrcpy-server
uses: actions/download-artifact@v4
with:
name: scrcpy-server
path: release/work/build-server/server/
- name: Download build-linux
uses: actions/download-artifact@v4
with:
name: build-linux-intermediate
path: release/work/build-linux/dist-tar/
# upload-artifact does not preserve permissions
- name: Detar
run: |
cd release/work/build-linux
tar xf dist-tar/dist.tar.gz
- name: Package linux
run: release/package_client.sh linux tar.gz
- name: Upload linux release
uses: actions/upload-artifact@v4
with:
name: release-linux
path: release/output/
package-win32:
package:
needs:
- build-scrcpy-server
- build-win32
- build-win64
runs-on: ubuntu-latest
env:
# $VERSION is used by release.mk
VERSION: ${{ github.event.inputs.name || github.ref_name }}
steps:
- name: Checkout code
uses: actions/checkout@v4
@ -259,151 +123,25 @@ jobs:
uses: actions/download-artifact@v4
with:
name: scrcpy-server
path: release/work/build-server/server/
path: build-server/server/
- name: Download build-win32
uses: actions/download-artifact@v4
with:
name: build-win32-intermediate
path: release/work/build-win32/dist-tar/
# upload-artifact does not preserve permissions
- name: Detar
run: |
cd release/work/build-win32
tar xf dist-tar/dist.tar.gz
- name: Package win32
run: release/package_client.sh win32 zip
- name: Upload win32 release
uses: actions/upload-artifact@v4
with:
name: release-win32
path: release/output/
package-win64:
needs:
- build-scrcpy-server
- build-win64
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download scrcpy-server
uses: actions/download-artifact@v4
with:
name: scrcpy-server
path: release/work/build-server/server/
path: build-win32/dist/
- name: Download build-win64
uses: actions/download-artifact@v4
with:
name: build-win64-intermediate
path: release/work/build-win64/dist-tar/
path: build-win64/dist/
# upload-artifact does not preserve permissions
- name: Detar
run: |
cd release/work/build-win64
tar xf dist-tar/dist.tar.gz
- name: Package win64
run: release/package_client.sh win64 zip
- name: Upload win64 release
uses: actions/upload-artifact@v4
with:
name: release-win64
path: release/output
package-macos:
needs:
- build-scrcpy-server
- build-macos
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download scrcpy-server
uses: actions/download-artifact@v4
with:
name: scrcpy-server
path: release/work/build-server/server/
- name: Download build-macos
uses: actions/download-artifact@v4
with:
name: build-macos-intermediate
path: release/work/build-macos/dist-tar/
# upload-artifact does not preserve permissions
- name: Detar
run: |
cd release/work/build-macos
tar xf dist-tar/dist.tar.gz
- name: Package macos
run: release/package_client.sh macos tar.gz
- name: Upload macos release
uses: actions/upload-artifact@v4
with:
name: release-macos
path: release/output/
release:
needs:
- build-scrcpy-server
- package-linux
- package-win32
- package-win64
- package-macos
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download scrcpy-server
uses: actions/download-artifact@v4
with:
name: scrcpy-server
path: release/work/build-server/server/
- name: Download release-linux
uses: actions/download-artifact@v4
with:
name: release-linux
path: release/output/
- name: Download release-win32
uses: actions/download-artifact@v4
with:
name: release-win32
path: release/output/
- name: Download release-win64
uses: actions/download-artifact@v4
with:
name: release-win64
path: release/output/
- name: Download release-macos
uses: actions/download-artifact@v4
with:
name: release-macos
path: release/output/
- name: Package server
run: release/package_server.sh
- name: Generate checksums
run: release/generate_checksums.sh
- name: Package
run: make -f release.mk package
- name: Upload release artifact
uses: actions/upload-artifact@v4
with:
name: scrcpy-release-${{ env.VERSION }}
path: release/output
path: release-${{ env.VERSION }}

View File

@ -2,7 +2,7 @@
source for the project. Do not download releases from random websites, even if
their name contains `scrcpy`.**
# scrcpy (v3.0)
# scrcpy (v2.7)
<img src="app/data/icon.svg" width="128" height="128" alt="scrcpy" align="right" />
@ -74,7 +74,7 @@ Note that USB debugging is not required to run scrcpy in [OTG mode](doc/otg.md).
## Get the app
- [Linux](doc/linux.md)
- [Windows](doc/windows.md) (read [how to run](doc/windows.md#run))
- [Windows](doc/windows.md)
- [macOS](doc/macos.md)
@ -141,7 +141,7 @@ documented in the following pages:
- [Device](doc/device.md)
- [Window](doc/window.md)
- [Recording](doc/recording.md)
- [Virtual display](doc/virtual_display.md)
- [Virtual display](doc/virtual_displays.md)
- [Tunnels](doc/tunnels.md)
- [OTG](doc/otg.md)
- [Camera](doc/camera.md)
@ -181,7 +181,6 @@ to your problem immediately.
You can also use:
- Reddit: [`r/scrcpy`](https://www.reddit.com/r/scrcpy)
- BlueSky: [`@scrcpy.bsky.social`](https://bsky.app/profile/scrcpy.bsky.social)
- Twitter: [`@scrcpy_app`](https://twitter.com/scrcpy_app)

View File

@ -2,7 +2,6 @@ _scrcpy() {
local cur prev words cword
local opts="
--always-on-top
--angle
--audio-bit-rate=
--audio-buffer=
--audio-codec=
@ -18,7 +17,6 @@ _scrcpy() {
--camera-fps=
--camera-high-speed
--camera-size=
--capture-orientation=
--crop=
-d --select-usb
--disable-screensaver
@ -39,6 +37,8 @@ _scrcpy() {
--list-cameras
--list-displays
--list-encoders
--lock-video-orientation
--lock-video-orientation=
-m --max-size=
-M
--max-fps=
@ -57,7 +57,6 @@ _scrcpy() {
--no-mipmaps
--no-mouse-hover
--no-power-on
--no-vd-system-decorations
--no-video
--no-video-playback
--orientation=
@ -78,7 +77,6 @@ _scrcpy() {
--rotation=
-s --serial=
-S --turn-screen-off
--screen-off-timeout=
--shortcut-mod=
--start-app=
-t --show-touches
@ -139,10 +137,6 @@ _scrcpy() {
COMPREPLY=($(compgen -W 'disabled uhid aoa' -- "$cur"))
return
;;
--capture-orientation)
COMPREPLY=($(compgen -W '0 90 180 270 flip0 flip90 flip180 flip270 @0 @90 @180 @270 @flip0 @flip90 @flip180 @flip270' -- "$cur"))
return
;;
--orientation|--display-orientation)
COMPREPLY=($(compgen -W '0 90 180 270 flip0 flip90 flip180 flip270' -- "$cur"))
return
@ -151,6 +145,10 @@ _scrcpy() {
COMPREPLY=($(compgen -W '0 90 180 270' -- "$cur"))
return
;;
--lock-video-orientation)
COMPREPLY=($(compgen -W 'unlocked initial 0 90 180 270' -- "$cur"))
return
;;
--pause-on-exit)
COMPREPLY=($(compgen -W 'true false if-error' -- "$cur"))
return
@ -195,7 +193,6 @@ _scrcpy() {
|--display-id \
|--max-fps \
|-m|--max-size \
|--new-display \
|-p|--port \
|--push-target \
|--rotation \

View File

@ -1,6 +0,0 @@
#!/bin/bash
cd "$(dirname ${BASH_SOURCE[0]})"
export ADB="${ADB:-./adb}"
export SCRCPY_SERVER_PATH="${SCRCPY_SERVER_PATH:-./scrcpy-server}"
export SCRCPY_ICON_PATH="${SCRCPY_ICON_PATH:-./icon.png}"
./scrcpy_bin "$@"

View File

@ -9,7 +9,6 @@ local arguments
arguments=(
'--always-on-top[Make scrcpy window always on top \(above other windows\)]'
'--angle=[Rotate the video content by a custom angle, in degrees]'
'--audio-bit-rate=[Encode the audio at the given bit-rate]'
'--audio-buffer=[Configure the audio buffering delay (in milliseconds)]'
'--audio-codec=[Select the audio codec]:codec:(opus aac flac raw)'
@ -25,7 +24,6 @@ arguments=(
'--camera-facing=[Select the device camera by its facing direction]:facing:(front back external)'
'--camera-fps=[Specify the camera capture frame rate]'
'--camera-size=[Specify an explicit camera capture size]'
'--capture-orientation=[Set the capture video orientation]:orientation:(0 90 180 270 flip0 flip90 flip180 flip270 @0 @90 @180 @270 @flip0 @flip90 @flip180 @flip270)'
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
{-d,--select-usb}'[Use USB device]'
'--disable-screensaver[Disable screensaver while scrcpy is running]'
@ -46,6 +44,7 @@ arguments=(
'--list-cameras[List cameras available on the device]'
'--list-displays[List displays available on the device]'
'--list-encoders[List video and audio encoders available on the device]'
'--lock-video-orientation=[Lock video orientation]:orientation:(unlocked initial 0 90 180 270)'
{-m,--max-size=}'[Limit both the width and height of the video to value]'
'-M[Use UHID/AOA mouse (same as --mouse=uhid or --mouse=aoa, depending on OTG mode)]'
'--max-fps=[Limit the frame rate of screen capture]'
@ -63,7 +62,6 @@ arguments=(
'--no-mipmaps[Disable the generation of mipmaps]'
'--no-mouse-hover[Do not forward mouse hover events]'
'--no-power-on[Do not power on the device on start]'
'--no-vd-system-decorations[Disable virtual display system decorations flag]'
'--no-video[Disable video forwarding]'
'--no-video-playback[Disable video playback]'
'--orientation=[Set the video orientation]:orientation values:(0 90 180 270 flip0 flip90 flip180 flip270)'
@ -82,7 +80,6 @@ arguments=(
'--require-audio=[Make scrcpy fail if audio is enabled but does not work]'
{-s,--serial=}'[The device serial number \(mandatory for multiple devices only\)]:serial:($("${ADB-adb}" devices | awk '\''$2 == "device" {print $1}'\''))'
{-S,--turn-screen-off}'[Turn the device screen off immediately]'
'--screen-off-timeout=[Set the screen off timeout in seconds]'
'--shortcut-mod=[\[key1,key2+key3,...\] Specify the modifiers to use for scrcpy shortcuts]:shortcut mod:(lctrl rctrl lalt ralt lsuper rsuper)'
'--start-app=[Start an Android app]'
{-t,--show-touches}'[Show physical touches]'

View File

@ -4,10 +4,10 @@ DEPS_DIR=$(dirname ${BASH_SOURCE[0]})
cd "$DEPS_DIR"
. common
VERSION=35.0.2
FILENAME=platform-tools_r$VERSION-win.zip
PROJECT_DIR=platform-tools-$VERSION-windows
SHA256SUM=2975a3eac0b19182748d64195375ad056986561d994fffbdc64332a516300bb9
VERSION=35.0.0
FILENAME=platform-tools_r$VERSION-windows.zip
PROJECT_DIR=platform-tools-$VERSION
SHA256SUM=7ab78a8f8b305ae4d0de647d99c43599744de61a0838d3a47bda0cdffefee87e
cd "$SOURCES_DIR"
@ -27,6 +27,6 @@ else
rmdir "$ZIP_PREFIX"
fi
mkdir -p "$INSTALL_DIR/adb-windows"
cd "$INSTALL_DIR/adb-windows"
cp -r "$SOURCES_DIR/$PROJECT_DIR"/. "$INSTALL_DIR/adb-windows/"
mkdir -p "$INSTALL_DIR/$HOST/bin"
cd "$INSTALL_DIR/$HOST/bin"
cp -r "$SOURCES_DIR/$PROJECT_DIR"/. "$INSTALL_DIR/$HOST/bin/"

View File

@ -1,29 +0,0 @@
#!/usr/bin/env bash
set -ex
DEPS_DIR=$(dirname ${BASH_SOURCE[0]})
cd "$DEPS_DIR"
. common
VERSION=35.0.2
FILENAME=platform-tools_r$VERSION-linux.zip
PROJECT_DIR=platform-tools-$VERSION-linux
SHA256SUM=acfdcccb123a8718c46c46c059b2f621140194e5ec1ac9d81715be3d6ab6cd0a
cd "$SOURCES_DIR"
if [[ -d "$PROJECT_DIR" ]]
then
echo "$PWD/$PROJECT_DIR" found
else
get_file "https://dl.google.com/android/repository/$FILENAME" "$FILENAME" "$SHA256SUM"
mkdir -p "$PROJECT_DIR"
cd "$PROJECT_DIR"
ZIP_PREFIX=platform-tools
unzip "../$FILENAME" "$ZIP_PREFIX"/adb
mv "$ZIP_PREFIX"/* .
rmdir "$ZIP_PREFIX"
fi
mkdir -p "$INSTALL_DIR/adb-linux"
cd "$INSTALL_DIR/adb-linux"
cp -r "$SOURCES_DIR/$PROJECT_DIR"/. "$INSTALL_DIR/adb-linux/"

View File

@ -1,29 +0,0 @@
#!/usr/bin/env bash
set -ex
DEPS_DIR=$(dirname ${BASH_SOURCE[0]})
cd "$DEPS_DIR"
. common
VERSION=35.0.2
FILENAME=platform-tools_r$VERSION-darwin.zip
PROJECT_DIR=platform-tools-$VERSION-darwin
SHA256SUM=1820078db90bf21628d257ff052528af1c61bb48f754b3555648f5652fa35d78
cd "$SOURCES_DIR"
if [[ -d "$PROJECT_DIR" ]]
then
echo "$PWD/$PROJECT_DIR" found
else
get_file "https://dl.google.com/android/repository/$FILENAME" "$FILENAME" "$SHA256SUM"
mkdir -p "$PROJECT_DIR"
cd "$PROJECT_DIR"
ZIP_PREFIX=platform-tools
unzip "../$FILENAME" "$ZIP_PREFIX"/adb
mv "$ZIP_PREFIX"/* .
rmdir "$ZIP_PREFIX"
fi
mkdir -p "$INSTALL_DIR/adb-macos"
cd "$INSTALL_DIR/adb-macos"
cp -r "$SOURCES_DIR/$PROJECT_DIR"/. "$INSTALL_DIR/adb-macos/"

View File

@ -1,47 +1,25 @@
#!/usr/bin/env bash
# This file is intended to be sourced by other scripts, not executed
process_args() {
if [[ $# != 3 ]]
then
# <host>: win32 or win64
# <build_type>: native or cross
# <link_type>: static or shared
echo "Syntax: $0 <host> <build_type> <link_type>" >&2
exit 1
fi
if [[ $# != 1 ]]
then
# <host>: win32 or win64
echo "Syntax: $0 <host>" >&2
exit 1
fi
HOST="$1"
BUILD_TYPE="$2" # native or cross
LINK_TYPE="$3" # static or shared
DIRNAME="$HOST-$BUILD_TYPE-$LINK_TYPE"
HOST="$1"
if [[ "$BUILD_TYPE" != native && "$BUILD_TYPE" != cross ]]
then
echo "Unsupported build type (expected native or cross): $BUILD_TYPE" >&2
exit 1
fi
if [[ "$LINK_TYPE" != static && "$LINK_TYPE" != shared ]]
then
echo "Unsupported link type (expected static or shared): $LINK_TYPE" >&2
exit 1
fi
if [[ "$BUILD_TYPE" == cross ]]
then
if [[ "$HOST" = win32 ]]
then
HOST_TRIPLET=i686-w64-mingw32
elif [[ "$HOST" = win64 ]]
then
HOST_TRIPLET=x86_64-w64-mingw32
else
echo "Unsupported cross-build to host: $HOST" >&2
exit 1
fi
fi
}
if [[ "$HOST" = win32 ]]
then
HOST_TRIPLET=i686-w64-mingw32
elif [[ "$HOST" = win64 ]]
then
HOST_TRIPLET=x86_64-w64-mingw32
else
echo "Unsupported host: $HOST" >&2
exit 1
fi
DEPS_DIR=$(dirname ${BASH_SOURCE[0]})
cd "$DEPS_DIR"
@ -59,7 +37,7 @@ checksum() {
local file="$1"
local sum="$2"
echo "$file: verifying checksum..."
echo "$sum $file" | shasum -a256 -c
echo "$sum $file" | sha256sum -c
}
get_file() {

View File

@ -3,12 +3,11 @@ set -ex
DEPS_DIR=$(dirname ${BASH_SOURCE[0]})
cd "$DEPS_DIR"
. common
process_args "$@"
VERSION=7.1
VERSION=7.0.2
FILENAME=ffmpeg-$VERSION.tar.xz
PROJECT_DIR=ffmpeg-$VERSION
SHA256SUM=40973D44970DBC83EF302B0609F2E74982BE2D85916DD2EE7472D30678A7ABE6
SHA256SUM=8646515b638a3ad303e23af6a3587734447cb8fc0a0c064ecdb8e95c4fd8b389
cd "$SOURCES_DIR"
@ -23,121 +22,68 @@ fi
mkdir -p "$BUILD_DIR/$PROJECT_DIR"
cd "$BUILD_DIR/$PROJECT_DIR"
if [[ -d "$DIRNAME" ]]
if [[ "$HOST" = win32 ]]
then
echo "'$PWD/$DIRNAME' already exists, not reconfigured"
cd "$DIRNAME"
ARCH=x86
elif [[ "$HOST" = win64 ]]
then
ARCH=x86_64
else
mkdir "$DIRNAME"
cd "$DIRNAME"
echo "Unsupported host: $HOST" >&2
exit 1
fi
if [[ "$HOST" == win* ]]
then
# -static-libgcc to avoid missing libgcc_s_dw2-1.dll
# -static to avoid dynamic dependency to zlib
export CFLAGS='-static-libgcc -static'
export CXXFLAGS="$CFLAGS"
export LDFLAGS='-static-libgcc -static'
elif [[ "$HOST" == "macos" ]]
then
export LDFLAGS="$LDFLAGS -L/opt/homebrew/opt/zlib/lib"
export CPPFLAGS="$CPPFLAGS -I/opt/homebrew/opt/zlib/include"
# -static-libgcc to avoid missing libgcc_s_dw2-1.dll
# -static to avoid dynamic dependency to zlib
export CFLAGS='-static-libgcc -static'
export CXXFLAGS="$CFLAGS"
export LDFLAGS='-static-libgcc -static'
export LDFLAGS="$LDFLAGS-L/opt/homebrew/opt/libiconv/lib"
export CPPFLAGS="$CPPFLAGS -I/opt/homebrew/opt/libiconv/include"
export PKG_CONFIG_PATH="/opt/homebrew/opt/zlib/lib/pkgconfig"
fi
if [[ -d "$HOST" ]]
then
echo "'$PWD/$HOST' already exists, not reconfigured"
cd "$HOST"
else
mkdir "$HOST"
cd "$HOST"
conf=(
--prefix="$INSTALL_DIR/$DIRNAME"
--extra-cflags="-O2 -fPIC"
--disable-programs
--disable-doc
--disable-swscale
--disable-postproc
--disable-avfilter
--disable-network
--disable-everything
"$SOURCES_DIR/$PROJECT_DIR"/configure \
--prefix="$INSTALL_DIR/$HOST" \
--enable-cross-compile \
--target-os=mingw32 \
--arch="$ARCH" \
--cross-prefix="${HOST_TRIPLET}-" \
--cc="${HOST_TRIPLET}-gcc" \
--extra-cflags="-O2 -fPIC" \
--enable-shared \
--disable-static \
--disable-programs \
--disable-doc \
--disable-swscale \
--disable-postproc \
--disable-avfilter \
--disable-avdevice \
--disable-network \
--disable-everything \
--enable-swresample \
--enable-decoder=h264 \
--enable-decoder=hevc \
--enable-decoder=av1 \
--enable-decoder=pcm_s16le \
--enable-decoder=opus \
--enable-decoder=aac \
--enable-decoder=flac \
--enable-decoder=png \
--enable-protocol=file \
--enable-demuxer=image2 \
--enable-parser=png \
--enable-zlib \
--enable-muxer=matroska \
--enable-muxer=mp4 \
--enable-muxer=opus \
--enable-muxer=flac \
--enable-muxer=wav \
--disable-vulkan
--disable-vaapi
--disable-vdpau
--enable-swresample
--enable-decoder=h264
--enable-decoder=hevc
--enable-decoder=av1
--enable-decoder=pcm_s16le
--enable-decoder=opus
--enable-decoder=aac
--enable-decoder=flac
--enable-decoder=png
--enable-protocol=file
--enable-demuxer=image2
--enable-parser=png
--enable-zlib
--enable-muxer=matroska
--enable-muxer=mp4
--enable-muxer=opus
--enable-muxer=flac
--enable-muxer=wav
)
if [[ "$HOST" == linux ]]
then
conf+=(
--enable-libv4l2
--enable-outdev=v4l2
--enable-encoder=rawvideo
)
else
# libavdevice is only used for V4L2 on Linux
conf+=(
--disable-avdevice
)
fi
if [[ "$LINK_TYPE" == static ]]
then
conf+=(
--enable-static
--disable-shared
)
else
conf+=(
--disable-static
--enable-shared
)
fi
if [[ "$BUILD_TYPE" == cross ]]
then
conf+=(
--enable-cross-compile
--cross-prefix="${HOST_TRIPLET}-"
--cc="${HOST_TRIPLET}-gcc"
)
case "$HOST" in
win32)
conf+=(
--target-os=mingw32
--arch=x86
)
;;
win64)
conf+=(
--target-os=mingw32
--arch=x86_64
)
;;
*)
echo "Unsupported host: $HOST" >&2
exit 1
esac
fi
"$SOURCES_DIR/$PROJECT_DIR"/configure "${conf[@]}"
fi
make -j

View File

@ -3,7 +3,6 @@ set -ex
DEPS_DIR=$(dirname ${BASH_SOURCE[0]})
cd "$DEPS_DIR"
. common
process_args "$@"
VERSION=1.0.27
FILENAME=libusb-$VERSION.tar.gz
@ -26,40 +25,20 @@ cd "$BUILD_DIR/$PROJECT_DIR"
export CFLAGS='-O2'
export CXXFLAGS="$CFLAGS"
if [[ -d "$DIRNAME" ]]
if [[ -d "$HOST" ]]
then
echo "'$PWD/$DIRNAME' already exists, not reconfigured"
cd "$DIRNAME"
echo "'$PWD/$HOST' already exists, not reconfigured"
cd "$HOST"
else
mkdir "$DIRNAME"
cd "$DIRNAME"
conf=(
--prefix="$INSTALL_DIR/$DIRNAME"
)
if [[ "$LINK_TYPE" == static ]]
then
conf+=(
--enable-static
--disable-shared
)
else
conf+=(
--disable-static
--enable-shared
)
fi
if [[ "$BUILD_TYPE" == cross ]]
then
conf+=(
--host="$HOST_TRIPLET"
)
fi
mkdir "$HOST"
cd "$HOST"
"$SOURCES_DIR/$PROJECT_DIR"/bootstrap.sh
"$SOURCES_DIR/$PROJECT_DIR"/configure "${conf[@]}"
"$SOURCES_DIR/$PROJECT_DIR"/configure \
--prefix="$INSTALL_DIR/$HOST" \
--host="$HOST_TRIPLET" \
--enable-shared \
--disable-static
fi
make -j

View File

@ -3,12 +3,11 @@ set -ex
DEPS_DIR=$(dirname ${BASH_SOURCE[0]})
cd "$DEPS_DIR"
. common
process_args "$@"
VERSION=2.30.9
VERSION=2.30.7
FILENAME=SDL-$VERSION.tar.gz
PROJECT_DIR=SDL-release-$VERSION
SHA256SUM=682a055004081e37d81a7d4ce546c3ee3ef2e0e6a675ed2651e430ccd14eb407
SHA256SUM=1578c96f62c9ae36b64e431b2aa0e0b0fd07c275dedbc694afc38e19056688f5
cd "$SOURCES_DIR"
@ -26,54 +25,23 @@ cd "$BUILD_DIR/$PROJECT_DIR"
export CFLAGS='-O2'
export CXXFLAGS="$CFLAGS"
if [[ -d "$DIRNAME" ]]
if [[ -d "$HOST" ]]
then
echo "'$PWD/$HDIRNAME' already exists, not reconfigured"
cd "$DIRNAME"
echo "'$PWD/$HOST' already exists, not reconfigured"
cd "$HOST"
else
mkdir "$DIRNAME"
cd "$DIRNAME"
mkdir "$HOST"
cd "$HOST"
conf=(
--prefix="$INSTALL_DIR/$DIRNAME"
)
if [[ "$HOST" == linux ]]
then
conf+=(
--enable-video-wayland
--enable-video-x11
)
fi
if [[ "$LINK_TYPE" == static ]]
then
conf+=(
--enable-static
--disable-shared
)
else
conf+=(
--disable-static
--enable-shared
)
fi
if [[ "$BUILD_TYPE" == cross ]]
then
conf+=(
--host="$HOST_TRIPLET"
)
fi
"$SOURCES_DIR/$PROJECT_DIR"/configure "${conf[@]}"
"$SOURCES_DIR/$PROJECT_DIR"/configure \
--prefix="$INSTALL_DIR/$HOST" \
--host="$HOST_TRIPLET" \
--enable-shared \
--disable-static
fi
make -j
# There is no "make install-strip"
make install
# Strip manually
if [[ "$LINK_TYPE" == shared && "$HOST" == win* ]]
then
${HOST_TRIPLET}-strip "$INSTALL_DIR/$DIRNAME/bin/SDL2.dll"
fi
${HOST_TRIPLET}-strip "$INSTALL_DIR/$HOST/bin/SDL2.dll"

View File

@ -109,22 +109,20 @@ endif
cc = meson.get_compiler('c')
static = get_option('static')
dependencies = [
dependency('libavformat', version: '>= 57.33', static: static),
dependency('libavcodec', version: '>= 57.37', static: static),
dependency('libavutil', static: static),
dependency('libswresample', static: static),
dependency('sdl2', version: '>= 2.0.5', static: static),
dependency('libavformat', version: '>= 57.33'),
dependency('libavcodec', version: '>= 57.37'),
dependency('libavutil'),
dependency('libswresample'),
dependency('sdl2', version: '>= 2.0.5'),
]
if v4l2_support
dependencies += dependency('libavdevice', static: static)
dependencies += dependency('libavdevice')
endif
if usb_support
dependencies += dependency('libusb-1.0', static: static)
dependencies += dependency('libusb-1.0')
endif
if host_machine.system() == 'windows'
@ -169,6 +167,9 @@ conf.set('DEFAULT_LOCAL_PORT_RANGE_LAST', '27199')
# run a server debugger and wait for a client to be attached
conf.set('SERVER_DEBUGGER', get_option('server_debugger'))
# select the debugger method ('old' for Android < 9, 'new' for Android >= 9)
conf.set('SERVER_DEBUGGER_METHOD_NEW', get_option('server_debugger_method') == 'new')
# enable V4L2 support (linux only)
conf.set('HAVE_V4L2', v4l2_support)

View File

@ -13,7 +13,7 @@ BEGIN
VALUE "LegalCopyright", "Romain Vimont, Genymobile"
VALUE "OriginalFilename", "scrcpy.exe"
VALUE "ProductName", "scrcpy"
VALUE "ProductVersion", "3.0"
VALUE "ProductVersion", "2.7"
END
END
BLOCK "VarFileInfo"

View File

@ -19,10 +19,6 @@ provides display and control of Android devices connected on USB (or over TCP/IP
.B \-\-always\-on\-top
Make scrcpy window always on top (above other windows).
.TP
.BI "\-\-angle " degrees
Rotate the video content by a custom angle, in degrees (clockwise).
.TP
.BI "\-\-audio\-bit\-rate " value
Encode the audio at the given bit rate, expressed in bits/s. Unit suffixes are supported: '\fBK\fR' (x1000) and '\fBM\fR' (x1000000).
@ -97,18 +93,6 @@ Select the camera size by its aspect ratio (+/- 10%).
Possible values are "sensor" (use the camera sensor aspect ratio), "\fInum\fR:\fIden\fR" (e.g. "4:3") and "\fIvalue\fR" (e.g. "1.6").
.TP
.BI "\-\-camera\-facing " facing
Select the device camera by its facing direction.
Possible values are "front", "back" and "external".
.TP
.BI "\-\-camera\-fps " fps
Specify the camera capture frame rate.
If not specified, Android's default frame rate (30 fps) is used.
.TP
.B \-\-camera\-high\-speed
Enable high-speed camera capture mode.
@ -122,26 +106,28 @@ Specify the device camera id to mirror.
The available camera ids can be listed by \fB\-\-list\-cameras\fR.
.TP
.BI "\-\-camera\-size " width\fRx\fIheight
Specify an explicit camera capture size.
.BI "\-\-camera\-facing " facing
Select the device camera by its facing direction.
Possible values are "front", "back" and "external".
.TP
.BI "\-\-capture\-orientation " value
Possible values are 0, 90, 180, 270, flip0, flip90, flip180 and flip270, possibly prefixed by '@'.
.BI "\-\-camera\-fps " fps
Specify the camera capture frame rate.
The number represents the clockwise rotation in degrees; the "flip" keyword applies a horizontal flip before the rotation.
If not specified, Android's default frame rate (30 fps) is used.
If a leading '@' is passed (@90) for display capture, then the rotation is locked, and is relative to the natural device orientation.
If '@' is passed alone, then the rotation is locked to the initial device orientation.
Default is 0.
.TP
.BI "\-\-camera\-size " width\fRx\fIheight
Specify an explicit camera capture size.
.TP
.BI "\-\-crop " width\fR:\fIheight\fR:\fIx\fR:\fIy
Crop the device screen on the server.
The values are expressed in the device natural orientation (typically, portrait for a phone, landscape for a tablet).
The values are expressed in the device natural orientation (typically, portrait for a phone, landscape for a tablet). Any
.B \-\-max\-size
value is computed on the cropped size.
.TP
.B \-d, \-\-select\-usb
@ -255,6 +241,16 @@ List video and audio encoders available on the device.
.B \-\-list\-displays
List displays available on the device.
.TP
\fB\-\-lock\-video\-orientation\fR[=\fIvalue\fR]
Lock capture video orientation to \fIvalue\fR.
Possible values are "unlocked", "initial" (locked to the initial orientation), 0, 90, 180, and 270. The values represent the clockwise rotation from the natural device orientation, in degrees.
Default is "unlocked".
Passing the option without argument is equivalent to passing "initial".
.TP
.BI "\-m, \-\-max\-size " value
Limit both the width and height of the video to \fIvalue\fR. The other dimension is computed so that the device aspect\-ratio is preserved.
@ -318,13 +314,14 @@ Disable video and audio playback on the computer (equivalent to \fB\-\-no\-video
.TP
\fB\-\-new\-display\fR[=[\fIwidth\fRx\fIheight\fR][/\fIdpi\fR]]
Create a new display with the specified resolution and density. If not provided, they default to the main display dimensions and DPI.
Create a new display with the specified resolution and density. If not provided, they default to the main display dimensions and DPI, and \fB\-\-max\-size\fR is considered.
Examples:
\-\-new\-display=1920x1080
\-\-new\-display=1920x1080/420
\-\-new\-display # main display size and density
\-\-new\-display -m1920 # scaled to fit a max size of 1920
\-\-new\-display=/240 # main display size and 240 dpi
.TP
@ -369,10 +366,6 @@ Do not forward mouse hover (mouse motion without any clicks) events.
.B \-\-no\-power\-on
Do not power on the device on start.
.TP
.B \-\-no\-vd\-system\-decorations
Disable virtual display system decorations flag.
.TP
.B \-\-no\-video
Disable video forwarding.
@ -555,6 +548,8 @@ Default is "info" for release builds, "debug" for debug builds.
.BI "\-\-v4l2-sink " /dev/videoN
Output to v4l2loopback device.
It requires to lock the video orientation (see \fB\-\-lock\-video\-orientation\fR).
.TP
.BI "\-\-v4l2-buffer " ms
Add a buffering delay (in milliseconds) before pushing frames. This increases latency to compensate for jitter.
@ -676,10 +671,6 @@ Pause or re-pause display
.B MOD+Shift+z
Unpause display
.TP
.B MOD+Shift+r
Reset video capture/encoding
.TP
.B MOD+g
Resize window to 1:1 (pixel\-perfect)

View File

@ -739,21 +739,3 @@ sc_adb_get_device_ip(struct sc_intr *intr, const char *serial, unsigned flags) {
return sc_adb_parse_device_ip(buf);
}
uint16_t
sc_adb_get_device_sdk_version(struct sc_intr *intr, const char *serial) {
char *sdk_version =
sc_adb_getprop(intr, serial, "ro.build.version.sdk", SC_ADB_SILENT);
if (!sdk_version) {
return 0;
}
long value;
bool ok = sc_str_parse_integer(sdk_version, &value);
free(sdk_version);
if (!ok || value < 0 || value > 0xFFFF) {
return 0;
}
return value;
}

View File

@ -114,10 +114,4 @@ sc_adb_getprop(struct sc_intr *intr, const char *serial, const char *prop,
char *
sc_adb_get_device_ip(struct sc_intr *intr, const char *serial, unsigned flags);
/**
* Return the device SDK version.
*/
uint16_t
sc_adb_get_device_sdk_version(struct sc_intr *intr, const char *serial);
#endif

View File

@ -288,7 +288,7 @@ sc_audio_regulator_push(struct sc_audio_regulator *ar, const AVFrame *frame) {
// Enable compensation when the difference exceeds +/- 4ms.
// Disable compensation when the difference is lower than +/- 1ms.
int threshold = ar->compensation_active
int threshold = ar->compensation != 0
? ar->sample_rate / 1000 /* 1ms */
: ar->sample_rate * 4 / 1000; /* 4ms */
@ -309,12 +309,14 @@ sc_audio_regulator_push(struct sc_audio_regulator *ar, const AVFrame *frame) {
LOGV("[Audio] Buffering: target=%" PRIu32 " avg=%f cur=%" PRIu32
" compensation=%d", ar->target_buffering, avg, can_read, diff);
int ret = swr_set_compensation(swr_ctx, diff, distance);
if (ret < 0) {
LOGW("Resampling compensation failed: %d", ret);
// not fatal
} else {
ar->compensation_active = diff != 0;
if (diff != ar->compensation) {
int ret = swr_set_compensation(swr_ctx, diff, distance);
if (ret < 0) {
LOGW("Resampling compensation failed: %d", ret);
// not fatal
} else {
ar->compensation = diff;
}
}
}
@ -390,7 +392,7 @@ sc_audio_regulator_init(struct sc_audio_regulator *ar, size_t sample_size,
atomic_init(&ar->played, false);
atomic_init(&ar->received, false);
atomic_init(&ar->underflow, 0);
ar->compensation_active = false;
ar->compensation = 0;
return true;

View File

@ -44,8 +44,8 @@ struct sc_audio_regulator {
// Number of silence samples inserted since the last received packet
atomic_uint_least32_t underflow;
// Non-zero compensation applied (only used by the receiver thread)
bool compensation_active;
// Current applied compensation value (only used by the receiver thread)
int compensation;
// Set to true the first time a sample is received
atomic_bool received;

View File

@ -106,10 +106,6 @@ enum {
OPT_NEW_DISPLAY,
OPT_LIST_APPS,
OPT_START_APP,
OPT_SCREEN_OFF_TIMEOUT,
OPT_CAPTURE_ORIENTATION,
OPT_ANGLE,
OPT_NO_VD_SYSTEM_DECORATIONS,
};
struct sc_option {
@ -151,13 +147,6 @@ static const struct sc_option options[] = {
.longopt = "always-on-top",
.text = "Make scrcpy window always on top (above other windows).",
},
{
.longopt_id = OPT_ANGLE,
.longopt = "angle",
.argdesc = "degrees",
.text = "Rotate the video content by a custom angle, in degrees "
"(clockwise).",
},
{
.longopt_id = OPT_AUDIO_BIT_RATE,
.longopt = "audio-bit-rate",
@ -255,6 +244,14 @@ static const struct sc_option options[] = {
"ratio), \"<num>:<den>\" (e.g. \"4:3\") or \"<value>\" (e.g. "
"\"1.6\")."
},
{
.longopt_id = OPT_CAMERA_ID,
.longopt = "camera-id",
.argdesc = "id",
.text = "Specify the device camera id to mirror.\n"
"The available camera ids can be listed by:\n"
" scrcpy --list-cameras",
},
{
.longopt_id = OPT_CAMERA_FACING,
.longopt = "camera-facing",
@ -262,14 +259,6 @@ static const struct sc_option options[] = {
.text = "Select the device camera by its facing direction.\n"
"Possible values are \"front\", \"back\" and \"external\".",
},
{
.longopt_id = OPT_CAMERA_FPS,
.longopt = "camera-fps",
.argdesc = "value",
.text = "Specify the camera capture frame rate.\n"
"If not specified, Android's default frame rate (30 fps) is "
"used.",
},
{
.longopt_id = OPT_CAMERA_HIGH_SPEED,
.longopt = "camera-high-speed",
@ -277,14 +266,6 @@ static const struct sc_option options[] = {
"This mode is restricted to specific resolutions and frame "
"rates, listed by --list-camera-sizes.",
},
{
.longopt_id = OPT_CAMERA_ID,
.longopt = "camera-id",
.argdesc = "id",
.text = "Specify the device camera id to mirror.\n"
"The available camera ids can be listed by:\n"
" scrcpy --list-cameras",
},
{
.longopt_id = OPT_CAMERA_SIZE,
.longopt = "camera-size",
@ -292,21 +273,12 @@ static const struct sc_option options[] = {
.text = "Specify an explicit camera capture size.",
},
{
.longopt_id = OPT_CAPTURE_ORIENTATION,
.longopt = "capture-orientation",
.longopt_id = OPT_CAMERA_FPS,
.longopt = "camera-fps",
.argdesc = "value",
.text = "Set the capture video orientation.\n"
"Possible values are 0, 90, 180, 270, flip0, flip90, flip180 "
"and flip270, possibly prefixed by '@'.\n"
"The number represents the clockwise rotation in degrees; the "
"flip\" keyword applies a horizontal flip before the "
"rotation.\n"
"If a leading '@' is passed (@90) for display capture, then "
"the rotation is locked, and is relative to the natural device "
"orientation.\n"
"If '@' is passed alone, then the rotation is locked to the "
"initial device orientation.\n"
"Default is 0.",
.text = "Specify the camera capture frame rate.\n"
"If not specified, Android's default frame rate (30 fps) is "
"used.",
},
{
// Not really deprecated (--codec has never been released), but without
@ -329,7 +301,8 @@ static const struct sc_option options[] = {
.argdesc = "width:height:x:y",
.text = "Crop the device screen on the server.\n"
"The values are expressed in the device natural orientation "
"(typically, portrait for a phone, landscape for a tablet).",
"(typically, portrait for a phone, landscape for a tablet). "
"Any --max-size value is computed on the cropped size.",
},
{
.shortopt = 'd',
@ -497,10 +470,18 @@ static const struct sc_option options[] = {
.text = "List video and audio encoders available on the device.",
},
{
// deprecated
.longopt_id = OPT_LOCK_VIDEO_ORIENTATION,
.longopt = "lock-video-orientation",
.argdesc = "value",
.optional_arg = true,
.text = "Lock capture video orientation to value.\n"
"Possible values are \"unlocked\", \"initial\" (locked to the "
"initial orientation), 0, 90, 180 and 270. The values "
"represent the clockwise rotation from the natural device "
"orientation, in degrees.\n"
"Default is \"unlocked\".\n"
"Passing the option without argument is equivalent to passing "
"\"initial\".",
},
{
.shortopt = 'm',
@ -590,11 +571,12 @@ static const struct sc_option options[] = {
.optional_arg = true,
.text = "Create a new display with the specified resolution and "
"density. If not provided, they default to the main display "
"dimensions and DPI.\n"
"dimensions and DPI, and --max-size is considered.\n"
"Examples:\n"
" --new-display=1920x1080\n"
" --new-display=1920x1080/420 # force 420 dpi\n"
" --new-display # main display size and density\n"
" --new-display -m1920 # scaled to fit a max size of 1920\n"
" --new-display=/240 # main display size and 240 dpi",
},
{
@ -659,11 +641,6 @@ static const struct sc_option options[] = {
.longopt = "no-power-on",
.text = "Do not power on the device on start.",
},
{
.longopt_id = OPT_NO_VD_SYSTEM_DECORATIONS,
.longopt = "no-vd-system-decorations",
.text = "Disable virtual display system decorations flag.",
},
{
.longopt_id = OPT_NO_VIDEO,
.longopt = "no-video",
@ -816,13 +793,6 @@ static const struct sc_option options[] = {
.longopt = "turn-screen-off",
.text = "Turn the device screen off immediately.",
},
{
.longopt_id = OPT_SCREEN_OFF_TIMEOUT,
.longopt = "screen-off-timeout",
.argdesc = "seconds",
.text = "Set the screen off timeout while scrcpy is running (restore "
"the initial value on exit).",
},
{
.longopt_id = OPT_SHORTCUT_MOD,
.longopt = "shortcut-mod",
@ -917,6 +887,8 @@ static const struct sc_option options[] = {
.longopt = "v4l2-sink",
.argdesc = "/dev/videoN",
.text = "Output to v4l2loopback device.\n"
"It requires to lock the video orientation (see "
"--lock-video-orientation).\n"
"This feature is only available on Linux.",
},
{
@ -1050,10 +1022,6 @@ static const struct sc_shortcut shortcuts[] = {
.shortcuts = { "MOD+Shift+z" },
.text = "Unpause display",
},
{
.shortcuts = { "MOD+Shift+r" },
.text = "Reset video capture/encoding",
},
{
.shortcuts = { "MOD+g" },
.text = "Resize window to 1:1 (pixel-perfect)",
@ -1602,6 +1570,78 @@ parse_audio_output_buffer(const char *s, sc_tick *tick) {
return true;
}
static bool
parse_lock_video_orientation(const char *s,
enum sc_lock_video_orientation *lock_mode) {
if (!s || !strcmp(s, "initial")) {
// Without argument, lock the initial orientation
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_INITIAL;
return true;
}
if (!strcmp(s, "unlocked")) {
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_UNLOCKED;
return true;
}
if (!strcmp(s, "0")) {
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_0;
return true;
}
if (!strcmp(s, "90")) {
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_90;
return true;
}
if (!strcmp(s, "180")) {
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_180;
return true;
}
if (!strcmp(s, "270")) {
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_270;
return true;
}
if (!strcmp(s, "1")) {
LOGW("--lock-video-orientation=1 is deprecated, use "
"--lock-video-orientation=270 instead.");
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_270;
return true;
}
if (!strcmp(s, "2")) {
LOGW("--lock-video-orientation=2 is deprecated, use "
"--lock-video-orientation=180 instead.");
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_180;
return true;
}
if (!strcmp(s, "3")) {
LOGW("--lock-video-orientation=3 is deprecated, use "
"--lock-video-orientation=90 instead.");
*lock_mode = SC_LOCK_VIDEO_ORIENTATION_90;
return true;
}
LOGE("Unsupported --lock-video-orientation value: %s (expected initial, "
"unlocked, 0, 90, 180 or 270).", s);
return false;
}
static bool
parse_rotation(const char *s, uint8_t *rotation) {
long value;
bool ok = parse_integer_arg(s, &value, false, 0, 3, "rotation");
if (!ok) {
return false;
}
*rotation = (uint8_t) value;
return true;
}
static bool
parse_orientation(const char *s, enum sc_orientation *orientation) {
if (!strcmp(s, "0")) {
@ -1641,32 +1681,6 @@ parse_orientation(const char *s, enum sc_orientation *orientation) {
return false;
}
static bool
parse_capture_orientation(const char *s, enum sc_orientation *orientation,
enum sc_orientation_lock *lock) {
if (*s == '\0') {
LOGE("Capture orientation may not be empty (expected 0, 90, 180, 270, "
"flip0, flip90, flip180 or flip270, possibly prefixed by '@')");
return false;
}
// Lock the orientation by a leading '@'
if (s[0] == '@') {
// Consume '@'
++s;
if (*s == '\0') {
// Only '@': lock to the initial orientation (orientation is unused)
*lock = SC_ORIENTATION_LOCKED_INITIAL;
return true;
}
*lock = SC_ORIENTATION_LOCKED_VALUE;
} else {
*lock = SC_ORIENTATION_UNLOCKED;
}
return parse_orientation(s, orientation);
}
static bool
parse_window_position(const char *s, int16_t *position) {
// special value for "auto"
@ -2137,20 +2151,6 @@ parse_time_limit(const char *s, sc_tick *tick) {
return true;
}
static bool
parse_screen_off_timeout(const char *s, sc_tick *tick) {
long value;
// value in seconds, but must fit in 31 bits in milliseconds
bool ok = parse_integer_arg(s, &value, false, 0, 0x7FFFFFFF / 1000,
"screen off timeout");
if (!ok) {
return false;
}
*tick = SC_TICK_FROM_SEC(value);
return true;
}
static bool
parse_pause_on_exit(const char *s, enum sc_pause_on_exit *pause_on_exit) {
if (!s || !strcmp(s, "true")) {
@ -2276,8 +2276,8 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->crop = optarg;
break;
case OPT_DISPLAY:
LOGE("--display has been removed, use --display-id instead.");
return false;
LOGW("--display is deprecated, use --display-id instead.");
// fall through
case OPT_DISPLAY_ID:
if (!parse_display_id(optarg, &opts->display_id)) {
return false;
@ -2341,13 +2341,8 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
"--mouse=uhid instead.");
return false;
case OPT_LOCK_VIDEO_ORIENTATION:
LOGE("--lock-video-orientation has been removed, use "
"--capture-orientation instead.");
return false;
case OPT_CAPTURE_ORIENTATION:
if (!parse_capture_orientation(optarg,
&opts->capture_orientation,
&opts->capture_orientation_lock)) {
if (!parse_lock_video_orientation(optarg,
&opts->lock_video_orientation)) {
return false;
}
break;
@ -2365,9 +2360,8 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->control = false;
break;
case OPT_NO_DISPLAY:
LOGE("--no-display has been removed, use --no-playback "
"instead.");
return false;
LOGW("--no-display is deprecated, use --no-playback instead.");
// fall through
case 'N':
opts->video_playback = false;
opts->audio_playback = false;
@ -2453,9 +2447,32 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->key_inject_mode = SC_KEY_INJECT_MODE_RAW;
break;
case OPT_ROTATION:
LOGE("--rotation has been removed, use --orientation or "
"--capture-orientation instead.");
return false;
LOGW("--rotation is deprecated, use --display-orientation "
"instead.");
uint8_t rotation;
if (!parse_rotation(optarg, &rotation)) {
return false;
}
assert(rotation <= 3);
switch (rotation) {
case 0:
opts->display_orientation = SC_ORIENTATION_0;
break;
case 1:
// rotation 1 was 90° counterclockwise, but orientation
// is expressed clockwise
opts->display_orientation = SC_ORIENTATION_270;
break;
case 2:
opts->display_orientation = SC_ORIENTATION_180;
break;
case 3:
// rotation 3 was 270° counterclockwise, but orientation
// is expressed clockwise
opts->display_orientation = SC_ORIENTATION_90;
break;
}
break;
case OPT_DISPLAY_ORIENTATION:
if (!parse_orientation(optarg, &opts->display_orientation)) {
return false;
@ -2516,9 +2533,23 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
}
break;
case OPT_FORWARD_ALL_CLICKS:
LOGE("--forward-all-clicks has been removed, "
LOGW("--forward-all-clicks is deprecated, "
"use --mouse-bind=++++ instead.");
return false;
opts->mouse_bindings = (struct sc_mouse_bindings) {
.pri = {
.right_click = SC_MOUSE_BINDING_CLICK,
.middle_click = SC_MOUSE_BINDING_CLICK,
.click4 = SC_MOUSE_BINDING_CLICK,
.click5 = SC_MOUSE_BINDING_CLICK,
},
.sec = {
.right_click = SC_MOUSE_BINDING_CLICK,
.middle_click = SC_MOUSE_BINDING_CLICK,
.click4 = SC_MOUSE_BINDING_CLICK,
.click5 = SC_MOUSE_BINDING_CLICK,
},
};
break;
case OPT_LEGACY_PASTE:
opts->legacy_paste = true;
break;
@ -2526,9 +2557,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->power_off_on_close = true;
break;
case OPT_DISPLAY_BUFFER:
LOGE("--display-buffer has been removed, use --video-buffer "
LOGW("--display-buffer is deprecated, use --video-buffer "
"instead.");
return false;
// fall through
case OPT_VIDEO_BUFFER:
if (!parse_buffering_time(optarg, &opts->video_buffer)) {
return false;
@ -2695,18 +2726,6 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
case OPT_START_APP:
opts->start_app = optarg;
break;
case OPT_SCREEN_OFF_TIMEOUT:
if (!parse_screen_off_timeout(optarg,
&opts->screen_off_timeout)) {
return false;
}
break;
case OPT_ANGLE:
opts->angle = optarg;
break;
case OPT_NO_VD_SYSTEM_DECORATIONS:
opts->vd_system_decorations = optarg;
break;
default:
// getopt prints the error message on stderr
return false;
@ -2801,6 +2820,14 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
if (opts->lock_video_orientation ==
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
LOGI("Video orientation is locked for v4l2 sink. "
"See --lock-video-orientation.");
opts->lock_video_orientation =
SC_LOCK_VIDEO_ORIENTATION_INITIAL_AUTO;
}
// V4L2 could not handle size change.
// Do not log because downsizing on error is the default behavior,
// not an explicit request from the user.
@ -2890,6 +2917,13 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
LOGE("--new-display is incompatible with --no-video");
return false;
}
if (opts->max_size && opts->new_display[0] != '\0'
&& opts->new_display[0] != '/') {
// An explicit size is defined (not "" nor "/<dpi>")
LOGE("Cannot specify both --new-display size and -m/--max-size");
return false;
}
}
if (otg) {

View File

@ -181,7 +181,6 @@ sc_control_msg_serialize(const struct sc_control_msg *msg, uint8_t *buf) {
case SC_CONTROL_MSG_TYPE_COLLAPSE_PANELS:
case SC_CONTROL_MSG_TYPE_ROTATE_DEVICE:
case SC_CONTROL_MSG_TYPE_OPEN_HARD_KEYBOARD_SETTINGS:
case SC_CONTROL_MSG_TYPE_RESET_VIDEO:
// no additional data
return 1;
default:
@ -305,9 +304,6 @@ sc_control_msg_log(const struct sc_control_msg *msg) {
case SC_CONTROL_MSG_TYPE_START_APP:
LOG_CMSG("start app \"%s\"", msg->start_app.name);
break;
case SC_CONTROL_MSG_TYPE_RESET_VIDEO:
LOG_CMSG("reset video");
break;
default:
LOG_CMSG("unknown type: %u", (unsigned) msg->type);
break;

View File

@ -42,7 +42,6 @@ enum sc_control_msg_type {
SC_CONTROL_MSG_TYPE_UHID_DESTROY,
SC_CONTROL_MSG_TYPE_OPEN_HARD_KEYBOARD_SETTINGS,
SC_CONTROL_MSG_TYPE_START_APP,
SC_CONTROL_MSG_TYPE_RESET_VIDEO,
};
enum sc_copy_key {

View File

@ -284,18 +284,6 @@ open_hard_keyboard_settings(struct sc_input_manager *im) {
}
}
static void
reset_video(struct sc_input_manager *im) {
assert(im->controller);
struct sc_control_msg msg;
msg.type = SC_CONTROL_MSG_TYPE_RESET_VIDEO;
if (!sc_controller_push_msg(im->controller, &msg)) {
LOGW("Could not request reset video");
}
}
static void
apply_orientation_transform(struct sc_input_manager *im,
enum sc_orientation transform) {
@ -533,12 +521,8 @@ sc_input_manager_process_key(struct sc_input_manager *im,
}
return;
case SDLK_r:
if (control && !repeat && down && !paused) {
if (shift) {
reset_video(im);
} else {
rotate_device(im);
}
if (control && !shift && !repeat && down && !paused) {
rotate_device(im);
}
return;
case SDLK_k:

View File

@ -50,8 +50,7 @@ const struct scrcpy_options scrcpy_options_default = {
.video_bit_rate = 0,
.audio_bit_rate = 0,
.max_fps = NULL,
.capture_orientation = SC_ORIENTATION_0,
.capture_orientation_lock = SC_ORIENTATION_UNLOCKED,
.lock_video_orientation = SC_LOCK_VIDEO_ORIENTATION_UNLOCKED,
.display_orientation = SC_ORIENTATION_0,
.record_orientation = SC_ORIENTATION_0,
.window_x = SC_WINDOW_POSITION_UNDEFINED,
@ -63,7 +62,6 @@ const struct scrcpy_options scrcpy_options_default = {
.audio_buffer = -1, // depends on the audio format,
.audio_output_buffer = SC_TICK_FROM_MS(5),
.time_limit = 0,
.screen_off_timeout = -1,
#ifdef HAVE_V4L2
.v4l2_device = NULL,
.v4l2_buffer = 0,
@ -107,8 +105,6 @@ const struct scrcpy_options scrcpy_options_default = {
.audio_dup = false,
.new_display = NULL,
.start_app = NULL,
.angle = NULL,
.vd_system_decorations = true,
};
enum sc_orientation

View File

@ -84,12 +84,6 @@ enum sc_orientation { // v v v
SC_ORIENTATION_FLIP_270, // 1 1 1
};
enum sc_orientation_lock {
SC_ORIENTATION_UNLOCKED,
SC_ORIENTATION_LOCKED_VALUE, // lock to specified orientation
SC_ORIENTATION_LOCKED_INITIAL, // lock to initial device orientation
};
static inline bool
sc_orientation_is_mirror(enum sc_orientation orientation) {
assert(!(orientation & ~7));
@ -136,6 +130,18 @@ sc_orientation_get_name(enum sc_orientation orientation) {
}
}
enum sc_lock_video_orientation {
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED = -1,
// lock the current orientation when scrcpy starts
SC_LOCK_VIDEO_ORIENTATION_INITIAL = -2,
// like SC_LOCK_VIDEO_ORIENTATION_INITIAL, but set automatically
SC_LOCK_VIDEO_ORIENTATION_INITIAL_AUTO = -3,
SC_LOCK_VIDEO_ORIENTATION_0 = 0,
SC_LOCK_VIDEO_ORIENTATION_90 = 3,
SC_LOCK_VIDEO_ORIENTATION_180 = 2,
SC_LOCK_VIDEO_ORIENTATION_270 = 1,
};
enum sc_keyboard_input_mode {
SC_KEYBOARD_INPUT_MODE_AUTO,
SC_KEYBOARD_INPUT_MODE_UHID_OR_AOA, // normal vs otg mode
@ -247,9 +253,7 @@ struct scrcpy_options {
uint32_t video_bit_rate;
uint32_t audio_bit_rate;
const char *max_fps; // float to be parsed by the server
const char *angle; // float to be parsed by the server
enum sc_orientation capture_orientation;
enum sc_orientation_lock capture_orientation_lock;
enum sc_lock_video_orientation lock_video_orientation;
enum sc_orientation display_orientation;
enum sc_orientation record_orientation;
int16_t window_x; // SC_WINDOW_POSITION_UNDEFINED for "auto"
@ -261,7 +265,6 @@ struct scrcpy_options {
sc_tick audio_buffer;
sc_tick audio_output_buffer;
sc_tick time_limit;
sc_tick screen_off_timeout;
#ifdef HAVE_V4L2
const char *v4l2_device;
sc_tick v4l2_buffer;
@ -310,7 +313,6 @@ struct scrcpy_options {
bool audio_dup;
const char *new_display; // [<width>x<height>][/<dpi>] parsed by the server
const char *start_app;
bool vd_system_decorations;
};
extern const struct scrcpy_options scrcpy_options_default;

View File

@ -143,14 +143,8 @@ sc_recorder_open_output_file(struct sc_recorder *recorder) {
return false;
}
char *file_url = sc_str_concat("file:", recorder->filename);
if (!file_url) {
avformat_free_context(recorder->ctx);
return false;
}
int ret = avio_open(&recorder->ctx->pb, file_url, AVIO_FLAG_WRITE);
free(file_url);
int ret = avio_open(&recorder->ctx->pb, recorder->filename,
AVIO_FLAG_WRITE);
if (ret < 0) {
LOGE("Failed to open output file: %s", recorder->filename);
avformat_free_context(recorder->ctx);

View File

@ -428,10 +428,7 @@ scrcpy(struct scrcpy_options *options) {
.video_bit_rate = options->video_bit_rate,
.audio_bit_rate = options->audio_bit_rate,
.max_fps = options->max_fps,
.angle = options->angle,
.screen_off_timeout = options->screen_off_timeout,
.capture_orientation = options->capture_orientation,
.capture_orientation_lock = options->capture_orientation_lock,
.lock_video_orientation = options->lock_video_orientation,
.control = options->control,
.display_id = options->display_id,
.new_display = options->new_display,
@ -458,7 +455,6 @@ scrcpy(struct scrcpy_options *options) {
.power_on = options->power_on,
.kill_adb_on_close = options->kill_adb_on_close,
.camera_high_speed = options->camera_high_speed,
.vd_system_decorations = options->vd_system_decorations,
.list = options->list,
};

View File

@ -201,31 +201,18 @@ execute_server(struct sc_server *server,
cmd[count++] = "app_process";
#ifdef SERVER_DEBUGGER
uint16_t sdk_version = sc_adb_get_device_sdk_version(&server->intr, serial);
if (!sdk_version) {
LOGE("Could not determine SDK version");
return 0;
}
# define SERVER_DEBUGGER_PORT "5005"
const char *dbg;
if (sdk_version < 28) {
// Android < 9
dbg = "-agentlib:jdwp=transport=dt_socket,suspend=y,server=y,address="
SERVER_DEBUGGER_PORT;
} else if (sdk_version < 30) {
// Android >= 9 && Android < 11
dbg = "-XjdwpProvider:internal -XjdwpOptions:transport=dt_socket,"
"suspend=y,server=y,address=" SERVER_DEBUGGER_PORT;
} else {
// Android >= 11
// Contrary to the other methods, this does not suspend on start.
// <https://github.com/Genymobile/scrcpy/pull/5466>
dbg = "-XjdwpProvider:adbconnection";
}
cmd[count++] = dbg;
cmd[count++] =
# ifdef SERVER_DEBUGGER_METHOD_NEW
/* Android 9 and above */
"-XjdwpProvider:internal -XjdwpOptions:transport=dt_socket,suspend=y,"
"server=y,address="
# else
/* Android 8 and below */
"-agentlib:jdwp=transport=dt_socket,suspend=y,server=y,address="
# endif
SERVER_DEBUGGER_PORT;
#endif
cmd[count++] = "/"; // unused
cmd[count++] = "com.genymobile.scrcpy.Server";
cmd[count++] = SCRCPY_VERSION;
@ -287,21 +274,9 @@ execute_server(struct sc_server *server,
VALIDATE_STRING(params->max_fps);
ADD_PARAM("max_fps=%s", params->max_fps);
}
if (params->angle) {
VALIDATE_STRING(params->angle);
ADD_PARAM("angle=%s", params->angle);
}
if (params->capture_orientation_lock != SC_ORIENTATION_UNLOCKED
|| params->capture_orientation != SC_ORIENTATION_0) {
if (params->capture_orientation_lock == SC_ORIENTATION_LOCKED_INITIAL) {
ADD_PARAM("capture_orientation=@");
} else {
const char *orient =
sc_orientation_get_name(params->capture_orientation);
bool locked =
params->capture_orientation_lock != SC_ORIENTATION_UNLOCKED;
ADD_PARAM("capture_orientation=%s%s", locked ? "@" : "", orient);
}
if (params->lock_video_orientation != SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
ADD_PARAM("lock_video_orientation=%" PRIi8,
params->lock_video_orientation);
}
if (server->tunnel.forward) {
ADD_PARAM("tunnel_forward=true");
@ -345,11 +320,6 @@ execute_server(struct sc_server *server,
if (params->stay_awake) {
ADD_PARAM("stay_awake=true");
}
if (params->screen_off_timeout != -1) {
assert(params->screen_off_timeout >= 0);
uint64_t ms = SC_TICK_TO_MS(params->screen_off_timeout);
ADD_PARAM("screen_off_timeout=%" PRIu64, ms);
}
if (params->video_codec_options) {
VALIDATE_STRING(params->video_codec_options);
ADD_PARAM("video_codec_options=%s", params->video_codec_options);
@ -389,9 +359,6 @@ execute_server(struct sc_server *server,
VALIDATE_STRING(params->new_display);
ADD_PARAM("new_display=%s", params->new_display);
}
if (!params->vd_system_decorations) {
ADD_PARAM("vd_system_decorations=false");
}
if (params->list & SC_OPTION_LIST_ENCODERS) {
ADD_PARAM("list_encoders=true");
}
@ -413,14 +380,10 @@ execute_server(struct sc_server *server,
cmd[count++] = NULL;
#ifdef SERVER_DEBUGGER
LOGI("Server debugger listening%s...",
sdk_version < 30 ? " on port " SERVER_DEBUGGER_PORT : "");
// For Android < 11, from the computer:
// - run `adb forward tcp:5005 tcp:5005`
// For Android >= 11:
// - execute `adb jdwp` to get the jdwp port
// - run `adb forward tcp:5005 jdwp:XXXX` (replace XXXX)
//
LOGI("Server debugger waiting for a client on device port "
SERVER_DEBUGGER_PORT "...");
// From the computer, run
// adb forward tcp:5005 tcp:5005
// Then, from Android Studio: Run > Debug > Edit configurations...
// On the left, click on '+', "Remote", with:
// Host: localhost

View File

@ -45,10 +45,7 @@ struct sc_server_params {
uint32_t video_bit_rate;
uint32_t audio_bit_rate;
const char *max_fps; // float to be parsed by the server
const char *angle; // float to be parsed by the server
sc_tick screen_off_timeout;
enum sc_orientation capture_orientation;
enum sc_orientation_lock capture_orientation_lock;
int8_t lock_video_orientation;
bool control;
uint32_t display_id;
const char *new_display;
@ -69,7 +66,6 @@ struct sc_server_params {
bool power_on;
bool kill_adb_on_close;
bool camera_high_speed;
bool vd_system_decorations;
uint8_t list;
};

View File

@ -9,6 +9,8 @@
#ifdef _WIN32
# include <ws2tcpip.h>
typedef int socklen_t;
typedef SOCKET sc_raw_socket;
# define SC_RAW_SOCKET_NONE INVALID_SOCKET
#else
# include <sys/types.h>
# include <sys/socket.h>
@ -21,6 +23,8 @@
typedef struct sockaddr_in SOCKADDR_IN;
typedef struct sockaddr SOCKADDR;
typedef struct in_addr IN_ADDR;
typedef int sc_raw_socket;
# define SC_RAW_SOCKET_NONE -1
#endif
bool
@ -43,26 +47,17 @@ net_cleanup(void) {
#endif
}
static inline bool
sc_raw_socket_close(sc_raw_socket raw_sock) {
#ifndef _WIN32
return !close(raw_sock);
#else
return !closesocket(raw_sock);
#endif
}
static inline sc_socket
wrap(sc_raw_socket sock) {
#ifdef SC_SOCKET_CLOSE_ON_INTERRUPT
if (sock == SC_RAW_SOCKET_NONE) {
#ifdef _WIN32
if (sock == INVALID_SOCKET) {
return SC_SOCKET_NONE;
}
struct sc_socket_wrapper *socket = malloc(sizeof(*socket));
struct sc_socket_windows *socket = malloc(sizeof(*socket));
if (!socket) {
LOG_OOM();
sc_raw_socket_close(sock);
closesocket(sock);
return SC_SOCKET_NONE;
}
@ -77,9 +72,9 @@ wrap(sc_raw_socket sock) {
static inline sc_raw_socket
unwrap(sc_socket socket) {
#ifdef SC_SOCKET_CLOSE_ON_INTERRUPT
#ifdef _WIN32
if (socket == SC_SOCKET_NONE) {
return SC_RAW_SOCKET_NONE;
return INVALID_SOCKET;
}
return socket->socket;
@ -88,6 +83,17 @@ unwrap(sc_socket socket) {
#endif
}
#ifndef HAVE_SOCK_CLOEXEC // avoid unused-function warning
static inline bool
sc_raw_socket_close(sc_raw_socket raw_sock) {
#ifndef _WIN32
return !close(raw_sock);
#else
return !closesocket(raw_sock);
#endif
}
#endif
#ifndef HAVE_SOCK_CLOEXEC
// If SOCK_CLOEXEC does not exist, the flag must be set manually once the
// socket is created
@ -242,9 +248,9 @@ net_interrupt(sc_socket socket) {
sc_raw_socket raw_sock = unwrap(socket);
#ifdef SC_SOCKET_CLOSE_ON_INTERRUPT
#ifdef _WIN32
if (!atomic_flag_test_and_set(&socket->closed)) {
return sc_raw_socket_close(raw_sock);
return !closesocket(raw_sock);
}
return true;
#else
@ -256,15 +262,15 @@ bool
net_close(sc_socket socket) {
sc_raw_socket raw_sock = unwrap(socket);
#ifdef SC_SOCKET_CLOSE_ON_INTERRUPT
#ifdef _WIN32
bool ret = true;
if (!atomic_flag_test_and_set(&socket->closed)) {
ret = sc_raw_socket_close(raw_sock);
ret = !closesocket(raw_sock);
}
free(socket);
return ret;
#else
return sc_raw_socket_close(raw_sock);
return !close(raw_sock);
#endif
}

View File

@ -7,37 +7,21 @@
#include <stdint.h>
#ifdef _WIN32
# include <winsock2.h>
typedef SOCKET sc_raw_socket;
# define SC_RAW_SOCKET_NONE INVALID_SOCKET
#else // not _WIN32
# include <sys/socket.h>
# define SC_SOCKET_NONE -1
typedef int sc_raw_socket;
# define SC_RAW_SOCKET_NONE -1
#endif
#if defined(_WIN32) || defined(__APPLE__)
// On Windows and macOS, shutdown() does not interrupt accept() or read()
// calls, so net_interrupt() must call close() instead, and net_close() must
// behave accordingly.
// This causes a small race condition (once the socket is closed, its
// handle becomes invalid and may in theory be reassigned before another
// thread calls accept() or read()), but it is deemed acceptable as a
// workaround.
# define SC_SOCKET_CLOSE_ON_INTERRUPT
#endif
#ifdef SC_SOCKET_CLOSE_ON_INTERRUPT
# include <stdatomic.h>
# define SC_SOCKET_NONE NULL
typedef struct sc_socket_wrapper {
sc_raw_socket socket;
typedef struct sc_socket_windows {
SOCKET socket;
atomic_flag closed;
} *sc_socket;
#else
#else // not _WIN32
# include <sys/socket.h>
# define SC_SOCKET_NONE -1
typedef sc_raw_socket sc_socket;
typedef int sc_socket;
#endif
#define IPV4_LOCALHOST 0x7F000001

View File

@ -64,26 +64,6 @@ sc_str_quote(const char *src) {
return quoted;
}
char *
sc_str_concat(const char *start, const char *end) {
assert(start);
assert(end);
size_t start_len = strlen(start);
size_t end_len = strlen(end);
char *result = malloc(start_len + end_len + 1);
if (!result) {
LOG_OOM();
return NULL;
}
memcpy(result, start, start_len);
memcpy(result + start_len, end, end_len + 1);
return result;
}
bool
sc_str_parse_integer(const char *s, long *out) {
char *endptr;

View File

@ -38,15 +38,6 @@ sc_str_join(char *dst, const char *const tokens[], char sep, size_t n);
char *
sc_str_quote(const char *src);
/**
* Concat two strings
*
* Return a new allocated string, contanining the concatenation of the two
* input strings.
*/
char *
sc_str_concat(const char *start, const char *end);
/**
* Parse `s` as an integer into `out`
*

View File

@ -51,6 +51,7 @@ static void test_options(void) {
"--fullscreen",
"--max-fps", "30",
"--max-size", "1024",
"--lock-video-orientation=2", // optional arguments require '='
// "--no-control" is not compatible with "--turn-screen-off"
// "--no-playback" is not compatible with "--fulscreen"
"--port", "1234:1236",
@ -79,6 +80,7 @@ static void test_options(void) {
assert(opts->fullscreen);
assert(!strcmp(opts->max_fps, "30"));
assert(opts->max_size == 1024);
assert(opts->lock_video_orientation == 2);
assert(opts->port_range.first == 1234);
assert(opts->port_range.last == 1236);
assert(!strcmp(opts->push_target, "/sdcard/Movies"));

View File

@ -407,21 +407,6 @@ static void test_serialize_open_hard_keyboard(void) {
assert(!memcmp(buf, expected, sizeof(expected)));
}
static void test_serialize_reset_video(void) {
struct sc_control_msg msg = {
.type = SC_CONTROL_MSG_TYPE_RESET_VIDEO,
};
uint8_t buf[SC_CONTROL_MSG_MAX_SIZE];
size_t size = sc_control_msg_serialize(&msg, buf);
assert(size == 1);
const uint8_t expected[] = {
SC_CONTROL_MSG_TYPE_RESET_VIDEO,
};
assert(!memcmp(buf, expected, sizeof(expected)));
}
int main(int argc, char *argv[]) {
(void) argc;
(void) argv;
@ -444,6 +429,5 @@ int main(int argc, char *argv[]) {
test_serialize_uhid_input();
test_serialize_uhid_destroy();
test_serialize_open_hard_keyboard();
test_serialize_reset_video();
return 0;
}

View File

@ -141,16 +141,6 @@ static void test_quote(void) {
free(out);
}
static void test_concat(void) {
const char *s = "2024:11";
char *out = sc_str_concat("my-prefix:", s);
// contains the concat
assert(!strcmp("my-prefix:2024:11", out));
free(out);
}
static void test_utf8_truncate(void) {
const char *s = "aÉbÔc";
assert(strlen(s) == 7); // É and Ô are 2 bytes-wide
@ -399,7 +389,6 @@ int main(int argc, char *argv[]) {
test_join_truncated_before_sep();
test_join_truncated_after_sep();
test_quote();
test_concat();
test_utf8_truncate();
test_parse_integer();
test_parse_integers();

View File

@ -77,7 +77,7 @@ pip3 install meson
sudo dnf install https://download1.rpmfusion.org/free/fedora/rpmfusion-free-release-$(rpm -E %fedora).noarch.rpm
# client build dependencies
sudo dnf install SDL2-devel ffms2-devel libusb1-devel libavdevice-free-devel meson gcc make
sudo dnf install SDL2-devel ffms2-devel libusb1-devel meson gcc make
# server build dependencies
sudo dnf install java-devel
@ -233,10 +233,10 @@ install` must be run as root)._
#### Option 2: Use prebuilt server
- [`scrcpy-server-v3.0`][direct-scrcpy-server]
<sub>SHA-256: `800044c62a94d5fc16f5ab9c86d45b1050eae3eb436514d1b0d2fe2646b894ea`</sub>
- [`scrcpy-server-v2.7`][direct-scrcpy-server]
<sub>SHA-256: `a23c5659f36c260f105c022d27bcb3eafffa26070e7baa9eda66d01377a1adba`</sub>
[direct-scrcpy-server]: https://github.com/Genymobile/scrcpy/releases/download/v3.0/scrcpy-server-v3.0
[direct-scrcpy-server]: https://github.com/Genymobile/scrcpy/releases/download/v2.7/scrcpy-server-v2.7
Download the prebuilt server somewhere, and specify its path during the Meson
configuration:

View File

@ -23,20 +23,14 @@ To control the device without mirroring:
scrcpy --no-video --no-audio
```
By default, the mouse is disabled when video playback is turned off.
To control the device using a relative mouse, enable UHID mouse mode:
```bash
scrcpy --no-video --no-audio --mouse=uhid
scrcpy --no-video --no-audio -M # short version
```
By default, mouse mode is switched to UHID if video mirroring is disabled (a
relative mouse mode is required).
To also use a UHID keyboard, set it explicitly:
```bash
scrcpy --no-video --no-audio --mouse=uhid --keyboard=uhid
scrcpy --no-video --no-audio -MK # short version
scrcpy --no-video --no-audio --keyboard=uhid
scrcpy --no-video --no-audio -K # short version
```
To use AOA instead (over USB only):

View File

@ -461,30 +461,26 @@ meson setup x -Dserver_debugger=true
meson configure x -Dserver_debugger=true
```
Then recompile, and run scrcpy.
If your device runs Android 8 or below, set the `server_debugger_method` to
`old` in addition:
For Android < 11, it will start a debugger on port 5005 on the device and wait:
```bash
meson setup x -Dserver_debugger=true -Dserver_debugger_method=old
# or, if x is already configured
meson configure x -Dserver_debugger=true -Dserver_debugger_method=old
```
Then recompile.
When you start scrcpy, it will start a debugger on port 5005 on the device.
Redirect that port to the computer:
```bash
adb forward tcp:5005 tcp:5005
```
For Android >= 11, first find the listening port:
```bash
adb jdwp
# press Ctrl+C to interrupt
```
Then redirect the resulting PID:
```bash
adb forward tcp:5005 jdwp:XXXX # replace XXXX
```
In Android Studio, _Run_ > _Debug_ > _Edit configurations..._ On the left, click
on `+`, _Remote_, and fill the form:
In Android Studio, _Run_ > _Debug_ > _Edit configurations..._ On the left, click on
`+`, _Remote_, and fill the form:
- Host: `localhost`
- Port: `5005`

View File

@ -71,31 +71,6 @@ adb shell cmd display power-on 0
```
## Screen off timeout
The Android screen automatically turns off after some delay.
To change this delay while scrcpy is running:
```bash
scrcpy --screen-off-timeout=300 # 300 seconds (5 minutes)
```
The initial value is restored on exit.
It is possible to change this setting manually:
```bash
# get the current screen_off_timeout value
adb shell settings get system screen_off_timeout
# set a new value (in milliseconds)
adb shell settings put system screen_off_timeout 30000
```
Note that the Android value is in milliseconds, but the scrcpy command line
argument is in seconds.
## Show touches
For presentations, it may be useful to show physical touches (on the physical

View File

@ -2,23 +2,6 @@
## Install
### From the official release
Download a static build of the [latest release]:
- [`scrcpy-linux-v3.0.tar.gz`][direct-linux] (x86_64)
<sub>SHA-256: `06cb74e22f758228c944cea048b78e42b2925c2affe2b5aca901cfd6a649e503`</sub>
[latest release]: https://github.com/Genymobile/scrcpy/releases/latest
[direct-linux]: https://github.com/Genymobile/scrcpy/releases/download/v3.0/scrcpy-linux-v3.0.tar.gz
and extract it.
_Static builds of scrcpy for Linux are still experimental._
### From your package manager
<a href="https://repology.org/project/scrcpy/versions"><img src="https://repology.org/badge/vertical-allrepos/scrcpy.svg" alt="Packaging status" align="right"></a>
Scrcpy is packaged in several distributions and package managers:
@ -30,10 +13,10 @@ Scrcpy is packaged in several distributions and package managers:
- Snap: `snap install scrcpy`
- … (see [repology](https://repology.org/project/scrcpy/versions))
### Latest version
### From an install script
To install the latest release from `master`, follow this simplified process.
However, the packaged version is not always the latest release. To install the
latest release from `master`, follow this simplified process.
First, you need to install the required packages:

View File

@ -2,23 +2,6 @@
## Install
### From the official release
Download a static build of the [latest release]:
- [`scrcpy-macos-v3.0.tar.gz`][direct-macos] (arm64)
<sub>SHA-256: `5db9821918537eb3aaf0333cdd05baf85babdd851972d5f1b71f86da0530b4bf`</sub>
[latest release]: https://github.com/Genymobile/scrcpy/releases/latest
[direct-macos]: https://github.com/Genymobile/scrcpy/releases/download/v3.0/scrcpy-macos-v3.0.tar.gz
and extract it.
_Static builds of scrcpy for macOS are still experimental._
### From a package manager
Scrcpy is available in [Homebrew]:
```bash
@ -30,7 +13,7 @@ brew install scrcpy
You need `adb`, accessible from your `PATH`. If you don't have it yet:
```bash
brew install --cask android-platform-tools
brew install android-platform-tools
```
Alternatively, Scrcpy is also available in [MacPorts], which sets up `adb` for you:

View File

@ -30,7 +30,6 @@ _<kbd>[Super]</kbd> is typically the <kbd>Windows</kbd> or <kbd>Cmd</kbd> key._
| Flip display vertically | <kbd>MOD</kbd>+<kbd>Shift</kbd>+<kbd></kbd> _(up)_ \| <kbd>MOD</kbd>+<kbd>Shift</kbd>+<kbd></kbd> _(down)_
| Pause or re-pause display | <kbd>MOD</kbd>+<kbd>z</kbd>
| Unpause display | <kbd>MOD</kbd>+<kbd>Shift</kbd>+<kbd>z</kbd>
| Reset video capture/encoding | <kbd>MOD</kbd>+<kbd>Shift</kbd>+<kbd>r</kbd>
| Resize window to 1:1 (pixel-perfect) | <kbd>MOD</kbd>+<kbd>g</kbd>
| Resize window to remove black borders | <kbd>MOD</kbd>+<kbd>w</kbd> \| _Double-left-click¹_
| Click on `HOME` | <kbd>MOD</kbd>+<kbd>h</kbd> \| _Middle-click_

View File

@ -27,9 +27,6 @@ preserved. That way, a device in 1920×1080 will be mirrored at 1024×576.
If encoding fails, scrcpy automatically tries again with a lower definition
(unless `--no-downsize-on-error` is enabled).
For camera mirroring, the `--max-size` value is used to select the camera source
size instead (among the available resolutions).
## Bit rate
@ -96,7 +93,7 @@ Sometimes, the default encoder may have issues or even crash, so it is useful to
try another one:
```bash
scrcpy --video-codec=h264 --video-encoder=OMX.qcom.video.encoder.avc
scrcpy --video-codec=h264 --video-encoder='OMX.qcom.video.encoder.avc'
```
@ -106,45 +103,24 @@ The orientation may be applied at 3 different levels:
- The [shortcut](shortcuts.md) <kbd>MOD</kbd>+<kbd>r</kbd> requests the
device to switch between portrait and landscape (the current running app may
refuse, if it does not support the requested orientation).
- `--capture-orientation` changes the mirroring orientation (the orientation
- `--lock-video-orientation` changes the mirroring orientation (the orientation
of the video sent from the device to the computer). This affects the
recording.
- `--orientation` is applied on the client side, and affects display and
recording. For the display, it can be changed dynamically using
[shortcuts](shortcuts.md).
To capture the video with a specific orientation:
To lock the mirroring orientation (on the capture side):
```bash
scrcpy --capture-orientation=0
scrcpy --capture-orientation=90 # 90° clockwise
scrcpy --capture-orientation=180 # 180°
scrcpy --capture-orientation=270 # 270° clockwise
scrcpy --capture-orientation=flip0 # hflip
scrcpy --capture-orientation=flip90 # hflip + 90° clockwise
scrcpy --capture-orientation=flip180 # hflip + 180°
scrcpy --capture-orientation=flip270 # hflip + 270° clockwise
scrcpy --lock-video-orientation # initial (current) orientation
scrcpy --lock-video-orientation=0 # natural orientation
scrcpy --lock-video-orientation=90 # 90° clockwise
scrcpy --lock-video-orientation=180 # 180°
scrcpy --lock-video-orientation=270 # 270° clockwise
```
The capture orientation can be locked by using `@`, so that a physical device
rotation does not change the captured video orientation:
```bash
scrcpy --capture-orientation=@ # locked to the initial orientation
scrcpy --capture-orientation=@0 # locked to 0°
scrcpy --capture-orientation=@90 # locked to 90° clockwise
scrcpy --capture-orientation=@180 # locked to 180°
scrcpy --capture-orientation=@270 # locked to 270° clockwise
scrcpy --capture-orientation=@flip0 # locked to hflip
scrcpy --capture-orientation=@flip90 # locked to hflip + 90° clockwise
scrcpy --capture-orientation=@flip180 # locked to hflip + 180°
scrcpy --capture-orientation=@flip270 # locked to hflip + 270° clockwise
```
The capture orientation transform is applied after `--crop`, but before
`--angle`.
To orient the video (on the client side):
To orient the video (on the rendering side):
```bash
scrcpy --orientation=0
@ -165,19 +141,6 @@ to the MP4 or MKV target file. Flipping is not supported, so only the 4 first
values are allowed when recording.
## Angle
To rotate the video content by a custom angle (in degrees, clockwise):
```
scrcpy --angle=23
```
The center of rotation is the center of the visible area.
This transformation is applied after `--crop` and `--capture-orientation`.
## Crop
The device screen may be cropped to mirror only part of the screen.
@ -191,11 +154,7 @@ scrcpy --crop=1224:1440:0:0 # 1224x1440 at offset (0,0)
The values are expressed in the device natural orientation (portrait for a
phone, landscape for a tablet).
Cropping is performed before `--capture-orientation` and `--angle`.
For display mirroring, `--max-size` is applied after cropping. For camera,
`--max-size` is applied first (because it selects the source size rather than
resizing the content).
If `--max-size` is also specified, resizing is applied after cropping.
## Display
@ -216,8 +175,6 @@ scrcpy --list-displays
A secondary display may only be controlled if the device runs at least Android
10 (otherwise it is mirrored as read-only).
It is also possible to create a [virtual display](virtual_display.md).
## Buffering

View File

@ -8,6 +8,7 @@ To mirror a new virtual display instead of the device screen:
scrcpy --new-display=1920x1080
scrcpy --new-display=1920x1080/420 # force 420 dpi
scrcpy --new-display # use the main display size and density
scrcpy --new-display -m1920 # ... scaled to fit a max size of 1920
scrcpy --new-display=/240 # use the main display size and 240 dpi
```
@ -23,13 +24,3 @@ For example:
```bash
scrcpy --new-display=1920x1080 --start-app=org.videolan.vlc
```
## System decorations
By default, virtual display system decorations are enabled. But some devices
might display a broken UI;
Use `--no-vd-system-decorations` to disable it.
Note that if no app is started, no content will be rendered, so no video frame
will be produced at all.

View File

@ -2,32 +2,27 @@
## Install
### From the official release
Download the [latest release]:
- [`scrcpy-win64-v3.0.zip`][direct-win64] (64-bit)
<sub>SHA-256: `dfbe8a8fef6535197acc506936bfd59d0aa0427e9b44fb2e5c550eae642f72be`</sub>
- [`scrcpy-win32-v3.0.zip`][direct-win32] (32-bit)
<sub>SHA-256: `7cbf8d7a6ebfdca7b3b161e29a481c11088305f3e0a89d28e8e62f70c7bd0028`</sub>
- [`scrcpy-win64-v2.7.zip`][direct-win64] (64-bit)
<sub>SHA-256: `5910bc18d5a16f42d84185ddc7e16a4cee6a6f5f33451559c1a1d6d0099bd5f5`</sub>
- [`scrcpy-win32-v2.7.zip`][direct-win32] (32-bit)
<sub>SHA-256: `ef4daf89d500f33d78b830625536ecb18481429dd94433e7634c824292059d06`</sub>
[latest release]: https://github.com/Genymobile/scrcpy/releases/latest
[direct-win64]: https://github.com/Genymobile/scrcpy/releases/download/v3.0/scrcpy-win64-v3.0.zip
[direct-win32]: https://github.com/Genymobile/scrcpy/releases/download/v3.0/scrcpy-win32-v3.0.zip
[direct-win64]: https://github.com/Genymobile/scrcpy/releases/download/v2.7/scrcpy-win64-v2.7.zip
[direct-win32]: https://github.com/Genymobile/scrcpy/releases/download/v2.7/scrcpy-win32-v2.7.zip
and extract it.
### From a package manager
From [Chocolatey]:
Alternatively, you could install it from packages manager, like [Chocolatey]:
```bash
choco install scrcpy
choco install adb # if you don't have it yet
```
From [Scoop]:
or [Scoop]:
```bash
@ -35,6 +30,7 @@ scoop install scrcpy
scoop install adb # if you don't have it yet
```
[Winget]: https://github.com/microsoft/winget-cli
[Chocolatey]: https://chocolatey.org/
[Scoop]: https://scoop.sh

View File

@ -2,8 +2,8 @@
set -e
BUILDDIR=build-auto
PREBUILT_SERVER_URL=https://github.com/Genymobile/scrcpy/releases/download/v3.0/scrcpy-server-v3.0
PREBUILT_SERVER_SHA256=800044c62a94d5fc16f5ab9c86d45b1050eae3eb436514d1b0d2fe2646b894ea
PREBUILT_SERVER_URL=https://github.com/Genymobile/scrcpy/releases/download/v2.7/scrcpy-server-v2.7
PREBUILT_SERVER_SHA256=a23c5659f36c260f105c022d27bcb3eafffa26070e7baa9eda66d01377a1adba
echo "[scrcpy] Downloading prebuilt server..."
wget "$PREBUILT_SERVER_URL" -O scrcpy-server

View File

@ -1,5 +1,5 @@
project('scrcpy', 'c',
version: '3.0',
version: '2.7',
meson_version: '>= 0.48',
default_options: [
'c_std=c11',

View File

@ -2,7 +2,7 @@ option('compile_app', type: 'boolean', value: true, description: 'Build the clie
option('compile_server', type: 'boolean', value: true, description: 'Build the server')
option('prebuilt_server', type: 'string', description: 'Path of the prebuilt server')
option('portable', type: 'boolean', value: false, description: 'Use scrcpy-server from the same directory as the scrcpy executable')
option('static', type: 'boolean', value: false, description: 'Use static dependencies')
option('server_debugger', type: 'boolean', value: false, description: 'Run a server debugger and wait for a client to be attached')
option('server_debugger_method', type: 'combo', choices: ['old', 'new'], value: 'new', description: 'Select the debugger method (Android < 9: "old", Android >= 9: "new")')
option('v4l2', type: 'boolean', value: true, description: 'Enable V4L2 feature when supported')
option('usb', type: 'boolean', value: true, description: 'Enable HID/OTG features when supported')

141
release.mk Normal file
View File

@ -0,0 +1,141 @@
# This makefile provides recipes to build a "portable" version of scrcpy for
# Windows.
#
# Here, "portable" means that the client and server binaries are expected to be
# anywhere, but in the same directory, instead of well-defined separate
# locations (e.g. /usr/bin/scrcpy and /usr/share/scrcpy/scrcpy-server).
#
# In particular, this implies to change the location from where the client push
# the server to the device.
.PHONY: default clean \
test test-client test-server \
build-server \
prepare-deps-win32 prepare-deps-win64 \
build-win32 build-win64 \
zip-win32 zip-win64 \
package release
GRADLE ?= ./gradlew
TEST_BUILD_DIR := build-test
SERVER_BUILD_DIR := build-server
WIN32_BUILD_DIR := build-win32
WIN64_BUILD_DIR := build-win64
VERSION ?= $(shell git describe --tags --exclude='*install-release' --always)
ZIP := zip
WIN32_TARGET_DIR := scrcpy-win32-$(VERSION)
WIN64_TARGET_DIR := scrcpy-win64-$(VERSION)
WIN32_TARGET := $(WIN32_TARGET_DIR).zip
WIN64_TARGET := $(WIN64_TARGET_DIR).zip
RELEASE_DIR := release-$(VERSION)
release: clean test build-server build-win32 build-win64 package
clean:
$(GRADLE) clean
rm -rf "$(ZIP)" "$(TEST_BUILD_DIR)" "$(SERVER_BUILD_DIR)" \
"$(WIN32_BUILD_DIR)" "$(WIN64_BUILD_DIR)"
test-client:
[ -d "$(TEST_BUILD_DIR)" ] || ( mkdir "$(TEST_BUILD_DIR)" && \
meson setup "$(TEST_BUILD_DIR)" -Db_sanitize=address )
ninja -C "$(TEST_BUILD_DIR)"
test-server:
$(GRADLE) -p server check
test: test-client test-server
build-server:
$(GRADLE) -p server assembleRelease
mkdir -p "$(SERVER_BUILD_DIR)/server"
cp server/build/outputs/apk/release/server-release-unsigned.apk \
"$(SERVER_BUILD_DIR)/server/scrcpy-server"
prepare-deps-win32:
@app/deps/adb.sh win32
@app/deps/sdl.sh win32
@app/deps/ffmpeg.sh win32
@app/deps/libusb.sh win32
prepare-deps-win64:
@app/deps/adb.sh win64
@app/deps/sdl.sh win64
@app/deps/ffmpeg.sh win64
@app/deps/libusb.sh win64
build-win32: prepare-deps-win32
rm -rf "$(WIN32_BUILD_DIR)"
mkdir -p "$(WIN32_BUILD_DIR)/local"
meson setup "$(WIN32_BUILD_DIR)" \
--pkg-config-path="app/deps/work/install/win32/lib/pkgconfig" \
-Dc_args="-I$(PWD)/app/deps/work/install/win32/include" \
-Dc_link_args="-L$(PWD)/app/deps/work/install/win32/lib" \
--cross-file=cross_win32.txt \
--buildtype=release --strip -Db_lto=true \
-Dcompile_server=false \
-Dportable=true
ninja -C "$(WIN32_BUILD_DIR)"
# Group intermediate outputs into a 'dist' directory
mkdir -p "$(WIN32_BUILD_DIR)/dist"
cp "$(WIN32_BUILD_DIR)"/app/scrcpy.exe "$(WIN32_BUILD_DIR)/dist/"
cp app/data/scrcpy-console.bat "$(WIN32_BUILD_DIR)/dist/"
cp app/data/scrcpy-noconsole.vbs "$(WIN32_BUILD_DIR)/dist/"
cp app/data/icon.png "$(WIN32_BUILD_DIR)/dist/"
cp app/data/open_a_terminal_here.bat "$(WIN32_BUILD_DIR)/dist/"
cp app/deps/work/install/win32/bin/*.dll "$(WIN32_BUILD_DIR)/dist/"
cp app/deps/work/install/win32/bin/adb.exe "$(WIN32_BUILD_DIR)/dist/"
build-win64: prepare-deps-win64
rm -rf "$(WIN64_BUILD_DIR)"
mkdir -p "$(WIN64_BUILD_DIR)/local"
meson setup "$(WIN64_BUILD_DIR)" \
--pkg-config-path="app/deps/work/install/win64/lib/pkgconfig" \
-Dc_args="-I$(PWD)/app/deps/work/install/win64/include" \
-Dc_link_args="-L$(PWD)/app/deps/work/install/win64/lib" \
--cross-file=cross_win64.txt \
--buildtype=release --strip -Db_lto=true \
-Dcompile_server=false \
-Dportable=true
ninja -C "$(WIN64_BUILD_DIR)"
# Group intermediate outputs into a 'dist' directory
mkdir -p "$(WIN64_BUILD_DIR)/dist"
cp "$(WIN64_BUILD_DIR)"/app/scrcpy.exe "$(WIN64_BUILD_DIR)/dist/"
cp app/data/scrcpy-console.bat "$(WIN64_BUILD_DIR)/dist/"
cp app/data/scrcpy-noconsole.vbs "$(WIN64_BUILD_DIR)/dist/"
cp app/data/icon.png "$(WIN64_BUILD_DIR)/dist/"
cp app/data/open_a_terminal_here.bat "$(WIN64_BUILD_DIR)/dist/"
cp app/deps/work/install/win64/bin/*.dll "$(WIN64_BUILD_DIR)/dist/"
cp app/deps/work/install/win64/bin/adb.exe "$(WIN64_BUILD_DIR)/dist/"
zip-win32:
mkdir -p "$(ZIP)/$(WIN32_TARGET_DIR)"
cp -r "$(WIN32_BUILD_DIR)/dist/." "$(ZIP)/$(WIN32_TARGET_DIR)/"
cp "$(SERVER_BUILD_DIR)"/server/scrcpy-server "$(ZIP)/$(WIN32_TARGET_DIR)/"
cd "$(ZIP)"; \
zip -r "$(WIN32_TARGET)" "$(WIN32_TARGET_DIR)"
rm -rf "$(ZIP)/$(WIN32_TARGET_DIR)"
zip-win64:
mkdir -p "$(ZIP)/$(WIN64_TARGET_DIR)"
cp -r "$(WIN64_BUILD_DIR)/dist/." "$(ZIP)/$(WIN64_TARGET_DIR)/"
cp "$(SERVER_BUILD_DIR)"/server/scrcpy-server "$(ZIP)/$(WIN64_TARGET_DIR)/"
cd "$(ZIP)"; \
zip -r "$(WIN64_TARGET)" "$(WIN64_TARGET_DIR)"
rm -rf "$(ZIP)/$(WIN64_TARGET_DIR)"
package: zip-win32 zip-win64
mkdir -p "$(RELEASE_DIR)"
cp "$(SERVER_BUILD_DIR)/server/scrcpy-server" \
"$(RELEASE_DIR)/scrcpy-server-$(VERSION)"
cp "$(ZIP)/$(WIN32_TARGET)" "$(RELEASE_DIR)"
cp "$(ZIP)/$(WIN64_TARGET)" "$(RELEASE_DIR)"
cd "$(RELEASE_DIR)" && \
sha256sum "scrcpy-server-$(VERSION)" \
"scrcpy-win32-$(VERSION).zip" \
"scrcpy-win64-$(VERSION).zip" > SHA256SUMS.txt
@echo "Release generated in $(RELEASE_DIR)/"

2
release.sh Executable file
View File

@ -0,0 +1,2 @@
#!/bin/bash
make -f release.mk

2
release/.gitignore vendored
View File

@ -1,2 +0,0 @@
/work
/output

View File

@ -1,5 +0,0 @@
# This file must be sourced from the release scripts directory
WORK_DIR="$PWD/work"
OUTPUT_DIR="$PWD/output"
VERSION="${VERSION:-$(git describe --tags --always)}"

View File

@ -1,36 +0,0 @@
#!/bin/bash
set -ex
cd "$(dirname ${BASH_SOURCE[0]})"
. build_common
cd .. # root project dir
LINUX_BUILD_DIR="$WORK_DIR/build-linux"
app/deps/adb_linux.sh
app/deps/sdl.sh linux native static
app/deps/ffmpeg.sh linux native static
app/deps/libusb.sh linux native static
DEPS_INSTALL_DIR="$PWD/app/deps/work/install/linux-native-static"
ADB_INSTALL_DIR="$PWD/app/deps/work/install/adb-linux"
rm -rf "$LINUX_BUILD_DIR"
meson setup "$LINUX_BUILD_DIR" \
--pkg-config-path="$DEPS_INSTALL_DIR/lib/pkgconfig" \
-Dc_args="-I$DEPS_INSTALL_DIR/include" \
-Dc_link_args="-L$DEPS_INSTALL_DIR/lib" \
--buildtype=release \
--strip \
-Db_lto=true \
-Dcompile_server=false \
-Dportable=true \
-Dstatic=true
ninja -C "$LINUX_BUILD_DIR"
# Group intermediate outputs into a 'dist' directory
mkdir -p "$LINUX_BUILD_DIR/dist"
cp "$LINUX_BUILD_DIR"/app/scrcpy "$LINUX_BUILD_DIR/dist/scrcpy_bin"
cp app/data/icon.png "$LINUX_BUILD_DIR/dist/"
cp app/data/scrcpy_static_wrapper.sh "$LINUX_BUILD_DIR/dist/scrcpy"
cp app/scrcpy.1 "$LINUX_BUILD_DIR/dist/"
cp -r "$ADB_INSTALL_DIR"/. "$LINUX_BUILD_DIR/dist/"

View File

@ -1,36 +0,0 @@
#!/bin/bash
set -ex
cd "$(dirname ${BASH_SOURCE[0]})"
. build_common
cd .. # root project dir
MACOS_BUILD_DIR="$WORK_DIR/build-macos"
app/deps/adb_macos.sh
app/deps/sdl.sh macos native static
app/deps/ffmpeg.sh macos native static
app/deps/libusb.sh macos native static
DEPS_INSTALL_DIR="$PWD/app/deps/work/install/macos-native-static"
ADB_INSTALL_DIR="$PWD/app/deps/work/install/adb-macos"
rm -rf "$MACOS_BUILD_DIR"
meson setup "$MACOS_BUILD_DIR" \
--pkg-config-path="$DEPS_INSTALL_DIR/lib/pkgconfig" \
-Dc_args="-I$DEPS_INSTALL_DIR/include" \
-Dc_link_args="-L$DEPS_INSTALL_DIR/lib" \
--buildtype=release \
--strip \
-Db_lto=true \
-Dcompile_server=false \
-Dportable=true \
-Dstatic=true
ninja -C "$MACOS_BUILD_DIR"
# Group intermediate outputs into a 'dist' directory
mkdir -p "$MACOS_BUILD_DIR/dist"
cp "$MACOS_BUILD_DIR"/app/scrcpy "$MACOS_BUILD_DIR/dist/scrcpy_bin"
cp app/data/icon.png "$MACOS_BUILD_DIR/dist/"
cp app/data/scrcpy_static_wrapper.sh "$MACOS_BUILD_DIR/dist/scrcpy"
cp app/scrcpy.1 "$MACOS_BUILD_DIR/dist/"
cp -r "$ADB_INSTALL_DIR"/. "$MACOS_BUILD_DIR/dist/"

View File

@ -1,14 +0,0 @@
#!/bin/bash
set -ex
cd "$(dirname ${BASH_SOURCE[0]})"
. build_common
cd .. # root project dir
GRADLE="${GRADLE:-./gradlew}"
SERVER_BUILD_DIR="$WORK_DIR/build-server"
rm -rf "$SERVER_BUILD_DIR"
"$GRADLE" -p server assembleRelease
mkdir -p "$SERVER_BUILD_DIR/server"
cp server/build/outputs/apk/release/server-release-unsigned.apk \
"$SERVER_BUILD_DIR/server/scrcpy-server"

View File

@ -1,52 +0,0 @@
#!/bin/bash
set -ex
case "$1" in
32)
WINXX=win32
;;
64)
WINXX=win64
;;
*)
echo "ERROR: $0 must be called with one argument: 32 or 64" >&2
exit 1
;;
esac
cd "$(dirname ${BASH_SOURCE[0]})"
. build_common
cd .. # root project dir
WINXX_BUILD_DIR="$WORK_DIR/build-$WINXX"
app/deps/adb_windows.sh
app/deps/sdl.sh $WINXX cross shared
app/deps/ffmpeg.sh $WINXX cross shared
app/deps/libusb.sh $WINXX cross shared
DEPS_INSTALL_DIR="$PWD/app/deps/work/install/$WINXX-cross-shared"
ADB_INSTALL_DIR="$PWD/app/deps/work/install/adb-windows"
rm -rf "$WINXX_BUILD_DIR"
meson setup "$WINXX_BUILD_DIR" \
--pkg-config-path="$DEPS_INSTALL_DIR/lib/pkgconfig" \
-Dc_args="-I$DEPS_INSTALL_DIR/include" \
-Dc_link_args="-L$DEPS_INSTALL_DIR/lib" \
--cross-file=cross_$WINXX.txt \
--buildtype=release \
--strip \
-Db_lto=true \
-Dcompile_server=false \
-Dportable=true
ninja -C "$WINXX_BUILD_DIR"
# Group intermediate outputs into a 'dist' directory
mkdir -p "$WINXX_BUILD_DIR/dist"
cp "$WINXX_BUILD_DIR"/app/scrcpy.exe "$WINXX_BUILD_DIR/dist/"
cp app/data/scrcpy-console.bat "$WINXX_BUILD_DIR/dist/"
cp app/data/scrcpy-noconsole.vbs "$WINXX_BUILD_DIR/dist/"
cp app/data/icon.png "$WINXX_BUILD_DIR/dist/"
cp app/data/open_a_terminal_here.bat "$WINXX_BUILD_DIR/dist/"
cp "$DEPS_INSTALL_DIR"/bin/*.dll "$WINXX_BUILD_DIR/dist/"
cp -r "$ADB_INSTALL_DIR"/. "$WINXX_BUILD_DIR/dist/"

View File

@ -1,13 +0,0 @@
#!/bin/bash
set -ex
cd "$(dirname ${BASH_SOURCE[0]})"
. build_common
cd "$OUTPUT_DIR"
sha256sum "scrcpy-server-$VERSION" \
"scrcpy-linux-$VERSION.tar.gz" \
"scrcpy-win32-$VERSION.zip" \
"scrcpy-win64-$VERSION.zip" \
"scrcpy-macos-$VERSION.tar.gz" \
| tee SHA256SUMS.txt
echo "Release checksums generated in $PWD/SHA256SUMS.txt"

View File

@ -1,52 +0,0 @@
#!/bin/bash
set -ex
cd "$(dirname ${BASH_SOURCE[0]})"
. build_common
cd .. # root project dir
if [[ $# != 2 ]]
then
# <target_name>: for example win64
# <format>: zip or tar.gz
echo "Syntax: $0 <target> <format>" >&2
exit 1
fi
FORMAT=$2
if [[ "$2" != zip && "$2" != tar.gz ]]
then
echo "Invalid format (expected zip or tar.gz): $2" >&2
exit 1
fi
BUILD_DIR="$WORK_DIR/build-$1"
ARCHIVE_DIR="$BUILD_DIR/release-archive"
TARGET="scrcpy-$1-$VERSION"
rm -rf "$ARCHIVE_DIR/$TARGET"
mkdir -p "$ARCHIVE_DIR/$TARGET"
cp -r "$BUILD_DIR/dist/." "$ARCHIVE_DIR/$TARGET/"
cp "$WORK_DIR/build-server/server/scrcpy-server" "$ARCHIVE_DIR/$TARGET/"
mkdir -p "$OUTPUT_DIR"
cd "$ARCHIVE_DIR"
rm -f "$OUTPUT_DIR/$TARGET.$FORMAT"
case "$FORMAT" in
zip)
zip -r "$OUTPUT_DIR/$TARGET.zip" "$TARGET"
;;
tar.gz)
tar cvf "$OUTPUT_DIR/$TARGET.tar.gz" "$TARGET"
;;
*)
echo "Invalid format (expected zip or tar.gz): $FORMAT" >&2
exit 1
esac
rm -rf "$TARGET"
cd -
echo "Generated '$OUTPUT_DIR/$TARGET.$FORMAT'"

View File

@ -1,10 +0,0 @@
#!/bin/bash
set -ex
cd "$(dirname ${BASH_SOURCE[0]})"
OUTPUT_DIR="$PWD/output"
. build_common
cd .. # root project dir
mkdir -p "$OUTPUT_DIR"
cp "$WORK_DIR/build-server/server/scrcpy-server" "$OUTPUT_DIR/scrcpy-server-$VERSION"
echo "Generated '$OUTPUT_DIR/scrcpy-server-$VERSION'"

View File

@ -1,24 +0,0 @@
#!/bin/bash
# To customize the version name:
# VERSION=myversion ./release.sh
set -e
cd "$(dirname ${BASH_SOURCE[0]})"
rm -rf output
./test_server.sh
./test_client.sh
./build_server.sh
./build_windows.sh 32
./build_windows.sh 64
./build_linux.sh
./package_server.sh
./package_client.sh win32 zip
./package_client.sh win64 zip
./package_client.sh linux tar.gz
./generate_checksums.sh
echo "Release generated in $PWD/output"

View File

@ -1,12 +0,0 @@
#!/bin/bash
set -ex
cd "$(dirname ${BASH_SOURCE[0]})"
. build_common
cd .. # root project dir
TEST_BUILD_DIR="$WORK_DIR/build-test"
rm -rf "$TEST_BUILD_DIR"
meson setup "$TEST_BUILD_DIR" -Dcompile_server=false \
-Db_sanitize=address,undefined
ninja -C "$TEST_BUILD_DIR" test

View File

@ -1,9 +0,0 @@
#!/bin/bash
set -ex
cd "$(dirname ${BASH_SOURCE[0]})"
. build_common
cd .. # root project dir
GRADLE="${GRADLE:-./gradlew}"
"$GRADLE" -p server check

View File

@ -7,8 +7,8 @@ android {
applicationId "com.genymobile.scrcpy"
minSdkVersion 21
targetSdkVersion 35
versionCode 30000
versionName "3.0"
versionCode 20700
versionName "2.7"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {

View File

@ -12,11 +12,10 @@
set -e
SCRCPY_DEBUG=false
SCRCPY_VERSION_NAME=3.0
SCRCPY_VERSION_NAME=2.7
PLATFORM=${ANDROID_PLATFORM:-35}
BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-35.0.0}
PLATFORM_TOOLS="$ANDROID_HOME/platforms/android-$PLATFORM"
BUILD_TOOLS_DIR="$ANDROID_HOME/build-tools/$BUILD_TOOLS"
BUILD_DIR="$(realpath ${BUILD_DIR:-build_manual})"
@ -24,8 +23,7 @@ CLASSES_DIR="$BUILD_DIR/classes"
GEN_DIR="$BUILD_DIR/gen"
SERVER_DIR=$(dirname "$0")
SERVER_BINARY=scrcpy-server
ANDROID_JAR="$PLATFORM_TOOLS/android.jar"
ANDROID_AIDL="$PLATFORM_TOOLS/framework.aidl"
ANDROID_JAR="$ANDROID_HOME/platforms/android-$PLATFORM/android.jar"
LAMBDA_JAR="$BUILD_TOOLS_DIR/core-lambda-stubs.jar"
echo "Platform: android-$PLATFORM"
@ -51,20 +49,12 @@ cd "$SERVER_DIR/src/main/aidl"
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. \
android/content/IOnPrimaryClipChangedListener.aidl
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. android/view/IDisplayFoldListener.aidl
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. -p "$ANDROID_AIDL" \
android/view/IDisplayWindowListener.aidl
# Fake sources to expose hidden Android types to the project
FAKE_SRC=( \
android/content/*java \
)
SRC=( \
com/genymobile/scrcpy/*.java \
com/genymobile/scrcpy/audio/*.java \
com/genymobile/scrcpy/control/*.java \
com/genymobile/scrcpy/device/*.java \
com/genymobile/scrcpy/opengl/*.java \
com/genymobile/scrcpy/util/*.java \
com/genymobile/scrcpy/video/*.java \
com/genymobile/scrcpy/wrappers/*.java \
@ -78,11 +68,10 @@ done
echo "Compiling java sources..."
cd ../java
javac -encoding UTF-8 -bootclasspath "$ANDROID_JAR" \
javac -bootclasspath "$ANDROID_JAR" \
-cp "$LAMBDA_JAR:$GEN_DIR" \
-d "$CLASSES_DIR" \
-source 1.8 -target 1.8 \
${FAKE_SRC[@]} \
${SRC[@]}
echo "Dexing..."

View File

@ -1,66 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.view;
import android.graphics.Rect;
import android.content.res.Configuration;
import java.util.List;
/**
* Interface to listen for changes to display window-containers.
*
* This differs from DisplayManager's DisplayListener in a couple ways:
* - onDisplayAdded is always called after the display is actually added to the WM hierarchy.
* This corresponds to the DisplayContent and not the raw Dislay from DisplayManager.
* - onDisplayConfigurationChanged is called for all configuration changes, not just changes
* to displayinfo (eg. windowing-mode).
*
*/
oneway interface IDisplayWindowListener {
/**
* Called when a new display is added to the WM hierarchy. The existing display ids are returned
* when this listener is registered with WM via {@link #registerDisplayWindowListener}.
*/
void onDisplayAdded(int displayId);
/**
* Called when a display's window-container configuration has changed.
*/
void onDisplayConfigurationChanged(int displayId, in Configuration newConfig);
/**
* Called when a display is removed from the hierarchy.
*/
void onDisplayRemoved(int displayId);
/**
* Called when fixed rotation is started on a display.
*/
void onFixedRotationStarted(int displayId, int newRotation);
/**
* Called when the previous fixed rotation on a display is finished.
*/
void onFixedRotationFinished(int displayId);
/**
* Called when the keep clear ares on a display have changed.
*/
void onKeepClearAreasChanged(int displayId, in List<Rect> restricted, in List<Rect> unrestricted);
}

View File

@ -1,5 +0,0 @@
package android.content;
public interface IContentProvider {
// android.content.IContentProvider is hidden, this is a fake one to expose the type to the project
}

View File

@ -5,8 +5,6 @@ import com.genymobile.scrcpy.util.Ln;
import com.genymobile.scrcpy.util.Settings;
import com.genymobile.scrcpy.util.SettingsException;
import android.os.BatteryManager;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
@ -18,132 +16,59 @@ import java.io.OutputStream;
*/
public final class CleanUp {
// Dynamic options
private static final int PENDING_CHANGE_DISPLAY_POWER = 1 << 0;
private int pendingChanges;
private boolean pendingRestoreDisplayPower;
private static final int MSG_TYPE_MASK = 0b11;
private static final int MSG_TYPE_RESTORE_STAY_ON = 0;
private static final int MSG_TYPE_DISABLE_SHOW_TOUCHES = 1;
private static final int MSG_TYPE_RESTORE_DISPLAY_POWER = 2;
private static final int MSG_TYPE_POWER_OFF_SCREEN = 3;
private Thread thread;
private static final int MSG_PARAM_SHIFT = 2;
private CleanUp(Options options) {
thread = new Thread(() -> runCleanUp(options), "cleanup");
thread.start();
private final OutputStream out;
public CleanUp(OutputStream out) {
this.out = out;
}
public static CleanUp start(Options options) {
return new CleanUp(options);
}
public void interrupt() {
thread.interrupt();
}
public void join() throws InterruptedException {
thread.join();
}
private void runCleanUp(Options options) {
boolean disableShowTouches = false;
if (options.getShowTouches()) {
try {
String oldValue = Settings.getAndPutValue(Settings.TABLE_SYSTEM, "show_touches", "1");
// If "show touches" was disabled, it must be disabled back on clean up
disableShowTouches = !"1".equals(oldValue);
} catch (SettingsException e) {
Ln.e("Could not change \"show_touches\"", e);
}
}
int restoreStayOn = -1;
if (options.getStayAwake()) {
int stayOn = BatteryManager.BATTERY_PLUGGED_AC | BatteryManager.BATTERY_PLUGGED_USB | BatteryManager.BATTERY_PLUGGED_WIRELESS;
try {
String oldValue = Settings.getAndPutValue(Settings.TABLE_GLOBAL, "stay_on_while_plugged_in", String.valueOf(stayOn));
try {
int currentStayOn = Integer.parseInt(oldValue);
// Restore only if the current value is different
if (currentStayOn != stayOn) {
restoreStayOn = currentStayOn;
}
} catch (NumberFormatException e) {
// ignore
}
} catch (SettingsException e) {
Ln.e("Could not change \"stay_on_while_plugged_in\"", e);
}
}
int restoreScreenOffTimeout = -1;
int screenOffTimeout = options.getScreenOffTimeout();
if (screenOffTimeout != -1) {
try {
String oldValue = Settings.getAndPutValue(Settings.TABLE_SYSTEM, "screen_off_timeout", String.valueOf(screenOffTimeout));
try {
int currentScreenOffTimeout = Integer.parseInt(oldValue);
// Restore only if the current value is different
if (currentScreenOffTimeout != screenOffTimeout) {
restoreScreenOffTimeout = currentScreenOffTimeout;
}
} catch (NumberFormatException e) {
// ignore
}
} catch (SettingsException e) {
Ln.e("Could not change \"screen_off_timeout\"", e);
}
}
boolean powerOffScreen = options.getPowerOffScreenOnClose();
int displayId = options.getDisplayId();
try {
run(displayId, restoreStayOn, disableShowTouches, powerOffScreen, restoreScreenOffTimeout);
} catch (InterruptedException e) {
// ignore
} catch (IOException e) {
Ln.e("Clean up I/O exception", e);
}
}
private void run(int displayId, int restoreStayOn, boolean disableShowTouches, boolean powerOffScreen, int restoreScreenOffTimeout)
throws IOException, InterruptedException {
String[] cmd = {
"app_process",
"/",
CleanUp.class.getName(),
String.valueOf(displayId),
String.valueOf(restoreStayOn),
String.valueOf(disableShowTouches),
String.valueOf(powerOffScreen),
String.valueOf(restoreScreenOffTimeout),
};
public static CleanUp configure(int displayId) throws IOException {
String[] cmd = {"app_process", "/", CleanUp.class.getName(), String.valueOf(displayId)};
ProcessBuilder builder = new ProcessBuilder(cmd);
builder.environment().put("CLASSPATH", Server.SERVER_PATH);
Process process = builder.start();
OutputStream out = process.getOutputStream();
return new CleanUp(process.getOutputStream());
}
while (true) {
int localPendingChanges;
boolean localPendingRestoreDisplayPower;
synchronized (this) {
while (pendingChanges == 0) {
wait();
}
localPendingChanges = pendingChanges;
localPendingRestoreDisplayPower = pendingRestoreDisplayPower;
pendingChanges = 0;
}
if ((localPendingChanges & PENDING_CHANGE_DISPLAY_POWER) != 0) {
out.write(localPendingRestoreDisplayPower ? 1 : 0);
out.flush();
}
private boolean sendMessage(int type, int param) {
assert (type & ~MSG_TYPE_MASK) == 0;
int msg = type | param << MSG_PARAM_SHIFT;
try {
out.write(msg);
out.flush();
return true;
} catch (IOException e) {
Ln.w("Could not configure cleanup (type=" + type + ", param=" + param + ")", e);
return false;
}
}
public synchronized void setRestoreDisplayPower(boolean restoreDisplayPower) {
pendingRestoreDisplayPower = restoreDisplayPower;
pendingChanges |= PENDING_CHANGE_DISPLAY_POWER;
notify();
public boolean setRestoreStayOn(int restoreValue) {
// Restore the value (between 0 and 7), -1 to not restore
// <https://developer.android.com/reference/android/provider/Settings.Global#STAY_ON_WHILE_PLUGGED_IN>
assert restoreValue >= -1 && restoreValue <= 7;
return sendMessage(MSG_TYPE_RESTORE_STAY_ON, restoreValue & 0b1111);
}
public boolean setDisableShowTouches(boolean disableOnExit) {
return sendMessage(MSG_TYPE_DISABLE_SHOW_TOUCHES, disableOnExit ? 1 : 0);
}
public boolean setRestoreDisplayPower(boolean restoreOnExit) {
return sendMessage(MSG_TYPE_RESTORE_DISPLAY_POWER, restoreOnExit ? 1 : 0);
}
public boolean setPowerOffScreen(boolean powerOffScreenOnExit) {
return sendMessage(MSG_TYPE_POWER_OFF_SCREEN, powerOffScreenOnExit ? 1 : 0);
}
public static void unlinkSelf() {
@ -158,21 +83,35 @@ public final class CleanUp {
unlinkSelf();
int displayId = Integer.parseInt(args[0]);
int restoreStayOn = Integer.parseInt(args[1]);
boolean disableShowTouches = Boolean.parseBoolean(args[2]);
boolean powerOffScreen = Boolean.parseBoolean(args[3]);
int restoreScreenOffTimeout = Integer.parseInt(args[4]);
// Dynamic option
int restoreStayOn = -1;
boolean disableShowTouches = false;
boolean restoreDisplayPower = false;
boolean powerOffScreen = false;
try {
// Wait for the server to die
int msg;
while ((msg = System.in.read()) != -1) {
// Only restore display power
assert msg == 0 || msg == 1;
restoreDisplayPower = msg != 0;
int type = msg & MSG_TYPE_MASK;
int param = msg >> MSG_PARAM_SHIFT;
switch (type) {
case MSG_TYPE_RESTORE_STAY_ON:
restoreStayOn = param > 7 ? -1 : param;
break;
case MSG_TYPE_DISABLE_SHOW_TOUCHES:
disableShowTouches = param != 0;
break;
case MSG_TYPE_RESTORE_DISPLAY_POWER:
restoreDisplayPower = param != 0;
break;
case MSG_TYPE_POWER_OFF_SCREEN:
powerOffScreen = param != 0;
break;
default:
Ln.w("Unexpected msg type: " + type);
break;
}
}
} catch (IOException e) {
// Expected when the server is dead
@ -198,24 +137,13 @@ public final class CleanUp {
}
}
if (restoreScreenOffTimeout != -1) {
Ln.i("Restoring \"screen off timeout\"");
try {
Settings.putValue(Settings.TABLE_SYSTEM, "screen_off_timeout", String.valueOf(restoreScreenOffTimeout));
} catch (SettingsException e) {
Ln.e("Could not restore \"screen_off_timeout\"", e);
}
}
// Change the power of the main display when mirroring a virtual display
int targetDisplayId = displayId != Device.DISPLAY_ID_NONE ? displayId : 0;
if (Device.isScreenOn(targetDisplayId)) {
if (Device.isScreenOn() && displayId != Device.DISPLAY_ID_NONE) {
if (powerOffScreen) {
Ln.i("Power off screen");
Device.powerOffScreen(targetDisplayId);
Device.powerOffScreen(displayId);
} else if (restoreDisplayPower) {
Ln.i("Restoring display power");
Device.setDisplayPower(targetDisplayId, true);
Device.setDisplayPower(displayId, true);
}
}

View File

@ -1,14 +1,9 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.TargetApi;
import android.content.AttributionSource;
import android.content.ContentResolver;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.IContentProvider;
import android.os.Binder;
import android.os.Process;
public final class FakeContext extends ContextWrapper {
@ -22,38 +17,6 @@ public final class FakeContext extends ContextWrapper {
return INSTANCE;
}
private final ContentResolver contentResolver = new ContentResolver(this) {
@SuppressWarnings({"unused", "ProtectedMemberInFinalClass"})
// @Override (but super-class method not visible)
protected IContentProvider acquireProvider(Context c, String name) {
return ServiceManager.getActivityManager().getContentProviderExternal(name, new Binder());
}
@SuppressWarnings("unused")
// @Override (but super-class method not visible)
public boolean releaseProvider(IContentProvider icp) {
return false;
}
@SuppressWarnings({"unused", "ProtectedMemberInFinalClass"})
// @Override (but super-class method not visible)
protected IContentProvider acquireUnstableProvider(Context c, String name) {
return null;
}
@SuppressWarnings("unused")
// @Override (but super-class method not visible)
public boolean releaseUnstableProvider(IContentProvider icp) {
return false;
}
@SuppressWarnings("unused")
// @Override (but super-class method not visible)
public void unstableProviderDied(IContentProvider icp) {
// ignore
}
};
private FakeContext() {
super(Workarounds.getSystemContext());
}
@ -86,9 +49,4 @@ public final class FakeContext extends ContextWrapper {
public Context getApplicationContext() {
return this;
}
@Override
public ContentResolver getContentResolver() {
return contentResolver;
}
}

View File

@ -4,7 +4,6 @@ import com.genymobile.scrcpy.audio.AudioCodec;
import com.genymobile.scrcpy.audio.AudioSource;
import com.genymobile.scrcpy.device.Device;
import com.genymobile.scrcpy.device.NewDisplay;
import com.genymobile.scrcpy.device.Orientation;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.util.CodecOption;
import com.genymobile.scrcpy.util.Ln;
@ -14,7 +13,6 @@ import com.genymobile.scrcpy.video.VideoCodec;
import com.genymobile.scrcpy.video.VideoSource;
import android.graphics.Rect;
import android.util.Pair;
import java.util.List;
import java.util.Locale;
@ -34,7 +32,7 @@ public class Options {
private int videoBitRate = 8000000;
private int audioBitRate = 128000;
private float maxFps;
private float angle;
private int lockVideoOrientation = Device.LOCK_VIDEO_ORIENTATION_UNLOCKED;
private boolean tunnelForward;
private Rect crop;
private boolean control = true;
@ -47,7 +45,6 @@ public class Options {
private boolean cameraHighSpeed;
private boolean showTouches;
private boolean stayAwake;
private int screenOffTimeout = -1;
private List<CodecOption> videoCodecOptions;
private List<CodecOption> audioCodecOptions;
@ -60,10 +57,6 @@ public class Options {
private boolean powerOn = true;
private NewDisplay newDisplay;
private boolean vdSystemDecorations = true;
private Orientation.Lock captureOrientationLock = Orientation.Lock.Unlocked;
private Orientation captureOrientation = Orientation.Orient0;
private boolean listEncoders;
private boolean listDisplays;
@ -129,8 +122,8 @@ public class Options {
return maxFps;
}
public float getAngle() {
return angle;
public int getLockVideoOrientation() {
return lockVideoOrientation;
}
public boolean isTunnelForward() {
@ -181,10 +174,6 @@ public class Options {
return stayAwake;
}
public int getScreenOffTimeout() {
return screenOffTimeout;
}
public List<CodecOption> getVideoCodecOptions() {
return videoCodecOptions;
}
@ -225,18 +214,6 @@ public class Options {
return newDisplay;
}
public Orientation getCaptureOrientation() {
return captureOrientation;
}
public Orientation.Lock getCaptureOrientationLock() {
return captureOrientationLock;
}
public boolean getVDSystemDecorations() {
return vdSystemDecorations;
}
public boolean getList() {
return listEncoders || listDisplays || listCameras || listCameraSizes || listApps;
}
@ -277,6 +254,10 @@ public class Options {
return sendCodecMeta;
}
public void resetLockVideoOrientation() {
this.lockVideoOrientation = Device.LOCK_VIDEO_ORIENTATION_UNLOCKED;
}
@SuppressWarnings("MethodLength")
public static Options parse(String... args) {
if (args.length < 1) {
@ -359,8 +340,8 @@ public class Options {
case "max_fps":
options.maxFps = parseFloat("max_fps", value);
break;
case "angle":
options.angle = parseFloat("angle", value);
case "lock_video_orientation":
options.lockVideoOrientation = Integer.parseInt(value);
break;
case "tunnel_forward":
options.tunnelForward = Boolean.parseBoolean(value);
@ -382,12 +363,6 @@ public class Options {
case "stay_awake":
options.stayAwake = Boolean.parseBoolean(value);
break;
case "screen_off_timeout":
options.screenOffTimeout = Integer.parseInt(value);
if (options.screenOffTimeout < -1) {
throw new IllegalArgumentException("Invalid screen off timeout: " + options.screenOffTimeout);
}
break;
case "video_codec_options":
options.videoCodecOptions = CodecOption.parse(value);
break;
@ -466,14 +441,6 @@ public class Options {
case "new_display":
options.newDisplay = parseNewDisplay(value);
break;
case "vd_system_decorations":
options.vdSystemDecorations = Boolean.parseBoolean(value);
break;
case "capture_orientation":
Pair<Orientation.Lock, Orientation> pair = parseCaptureOrientation(value);
options.captureOrientationLock = pair.first;
options.captureOrientation = pair.second;
break;
case "send_device_meta":
options.sendDeviceMeta = Boolean.parseBoolean(value);
break;
@ -501,11 +468,6 @@ public class Options {
}
}
if (options.newDisplay != null) {
assert options.displayId == 0 : "Must not set both displayId and newDisplay";
options.displayId = Device.DISPLAY_ID_NONE;
}
return options;
}
@ -597,25 +559,4 @@ public class Options {
return new NewDisplay(size, dpi);
}
private static Pair<Orientation.Lock, Orientation> parseCaptureOrientation(String value) {
if (value.isEmpty()) {
throw new IllegalArgumentException("Empty capture orientation string");
}
Orientation.Lock lock;
if (value.charAt(0) == '@') {
// Consume '@'
value = value.substring(1);
if (value.isEmpty()) {
// Only '@': lock to the initial orientation (orientation is unused)
return Pair.create(Orientation.Lock.LockedInitial, Orientation.Orient0);
}
lock = Orientation.Lock.LockedValue;
} else {
lock = Orientation.Lock.Unlocked;
}
return Pair.create(lock, Orientation.getByName(value));
}
}

View File

@ -14,9 +14,10 @@ import com.genymobile.scrcpy.device.DesktopConnection;
import com.genymobile.scrcpy.device.Device;
import com.genymobile.scrcpy.device.NewDisplay;
import com.genymobile.scrcpy.device.Streamer;
import com.genymobile.scrcpy.opengl.OpenGLRunner;
import com.genymobile.scrcpy.util.Ln;
import com.genymobile.scrcpy.util.LogUtils;
import com.genymobile.scrcpy.util.Settings;
import com.genymobile.scrcpy.util.SettingsException;
import com.genymobile.scrcpy.video.CameraCapture;
import com.genymobile.scrcpy.video.NewDisplayCapture;
import com.genymobile.scrcpy.video.ScreenCapture;
@ -24,6 +25,7 @@ import com.genymobile.scrcpy.video.SurfaceCapture;
import com.genymobile.scrcpy.video.SurfaceEncoder;
import com.genymobile.scrcpy.video.VideoSource;
import android.os.BatteryManager;
import android.os.Build;
import java.io.File;
@ -74,6 +76,51 @@ public final class Server {
// not instantiable
}
private static void initAndCleanUp(Options options, CleanUp cleanUp) {
// This method is called from its own thread, so it may only configure cleanup actions which are NOT dynamic (i.e. they are configured once
// and for all, they cannot be changed from another thread)
if (options.getShowTouches()) {
try {
String oldValue = Settings.getAndPutValue(Settings.TABLE_SYSTEM, "show_touches", "1");
// If "show touches" was disabled, it must be disabled back on clean up
if (!"1".equals(oldValue)) {
if (!cleanUp.setDisableShowTouches(true)) {
Ln.e("Could not disable show touch on exit");
}
}
} catch (SettingsException e) {
Ln.e("Could not change \"show_touches\"", e);
}
}
if (options.getStayAwake()) {
int stayOn = BatteryManager.BATTERY_PLUGGED_AC | BatteryManager.BATTERY_PLUGGED_USB | BatteryManager.BATTERY_PLUGGED_WIRELESS;
try {
String oldValue = Settings.getAndPutValue(Settings.TABLE_GLOBAL, "stay_on_while_plugged_in", String.valueOf(stayOn));
try {
int restoreStayOn = Integer.parseInt(oldValue);
if (restoreStayOn != stayOn) {
// Restore only if the current value is different
if (!cleanUp.setRestoreStayOn(restoreStayOn)) {
Ln.e("Could not restore stay on on exit");
}
}
} catch (NumberFormatException e) {
// ignore
}
} catch (SettingsException e) {
Ln.e("Could not change \"stay_on_while_plugged_in\"", e);
}
}
if (options.getPowerOffScreenOnClose()) {
if (!cleanUp.setPowerOffScreen(true)) {
Ln.e("Could not power off screen on exit");
}
}
}
private static void scrcpy(Options options) throws IOException, ConfigurationException {
if (Build.VERSION.SDK_INT < AndroidVersions.API_31_ANDROID_12 && options.getVideoSource() == VideoSource.CAMERA) {
Ln.e("Camera mirroring is not supported before Android 12");
@ -85,10 +132,32 @@ public final class Server {
throw new ConfigurationException("New virtual display is not supported");
}
if (Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14) {
int lockVideoOrientation = options.getLockVideoOrientation();
if (lockVideoOrientation != Device.LOCK_VIDEO_ORIENTATION_UNLOCKED) {
if (lockVideoOrientation != Device.LOCK_VIDEO_ORIENTATION_INITIAL_AUTO) {
Ln.e("--lock-video-orientation is broken on Android >= 14: <https://github.com/Genymobile/scrcpy/issues/4011>");
throw new ConfigurationException("--lock-video-orientation is broken on Android >= 14");
} else {
// If the flag has been set automatically (because v4l2 sink is enabled), do not fail
Ln.w("--lock-video-orientation is ignored on Android >= 14: <https://github.com/Genymobile/scrcpy/issues/4011>");
}
}
if (options.getCrop() != null) {
Ln.e("--crop is broken on Android >= 14: <https://github.com/Genymobile/scrcpy/issues/4162>");
throw new ConfigurationException("Crop is not broken on Android >= 14");
}
}
CleanUp cleanUp = null;
Thread initThread = null;
NewDisplay newDisplay = options.getNewDisplay();
int displayId = newDisplay == null ? options.getDisplayId() : Device.DISPLAY_ID_NONE;
if (options.getCleanup()) {
cleanUp = CleanUp.start(options);
cleanUp = CleanUp.configure(displayId);
initThread = startInitThread(options, cleanUp);
}
int scid = options.getScid();
@ -112,7 +181,7 @@ public final class Server {
if (control) {
ControlChannel controlChannel = connection.getControlChannel();
controller = new Controller(controlChannel, cleanUp, options);
controller = new Controller(displayId, controlChannel, cleanUp, options.getClipboardAutosync(), options.getPowerOn());
asyncProcessors.add(controller);
}
@ -131,7 +200,8 @@ public final class Server {
if (audioCodec == AudioCodec.RAW) {
audioRecorder = new AudioRawRecorder(audioCapture, audioStreamer);
} else {
audioRecorder = new AudioEncoder(audioCapture, audioStreamer, options);
audioRecorder = new AudioEncoder(audioCapture, audioStreamer, options.getAudioBitRate(), options.getAudioCodecOptions(),
options.getAudioEncoder());
}
asyncProcessors.add(audioRecorder);
}
@ -141,22 +211,20 @@ public final class Server {
options.getSendFrameMeta());
SurfaceCapture surfaceCapture;
if (options.getVideoSource() == VideoSource.DISPLAY) {
NewDisplay newDisplay = options.getNewDisplay();
if (newDisplay != null) {
surfaceCapture = new NewDisplayCapture(controller, options);
surfaceCapture = new NewDisplayCapture(controller, newDisplay, options.getMaxSize());
} else {
assert options.getDisplayId() != Device.DISPLAY_ID_NONE;
surfaceCapture = new ScreenCapture(controller, options);
assert displayId != Device.DISPLAY_ID_NONE;
surfaceCapture = new ScreenCapture(controller, displayId, options.getMaxSize(), options.getCrop(),
options.getLockVideoOrientation());
}
} else {
surfaceCapture = new CameraCapture(options);
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(),
options.getMaxSize(), options.getCameraAspectRatio(), options.getCameraFps(), options.getCameraHighSpeed());
}
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options);
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
asyncProcessors.add(surfaceEncoder);
if (controller != null) {
controller.setSurfaceCapture(surfaceCapture);
}
}
Completion completion = new Completion(asyncProcessors.size());
@ -168,25 +236,22 @@ public final class Server {
completion.await();
} finally {
if (cleanUp != null) {
cleanUp.interrupt();
if (initThread != null) {
initThread.interrupt();
}
for (AsyncProcessor asyncProcessor : asyncProcessors) {
asyncProcessor.stop();
}
OpenGLRunner.quit(); // quit the OpenGL thread, if any
connection.shutdown();
try {
if (cleanUp != null) {
cleanUp.join();
if (initThread != null) {
initThread.join();
}
for (AsyncProcessor asyncProcessor : asyncProcessors) {
asyncProcessor.join();
}
OpenGLRunner.join();
} catch (InterruptedException e) {
// ignore
}
@ -195,6 +260,12 @@ public final class Server {
}
}
private static Thread startInitThread(final Options options, final CleanUp cleanUp) {
Thread thread = new Thread(() -> initAndCleanUp(options, cleanUp), "init-cleanup");
thread.start();
return thread;
}
public static void main(String... args) {
int status = 0;
try {

View File

@ -2,7 +2,6 @@ package com.genymobile.scrcpy.audio;
import com.genymobile.scrcpy.AndroidVersions;
import com.genymobile.scrcpy.AsyncProcessor;
import com.genymobile.scrcpy.Options;
import com.genymobile.scrcpy.device.ConfigurationException;
import com.genymobile.scrcpy.device.Streamer;
import com.genymobile.scrcpy.util.Codec;
@ -68,12 +67,12 @@ public final class AudioEncoder implements AsyncProcessor {
private boolean ended;
public AudioEncoder(AudioCapture capture, Streamer streamer, Options options) {
public AudioEncoder(AudioCapture capture, Streamer streamer, int bitRate, List<CodecOption> codecOptions, String encoderName) {
this.capture = capture;
this.streamer = streamer;
this.bitRate = options.getAudioBitRate();
this.codecOptions = options.getAudioCodecOptions();
this.encoderName = options.getAudioEncoder();
this.bitRate = bitRate;
this.codecOptions = codecOptions;
this.encoderName = encoderName;
}
private static MediaFormat createFormat(String mimeType, int bitRate, List<CodecOption> codecOptions) {

View File

@ -24,7 +24,6 @@ public final class ControlMessage {
public static final int TYPE_UHID_DESTROY = 14;
public static final int TYPE_OPEN_HARD_KEYBOARD_SETTINGS = 15;
public static final int TYPE_START_APP = 16;
public static final int TYPE_RESET_VIDEO = 17;
public static final long SEQUENCE_INVALID = 0;

View File

@ -46,7 +46,6 @@ public class ControlMessageReader {
case ControlMessage.TYPE_COLLAPSE_PANELS:
case ControlMessage.TYPE_ROTATE_DEVICE:
case ControlMessage.TYPE_OPEN_HARD_KEYBOARD_SETTINGS:
case ControlMessage.TYPE_RESET_VIDEO:
return ControlMessage.createEmpty(type);
case ControlMessage.TYPE_UHID_CREATE:
return parseUhidCreate();

View File

@ -3,15 +3,12 @@ package com.genymobile.scrcpy.control;
import com.genymobile.scrcpy.AndroidVersions;
import com.genymobile.scrcpy.AsyncProcessor;
import com.genymobile.scrcpy.CleanUp;
import com.genymobile.scrcpy.Options;
import com.genymobile.scrcpy.device.Device;
import com.genymobile.scrcpy.device.DeviceApp;
import com.genymobile.scrcpy.device.Point;
import com.genymobile.scrcpy.device.Position;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.util.Ln;
import com.genymobile.scrcpy.util.LogUtils;
import com.genymobile.scrcpy.video.SurfaceCapture;
import com.genymobile.scrcpy.video.VirtualDisplayListener;
import com.genymobile.scrcpy.wrappers.ClipboardManager;
import com.genymobile.scrcpy.wrappers.InputManager;
@ -96,15 +93,12 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
private boolean keepDisplayPowerOff;
// Used for resetting video encoding on RESET_VIDEO message
private SurfaceCapture surfaceCapture;
public Controller(ControlChannel controlChannel, CleanUp cleanUp, Options options) {
this.displayId = options.getDisplayId();
public Controller(int displayId, ControlChannel controlChannel, CleanUp cleanUp, boolean clipboardAutosync, boolean powerOn) {
this.displayId = displayId;
this.controlChannel = controlChannel;
this.cleanUp = cleanUp;
this.clipboardAutosync = options.getClipboardAutosync();
this.powerOn = options.getPowerOn();
this.clipboardAutosync = clipboardAutosync;
this.powerOn = powerOn;
initPointers();
sender = new DeviceMessageSender(controlChannel);
@ -149,10 +143,6 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
}
}
public void setSurfaceCapture(SurfaceCapture surfaceCapture) {
this.surfaceCapture = surfaceCapture;
}
private UhidManager getUhidManager() {
if (uhidManager == null) {
uhidManager = new UhidManager(sender);
@ -176,7 +166,7 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
private void control() throws IOException {
// on start, power on the device
if (powerOn && displayId == 0 && !Device.isScreenOn(displayId)) {
if (powerOn && displayId == 0 && !Device.isScreenOn()) {
Device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, displayId, Device.INJECT_MODE_ASYNC);
// dirty hack
@ -281,7 +271,7 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
setClipboard(msg.getText(), msg.getPaste(), msg.getSequence());
break;
case ControlMessage.TYPE_SET_DISPLAY_POWER:
if (supportsInputEvents) {
if (supportsInputEvents && displayId != Device.DISPLAY_ID_NONE) {
setDisplayPower(msg.getOn());
}
break;
@ -303,9 +293,6 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
case ControlMessage.TYPE_START_APP:
startAppAsync(msg.getText());
break;
case ControlMessage.TYPE_RESET_VIDEO:
resetVideo();
break;
default:
// do nothing
}
@ -360,11 +347,7 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
Point point = displayData.positionMapper.map(position);
if (point == null) {
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Size eventSize = position.getScreenSize();
Size currentSize = displayData.positionMapper.getVideoSize();
Ln.v("Ignore touch event generated for size " + eventSize + " (current size is " + currentSize + ")");
}
Ln.w("Ignore touch event, it was generated for a different device size");
return false;
}
@ -478,11 +461,7 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
Point point = displayData.positionMapper.map(position);
if (point == null) {
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Size eventSize = position.getScreenSize();
Size currentSize = displayData.positionMapper.getVideoSize();
Ln.v("Ignore scroll event generated for size " + eventSize + " (current size is " + currentSize + ")");
}
Ln.w("Ignore scroll event, it was generated for a different device size");
return false;
}
@ -511,7 +490,7 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
}
private boolean pressBackOrTurnScreenOn(int action) {
if (displayId == Device.DISPLAY_ID_NONE || Device.isScreenOn(displayId)) {
if (Device.isScreenOn()) {
return injectKeyEvent(action, KeyEvent.KEYCODE_BACK, 0, 0, Device.INJECT_MODE_ASYNC);
}
@ -691,12 +670,9 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
}
private void setDisplayPower(boolean on) {
// Change the power of the main display when mirroring a virtual display
int targetDisplayId = displayId != Device.DISPLAY_ID_NONE ? displayId : 0;
boolean setDisplayPowerOk = Device.setDisplayPower(targetDisplayId, on);
boolean setDisplayPowerOk = Device.setDisplayPower(displayId, on);
if (setDisplayPowerOk) {
// Do not keep display power off for virtual displays: MOD+p must wake up the physical device
keepDisplayPowerOff = displayId != Device.DISPLAY_ID_NONE && !on;
keepDisplayPowerOff = !on;
Ln.i("Device display turned " + (on ? "on" : "off"));
if (cleanUp != null) {
boolean mustRestoreOnExit = !on;
@ -704,11 +680,4 @@ public class Controller implements AsyncProcessor, VirtualDisplayListener {
}
}
}
private void resetVideo() {
if (surfaceCapture != null) {
Ln.i("Video capture reset");
surfaceCapture.requestInvalidate();
}
}
}

View File

@ -3,46 +3,46 @@ package com.genymobile.scrcpy.control;
import com.genymobile.scrcpy.device.Point;
import com.genymobile.scrcpy.device.Position;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.util.AffineMatrix;
import com.genymobile.scrcpy.video.ScreenInfo;
import android.graphics.Rect;
public final class PositionMapper {
private final Size videoSize;
private final AffineMatrix videoToDeviceMatrix;
private final Rect contentRect;
private final int coordsRotation;
public PositionMapper(Size videoSize, AffineMatrix videoToDeviceMatrix) {
public PositionMapper(Size videoSize, Rect contentRect, int videoRotation) {
this.videoSize = videoSize;
this.videoToDeviceMatrix = videoToDeviceMatrix;
this.contentRect = contentRect;
this.coordsRotation = reverseRotation(videoRotation);
}
public static PositionMapper create(Size videoSize, AffineMatrix filterTransform, Size targetSize) {
boolean convertToPixels = !videoSize.equals(targetSize) || filterTransform != null;
AffineMatrix transform = filterTransform;
if (convertToPixels) {
AffineMatrix inputTransform = AffineMatrix.ndcFromPixels(videoSize);
AffineMatrix outputTransform = AffineMatrix.ndcToPixels(targetSize);
transform = outputTransform.multiply(transform).multiply(inputTransform);
}
return new PositionMapper(videoSize, transform);
public static PositionMapper from(ScreenInfo screenInfo) {
// ignore the locked video orientation, the events will apply in coordinates considered in the physical device orientation
Size videoSize = screenInfo.getUnlockedVideoSize();
return new PositionMapper(videoSize, screenInfo.getContentRect(), screenInfo.getVideoRotation());
}
public Size getVideoSize() {
return videoSize;
private static int reverseRotation(int rotation) {
return (4 - rotation) % 4;
}
public Point map(Position position) {
Size clientVideoSize = position.getScreenSize();
// reverse the video rotation to apply the events
Position devicePosition = position.rotate(coordsRotation);
Size clientVideoSize = devicePosition.getScreenSize();
if (!videoSize.equals(clientVideoSize)) {
// The client sends a click relative to a video with wrong dimensions,
// the device may have been rotated since the event was generated, so ignore the event
return null;
}
Point point = position.getPoint();
if (videoToDeviceMatrix != null) {
point = videoToDeviceMatrix.apply(point);
}
return point;
Point point = devicePosition.getPoint();
int convertedX = contentRect.left + point.getX() * contentRect.width() / videoSize.getWidth();
int convertedY = contentRect.top + point.getY() * contentRect.height() / videoSize.getHeight();
return new Point(convertedX, convertedY);
}
}

View File

@ -40,9 +40,10 @@ public final class Device {
public static final int INJECT_MODE_WAIT_FOR_RESULT = InputManager.INJECT_INPUT_EVENT_MODE_WAIT_FOR_RESULT;
public static final int INJECT_MODE_WAIT_FOR_FINISH = InputManager.INJECT_INPUT_EVENT_MODE_WAIT_FOR_FINISH;
// The new display power method introduced in Android 15 does not work as expected:
// <https://github.com/Genymobile/scrcpy/issues/5530>
private static final boolean USE_ANDROID_15_DISPLAY_POWER = false;
public static final int LOCK_VIDEO_ORIENTATION_UNLOCKED = -1;
public static final int LOCK_VIDEO_ORIENTATION_INITIAL = -2;
// like SC_LOCK_VIDEO_ORIENTATION_INITIAL, but set automatically
public static final int LOCK_VIDEO_ORIENTATION_INITIAL_AUTO = -3;
private Device() {
// not instantiable
@ -81,9 +82,8 @@ public final class Device {
&& injectKeyEvent(KeyEvent.ACTION_UP, keyCode, 0, 0, displayId, injectMode);
}
public static boolean isScreenOn(int displayId) {
assert displayId != DISPLAY_ID_NONE;
return ServiceManager.getPowerManager().isScreenOn(displayId);
public static boolean isScreenOn() {
return ServiceManager.getPowerManager().isScreenOn();
}
public static void expandNotificationPanel() {
@ -131,7 +131,7 @@ public final class Device {
public static boolean setDisplayPower(int displayId, boolean on) {
assert displayId != Device.DISPLAY_ID_NONE;
if (USE_ANDROID_15_DISPLAY_POWER && Build.VERSION.SDK_INT >= AndroidVersions.API_35_ANDROID_15) {
if (Build.VERSION.SDK_INT >= AndroidVersions.API_35_ANDROID_15) {
return ServiceManager.getDisplayManager().requestDisplayPower(displayId, on);
}
@ -181,7 +181,7 @@ public final class Device {
public static boolean powerOffScreen(int displayId) {
assert displayId != DISPLAY_ID_NONE;
if (!isScreenOn(displayId)) {
if (!isScreenOn()) {
return true;
}
return pressReleaseKeycode(KeyEvent.KEYCODE_POWER, displayId, Device.INJECT_MODE_ASYNC);

View File

@ -1,47 +0,0 @@
package com.genymobile.scrcpy.device;
public enum Orientation {
// @formatter:off
Orient0("0"),
Orient90("90"),
Orient180("180"),
Orient270("270"),
Flip0("flip0"),
Flip90("flip90"),
Flip180("flip180"),
Flip270("flip270");
public enum Lock {
Unlocked, LockedInitial, LockedValue,
}
private final String name;
Orientation(String name) {
this.name = name;
}
public static Orientation getByName(String name) {
for (Orientation orientation : values()) {
if (orientation.name.equals(name)) {
return orientation;
}
}
throw new IllegalArgumentException("Unknown orientation: " + name);
}
public static Orientation fromRotation(int rotation) {
assert rotation >= 0 && rotation < 4;
return values()[rotation];
}
public boolean isFlipped() {
return (ordinal() & 4) != 0;
}
public int getRotation() {
return ordinal() & 3;
}
}

View File

@ -29,61 +29,6 @@ public final class Size {
return new Size(height, width);
}
public Size limit(int maxSize) {
assert maxSize >= 0 : "Max size may not be negative";
assert maxSize % 8 == 0 : "Max size must be a multiple of 8";
if (maxSize == 0) {
// No limit
return this;
}
boolean portrait = height > width;
int major = portrait ? height : width;
if (major <= maxSize) {
return this;
}
int minor = portrait ? width : height;
int newMajor = maxSize;
int newMinor = maxSize * minor / major;
int w = portrait ? newMinor : newMajor;
int h = portrait ? newMajor : newMinor;
return new Size(w, h);
}
/**
* Round both dimensions of this size to be a multiple of 8 (as required by many encoders).
*
* @return The current size rounded.
*/
public Size round8() {
if (isMultipleOf8()) {
// Already a multiple of 8
return this;
}
boolean portrait = height > width;
int major = portrait ? height : width;
int minor = portrait ? width : height;
major &= ~7; // round down to not exceed the initial size
minor = (minor + 4) & ~7; // round to the nearest to minimize aspect ratio distortion
if (minor > major) {
minor = major;
}
int w = portrait ? minor : major;
int h = portrait ? major : minor;
return new Size(w, h);
}
public boolean isMultipleOf8() {
return (width & 7) == 0 && (height & 7) == 0;
}
public Rect toRect() {
return new Rect(0, 0, width, height);
}
@ -107,6 +52,6 @@ public final class Size {
@Override
public String toString() {
return width + "x" + height;
return "Size{" + width + 'x' + height + '}';
}
}

View File

@ -1,135 +0,0 @@
package com.genymobile.scrcpy.opengl;
import com.genymobile.scrcpy.util.AffineMatrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
public class AffineOpenGLFilter implements OpenGLFilter {
private int program;
private FloatBuffer vertexBuffer;
private FloatBuffer texCoordsBuffer;
private final float[] userMatrix;
private int vertexPosLoc;
private int texCoordsInLoc;
private int texLoc;
private int texMatrixLoc;
private int userMatrixLoc;
public AffineOpenGLFilter(AffineMatrix transform) {
userMatrix = transform.to4x4();
}
@Override
public void init() throws OpenGLException {
// @formatter:off
String vertexShaderCode = "#version 100\n"
+ "attribute vec4 vertex_pos;\n"
+ "attribute vec4 tex_coords_in;\n"
+ "varying vec2 tex_coords;\n"
+ "uniform mat4 tex_matrix;\n"
+ "uniform mat4 user_matrix;\n"
+ "void main() {\n"
+ " gl_Position = vertex_pos;\n"
+ " tex_coords = (tex_matrix * user_matrix * tex_coords_in).xy;\n"
+ "}";
// @formatter:off
String fragmentShaderCode = "#version 100\n"
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision highp float;\n"
+ "uniform samplerExternalOES tex;\n"
+ "varying vec2 tex_coords;\n"
+ "void main() {\n"
+ " if (tex_coords.x >= 0.0 && tex_coords.x <= 1.0\n"
+ " && tex_coords.y >= 0.0 && tex_coords.y <= 1.0) {\n"
+ " gl_FragColor = texture2D(tex, tex_coords);\n"
+ " } else {\n"
+ " gl_FragColor = vec4(0.0);\n"
+ " }\n"
+ "}";
program = GLUtils.createProgram(vertexShaderCode, fragmentShaderCode);
if (program == 0) {
throw new OpenGLException("Cannot create OpenGL program");
}
float[] vertices = {
-1, -1, // Bottom-left
1, -1, // Bottom-right
-1, 1, // Top-left
1, 1, // Top-right
};
float[] texCoords = {
0, 0, // Bottom-left
1, 0, // Bottom-right
0, 1, // Top-left
1, 1, // Top-right
};
// OpenGL will fill the 3rd and 4th coordinates of the vec4 automatically with 0.0 and 1.0 respectively
vertexBuffer = GLUtils.createFloatBuffer(vertices);
texCoordsBuffer = GLUtils.createFloatBuffer(texCoords);
vertexPosLoc = GLES20.glGetAttribLocation(program, "vertex_pos");
assert vertexPosLoc != -1;
texCoordsInLoc = GLES20.glGetAttribLocation(program, "tex_coords_in");
assert texCoordsInLoc != -1;
texLoc = GLES20.glGetUniformLocation(program, "tex");
assert texLoc != -1;
texMatrixLoc = GLES20.glGetUniformLocation(program, "tex_matrix");
assert texMatrixLoc != -1;
userMatrixLoc = GLES20.glGetUniformLocation(program, "user_matrix");
assert userMatrixLoc != -1;
}
@Override
public void draw(int textureId, float[] texMatrix) {
GLES20.glUseProgram(program);
GLUtils.checkGlError();
GLES20.glEnableVertexAttribArray(vertexPosLoc);
GLUtils.checkGlError();
GLES20.glEnableVertexAttribArray(texCoordsInLoc);
GLUtils.checkGlError();
GLES20.glVertexAttribPointer(vertexPosLoc, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer);
GLUtils.checkGlError();
GLES20.glVertexAttribPointer(texCoordsInLoc, 2, GLES20.GL_FLOAT, false, 0, texCoordsBuffer);
GLUtils.checkGlError();
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLUtils.checkGlError();
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
GLUtils.checkGlError();
GLES20.glUniform1i(texLoc, 0);
GLUtils.checkGlError();
GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, texMatrix, 0);
GLUtils.checkGlError();
GLES20.glUniformMatrix4fv(userMatrixLoc, 1, false, userMatrix, 0);
GLUtils.checkGlError();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLUtils.checkGlError();
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLUtils.checkGlError();
}
@Override
public void release() {
GLES20.glDeleteProgram(program);
GLUtils.checkGlError();
}
}

View File

@ -1,124 +0,0 @@
package com.genymobile.scrcpy.opengl;
import com.genymobile.scrcpy.BuildConfig;
import com.genymobile.scrcpy.util.Ln;
import android.opengl.GLES20;
import android.opengl.GLU;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
public final class GLUtils {
private static final boolean DEBUG = BuildConfig.DEBUG;
private GLUtils() {
// not instantiable
}
public static int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = createShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int fragmentShader = createShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (fragmentShader == 0) {
GLES20.glDeleteShader(vertexShader);
return 0;
}
int program = GLES20.glCreateProgram();
if (program == 0) {
GLES20.glDeleteShader(fragmentShader);
GLES20.glDeleteShader(vertexShader);
return 0;
}
GLES20.glAttachShader(program, vertexShader);
checkGlError();
GLES20.glAttachShader(program, fragmentShader);
checkGlError();
GLES20.glLinkProgram(program);
checkGlError();
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] == 0) {
Ln.e("Could not link program: " + GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
GLES20.glDeleteShader(fragmentShader);
GLES20.glDeleteShader(vertexShader);
return 0;
}
return program;
}
public static int createShader(int type, String source) {
int shader = GLES20.glCreateShader(type);
if (shader == 0) {
Ln.e(getGlErrorMessage("Could not create shader"));
return 0;
}
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
if (compileStatus[0] == 0) {
Ln.e("Could not compile " + getShaderTypeString(type) + ": " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
return 0;
}
return shader;
}
private static String getShaderTypeString(int type) {
switch (type) {
case GLES20.GL_VERTEX_SHADER:
return "vertex shader";
case GLES20.GL_FRAGMENT_SHADER:
return "fragment shader";
default:
return "shader";
}
}
/**
* Throws a runtime exception if {@link GLES20#glGetError()} returns an error (useful for debugging).
*/
public static void checkGlError() {
if (DEBUG) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
throw new RuntimeException(toErrorString(error));
}
}
}
public static String getGlErrorMessage(String userError) {
int glError = GLES20.glGetError();
if (glError == GLES20.GL_NO_ERROR) {
return userError;
}
return userError + " (" + toErrorString(glError) + ")";
}
private static String toErrorString(int glError) {
String errorString = GLU.gluErrorString(glError);
return "glError 0x" + Integer.toHexString(glError) + " " + errorString;
}
public static FloatBuffer createFloatBuffer(float[] values) {
FloatBuffer fb = ByteBuffer.allocateDirect(values.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
fb.put(values);
fb.position(0);
return fb;
}
}

View File

@ -1,13 +0,0 @@
package com.genymobile.scrcpy.opengl;
import java.io.IOException;
public class OpenGLException extends IOException {
public OpenGLException(String message) {
super(message);
}
public OpenGLException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@ -1,21 +0,0 @@
package com.genymobile.scrcpy.opengl;
public interface OpenGLFilter {
/**
* Initialize the OpenGL filter (typically compile the shaders and create the program).
*
* @throws OpenGLException if an initialization error occurs
*/
void init() throws OpenGLException;
/**
* Render a frame (call for each frame).
*/
void draw(int textureId, float[] texMatrix);
/**
* Release resources.
*/
void release();
}

View File

@ -1,258 +0,0 @@
package com.genymobile.scrcpy.opengl;
import com.genymobile.scrcpy.device.Size;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Handler;
import android.os.HandlerThread;
import android.view.Surface;
import java.util.concurrent.Semaphore;
public final class OpenGLRunner {
private static HandlerThread handlerThread;
private static Handler handler;
private static boolean quit;
private EGLDisplay eglDisplay;
private EGLContext eglContext;
private EGLSurface eglSurface;
private final OpenGLFilter filter;
private final float[] overrideTransformMatrix;
private SurfaceTexture surfaceTexture;
private Surface inputSurface;
private int textureId;
private boolean stopped;
public OpenGLRunner(OpenGLFilter filter, float[] overrideTransformMatrix) {
this.filter = filter;
this.overrideTransformMatrix = overrideTransformMatrix;
}
public OpenGLRunner(OpenGLFilter filter) {
this(filter, null);
}
public static synchronized void initOnce() {
if (handlerThread == null) {
if (quit) {
throw new IllegalStateException("Could not init OpenGLRunner after it is quit");
}
handlerThread = new HandlerThread("OpenGLRunner");
handlerThread.start();
handler = new Handler(handlerThread.getLooper());
}
}
public static void quit() {
HandlerThread thread;
synchronized (OpenGLRunner.class) {
thread = handlerThread;
quit = true;
}
if (thread != null) {
thread.quitSafely();
}
}
public static void join() throws InterruptedException {
HandlerThread thread;
synchronized (OpenGLRunner.class) {
thread = handlerThread;
}
if (thread != null) {
thread.join();
}
}
public Surface start(Size inputSize, Size outputSize, Surface outputSurface) throws OpenGLException {
initOnce();
// Simulate CompletableFuture, but working for all Android versions
final Semaphore sem = new Semaphore(0);
Throwable[] throwableRef = new Throwable[1];
// The whole OpenGL execution must be performed on a Handler, so that SurfaceTexture.setOnFrameAvailableListener() works correctly.
// See <https://github.com/Genymobile/scrcpy/issues/5444>
handler.post(() -> {
try {
run(inputSize, outputSize, outputSurface);
} catch (Throwable throwable) {
throwableRef[0] = throwable;
} finally {
sem.release();
}
});
try {
sem.acquire();
} catch (InterruptedException e) {
// Behave as if this method call was synchronous
Thread.currentThread().interrupt();
}
Throwable throwable = throwableRef[0];
if (throwable != null) {
if (throwable instanceof OpenGLException) {
throw (OpenGLException) throwable;
}
throw new OpenGLException("Asynchronous OpenGL runner init failed", throwable);
}
// Synchronization is ok: inputSurface is written before sem.release() and read after sem.acquire()
return inputSurface;
}
private void run(Size inputSize, Size outputSize, Surface outputSurface) throws OpenGLException {
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new OpenGLException("Unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new OpenGLException("Unable to initialize EGL14");
}
// @formatter:off
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0);
if (numConfigs[0] <= 0) {
EGL14.eglTerminate(eglDisplay);
throw new OpenGLException("Unable to find ES2 EGL config");
}
EGLConfig eglConfig = configs[0];
// @formatter:off
int[] contextAttribList = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, EGL14.EGL_NO_CONTEXT, contextAttribList, 0);
if (eglContext == null) {
EGL14.eglTerminate(eglDisplay);
throw new OpenGLException("Failed to create EGL context");
}
int[] surfaceAttribList = {
EGL14.EGL_NONE
};
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, outputSurface, surfaceAttribList, 0);
if (eglSurface == null) {
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglTerminate(eglDisplay);
throw new OpenGLException("Failed to create EGL window surface");
}
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
EGL14.eglDestroySurface(eglDisplay, eglSurface);
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglTerminate(eglDisplay);
throw new OpenGLException("Failed to make EGL context current");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
GLUtils.checkGlError();
textureId = textures[0];
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLUtils.checkGlError();
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLUtils.checkGlError();
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLUtils.checkGlError();
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLUtils.checkGlError();
surfaceTexture = new SurfaceTexture(textureId);
surfaceTexture.setDefaultBufferSize(inputSize.getWidth(), inputSize.getHeight());
inputSurface = new Surface(surfaceTexture);
filter.init();
surfaceTexture.setOnFrameAvailableListener(surfaceTexture -> {
if (stopped) {
// Make sure to never render after resources have been released
return;
}
render(outputSize);
}, handler);
}
private void render(Size outputSize) {
GLES20.glViewport(0, 0, outputSize.getWidth(), outputSize.getHeight());
GLUtils.checkGlError();
surfaceTexture.updateTexImage();
float[] matrix;
if (overrideTransformMatrix != null) {
matrix = overrideTransformMatrix;
} else {
matrix = new float[16];
surfaceTexture.getTransformMatrix(matrix);
}
filter.draw(textureId, matrix);
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, surfaceTexture.getTimestamp());
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
public void stopAndRelease() {
final Semaphore sem = new Semaphore(0);
handler.post(() -> {
stopped = true;
surfaceTexture.setOnFrameAvailableListener(null, handler);
filter.release();
int[] textures = {textureId};
GLES20.glDeleteTextures(1, textures, 0);
GLUtils.checkGlError();
EGL14.eglDestroySurface(eglDisplay, eglSurface);
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglTerminate(eglDisplay);
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglContext = EGL14.EGL_NO_CONTEXT;
eglSurface = EGL14.EGL_NO_SURFACE;
surfaceTexture.release();
inputSurface.release();
sem.release();
});
try {
sem.acquire();
} catch (InterruptedException e) {
// Behave as if this method call was synchronous
Thread.currentThread().interrupt();
}
}
}

View File

@ -1,368 +0,0 @@
package com.genymobile.scrcpy.util;
import com.genymobile.scrcpy.device.Point;
import com.genymobile.scrcpy.device.Size;
/**
* Represents a 2D affine transform (a 3x3 matrix):
*
* <pre>
* / a c e \
* | b d f |
* \ 0 0 1 /
* </pre>
* <p>
* Or, a 4x4 matrix if we add a z axis:
*
* <pre>
* / a c 0 e \
* | b d 0 f |
* | 0 0 1 0 |
* \ 0 0 0 1 /
* </pre>
*/
public class AffineMatrix {
private final double a, b, c, d, e, f;
/**
* The identity matrix.
*/
public static final AffineMatrix IDENTITY = new AffineMatrix(1, 0, 0, 1, 0, 0);
/**
* Create a new matrix:
*
* <pre>
* / a c e \
* | b d f |
* \ 0 0 1 /
* </pre>
*/
public AffineMatrix(double a, double b, double c, double d, double e, double f) {
this.a = a;
this.b = b;
this.c = c;
this.d = d;
this.e = e;
this.f = f;
}
@Override
public String toString() {
return "[" + a + ", " + c + ", " + e + "; " + b + ", " + d + ", " + f + "]";
}
/**
* Return a matrix which converts from Normalized Device Coordinates to pixels.
*
* @param size the target size
* @return the transform matrix
*/
public static AffineMatrix ndcFromPixels(Size size) {
double w = size.getWidth();
double h = size.getHeight();
return new AffineMatrix(1 / w, 0, 0, -1 / h, 0, 1);
}
/**
* Return a matrix which converts from pixels to Normalized Device Coordinates.
*
* @param size the source size
* @return the transform matrix
*/
public static AffineMatrix ndcToPixels(Size size) {
double w = size.getWidth();
double h = size.getHeight();
return new AffineMatrix(w, 0, 0, -h, 0, h);
}
/**
* Apply the transform to a point ({@code this} should be a matrix converted to pixels coordinates via {@link #ndcToPixels(Size)}).
*
* @param point the source point
* @return the converted point
*/
public Point apply(Point point) {
int x = point.getX();
int y = point.getY();
int xx = (int) (a * x + c * y + e);
int yy = (int) (b * x + d * y + f);
return new Point(xx, yy);
}
/**
* Compute <code>this * rhs</code>.
*
* @param rhs the matrix to multiply
* @return the product
*/
public AffineMatrix multiply(AffineMatrix rhs) {
if (rhs == null) {
// For convenience
return this;
}
double aa = this.a * rhs.a + this.c * rhs.b;
double bb = this.b * rhs.a + this.d * rhs.b;
double cc = this.a * rhs.c + this.c * rhs.d;
double dd = this.b * rhs.c + this.d * rhs.d;
double ee = this.a * rhs.e + this.c * rhs.f + this.e;
double ff = this.b * rhs.e + this.d * rhs.f + this.f;
return new AffineMatrix(aa, bb, cc, dd, ee, ff);
}
/**
* Multiply all matrices from left to right, ignoring any {@code null} matrix (for convenience).
*
* @param matrices the matrices
* @return the product
*/
public static AffineMatrix multiplyAll(AffineMatrix... matrices) {
AffineMatrix result = null;
for (AffineMatrix matrix : matrices) {
if (result == null) {
result = matrix;
} else {
result = result.multiply(matrix);
}
}
return result;
}
/**
* Invert the matrix.
*
* @return the inverse matrix (or {@code null} if not invertible).
*/
public AffineMatrix invert() {
// The 3x3 matrix M can be decomposed into M = M1 * M2:
// M1 M2
// / 1 0 e \ / a c 0 \
// | 0 1 f | * | b d 0 |
// \ 0 0 1 / \ 0 0 1 /
//
// The inverse of an invertible 2x2 matrix is given by this formula:
//
// / A B \⁻¹ 1 / D -B \
// \ C D / = ----- \ -C A /
// AD-BC
//
// Let B=c and C=b (to apply the general formula with the same letters).
//
// M⁻¹ = (M1 * M2)⁻¹ = M2⁻¹ * M1⁻¹
//
// M2⁻¹ M1⁻¹
// /----------------\
// 1 / d -B 0 \ / 1 0 -e \
// = ----- | -C a 0 | * | 0 1 -f |
// ad-BC \ 0 0 1 / \ 0 0 1 /
//
// With the original letters:
//
// 1 / d -c 0 \ / 1 0 -e \
// M⁻¹ = ----- | -b a 0 | * | 0 1 -f |
// ad-cb \ 0 0 1 / \ 0 0 1 /
//
// 1 / d -c cf-de \
// = ----- | -b a be-af |
// ad-cb \ 0 0 1 /
double det = a * d - c * b;
if (det == 0) {
// Not invertible
return null;
}
double aa = d / det;
double bb = -b / det;
double cc = -c / det;
double dd = a / det;
double ee = (c * f - d * e) / det;
double ff = (b * e - a * f) / det;
return new AffineMatrix(aa, bb, cc, dd, ee, ff);
}
/**
* Return this transform applied from the center (0.5, 0.5).
*
* @return the resulting matrix
*/
public AffineMatrix fromCenter() {
return translate(0.5, 0.5).multiply(this).multiply(translate(-0.5, -0.5));
}
/**
* Return this transform with the specified aspect ratio.
*
* @param ar the aspect ratio
* @return the resulting matrix
*/
public AffineMatrix withAspectRatio(double ar) {
return scale(1 / ar, 1).multiply(this).multiply(scale(ar, 1));
}
/**
* Return this transform with the specified aspect ratio.
*
* @param size the size describing the aspect ratio
* @return the transform
*/
public AffineMatrix withAspectRatio(Size size) {
double ar = (double) size.getWidth() / size.getHeight();
return withAspectRatio(ar);
}
/**
* Return a translation matrix.
*
* @param x the horizontal translation
* @param y the vertical translation
* @return the matrix
*/
public static AffineMatrix translate(double x, double y) {
return new AffineMatrix(1, 0, 0, 1, x, y);
}
/**
* Return a scaling matrix.
*
* @param x the horizontal scaling
* @param y the vertical scaling
* @return the matrix
*/
public static AffineMatrix scale(double x, double y) {
return new AffineMatrix(x, 0, 0, y, 0, 0);
}
/**
* Return a scaling matrix.
*
* @param from the source size
* @param to the destination size
* @return the matrix
*/
public static AffineMatrix scale(Size from, Size to) {
double scaleX = (double) to.getWidth() / from.getWidth();
double scaleY = (double) to.getHeight() / from.getHeight();
return scale(scaleX, scaleY);
}
/**
* Return a matrix applying a "reframing" (cropping a rectangle).
* <p/>
* <code>(x, y)</code> is the bottom-left corner, <code>(w, h)</code> is the size of the rectangle.
*
* @param x horizontal coordinate (increasing to the right)
* @param y vertical coordinate (increasing upwards)
* @param w width
* @param h height
* @return the matrix
*/
public static AffineMatrix reframe(double x, double y, double w, double h) {
if (w == 0 || h == 0) {
throw new IllegalArgumentException("Cannot reframe to an empty area: " + w + "x" + h);
}
return scale(1 / w, 1 / h).multiply(translate(-x, -y));
}
/**
* Return an orthogonal rotation matrix.
*
* @param ccwRotation the counter-clockwise rotation
* @return the matrix
*/
public static AffineMatrix rotateOrtho(int ccwRotation) {
switch (ccwRotation) {
case 0:
return IDENTITY;
case 1:
// 90° counter-clockwise
return new AffineMatrix(0, 1, -1, 0, 1, 0);
case 2:
// 180°
return new AffineMatrix(-1, 0, 0, -1, 1, 1);
case 3:
// 90° clockwise
return new AffineMatrix(0, -1, 1, 0, 0, 1);
default:
throw new IllegalArgumentException("Invalid rotation: " + ccwRotation);
}
}
/**
* Return an horizontal flip matrix.
*
* @return the matrix
*/
public static AffineMatrix hflip() {
return new AffineMatrix(-1, 0, 0, 1, 1, 0);
}
/**
* Return a vertical flip matrix.
*
* @return the matrix
*/
public static AffineMatrix vflip() {
return new AffineMatrix(1, 0, 0, -1, 0, 1);
}
/**
* Return a rotation matrix.
*
* @param ccwDegrees the angle, in degrees (counter-clockwise)
* @return the matrix
*/
public static AffineMatrix rotate(double ccwDegrees) {
double radians = Math.toRadians(ccwDegrees);
double cos = Math.cos(radians);
double sin = Math.sin(radians);
return new AffineMatrix(cos, sin, -sin, cos, 0, 0);
}
/**
* Export this affine transform to a 4x4 column-major order matrix.
*
* @param matrix output 4x4 matrix
*/
public void to4x4(float[] matrix) {
// matrix is a 4x4 matrix in column-major order
// Column 0
matrix[0] = (float) a;
matrix[1] = (float) b;
matrix[2] = 0;
matrix[3] = 0;
// Column 1
matrix[4] = (float) c;
matrix[5] = (float) d;
matrix[6] = 0;
matrix[7] = 0;
// Column 2
matrix[8] = 0;
matrix[9] = 0;
matrix[10] = 1;
matrix[11] = 0;
// Column 3
matrix[12] = (float) e;
matrix[13] = (float) f;
matrix[14] = 0;
matrix[15] = 1;
}
/**
* Export this affine transform to a 4x4 column-major order matrix.
*
* @return 4x4 matrix
*/
public float[] to4x4() {
float[] matrix = new float[16];
to4x4(matrix);
return matrix;
}
}

View File

@ -236,7 +236,7 @@ public final class LogUtils {
} else {
builder.append("\n ").append(String.format("%" + column + "s", " "));
}
builder.append(" ").append(app.getPackageName());
builder.append(" [").append(app.getPackageName()).append(']');
}
return builder.toString();

View File

@ -1,17 +1,9 @@
package com.genymobile.scrcpy.video;
import com.genymobile.scrcpy.AndroidVersions;
import com.genymobile.scrcpy.Options;
import com.genymobile.scrcpy.device.ConfigurationException;
import com.genymobile.scrcpy.device.Orientation;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.opengl.AffineOpenGLFilter;
import com.genymobile.scrcpy.opengl.OpenGLFilter;
import com.genymobile.scrcpy.opengl.OpenGLRunner;
import com.genymobile.scrcpy.util.AffineMatrix;
import com.genymobile.scrcpy.util.HandlerExecutor;
import com.genymobile.scrcpy.util.Ln;
import com.genymobile.scrcpy.util.LogUtils;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.SuppressLint;
@ -46,13 +38,6 @@ import java.util.stream.Stream;
public class CameraCapture extends SurfaceCapture {
public static final float[] VFLIP_MATRIX = {
1, 0, 0, 0, // column 1
0, -1, 0, 0, // column 2
0, 0, 1, 0, // column 3
0, 1, 0, 1, // column 4
};
private final String explicitCameraId;
private final CameraFacing cameraFacing;
private final Size explicitSize;
@ -60,16 +45,9 @@ public class CameraCapture extends SurfaceCapture {
private final CameraAspectRatio aspectRatio;
private final int fps;
private final boolean highSpeed;
private final Rect crop;
private final Orientation captureOrientation;
private final float angle;
private String cameraId;
private Size captureSize;
private Size videoSize; // after OpenGL transforms
private AffineMatrix transform;
private OpenGLRunner glRunner;
private Size size;
private HandlerThread cameraThread;
private Handler cameraHandler;
@ -78,22 +56,19 @@ public class CameraCapture extends SurfaceCapture {
private final AtomicBoolean disconnected = new AtomicBoolean();
public CameraCapture(Options options) {
this.explicitCameraId = options.getCameraId();
this.cameraFacing = options.getCameraFacing();
this.explicitSize = options.getCameraSize();
this.maxSize = options.getMaxSize();
this.aspectRatio = options.getCameraAspectRatio();
this.fps = options.getCameraFps();
this.highSpeed = options.getCameraHighSpeed();
this.crop = options.getCrop();
this.captureOrientation = options.getCaptureOrientation();
assert captureOrientation != null;
this.angle = options.getAngle();
public CameraCapture(String explicitCameraId, CameraFacing cameraFacing, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, int fps,
boolean highSpeed) {
this.explicitCameraId = explicitCameraId;
this.cameraFacing = cameraFacing;
this.explicitSize = explicitSize;
this.maxSize = maxSize;
this.aspectRatio = aspectRatio;
this.fps = fps;
this.highSpeed = highSpeed;
}
@Override
protected void init() throws ConfigurationException, IOException {
public void init() throws IOException {
cameraThread = new HandlerThread("camera");
cameraThread.start();
cameraHandler = new Handler(cameraThread.getLooper());
@ -102,7 +77,12 @@ public class CameraCapture extends SurfaceCapture {
try {
cameraId = selectCamera(explicitCameraId, cameraFacing);
if (cameraId == null) {
throw new ConfigurationException("No matching camera found");
throw new IOException("No matching camera found");
}
size = selectSize(cameraId, explicitSize, maxSize, aspectRatio, highSpeed);
if (size == null) {
throw new IOException("Could not select camera size");
}
Ln.i("Using camera '" + cameraId + "'");
@ -112,45 +92,14 @@ public class CameraCapture extends SurfaceCapture {
}
}
@Override
public void prepare() throws IOException {
try {
captureSize = selectSize(cameraId, explicitSize, maxSize, aspectRatio, highSpeed);
if (captureSize == null) {
throw new IOException("Could not select camera size");
}
} catch (CameraAccessException e) {
throw new IOException(e);
}
VideoFilter filter = new VideoFilter(captureSize);
if (crop != null) {
filter.addCrop(crop, false);
}
if (captureOrientation != Orientation.Orient0) {
filter.addOrientation(captureOrientation);
}
filter.addAngle(angle);
transform = filter.getInverseTransform();
videoSize = filter.getOutputSize().limit(maxSize).round8();
}
private static String selectCamera(String explicitCameraId, CameraFacing cameraFacing) throws CameraAccessException, ConfigurationException {
CameraManager cameraManager = ServiceManager.getCameraManager();
String[] cameraIds = cameraManager.getCameraIdList();
private static String selectCamera(String explicitCameraId, CameraFacing cameraFacing) throws CameraAccessException {
if (explicitCameraId != null) {
if (!Arrays.asList(cameraIds).contains(explicitCameraId)) {
Ln.e("Camera with id " + explicitCameraId + " not found\n" + LogUtils.buildCameraListMessage(false));
throw new ConfigurationException("Camera id not found");
}
return explicitCameraId;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraFacing == null) {
// Use the first one
return cameraIds.length > 0 ? cameraIds[0] : null;
@ -252,33 +201,15 @@ public class CameraCapture extends SurfaceCapture {
@Override
public void start(Surface surface) throws IOException {
if (transform != null) {
assert glRunner == null;
OpenGLFilter glFilter = new AffineOpenGLFilter(transform);
// The transform matrix returned by SurfaceTexture is incorrect for camera capture (it often contains an additional unexpected 90°
// rotation). Use a vertical flip transform matrix instead.
glRunner = new OpenGLRunner(glFilter, VFLIP_MATRIX);
surface = glRunner.start(captureSize, videoSize, surface);
}
try {
CameraCaptureSession session = createCaptureSession(cameraDevice, surface);
CaptureRequest request = createCaptureRequest(surface);
setRepeatingRequest(session, request);
} catch (CameraAccessException | InterruptedException e) {
stop();
throw new IOException(e);
}
}
@Override
public void stop() {
if (glRunner != null) {
glRunner.stopAndRelease();
glRunner = null;
}
}
@Override
public void release() {
if (cameraDevice != null) {
@ -291,7 +222,7 @@ public class CameraCapture extends SurfaceCapture {
@Override
public Size getSize() {
return videoSize;
return size;
}
@Override
@ -301,7 +232,13 @@ public class CameraCapture extends SurfaceCapture {
}
this.maxSize = maxSize;
return true;
try {
size = selectSize(cameraId, null, maxSize, aspectRatio, highSpeed);
return size != null;
} catch (CameraAccessException e) {
Ln.w("Could not select camera size", e);
return false;
}
}
@SuppressLint("MissingPermission")
@ -319,7 +256,7 @@ public class CameraCapture extends SurfaceCapture {
public void onDisconnected(CameraDevice camera) {
Ln.w("Camera disconnected");
disconnected.set(true);
invalidate();
requestReset();
}
@Override
@ -418,9 +355,4 @@ public class CameraCapture extends SurfaceCapture {
public boolean isClosed() {
return disconnected.get();
}
@Override
public void requestInvalidate() {
// do nothing (the user could not request a reset anyway for now, since there is no controller for camera mirroring)
}
}

View File

@ -1,37 +0,0 @@
package com.genymobile.scrcpy.video;
import android.media.MediaCodec;
import java.util.concurrent.atomic.AtomicBoolean;
public class CaptureReset implements SurfaceCapture.CaptureListener {
private final AtomicBoolean reset = new AtomicBoolean();
// Current instance of MediaCodec to "interrupt" on reset
private MediaCodec runningMediaCodec;
public boolean consumeReset() {
return reset.getAndSet(false);
}
public synchronized void reset() {
reset.set(true);
if (runningMediaCodec != null) {
try {
runningMediaCodec.signalEndOfInputStream();
} catch (IllegalStateException e) {
// ignore
}
}
}
public synchronized void setRunningMediaCodec(MediaCodec runningMediaCodec) {
this.runningMediaCodec = runningMediaCodec;
}
@Override
public void onInvalidated() {
reset();
}
}

View File

@ -1,139 +0,0 @@
package com.genymobile.scrcpy.video;
import com.genymobile.scrcpy.AndroidVersions;
import com.genymobile.scrcpy.device.Device;
import com.genymobile.scrcpy.device.DisplayInfo;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.util.Ln;
import com.genymobile.scrcpy.wrappers.DisplayManager;
import com.genymobile.scrcpy.wrappers.DisplayWindowListener;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.content.res.Configuration;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.view.IDisplayWindowListener;
public class DisplaySizeMonitor {
public interface Listener {
void onDisplaySizeChanged();
}
// On Android 14, DisplayListener may be broken (it never sends events). This is fixed in recent Android 14 upgrades, but we can't really
// detect it directly, so register a DisplayWindowListener (introduced in Android 11) to listen to configuration changes instead.
private static final boolean USE_DEFAULT_METHOD = Build.VERSION.SDK_INT != AndroidVersions.API_34_ANDROID_14;
private DisplayManager.DisplayListenerHandle displayListenerHandle;
private HandlerThread handlerThread;
private IDisplayWindowListener displayWindowListener;
private int displayId = Device.DISPLAY_ID_NONE;
private Size sessionDisplaySize;
private Listener listener;
public void start(int displayId, Listener listener) {
// Once started, the listener and the displayId must never change
assert listener != null;
this.listener = listener;
assert this.displayId == Device.DISPLAY_ID_NONE;
this.displayId = displayId;
if (USE_DEFAULT_METHOD) {
handlerThread = new HandlerThread("DisplayListener");
handlerThread.start();
Handler handler = new Handler(handlerThread.getLooper());
displayListenerHandle = ServiceManager.getDisplayManager().registerDisplayListener(eventDisplayId -> {
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("DisplaySizeMonitor: onDisplayChanged(" + eventDisplayId + ")");
}
if (eventDisplayId == displayId) {
checkDisplaySizeChanged();
}
}, handler);
} else {
displayWindowListener = new DisplayWindowListener() {
@Override
public void onDisplayConfigurationChanged(int eventDisplayId, Configuration newConfig) {
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("DisplaySizeMonitor: onDisplayConfigurationChanged(" + eventDisplayId + ")");
}
if (eventDisplayId == displayId) {
checkDisplaySizeChanged();
}
}
};
ServiceManager.getWindowManager().registerDisplayWindowListener(displayWindowListener);
}
}
/**
* Stop and release the monitor.
* <p/>
* It must not be used anymore.
* It is ok to call this method even if {@link #start(int, Listener)} was not called.
*/
public void stopAndRelease() {
if (USE_DEFAULT_METHOD) {
// displayListenerHandle may be null if registration failed
if (displayListenerHandle != null) {
ServiceManager.getDisplayManager().unregisterDisplayListener(displayListenerHandle);
displayListenerHandle = null;
}
if (handlerThread != null) {
handlerThread.quitSafely();
}
} else if (displayWindowListener != null) {
ServiceManager.getWindowManager().unregisterDisplayWindowListener(displayWindowListener);
}
}
private synchronized Size getSessionDisplaySize() {
return sessionDisplaySize;
}
public synchronized void setSessionDisplaySize(Size sessionDisplaySize) {
this.sessionDisplaySize = sessionDisplaySize;
}
private void checkDisplaySizeChanged() {
DisplayInfo di = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
if (di == null) {
Ln.w("DisplayInfo for " + displayId + " cannot be retrieved");
// We can't compare with the current size, so reset unconditionally
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("DisplaySizeMonitor: requestReset(): " + getSessionDisplaySize() + " -> (unknown)");
}
setSessionDisplaySize(null);
listener.onDisplaySizeChanged();
} else {
Size size = di.getSize();
// The field is hidden on purpose, to read it with synchronization
@SuppressWarnings("checkstyle:HiddenField")
Size sessionDisplaySize = getSessionDisplaySize(); // synchronized
// .equals() also works if sessionDisplaySize == null
if (!size.equals(sessionDisplaySize)) {
// Reset only if the size is different
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("DisplaySizeMonitor: requestReset(): " + sessionDisplaySize + " -> " + size);
}
// Set the new size immediately, so that a future onDisplayChanged() event called before the asynchronous prepare()
// considers that the current size is the requested size (to avoid a duplicate requestReset())
setSessionDisplaySize(size);
listener.onDisplaySizeChanged();
} else if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("DisplaySizeMonitor: Size not changed (" + size + "): do not requestReset()");
}
}
}
}

View File

@ -1,31 +1,22 @@
package com.genymobile.scrcpy.video;
import com.genymobile.scrcpy.AndroidVersions;
import com.genymobile.scrcpy.Options;
import com.genymobile.scrcpy.control.PositionMapper;
import com.genymobile.scrcpy.device.DisplayInfo;
import com.genymobile.scrcpy.device.NewDisplay;
import com.genymobile.scrcpy.device.Orientation;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.opengl.AffineOpenGLFilter;
import com.genymobile.scrcpy.opengl.OpenGLFilter;
import com.genymobile.scrcpy.opengl.OpenGLRunner;
import com.genymobile.scrcpy.util.AffineMatrix;
import com.genymobile.scrcpy.util.Ln;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.graphics.Rect;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.os.Build;
import android.view.Surface;
import java.io.IOException;
public class NewDisplayCapture extends SurfaceCapture {
// Internal fields copied from android.hardware.display.DisplayManager
private static final int VIRTUAL_DISPLAY_FLAG_PUBLIC = android.hardware.display.DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC;
private static final int VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY = android.hardware.display.DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY;
private static final int VIRTUAL_DISPLAY_FLAG_SUPPORTS_TOUCH = 1 << 6;
private static final int VIRTUAL_DISPLAY_FLAG_ROTATES_WITH_CONTENT = 1 << 7;
private static final int VIRTUAL_DISPLAY_FLAG_DESTROY_CONTENT_ON_REMOVAL = 1 << 8;
@ -40,53 +31,28 @@ public class NewDisplayCapture extends SurfaceCapture {
private final VirtualDisplayListener vdListener;
private final NewDisplay newDisplay;
private final DisplaySizeMonitor displaySizeMonitor = new DisplaySizeMonitor();
private AffineMatrix displayTransform;
private AffineMatrix eventTransform;
private OpenGLRunner glRunner;
private Size mainDisplaySize;
private int mainDisplayDpi;
private int maxSize;
private final Rect crop;
private final boolean captureOrientationLocked;
private final Orientation captureOrientation;
private final float angle;
private final boolean vdSystemDecorations;
private int maxSize; // only used if newDisplay.getSize() != null
private VirtualDisplay virtualDisplay;
private Size videoSize;
private Size displaySize; // the logical size of the display (including rotation)
private Size physicalSize; // the physical size of the display (without rotation)
private Size size;
private int dpi;
public NewDisplayCapture(VirtualDisplayListener vdListener, Options options) {
public NewDisplayCapture(VirtualDisplayListener vdListener, NewDisplay newDisplay, int maxSize) {
this.vdListener = vdListener;
this.newDisplay = options.getNewDisplay();
assert newDisplay != null;
this.maxSize = options.getMaxSize();
this.crop = options.getCrop();
assert options.getCaptureOrientationLock() != null;
this.captureOrientationLocked = options.getCaptureOrientationLock() != Orientation.Lock.Unlocked;
this.captureOrientation = options.getCaptureOrientation();
assert captureOrientation != null;
this.angle = options.getAngle();
this.vdSystemDecorations = options.getVDSystemDecorations();
this.newDisplay = newDisplay;
this.maxSize = maxSize;
}
@Override
protected void init() {
displaySize = newDisplay.getSize();
public void init() {
size = newDisplay.getSize();
dpi = newDisplay.getDpi();
if (displaySize == null || dpi == 0) {
if (size == null || dpi == 0) {
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(0);
if (displayInfo != null) {
mainDisplaySize = displayInfo.getSize();
if ((displayInfo.getRotation() % 2) != 0) {
mainDisplaySize = mainDisplaySize.rotate(); // Use the natural device orientation (at rotation 0), not the current one
}
mainDisplayDpi = displayInfo.getDpi();
} else {
Ln.w("Main display not found, fallback to 1920x1080 240dpi");
@ -98,135 +64,58 @@ public class NewDisplayCapture extends SurfaceCapture {
@Override
public void prepare() {
int displayRotation;
if (virtualDisplay == null) {
if (!newDisplay.hasExplicitSize()) {
displaySize = mainDisplaySize;
}
if (!newDisplay.hasExplicitDpi()) {
dpi = scaleDpi(mainDisplaySize, mainDisplayDpi, displaySize);
}
videoSize = displaySize;
displayRotation = 0;
// Set the current display size to avoid an unnecessary call to invalidate()
displaySizeMonitor.setSessionDisplaySize(displaySize);
} else {
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(virtualDisplay.getDisplay().getDisplayId());
displaySize = displayInfo.getSize();
dpi = displayInfo.getDpi();
displayRotation = displayInfo.getRotation();
if (!newDisplay.hasExplicitSize()) {
size = ScreenInfo.computeVideoSize(mainDisplaySize.getWidth(), mainDisplaySize.getHeight(), maxSize);
}
VideoFilter filter = new VideoFilter(displaySize);
if (crop != null) {
boolean transposed = (displayRotation % 2) != 0;
filter.addCrop(crop, transposed);
if (!newDisplay.hasExplicitDpi()) {
dpi = scaleDpi(mainDisplaySize, mainDisplayDpi, size);
}
filter.addOrientation(displayRotation, captureOrientationLocked, captureOrientation);
filter.addAngle(angle);
Size filteredSize = filter.getOutputSize();
if (!filteredSize.isMultipleOf8() || (maxSize != 0 && filteredSize.getMax() > maxSize)) {
if (maxSize != 0) {
filteredSize = filteredSize.limit(maxSize);
}
filteredSize = filteredSize.round8();
filter.addResize(filteredSize);
}
eventTransform = filter.getInverseTransform();
// DisplayInfo gives the oriented size (so videoSize includes the display rotation)
videoSize = filter.getOutputSize();
// But the virtual display video always remains in the origin orientation (the video itself is not rotated, so it must rotated manually).
// This additional display rotation must not be included in the input events transform (the expected coordinates are already in the
// physical display size)
if ((displayRotation % 2) == 0) {
physicalSize = displaySize;
} else {
physicalSize = displaySize.rotate();
}
VideoFilter displayFilter = new VideoFilter(physicalSize);
displayFilter.addRotation(displayRotation);
AffineMatrix displayRotationMatrix = displayFilter.getInverseTransform();
// Take care of multiplication order:
// displayTransform = (FILTER_MATRIX * DISPLAY_FILTER_MATRIX)⁻¹
// = DISPLAY_FILTER_MATRIX⁻¹ * FILTER_MATRIX⁻¹
// = displayRotationMatrix * eventTransform
displayTransform = AffineMatrix.multiplyAll(displayRotationMatrix, eventTransform);
}
public void startNew(Surface surface) {
@Override
public void start(Surface surface) {
if (virtualDisplay != null) {
virtualDisplay.release();
virtualDisplay = null;
}
int virtualDisplayId;
try {
int flags = VIRTUAL_DISPLAY_FLAG_PUBLIC
| VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY
int flags = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC
| DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY
| VIRTUAL_DISPLAY_FLAG_SUPPORTS_TOUCH
| VIRTUAL_DISPLAY_FLAG_ROTATES_WITH_CONTENT
| VIRTUAL_DISPLAY_FLAG_DESTROY_CONTENT_ON_REMOVAL;
if (vdSystemDecorations) {
flags |= VIRTUAL_DISPLAY_FLAG_SHOULD_SHOW_SYSTEM_DECORATIONS;
}
| VIRTUAL_DISPLAY_FLAG_DESTROY_CONTENT_ON_REMOVAL
| VIRTUAL_DISPLAY_FLAG_SHOULD_SHOW_SYSTEM_DECORATIONS;
if (Build.VERSION.SDK_INT >= AndroidVersions.API_33_ANDROID_13) {
flags |= VIRTUAL_DISPLAY_FLAG_TRUSTED
| VIRTUAL_DISPLAY_FLAG_OWN_DISPLAY_GROUP
| VIRTUAL_DISPLAY_FLAG_ALWAYS_UNLOCKED
| VIRTUAL_DISPLAY_FLAG_TOUCH_FEEDBACK_DISABLED;
if (Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14) {
flags |= VIRTUAL_DISPLAY_FLAG_OWN_FOCUS
| VIRTUAL_DISPLAY_FLAG_DEVICE_DISPLAY_GROUP;
flags |= VIRTUAL_DISPLAY_FLAG_OWN_FOCUS
| VIRTUAL_DISPLAY_FLAG_DEVICE_DISPLAY_GROUP;
}
}
virtualDisplay = ServiceManager.getDisplayManager()
.createNewVirtualDisplay("scrcpy", displaySize.getWidth(), displaySize.getHeight(), dpi, surface, flags);
.createNewVirtualDisplay("scrcpy", size.getWidth(), size.getHeight(), dpi, surface, flags);
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
Ln.i("New display: " + displaySize.getWidth() + "x" + displaySize.getHeight() + "/" + dpi + " (id=" + virtualDisplayId + ")");
displaySizeMonitor.start(virtualDisplayId, this::invalidate);
Ln.i("New display: " + size.getWidth() + "x" + size.getHeight() + "/" + dpi + " (id=" + virtualDisplayId + ")");
} catch (Exception e) {
Ln.e("Could not create display", e);
throw new AssertionError("Could not create display");
}
}
@Override
public void start(Surface surface) throws IOException {
if (displayTransform != null) {
assert glRunner == null;
OpenGLFilter glFilter = new AffineOpenGLFilter(displayTransform);
glRunner = new OpenGLRunner(glFilter);
surface = glRunner.start(physicalSize, videoSize, surface);
}
if (virtualDisplay == null) {
startNew(surface);
} else {
virtualDisplay.setSurface(surface);
}
if (vdListener != null) {
PositionMapper positionMapper = PositionMapper.create(videoSize, eventTransform, displaySize);
vdListener.onNewVirtualDisplay(virtualDisplay.getDisplay().getDisplayId(), positionMapper);
}
}
@Override
public void stop() {
if (glRunner != null) {
glRunner.stopAndRelease();
glRunner = null;
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
Rect contentRect = new Rect(0, 0, size.getWidth(), size.getHeight());
PositionMapper positionMapper = new PositionMapper(size, contentRect, 0);
vdListener.onNewVirtualDisplay(virtualDisplayId, positionMapper);
}
}
@Override
public void release() {
displaySizeMonitor.stopAndRelease();
if (virtualDisplay != null) {
virtualDisplay.release();
virtualDisplay = null;
@ -235,11 +124,16 @@ public class NewDisplayCapture extends SurfaceCapture {
@Override
public synchronized Size getSize() {
return videoSize;
return size;
}
@Override
public synchronized boolean setMaxSize(int newMaxSize) {
if (newDisplay.hasExplicitSize()) {
// Cannot retry with a different size if the display size was explicitly provided
return false;
}
maxSize = newMaxSize;
return true;
}
@ -249,9 +143,4 @@ public class NewDisplayCapture extends SurfaceCapture {
int num = size.getMax();
return initialDpi * num / den;
}
@Override
public void requestInvalidate() {
invalidate();
}
}

View File

@ -1,67 +1,114 @@
package com.genymobile.scrcpy.video;
import com.genymobile.scrcpy.AndroidVersions;
import com.genymobile.scrcpy.Options;
import com.genymobile.scrcpy.control.PositionMapper;
import com.genymobile.scrcpy.device.ConfigurationException;
import com.genymobile.scrcpy.device.Device;
import com.genymobile.scrcpy.device.DisplayInfo;
import com.genymobile.scrcpy.device.Orientation;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.opengl.AffineOpenGLFilter;
import com.genymobile.scrcpy.opengl.OpenGLFilter;
import com.genymobile.scrcpy.opengl.OpenGLRunner;
import com.genymobile.scrcpy.util.AffineMatrix;
import com.genymobile.scrcpy.util.Ln;
import com.genymobile.scrcpy.util.LogUtils;
import com.genymobile.scrcpy.wrappers.DisplayManager;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import com.genymobile.scrcpy.wrappers.SurfaceControl;
import android.graphics.Rect;
import android.hardware.display.VirtualDisplay;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.IBinder;
import android.view.IDisplayFoldListener;
import android.view.IRotationWatcher;
import android.view.Surface;
import java.io.IOException;
public class ScreenCapture extends SurfaceCapture {
private final VirtualDisplayListener vdListener;
private final int displayId;
private int maxSize;
private final Rect crop;
private Orientation.Lock captureOrientationLock;
private Orientation captureOrientation;
private final float angle;
private final int lockVideoOrientation;
private DisplayInfo displayInfo;
private Size videoSize;
private ScreenInfo screenInfo;
private final DisplaySizeMonitor displaySizeMonitor = new DisplaySizeMonitor();
// Source display size (before resizing/crop) for the current session
private Size sessionDisplaySize;
private IBinder display;
private VirtualDisplay virtualDisplay;
private AffineMatrix transform;
private OpenGLRunner glRunner;
private DisplayManager.DisplayListenerHandle displayListenerHandle;
private HandlerThread handlerThread;
public ScreenCapture(VirtualDisplayListener vdListener, Options options) {
// On Android 14, the DisplayListener may be broken (it never sends events). This is fixed in recent Android 14 upgrades, but we can't really
// detect it directly, so register a RotationWatcher and a DisplayFoldListener as a fallback, until we receive the first event from
// DisplayListener (which proves that it works).
private boolean displayListenerWorks; // only accessed from the display listener thread
private IRotationWatcher rotationWatcher;
private IDisplayFoldListener displayFoldListener;
public ScreenCapture(VirtualDisplayListener vdListener, int displayId, int maxSize, Rect crop, int lockVideoOrientation) {
this.vdListener = vdListener;
this.displayId = options.getDisplayId();
assert displayId != Device.DISPLAY_ID_NONE;
this.maxSize = options.getMaxSize();
this.crop = options.getCrop();
this.captureOrientationLock = options.getCaptureOrientationLock();
this.captureOrientation = options.getCaptureOrientation();
assert captureOrientationLock != null;
assert captureOrientation != null;
this.angle = options.getAngle();
this.displayId = displayId;
this.maxSize = maxSize;
this.crop = crop;
this.lockVideoOrientation = lockVideoOrientation;
}
@Override
public void init() {
displaySizeMonitor.start(displayId, this::invalidate);
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
registerDisplayListenerFallbacks();
}
handlerThread = new HandlerThread("DisplayListener");
handlerThread.start();
Handler handler = new Handler(handlerThread.getLooper());
displayListenerHandle = ServiceManager.getDisplayManager().registerDisplayListener(displayId -> {
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("ScreenCapture: onDisplayChanged(" + displayId + ")");
}
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
if (!displayListenerWorks) {
// On the first display listener event, we know it works, we can unregister the fallbacks
displayListenerWorks = true;
unregisterDisplayListenerFallbacks();
}
}
if (this.displayId == displayId) {
DisplayInfo di = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
if (di == null) {
Ln.w("DisplayInfo for " + displayId + " cannot be retrieved");
// We can't compare with the current size, so reset unconditionally
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("ScreenCapture: requestReset(): " + getSessionDisplaySize() + " -> (unknown)");
}
setSessionDisplaySize(null);
requestReset();
} else {
Size size = di.getSize();
// The field is hidden on purpose, to read it with synchronization
@SuppressWarnings("checkstyle:HiddenField")
Size sessionDisplaySize = getSessionDisplaySize(); // synchronized
// .equals() also works if sessionDisplaySize == null
if (!size.equals(sessionDisplaySize)) {
// Reset only if the size is different
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("ScreenCapture: requestReset(): " + sessionDisplaySize + " -> " + size);
}
// Set the new size immediately, so that a future onDisplayChanged() event called before the asynchronous prepare()
// considers that the current size is the requested size (to avoid a duplicate requestReset())
setSessionDisplaySize(size);
requestReset();
} else if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("ScreenCapture: Size not changed (" + size + "): do not requestReset()");
}
}
}
}, handler);
}
@Override
@ -76,32 +123,12 @@ public class ScreenCapture extends SurfaceCapture {
Ln.w("Display doesn't have FLAG_SUPPORTS_PROTECTED_BUFFERS flag, mirroring can be restricted");
}
Size displaySize = displayInfo.getSize();
displaySizeMonitor.setSessionDisplaySize(displaySize);
if (captureOrientationLock == Orientation.Lock.LockedInitial) {
// The user requested to lock the video orientation to the current orientation
captureOrientationLock = Orientation.Lock.LockedValue;
captureOrientation = Orientation.fromRotation(displayInfo.getRotation());
}
VideoFilter filter = new VideoFilter(displaySize);
if (crop != null) {
boolean transposed = (displayInfo.getRotation() % 2) != 0;
filter.addCrop(crop, transposed);
}
boolean locked = captureOrientationLock != Orientation.Lock.Unlocked;
filter.addOrientation(displayInfo.getRotation(), locked, captureOrientation);
filter.addAngle(angle);
transform = filter.getInverseTransform();
videoSize = filter.getOutputSize().limit(maxSize).round8();
setSessionDisplaySize(displayInfo.getSize());
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), displayInfo.getSize(), crop, maxSize, lockVideoOrientation);
}
@Override
public void start(Surface surface) throws IOException {
public void start(Surface surface) {
if (display != null) {
SurfaceControl.destroyDisplay(display);
display = null;
@ -111,40 +138,31 @@ public class ScreenCapture extends SurfaceCapture {
virtualDisplay = null;
}
Size inputSize;
if (transform != null) {
// If there is a filter, it must receive the full display content
inputSize = displayInfo.getSize();
assert glRunner == null;
OpenGLFilter glFilter = new AffineOpenGLFilter(transform);
glRunner = new OpenGLRunner(glFilter);
surface = glRunner.start(inputSize, videoSize, surface);
} else {
// If there is no filter, the display must be rendered at target video size directly
inputSize = videoSize;
}
int virtualDisplayId;
PositionMapper positionMapper;
try {
Size videoSize = screenInfo.getVideoSize();
virtualDisplay = ServiceManager.getDisplayManager()
.createVirtualDisplay("scrcpy", inputSize.getWidth(), inputSize.getHeight(), displayId, surface);
.createVirtualDisplay("scrcpy", videoSize.getWidth(), videoSize.getHeight(), displayId, surface);
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
// The positions are relative to the virtual display, not the original display (so use inputSize, not deviceSize!)
positionMapper = PositionMapper.create(videoSize, transform, inputSize);
Rect contentRect = new Rect(0, 0, videoSize.getWidth(), videoSize.getHeight());
// The position are relative to the virtual display, not the original display
positionMapper = new PositionMapper(videoSize, contentRect, 0);
Ln.d("Display: using DisplayManager API");
} catch (Exception displayManagerException) {
try {
display = createDisplay();
Size deviceSize = displayInfo.getSize();
Rect contentRect = screenInfo.getContentRect();
// does not include the locked video orientation
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
int videoRotation = screenInfo.getVideoRotation();
int layerStack = displayInfo.getLayerStack();
setDisplaySurface(display, surface, deviceSize.toRect(), inputSize.toRect(), layerStack);
setDisplaySurface(display, surface, videoRotation, contentRect, unlockedVideoRect, layerStack);
virtualDisplayId = displayId;
positionMapper = PositionMapper.create(videoSize, transform, deviceSize);
positionMapper = PositionMapper.from(screenInfo);
Ln.d("Display: using SurfaceControl API");
} catch (Exception surfaceControlException) {
Ln.e("Could not create display using DisplayManager", displayManagerException);
@ -158,17 +176,20 @@ public class ScreenCapture extends SurfaceCapture {
}
}
@Override
public void stop() {
if (glRunner != null) {
glRunner.stopAndRelease();
glRunner = null;
}
}
@Override
public void release() {
displaySizeMonitor.stopAndRelease();
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
unregisterDisplayListenerFallbacks();
}
handlerThread.quitSafely();
handlerThread = null;
// displayListenerHandle may be null if registration failed
if (displayListenerHandle != null) {
ServiceManager.getDisplayManager().unregisterDisplayListener(displayListenerHandle);
displayListenerHandle = null;
}
if (display != null) {
SurfaceControl.destroyDisplay(display);
@ -182,7 +203,7 @@ public class ScreenCapture extends SurfaceCapture {
@Override
public Size getSize() {
return videoSize;
return screenInfo.getVideoSize();
}
@Override
@ -199,19 +220,75 @@ public class ScreenCapture extends SurfaceCapture {
return SurfaceControl.createDisplay("scrcpy", secure);
}
private static void setDisplaySurface(IBinder display, Surface surface, Rect deviceRect, Rect displayRect, int layerStack) {
private static void setDisplaySurface(IBinder display, Surface surface, int orientation, Rect deviceRect, Rect displayRect, int layerStack) {
SurfaceControl.openTransaction();
try {
SurfaceControl.setDisplaySurface(display, surface);
SurfaceControl.setDisplayProjection(display, 0, deviceRect, displayRect);
SurfaceControl.setDisplayProjection(display, orientation, deviceRect, displayRect);
SurfaceControl.setDisplayLayerStack(display, layerStack);
} finally {
SurfaceControl.closeTransaction();
}
}
@Override
public void requestInvalidate() {
invalidate();
private synchronized Size getSessionDisplaySize() {
return sessionDisplaySize;
}
private synchronized void setSessionDisplaySize(Size sessionDisplaySize) {
this.sessionDisplaySize = sessionDisplaySize;
}
private void registerDisplayListenerFallbacks() {
rotationWatcher = new IRotationWatcher.Stub() {
@Override
public void onRotationChanged(int rotation) {
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("ScreenCapture: onRotationChanged(" + rotation + ")");
}
requestReset();
}
};
ServiceManager.getWindowManager().registerRotationWatcher(rotationWatcher, displayId);
// Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10 (but implied by == API_34_ANDROID 14)
displayFoldListener = new IDisplayFoldListener.Stub() {
private boolean first = true;
@Override
public void onDisplayFoldChanged(int displayId, boolean folded) {
if (first) {
// An event is posted on registration to signal the initial state. Ignore it to avoid restarting encoding.
first = false;
return;
}
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
Ln.v("ScreenCapture: onDisplayFoldChanged(" + displayId + ", " + folded + ")");
}
if (ScreenCapture.this.displayId != displayId) {
// Ignore events related to other display ids
return;
}
requestReset();
}
};
ServiceManager.getWindowManager().registerDisplayFoldListener(displayFoldListener);
}
private void unregisterDisplayListenerFallbacks() {
synchronized (this) {
if (rotationWatcher != null) {
ServiceManager.getWindowManager().unregisterRotationWatcher(rotationWatcher);
rotationWatcher = null;
}
if (displayFoldListener != null) {
// Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10 (but implied by == API_34_ANDROID 14)
ServiceManager.getWindowManager().unregisterDisplayFoldListener(displayFoldListener);
displayFoldListener = null;
}
}
}
}

View File

@ -0,0 +1,149 @@
package com.genymobile.scrcpy.video;
import com.genymobile.scrcpy.BuildConfig;
import com.genymobile.scrcpy.device.Device;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.util.Ln;
import android.graphics.Rect;
public final class ScreenInfo {
/**
* Device (physical) size, possibly cropped
*/
private final Rect contentRect; // device size, possibly cropped
/**
* Video size, possibly smaller than the device size, already taking the device rotation and crop into account.
* <p>
* However, it does not include the locked video orientation.
*/
private final Size unlockedVideoSize;
/**
* Device rotation, related to the natural device orientation (0, 1, 2 or 3)
*/
private final int deviceRotation;
/**
* The locked video orientation (-1: disabled, 0: normal, 1: 90° CCW, 2: 180°, 3: 90° CW)
*/
private final int lockedVideoOrientation;
public ScreenInfo(Rect contentRect, Size unlockedVideoSize, int deviceRotation, int lockedVideoOrientation) {
this.contentRect = contentRect;
this.unlockedVideoSize = unlockedVideoSize;
this.deviceRotation = deviceRotation;
this.lockedVideoOrientation = lockedVideoOrientation;
}
public Rect getContentRect() {
return contentRect;
}
/**
* Return the video size as if locked video orientation was not set.
*
* @return the unlocked video size
*/
public Size getUnlockedVideoSize() {
return unlockedVideoSize;
}
/**
* Return the actual video size if locked video orientation is set.
*
* @return the actual video size
*/
public Size getVideoSize() {
if (getVideoRotation() % 2 == 0) {
return unlockedVideoSize;
}
return unlockedVideoSize.rotate();
}
public static ScreenInfo computeScreenInfo(int rotation, Size deviceSize, Rect crop, int maxSize, int lockedVideoOrientation) {
if (lockedVideoOrientation == Device.LOCK_VIDEO_ORIENTATION_INITIAL || lockedVideoOrientation == Device.LOCK_VIDEO_ORIENTATION_INITIAL_AUTO) {
// The user requested to lock the video orientation to the current orientation
lockedVideoOrientation = rotation;
}
Rect contentRect = new Rect(0, 0, deviceSize.getWidth(), deviceSize.getHeight());
if (crop != null) {
if (rotation % 2 != 0) { // 180s preserve dimensions
// the crop (provided by the user) is expressed in the natural orientation
crop = flipRect(crop);
}
if (!contentRect.intersect(crop)) {
// intersect() changes contentRect so that it is intersected with crop
Ln.w("Crop rectangle (" + formatCrop(crop) + ") does not intersect device screen (" + formatCrop(deviceSize.toRect()) + ")");
contentRect = new Rect(); // empty
}
}
Size videoSize = computeVideoSize(contentRect.width(), contentRect.height(), maxSize);
return new ScreenInfo(contentRect, videoSize, rotation, lockedVideoOrientation);
}
private static String formatCrop(Rect rect) {
return rect.width() + ":" + rect.height() + ":" + rect.left + ":" + rect.top;
}
public static Size computeVideoSize(int w, int h, int maxSize) {
// Compute the video size and the padding of the content inside this video.
// Principle:
// - scale down the great side of the screen to maxSize (if necessary);
// - scale down the other side so that the aspect ratio is preserved;
// - round this value to the nearest multiple of 8 (H.264 only accepts multiples of 8)
w &= ~7; // in case it's not a multiple of 8
h &= ~7;
if (maxSize > 0) {
if (BuildConfig.DEBUG && maxSize % 8 != 0) {
throw new AssertionError("Max size must be a multiple of 8");
}
boolean portrait = h > w;
int major = portrait ? h : w;
int minor = portrait ? w : h;
if (major > maxSize) {
int minorExact = minor * maxSize / major;
// +4 to round the value to the nearest multiple of 8
minor = (minorExact + 4) & ~7;
major = maxSize;
}
w = portrait ? minor : major;
h = portrait ? major : minor;
}
return new Size(w, h);
}
private static Rect flipRect(Rect crop) {
return new Rect(crop.top, crop.left, crop.bottom, crop.right);
}
/**
* Return the rotation to apply to the device rotation to get the requested locked video orientation
*
* @return the rotation offset
*/
public int getVideoRotation() {
if (lockedVideoOrientation == -1) {
// no offset
return 0;
}
return (deviceRotation + 4 - lockedVideoOrientation) % 4;
}
/**
* Return the rotation to apply to the requested locked video orientation to get the device rotation
*
* @return the (reverse) rotation offset
*/
public int getReverseVideoRotation() {
if (lockedVideoOrientation == -1) {
// no offset
return 0;
}
return (lockedVideoOrientation + 4 - deviceRotation) % 4;
}
}

View File

@ -6,37 +6,36 @@ import com.genymobile.scrcpy.device.Size;
import android.view.Surface;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* A video source which can be rendered on a Surface for encoding.
*/
public abstract class SurfaceCapture {
public interface CaptureListener {
void onInvalidated();
}
private CaptureListener listener;
private final AtomicBoolean resetCapture = new AtomicBoolean();
/**
* Notify the listener that the capture has been invalidated (for example, because its size changed).
* Request the encoding session to be restarted, for example if the capture implementation detects that the video source size has changed (on
* device rotation for example).
*/
protected void invalidate() {
listener.onInvalidated();
protected void requestReset() {
resetCapture.set(true);
}
/**
* Consume the reset request (intended to be called by the encoder).
*
* @return {@code true} if a reset request was pending, {@code false} otherwise.
*/
public boolean consumeReset() {
return resetCapture.getAndSet(false);
}
/**
* Called once before the first capture starts.
*/
public final void init(CaptureListener listener) throws ConfigurationException, IOException {
this.listener = listener;
init();
}
/**
* Called once before the first capture starts.
*/
protected abstract void init() throws ConfigurationException, IOException;
public abstract void init() throws ConfigurationException, IOException;
/**
* Called after the last capture ends (if and only if {@link #init()} has been called).
@ -46,7 +45,7 @@ public abstract class SurfaceCapture {
/**
* Called once before each capture starts, before {@link #getSize()}.
*/
public void prepare() throws ConfigurationException, IOException {
public void prepare() throws ConfigurationException {
// empty by default
}
@ -57,13 +56,6 @@ public abstract class SurfaceCapture {
*/
public abstract void start(Surface surface) throws IOException;
/**
* Stop the capture.
*/
public void stop() {
// Do nothing by default
}
/**
* Return the video size
*
@ -86,11 +78,4 @@ public abstract class SurfaceCapture {
public boolean isClosed() {
return false;
}
/**
* Manually request to invalidate (typically a user request).
* <p>
* The capture implementation is free to ignore the request and do nothing.
*/
public abstract void requestInvalidate();
}

View File

@ -2,7 +2,6 @@ package com.genymobile.scrcpy.video;
import com.genymobile.scrcpy.AndroidVersions;
import com.genymobile.scrcpy.AsyncProcessor;
import com.genymobile.scrcpy.Options;
import com.genymobile.scrcpy.device.ConfigurationException;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.device.Streamer;
@ -50,16 +49,15 @@ public class SurfaceEncoder implements AsyncProcessor {
private Thread thread;
private final AtomicBoolean stopped = new AtomicBoolean();
private final CaptureReset reset = new CaptureReset();
public SurfaceEncoder(SurfaceCapture capture, Streamer streamer, Options options) {
public SurfaceEncoder(SurfaceCapture capture, Streamer streamer, int videoBitRate, float maxFps, List<CodecOption> codecOptions,
String encoderName, boolean downsizeOnError) {
this.capture = capture;
this.streamer = streamer;
this.videoBitRate = options.getVideoBitRate();
this.maxFps = options.getMaxFps();
this.codecOptions = options.getVideoCodecOptions();
this.encoderName = options.getVideoEncoder();
this.downsizeOnError = options.getDownsizeOnError();
this.videoBitRate = videoBitRate;
this.maxFps = maxFps;
this.codecOptions = codecOptions;
this.encoderName = encoderName;
this.downsizeOnError = downsizeOnError;
}
private void streamCapture() throws IOException, ConfigurationException {
@ -67,14 +65,14 @@ public class SurfaceEncoder implements AsyncProcessor {
MediaCodec mediaCodec = createMediaCodec(codec, encoderName);
MediaFormat format = createFormat(codec.getMimeType(), videoBitRate, maxFps, codecOptions);
capture.init(reset);
capture.init();
try {
boolean alive;
boolean headerWritten = false;
do {
reset.consumeReset(); // If a capture reset was requested, it is implicitly fulfilled
capture.consumeReset(); // If a capture reset was requested, it is implicitly fulfilled
capture.prepare();
Size size = capture.getSize();
if (!headerWritten) {
@ -86,50 +84,29 @@ public class SurfaceEncoder implements AsyncProcessor {
format.setInteger(MediaFormat.KEY_HEIGHT, size.getHeight());
Surface surface = null;
boolean mediaCodecStarted = false;
boolean captureStarted = false;
try {
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
surface = mediaCodec.createInputSurface();
VideoFilter filter = new VideoFilter(surface);
surface = filter.getInputSurface();
capture.start(surface);
captureStarted = true;
mediaCodec.start();
mediaCodecStarted = true;
// Set the MediaCodec instance to "interrupt" (by signaling an EOS) on reset
reset.setRunningMediaCodec(mediaCodec);
if (stopped.get()) {
alive = false;
} else {
boolean resetRequested = reset.consumeReset();
if (!resetRequested) {
// If a reset is requested during encode(), it will interrupt the encoding by an EOS
encode(mediaCodec, streamer);
}
// The capture might have been closed internally (for example if the camera is disconnected)
alive = !stopped.get() && !capture.isClosed();
}
alive = encode(mediaCodec, streamer);
// do not call stop() on exception, it would trigger an IllegalStateException
mediaCodec.stop();
filter.release();
} catch (IllegalStateException | IllegalArgumentException e) {
Ln.e("Encoding error: " + e.getClass().getName() + ": " + e.getMessage());
if (!prepareRetry(size)) {
throw e;
}
Ln.i("Retrying...");
alive = true;
} finally {
reset.setRunningMediaCodec(null);
if (captureStarted) {
capture.stop();
}
if (mediaCodecStarted) {
try {
mediaCodec.stop();
} catch (IllegalStateException e) {
// ignore (just in case)
}
}
mediaCodec.reset();
if (surface != null) {
surface.release();
@ -190,16 +167,25 @@ public class SurfaceEncoder implements AsyncProcessor {
return 0;
}
private void encode(MediaCodec codec, Streamer streamer) throws IOException {
private boolean encode(MediaCodec codec, Streamer streamer) throws IOException {
boolean eof = false;
boolean alive = true;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
boolean eos;
do {
while (!capture.consumeReset() && !eof) {
if (stopped.get()) {
alive = false;
break;
}
int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, -1);
try {
eos = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
// On EOS, there might be data or not, depending on bufferInfo.size
if (outputBufferId >= 0 && bufferInfo.size > 0) {
if (capture.consumeReset()) {
// must restart encoding with new size
break;
}
eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
if (outputBufferId >= 0) {
ByteBuffer codecBuffer = codec.getOutputBuffer(outputBufferId);
boolean isConfig = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
@ -216,7 +202,14 @@ public class SurfaceEncoder implements AsyncProcessor {
codec.releaseOutputBuffer(outputBufferId, false);
}
}
} while (!eos);
}
if (capture.isClosed()) {
// The capture might have been closed internally (for example if the camera is disconnected)
alive = false;
}
return !eof && alive;
}
private static MediaCodec createMediaCodec(Codec codec, String encoderName) throws IOException, ConfigurationException {
@ -309,7 +302,6 @@ public class SurfaceEncoder implements AsyncProcessor {
public void stop() {
if (thread != null) {
stopped.set(true);
reset.reset();
}
}

View File

@ -1,119 +1,116 @@
package com.genymobile.scrcpy.video;
import com.genymobile.scrcpy.device.Orientation;
import com.genymobile.scrcpy.device.Size;
import com.genymobile.scrcpy.util.AffineMatrix;
import com.genymobile.scrcpy.util.Ln;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.view.Surface;
public class VideoFilter {
private EGLDisplay eglDisplay;
private EGLContext eglContext;
private EGLSurface eglSurface;
private SurfaceTexture surfaceTexture;
private Surface inputSurface;
private int textureId;
private Size size;
private AffineMatrix transform;
public VideoFilter(Size inputSize) {
this.size = inputSize;
}
public Size getOutputSize() {
return size;
}
public AffineMatrix getTransform() {
return transform;
}
/**
* Return the inverse transform.
* <p/>
* The direct affine transform describes how the input image is transformed.
* <p/>
* It is often useful to retrieve the inverse transform instead:
* <ul>
* <li>The OpenGL filter expects the matrix to transform the image <em>coordinates</em>, which is the inverse transform;</li>
* <li>The click positions must be transformed back to the device positions, using the inverse transform too.</li>
* </ul>
*
* @return the inverse transform
*/
public AffineMatrix getInverseTransform() {
if (transform == null) {
return null;
}
return transform.invert();
}
private static Rect transposeRect(Rect rect) {
return new Rect(rect.top, rect.left, rect.bottom, rect.right);
}
public void addCrop(Rect crop, boolean transposed) {
if (transposed) {
crop = transposeRect(crop);
public VideoFilter(Surface outputSurface) {
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("Unable to get EGL14 display");
}
double inputWidth = size.getWidth();
double inputHeight = size.getHeight();
if (crop.left < 0 || crop.top < 0 || crop.right > inputWidth || crop.bottom > inputHeight) {
throw new IllegalArgumentException("Crop " + crop + " exceeds the input area (" + size + ")");
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new RuntimeException("Unable to initialize EGL14");
}
double x = crop.left / inputWidth;
double y = 1 - (crop.bottom / inputHeight); // OpenGL origin is bottom-left
double w = crop.width() / inputWidth;
double h = crop.height() / inputHeight;
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0);
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find ES2 EGL config");
}
EGLConfig eglConfig = configs[0];
transform = AffineMatrix.reframe(x, y, w, h).multiply(transform);
size = new Size(crop.width(), crop.height());
int[] contextAttribList = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, EGL14.EGL_NO_CONTEXT, contextAttribList, 0);
if (eglContext == null) {
throw new RuntimeException("Failed to create EGL context");
}
int[] surfaceAttribList = {
EGL14.EGL_NONE
};
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, outputSurface, surfaceAttribList, 0);
if (eglSurface == null) {
throw new RuntimeException("Failed to create EGL window surface");
}
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("Failed to make EGL context current");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
textureId = textures[0];
surfaceTexture = new SurfaceTexture(textureId);
inputSurface = new Surface(surfaceTexture);
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
// XXX This should be called when the VirtualDisplay has rendered a new frame
Ln.i("==== render");
render();
}
});
}
public void addRotation(int ccwRotation) {
if (ccwRotation == 0) {
return;
}
transform = AffineMatrix.rotateOrtho(ccwRotation).multiply(transform);
if (ccwRotation % 2 != 0) {
size = size.rotate();
}
public Surface getInputSurface() {
return inputSurface;
}
public void addOrientation(Orientation captureOrientation) {
if (captureOrientation.isFlipped()) {
transform = AffineMatrix.hflip().multiply(transform);
}
int ccwRotation = (4 - captureOrientation.getRotation()) % 4;
addRotation(ccwRotation);
public void render() {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
// For now, just paint with a color
GLES20.glClearColor(0.0f, 0.5f, 0.5f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glViewport(0, 0, 1920, 1080);
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
public void addOrientation(int displayRotation, boolean locked, Orientation captureOrientation) {
if (locked) {
// flip/rotate the current display from the natural device orientation (i.e. where display rotation is 0)
int reverseDisplayRotation = (4 - displayRotation) % 4;
addRotation(reverseDisplayRotation);
public void release() {
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglDestroySurface(eglDisplay, eglSurface);
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglTerminate(eglDisplay);
}
addOrientation(captureOrientation);
}
public void addAngle(double cwAngle) {
if (cwAngle == 0) {
return;
}
double ccwAngle = -cwAngle;
transform = AffineMatrix.rotate(ccwAngle).withAspectRatio(size).fromCenter().multiply(transform);
}
public void addResize(Size targetSize) {
if (size.equals(targetSize)) {
return;
}
if (transform == null) {
// The requested scaling is performed by the viewport (by changing the output size), but the OpenGL filter must still run, even if
// resizing is not performed by the shader. So transform MUST NOT be null.
transform = AffineMatrix.IDENTITY;
}
size = targetSize;
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglContext = EGL14.EGL_NO_CONTEXT;
eglSurface = EGL14.EGL_NO_SURFACE;
surfaceTexture.release();
inputSurface.release();
}
}

View File

@ -6,7 +6,6 @@ import com.genymobile.scrcpy.util.Ln;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.IContentProvider;
import android.content.Intent;
import android.os.Binder;
import android.os.Bundle;
@ -65,7 +64,7 @@ public final class ActivityManager {
}
@TargetApi(AndroidVersions.API_29_ANDROID_10)
public IContentProvider getContentProviderExternal(String name, IBinder token) {
private ContentProvider getContentProviderExternal(String name, IBinder token) {
try {
Method method = getGetContentProviderExternalMethod();
Object[] args;
@ -84,7 +83,11 @@ public final class ActivityManager {
// IContentProvider provider = providerHolder.provider;
Field providerField = providerHolder.getClass().getDeclaredField("provider");
providerField.setAccessible(true);
return (IContentProvider) providerField.get(providerHolder);
Object provider = providerField.get(providerHolder);
if (provider == null) {
return null;
}
return new ContentProvider(this, provider, name, token);
} catch (ReflectiveOperationException e) {
Ln.e("Could not invoke method", e);
return null;
@ -101,12 +104,7 @@ public final class ActivityManager {
}
public ContentProvider createSettingsProvider() {
IBinder token = new Binder();
IContentProvider provider = getContentProviderExternal("settings", token);
if (provider == null) {
return null;
}
return new ContentProvider(this, provider, "settings", token);
return getContentProviderExternal("settings", new Binder());
}
private Method getStartActivityAsUserMethod() throws NoSuchMethodException, ClassNotFoundException {

View File

@ -192,9 +192,6 @@ public final class DisplayManager {
if ("onDisplayChanged".equals(method.getName())) {
listener.onDisplayChanged((int) args[0]);
}
if ("toString".equals(method.getName())) {
return "DisplayListener";
}
return null;
});
try {

View File

@ -1,39 +0,0 @@
package com.genymobile.scrcpy.wrappers;
import android.content.res.Configuration;
import android.graphics.Rect;
import android.view.IDisplayWindowListener;
import java.util.List;
public class DisplayWindowListener extends IDisplayWindowListener.Stub {
@Override
public void onDisplayAdded(int displayId) {
// empty default implementation
}
@Override
public void onDisplayConfigurationChanged(int displayId, Configuration newConfig) {
// empty default implementation
}
@Override
public void onDisplayRemoved(int displayId) {
// empty default implementation
}
@Override
public void onFixedRotationStarted(int displayId, int newRotation) {
// empty default implementation
}
@Override
public void onFixedRotationFinished(int displayId) {
// empty default implementation
}
@Override
public void onKeepClearAreasChanged(int displayId, List<Rect> restricted, List<Rect> unrestricted) {
// empty default implementation
}
}

View File

@ -1,9 +1,7 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.AndroidVersions;
import com.genymobile.scrcpy.util.Ln;
import android.os.Build;
import android.os.IInterface;
import java.lang.reflect.Method;
@ -23,22 +21,14 @@ public final class PowerManager {
private Method getIsScreenOnMethod() throws NoSuchMethodException {
if (isScreenOnMethod == null) {
if (Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14) {
isScreenOnMethod = manager.getClass().getMethod("isDisplayInteractive", int.class);
} else {
isScreenOnMethod = manager.getClass().getMethod("isInteractive");
}
isScreenOnMethod = manager.getClass().getMethod("isInteractive");
}
return isScreenOnMethod;
}
public boolean isScreenOn(int displayId) {
public boolean isScreenOn() {
try {
Method method = getIsScreenOnMethod();
if (Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14) {
return (boolean) method.invoke(manager, displayId);
}
return (boolean) method.invoke(manager);
} catch (ReflectiveOperationException e) {
Ln.e("Could not invoke method", e);

View File

@ -6,7 +6,6 @@ import com.genymobile.scrcpy.util.Ln;
import android.annotation.TargetApi;
import android.os.IInterface;
import android.view.IDisplayFoldListener;
import android.view.IDisplayWindowListener;
import android.view.IRotationWatcher;
import java.lang.reflect.Method;
@ -227,23 +226,4 @@ public final class WindowManager {
Ln.e("Could not unregister display fold listener", e);
}
}
@TargetApi(AndroidVersions.API_30_ANDROID_11)
public int[] registerDisplayWindowListener(IDisplayWindowListener listener) {
try {
return (int[]) manager.getClass().getMethod("registerDisplayWindowListener", IDisplayWindowListener.class).invoke(manager, listener);
} catch (Exception e) {
Ln.e("Could not register display window listener", e);
}
return null;
}
@TargetApi(AndroidVersions.API_30_ANDROID_11)
public void unregisterDisplayWindowListener(IDisplayWindowListener listener) {
try {
manager.getClass().getMethod("unregisterDisplayWindowListener", IDisplayWindowListener.class).invoke(manager, listener);
} catch (Exception e) {
Ln.e("Could not unregister display window listener", e);
}
}
}