Compare commits
71 Commits
v2.7
...
opengl_fil
Author | SHA1 | Date | |
---|---|---|---|
4397dfba89 | |||
f08a6d86c5 | |||
3ac4b64461 | |||
c7378f4dc8 | |||
e26bdb07a2 | |||
04a3e6fb06 | |||
c29ecd0314 | |||
d62fa8880e | |||
1f6634ea87 | |||
58ba00fa06 | |||
569c37cec1 | |||
58a0fbbf2e | |||
acff5b005c | |||
5474ae6bd6 | |||
2c25fd7a80 | |||
ce21f515e3 | |||
381fe95867 | |||
566b5be0f6 | |||
dd20efa41c | |||
13ce277e1f | |||
9c9d92fb1c | |||
408a388fc5 | |||
98ed5eb643 | |||
5d0e012a4c | |||
d19396718e | |||
7024d38199 | |||
f1368d9a8f | |||
d916429566 | |||
7cfefae5e1 | |||
b60e174780 | |||
5851b62580 | |||
12d5ca4d5e | |||
68e54d9b0b | |||
5f0480c039 | |||
874eaec487 | |||
14e5439dee | |||
a5844e198e | |||
2687d20280 | |||
9c0a328498 | |||
02ef3d57ce | |||
538a32a539 | |||
9578aae34e | |||
7b3dd595b4 | |||
a46150f753 | |||
3acffaae57 | |||
e33be3d288 | |||
c15df01171 | |||
09741bc805 | |||
afbaf59abb | |||
5b10650f22 | |||
0d8014be52 | |||
064670ab4c | |||
ff9fb5994d | |||
a36de26969 | |||
281fcc7052 | |||
65fc53eace | |||
a6f74d72f5 | |||
e724ff4349 | |||
79014143b9 | |||
c0a6432967 | |||
ec602a0334 | |||
7a9ea5c66f | |||
d92b7a6024 | |||
0bb3955b95 | |||
62776fb261 | |||
10f60054ac | |||
42fb947780 | |||
2e7a15a998 | |||
a7e61fb871 | |||
0cc6f6aa09 | |||
f69ac40534 |
147
.github/workflows/release.yml
vendored
Normal file
147
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,147 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
name:
|
||||
description: 'Version name (default is ref name)'
|
||||
|
||||
jobs:
|
||||
build-scrcpy-server:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GRADLE: gradle # use native gradle instead of ./gradlew in release.mk
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup JDK
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
|
||||
- name: Test scrcpy-server
|
||||
run: make -f release.mk test-server
|
||||
|
||||
- name: Build scrcpy-server
|
||||
run: make -f release.mk build-server
|
||||
|
||||
- name: Upload scrcpy-server artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: scrcpy-server
|
||||
path: build-server/server/scrcpy-server
|
||||
|
||||
test-client:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y meson ninja-build nasm ffmpeg libsdl2-2.0-0 \
|
||||
libsdl2-dev libavcodec-dev libavdevice-dev libavformat-dev \
|
||||
libavutil-dev libswresample-dev libusb-1.0-0 libusb-1.0-0-dev
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
meson setup d -Db_sanitize=address,undefined
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
meson test -Cd
|
||||
|
||||
build-win32:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y meson ninja-build nasm ffmpeg libsdl2-2.0-0 \
|
||||
libsdl2-dev libavcodec-dev libavdevice-dev libavformat-dev \
|
||||
libavutil-dev libswresample-dev libusb-1.0-0 libusb-1.0-0-dev \
|
||||
mingw-w64 mingw-w64-tools libz-mingw-w64-dev
|
||||
|
||||
- name: Workaround for old meson version run by Github Actions
|
||||
run: sed -i 's/^pkg-config/pkgconfig/' cross_win32.txt
|
||||
|
||||
- name: Build scrcpy win32
|
||||
run: make -f release.mk build-win32
|
||||
|
||||
- name: Upload build-win32 artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-win32-intermediate
|
||||
path: build-win32/dist/
|
||||
|
||||
build-win64:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y meson ninja-build nasm ffmpeg libsdl2-2.0-0 \
|
||||
libsdl2-dev libavcodec-dev libavdevice-dev libavformat-dev \
|
||||
libavutil-dev libswresample-dev libusb-1.0-0 libusb-1.0-0-dev \
|
||||
mingw-w64 mingw-w64-tools libz-mingw-w64-dev
|
||||
|
||||
- name: Workaround for old meson version run by Github Actions
|
||||
run: sed -i 's/^pkg-config/pkgconfig/' cross_win64.txt
|
||||
|
||||
- name: Build scrcpy win64
|
||||
run: make -f release.mk build-win64
|
||||
|
||||
- name: Upload build-win64 artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-win64-intermediate
|
||||
path: build-win64/dist/
|
||||
|
||||
package:
|
||||
needs:
|
||||
- build-scrcpy-server
|
||||
- build-win32
|
||||
- build-win64
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# $VERSION is used by release.mk
|
||||
VERSION: ${{ github.event.inputs.name || github.ref_name }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download scrcpy-server
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: scrcpy-server
|
||||
path: build-server/server/
|
||||
|
||||
- name: Download build-win32
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build-win32-intermediate
|
||||
path: build-win32/dist/
|
||||
|
||||
- name: Download build-win64
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: build-win64-intermediate
|
||||
path: build-win64/dist/
|
||||
|
||||
- name: Package
|
||||
run: make -f release.mk package
|
||||
|
||||
- name: Upload release artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: scrcpy-release-${{ env.VERSION }}
|
||||
path: release-${{ env.VERSION }}
|
@ -31,6 +31,7 @@ It focuses on:
|
||||
Its features include:
|
||||
- [audio forwarding](doc/audio.md) (Android 11+)
|
||||
- [recording](doc/recording.md)
|
||||
- [virtual display](doc/virtual_display.md)
|
||||
- mirroring with [Android device screen off](doc/device.md#turn-screen-off)
|
||||
- [copy-paste](doc/control.md#copy-paste) in both directions
|
||||
- [configurable quality](doc/video.md)
|
||||
@ -91,6 +92,12 @@ Here are just some common examples.
|
||||
scrcpy --video-codec=h265 -m1920 --max-fps=60 --no-audio -K # short version
|
||||
```
|
||||
|
||||
- Start VLC in a new virtual display (separate from the device display):
|
||||
|
||||
```bash
|
||||
scrcpy --new-display=1920x1080 --start-app=org.videolan.vlc
|
||||
```
|
||||
|
||||
- Record the device camera in H.265 at 1920x1080 (and microphone) to an MP4
|
||||
file:
|
||||
|
||||
@ -134,6 +141,7 @@ documented in the following pages:
|
||||
- [Device](doc/device.md)
|
||||
- [Window](doc/window.md)
|
||||
- [Recording](doc/recording.md)
|
||||
- [Virtual display](doc/virtual_displays.md)
|
||||
- [Tunnels](doc/tunnels.md)
|
||||
- [OTG](doc/otg.md)
|
||||
- [Camera](doc/camera.md)
|
||||
|
@ -20,7 +20,6 @@ _scrcpy() {
|
||||
--crop=
|
||||
-d --select-usb
|
||||
--disable-screensaver
|
||||
--display-buffer=
|
||||
--display-id=
|
||||
--display-orientation=
|
||||
-e --select-tcpip
|
||||
@ -33,6 +32,7 @@ _scrcpy() {
|
||||
--keyboard=
|
||||
--kill-adb-on-close
|
||||
--legacy-paste
|
||||
--list-apps
|
||||
--list-camera-sizes
|
||||
--list-cameras
|
||||
--list-displays
|
||||
@ -46,6 +46,8 @@ _scrcpy() {
|
||||
--mouse-bind=
|
||||
-n --no-control
|
||||
-N --no-playback
|
||||
--new-display
|
||||
--new-display=
|
||||
--no-audio
|
||||
--no-audio-playback
|
||||
--no-cleanup
|
||||
@ -76,6 +78,7 @@ _scrcpy() {
|
||||
-s --serial=
|
||||
-S --turn-screen-off
|
||||
--shortcut-mod=
|
||||
--start-app=
|
||||
-t --show-touches
|
||||
--tcpip
|
||||
--tcpip=
|
||||
@ -86,6 +89,7 @@ _scrcpy() {
|
||||
--v4l2-sink=
|
||||
-v --version
|
||||
-V --verbosity=
|
||||
--video-buffer=
|
||||
--video-codec=
|
||||
--video-codec-options=
|
||||
--video-encoder=
|
||||
@ -187,7 +191,6 @@ _scrcpy() {
|
||||
|--camera-size \
|
||||
|--crop \
|
||||
|--display-id \
|
||||
|--display-buffer \
|
||||
|--max-fps \
|
||||
|-m|--max-size \
|
||||
|-p|--port \
|
||||
@ -197,6 +200,7 @@ _scrcpy() {
|
||||
|--tunnel-port \
|
||||
|--v4l2-buffer \
|
||||
|--v4l2-sink \
|
||||
|--video-buffer \
|
||||
|--video-codec-options \
|
||||
|--video-encoder \
|
||||
|--tcpip \
|
||||
|
@ -27,7 +27,6 @@ arguments=(
|
||||
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
|
||||
{-d,--select-usb}'[Use USB device]'
|
||||
'--disable-screensaver[Disable screensaver while scrcpy is running]'
|
||||
'--display-buffer=[Add a buffering delay \(in milliseconds\) before displaying]'
|
||||
'--display-id=[Specify the display id to mirror]'
|
||||
'--display-orientation=[Set the initial display orientation]:orientation values:(0 90 180 270 flip0 flip90 flip180 flip270)'
|
||||
{-e,--select-tcpip}'[Use TCP/IP device]'
|
||||
@ -40,6 +39,7 @@ arguments=(
|
||||
'--keyboard=[Set the keyboard input mode]:mode:(disabled sdk uhid aoa)'
|
||||
'--kill-adb-on-close[Kill adb when scrcpy terminates]'
|
||||
'--legacy-paste[Inject computer clipboard text as a sequence of key events on Ctrl+v]'
|
||||
'--list-apps[List Android apps installed on the device]'
|
||||
'--list-camera-sizes[List the valid camera capture sizes]'
|
||||
'--list-cameras[List cameras available on the device]'
|
||||
'--list-displays[List displays available on the device]'
|
||||
@ -52,6 +52,7 @@ arguments=(
|
||||
'--mouse-bind=[Configure bindings of secondary clicks]'
|
||||
{-n,--no-control}'[Disable device control \(mirror the device in read only\)]'
|
||||
{-N,--no-playback}'[Disable video and audio playback]'
|
||||
'--new-display=[Create a new display]'
|
||||
'--no-audio[Disable audio forwarding]'
|
||||
'--no-audio-playback[Disable audio playback]'
|
||||
'--no-cleanup[Disable device cleanup actions on exit]'
|
||||
@ -80,6 +81,7 @@ arguments=(
|
||||
{-s,--serial=}'[The device serial number \(mandatory for multiple devices only\)]:serial:($("${ADB-adb}" devices | awk '\''$2 == "device" {print $1}'\''))'
|
||||
{-S,--turn-screen-off}'[Turn the device screen off immediately]'
|
||||
'--shortcut-mod=[\[key1,key2+key3,...\] Specify the modifiers to use for scrcpy shortcuts]:shortcut mod:(lctrl rctrl lalt ralt lsuper rsuper)'
|
||||
'--start-app=[Start an Android app]'
|
||||
{-t,--show-touches}'[Show physical touches]'
|
||||
'--tcpip[\(optional \[ip\:port\]\) Configure and connect the device over TCP/IP]'
|
||||
'--time-limit=[Set the maximum mirroring time, in seconds]'
|
||||
@ -89,6 +91,7 @@ arguments=(
|
||||
'--v4l2-sink=[\[\/dev\/videoN\] Output to v4l2loopback device]'
|
||||
{-v,--version}'[Print the version of scrcpy]'
|
||||
{-V,--verbosity=}'[Set the log level]:verbosity:(verbose debug info warn error)'
|
||||
'--video-buffer=[Add a buffering delay \(in milliseconds\) before displaying video frames]'
|
||||
'--video-codec=[Select the video codec]:codec:(h264 h265 av1)'
|
||||
'--video-codec-options=[Set a list of comma-separated key\:type=value options for the device video encoder]'
|
||||
'--video-encoder=[Use a specific MediaCodec video encoder]'
|
||||
|
@ -5,6 +5,7 @@ src = [
|
||||
'src/adb/adb_parser.c',
|
||||
'src/adb/adb_tunnel.c',
|
||||
'src/audio_player.c',
|
||||
'src/audio_regulator.c',
|
||||
'src/cli.c',
|
||||
'src/clock.c',
|
||||
'src/compat.c',
|
||||
@ -22,6 +23,7 @@ src = [
|
||||
'src/frame_buffer.c',
|
||||
'src/input_manager.c',
|
||||
'src/keyboard_sdk.c',
|
||||
'src/mouse_capture.c',
|
||||
'src/mouse_sdk.c',
|
||||
'src/opengl.c',
|
||||
'src/options.c',
|
||||
|
54
app/scrcpy.1
54
app/scrcpy.1
@ -139,12 +139,6 @@ Also see \fB\-e\fR (\fB\-\-select\-tcpip\fR).
|
||||
.BI "\-\-disable\-screensaver"
|
||||
Disable screensaver while scrcpy is running.
|
||||
|
||||
.TP
|
||||
.BI "\-\-display\-buffer " ms
|
||||
Add a buffering delay (in milliseconds) before displaying. This increases latency to compensate for jitter.
|
||||
|
||||
Default is 0 (no buffering).
|
||||
|
||||
.TP
|
||||
.BI "\-\-display\-id " id
|
||||
Specify the device display id to mirror.
|
||||
@ -227,6 +221,10 @@ Inject computer clipboard text as a sequence of key events on Ctrl+v (like MOD+S
|
||||
|
||||
This is a workaround for some devices not behaving as expected when setting the device clipboard programmatically.
|
||||
|
||||
.TP
|
||||
.B \-\-list\-apps
|
||||
List Android apps installed on the device.
|
||||
|
||||
.TP
|
||||
.B \-\-list\-camera\-sizes
|
||||
List the valid camera capture sizes.
|
||||
@ -314,6 +312,18 @@ Disable device control (mirror the device in read\-only).
|
||||
.B \-N, \-\-no\-playback
|
||||
Disable video and audio playback on the computer (equivalent to \fB\-\-no\-video\-playback \-\-no\-audio\-playback\fR).
|
||||
|
||||
.TP
|
||||
\fB\-\-new\-display\fR[=[\fIwidth\fRx\fIheight\fR][/\fIdpi\fR]]
|
||||
Create a new display with the specified resolution and density. If not provided, they default to the main display dimensions and DPI, and \fB\-\-max\-size\fR is considered.
|
||||
|
||||
Examples:
|
||||
|
||||
\-\-new\-display=1920x1080
|
||||
\-\-new\-display=1920x1080/420
|
||||
\-\-new\-display # main display size and density
|
||||
\-\-new\-display -m1920 # scaled to fit a max size of 1920
|
||||
\-\-new\-display=/240 # main display size and 240 dpi
|
||||
|
||||
.TP
|
||||
.B \-\-no\-audio
|
||||
Disable audio forwarding.
|
||||
@ -478,6 +488,22 @@ For example, to use either LCtrl or LSuper for scrcpy shortcuts, pass "lctrl,lsu
|
||||
|
||||
Default is "lalt,lsuper" (left-Alt or left-Super).
|
||||
|
||||
.TP
|
||||
.BI "\-\-start\-app " name
|
||||
Start an Android app, by its exact package name.
|
||||
|
||||
Add a '?' prefix to select an app whose name starts with the given name, case-insensitive (retrieving app names on the device may take some time):
|
||||
|
||||
scrcpy --start-app=?firefox
|
||||
|
||||
Add a '+' prefix to force-stop before starting the app:
|
||||
|
||||
scrcpy --new-display --start-app=+org.mozilla.firefox
|
||||
|
||||
Both prefixes can be used, in that order:
|
||||
|
||||
scrcpy --start-app=+?firefox
|
||||
|
||||
.TP
|
||||
.B \-t, \-\-show\-touches
|
||||
Enable "show touches" on start, restore the initial value on exit.
|
||||
@ -528,7 +554,15 @@ It requires to lock the video orientation (see \fB\-\-lock\-video\-orientation\f
|
||||
.BI "\-\-v4l2-buffer " ms
|
||||
Add a buffering delay (in milliseconds) before pushing frames. This increases latency to compensate for jitter.
|
||||
|
||||
This option is similar to \fB\-\-display\-buffer\fR, but specific to V4L2 sink.
|
||||
This option is similar to \fB\-\-video\-buffer\fR, but specific to V4L2 sink.
|
||||
|
||||
Default is 0 (no buffering).
|
||||
|
||||
.TP
|
||||
.BI "\-\-video\-buffer " ms
|
||||
Add a buffering delay (in milliseconds) before displaying video frames.
|
||||
|
||||
This increases latency to compensate for jitter.
|
||||
|
||||
Default is 0 (no buffering).
|
||||
|
||||
@ -727,7 +761,11 @@ Pinch-to-zoom and rotate from the center of the screen
|
||||
|
||||
.TP
|
||||
.B Shift+click-and-move
|
||||
Tilt (slide vertically with two fingers)
|
||||
Tilt vertically (slide with 2 fingers)
|
||||
|
||||
.TP
|
||||
.B Ctrl+Shift+click-and-move
|
||||
Tilt horizontally (slide with 2 fingers)
|
||||
|
||||
.TP
|
||||
.B Drag & drop APK file
|
||||
|
@ -1,138 +1,23 @@
|
||||
#include "audio_player.h"
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/opt.h>
|
||||
|
||||
#include "util/log.h"
|
||||
|
||||
//#define SC_AUDIO_PLAYER_DEBUG // uncomment to debug
|
||||
|
||||
/**
|
||||
* Real-time audio player with configurable latency
|
||||
*
|
||||
* As input, the player regularly receives AVFrames of decoded audio samples.
|
||||
* As output, an SDL callback regularly requests audio samples to be played.
|
||||
* In the middle, an audio buffer stores the samples produced but not consumed
|
||||
* yet.
|
||||
*
|
||||
* The goal of the player is to feed the audio output with a latency as low as
|
||||
* possible while avoiding buffer underrun (i.e. not being able to provide
|
||||
* samples when requested).
|
||||
*
|
||||
* The player aims to feed the audio output with as little latency as possible
|
||||
* while avoiding buffer underrun. To achieve this, it attempts to maintain the
|
||||
* average buffering (the number of samples present in the buffer) around a
|
||||
* target value. If this target buffering is too low, then buffer underrun will
|
||||
* occur frequently. If it is too high, then latency will become unacceptable.
|
||||
* This target value is configured using the scrcpy option --audio-buffer.
|
||||
*
|
||||
* The player cannot adjust the sample input rate (it receives samples produced
|
||||
* in real-time) or the sample output rate (it must provide samples as
|
||||
* requested by the audio output callback). Therefore, it may only apply
|
||||
* compensation by resampling (converting _m_ input samples to _n_ output
|
||||
* samples).
|
||||
*
|
||||
* The compensation itself is applied by libswresample (FFmpeg). It is
|
||||
* configured using swr_set_compensation(). An important work for the player
|
||||
* is to estimate the compensation value regularly and apply it.
|
||||
*
|
||||
* The estimated buffering level is the result of averaging the "natural"
|
||||
* buffering (samples are produced and consumed by blocks, so it must be
|
||||
* smoothed), and making instant adjustments resulting of its own actions
|
||||
* (explicit compensation and silence insertion on underflow), which are not
|
||||
* smoothed.
|
||||
*
|
||||
* Buffer underflow events can occur when packets arrive too late. In that case,
|
||||
* the player inserts silence. Once the packets finally arrive (late), one
|
||||
* strategy could be to drop the samples that were replaced by silence, in
|
||||
* order to keep a minimal latency. However, dropping samples in case of buffer
|
||||
* underflow is inadvisable, as it would temporarily increase the underflow
|
||||
* even more and cause very noticeable audio glitches.
|
||||
*
|
||||
* Therefore, the player doesn't drop any sample on underflow. The compensation
|
||||
* mechanism will absorb the delay introduced by the inserted silence.
|
||||
*/
|
||||
|
||||
/** Downcast frame_sink to sc_audio_player */
|
||||
#define DOWNCAST(SINK) container_of(SINK, struct sc_audio_player, frame_sink)
|
||||
|
||||
#define SC_AV_SAMPLE_FMT AV_SAMPLE_FMT_FLT
|
||||
#define SC_SDL_SAMPLE_FMT AUDIO_F32
|
||||
|
||||
#define TO_BYTES(SAMPLES) sc_audiobuf_to_bytes(&ap->buf, (SAMPLES))
|
||||
#define TO_SAMPLES(BYTES) sc_audiobuf_to_samples(&ap->buf, (BYTES))
|
||||
|
||||
static void SDLCALL
|
||||
sc_audio_player_sdl_callback(void *userdata, uint8_t *stream, int len_int) {
|
||||
struct sc_audio_player *ap = userdata;
|
||||
|
||||
// This callback is called with the lock used by SDL_LockAudioDevice()
|
||||
|
||||
assert(len_int > 0);
|
||||
size_t len = len_int;
|
||||
uint32_t count = TO_SAMPLES(len);
|
||||
|
||||
#ifdef SC_AUDIO_PLAYER_DEBUG
|
||||
LOGD("[Audio] SDL callback requests %" PRIu32 " samples", count);
|
||||
#endif
|
||||
assert(len % ap->audioreg.sample_size == 0);
|
||||
uint32_t out_samples = len / ap->audioreg.sample_size;
|
||||
|
||||
bool played = atomic_load_explicit(&ap->played, memory_order_relaxed);
|
||||
if (!played) {
|
||||
uint32_t buffered_samples = sc_audiobuf_can_read(&ap->buf);
|
||||
// Wait until the buffer is filled up to at least target_buffering
|
||||
// before playing
|
||||
if (buffered_samples < ap->target_buffering) {
|
||||
LOGV("[Audio] Inserting initial buffering silence: %" PRIu32
|
||||
" samples", count);
|
||||
// Delay playback starting to reach the target buffering. Fill the
|
||||
// whole buffer with silence (len is small compared to the
|
||||
// arbitrary margin value).
|
||||
memset(stream, 0, len);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t read = sc_audiobuf_read(&ap->buf, stream, count);
|
||||
|
||||
if (read < count) {
|
||||
uint32_t silence = count - read;
|
||||
// Insert silence. In theory, the inserted silent samples replace the
|
||||
// missing real samples, which will arrive later, so they should be
|
||||
// dropped to keep the latency minimal. However, this would cause very
|
||||
// audible glitches, so let the clock compensation restore the target
|
||||
// latency.
|
||||
LOGD("[Audio] Buffer underflow, inserting silence: %" PRIu32 " samples",
|
||||
silence);
|
||||
memset(stream + TO_BYTES(read), 0, TO_BYTES(silence));
|
||||
|
||||
bool received = atomic_load_explicit(&ap->received,
|
||||
memory_order_relaxed);
|
||||
if (received) {
|
||||
// Inserting additional samples immediately increases buffering
|
||||
atomic_fetch_add_explicit(&ap->underflow, silence,
|
||||
memory_order_relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
atomic_store_explicit(&ap->played, true, memory_order_relaxed);
|
||||
}
|
||||
|
||||
static uint8_t *
|
||||
sc_audio_player_get_swr_buf(struct sc_audio_player *ap, uint32_t min_samples) {
|
||||
size_t min_buf_size = TO_BYTES(min_samples);
|
||||
if (min_buf_size > ap->swr_buf_alloc_size) {
|
||||
size_t new_size = min_buf_size + 4096;
|
||||
uint8_t *buf = realloc(ap->swr_buf, new_size);
|
||||
if (!buf) {
|
||||
LOG_OOM();
|
||||
// Could not realloc to the requested size
|
||||
return NULL;
|
||||
}
|
||||
ap->swr_buf = buf;
|
||||
ap->swr_buf_alloc_size = new_size;
|
||||
}
|
||||
|
||||
return ap->swr_buf;
|
||||
sc_audio_regulator_pull(&ap->audioreg, stream, out_samples);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -140,209 +25,21 @@ sc_audio_player_frame_sink_push(struct sc_frame_sink *sink,
|
||||
const AVFrame *frame) {
|
||||
struct sc_audio_player *ap = DOWNCAST(sink);
|
||||
|
||||
SwrContext *swr_ctx = ap->swr_ctx;
|
||||
|
||||
int64_t swr_delay = swr_get_delay(swr_ctx, ap->sample_rate);
|
||||
// No need to av_rescale_rnd(), input and output sample rates are the same.
|
||||
// Add more space (256) for clock compensation.
|
||||
int dst_nb_samples = swr_delay + frame->nb_samples + 256;
|
||||
|
||||
uint8_t *swr_buf = sc_audio_player_get_swr_buf(ap, dst_nb_samples);
|
||||
if (!swr_buf) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int ret = swr_convert(swr_ctx, &swr_buf, dst_nb_samples,
|
||||
(const uint8_t **) frame->data, frame->nb_samples);
|
||||
if (ret < 0) {
|
||||
LOGE("Resampling failed: %d", ret);
|
||||
return false;
|
||||
}
|
||||
|
||||
// swr_convert() returns the number of samples which would have been
|
||||
// written if the buffer was big enough.
|
||||
uint32_t samples = MIN(ret, dst_nb_samples);
|
||||
#ifdef SC_AUDIO_PLAYER_DEBUG
|
||||
LOGD("[Audio] %" PRIu32 " samples written to buffer", samples);
|
||||
#endif
|
||||
|
||||
uint32_t cap = sc_audiobuf_capacity(&ap->buf);
|
||||
if (samples > cap) {
|
||||
// Very very unlikely: a single resampled frame should never
|
||||
// exceed the audio buffer size (or something is very wrong).
|
||||
// Ignore the first bytes in swr_buf to avoid memory corruption anyway.
|
||||
swr_buf += TO_BYTES(samples - cap);
|
||||
samples = cap;
|
||||
}
|
||||
|
||||
uint32_t skipped_samples = 0;
|
||||
|
||||
uint32_t written = sc_audiobuf_write(&ap->buf, swr_buf, samples);
|
||||
if (written < samples) {
|
||||
uint32_t remaining = samples - written;
|
||||
|
||||
// All samples that could be written without locking have been written,
|
||||
// now we need to lock to drop/consume old samples
|
||||
SDL_LockAudioDevice(ap->device);
|
||||
|
||||
// Retry with the lock
|
||||
written += sc_audiobuf_write(&ap->buf,
|
||||
swr_buf + TO_BYTES(written),
|
||||
remaining);
|
||||
if (written < samples) {
|
||||
remaining = samples - written;
|
||||
// Still insufficient, drop old samples to make space
|
||||
skipped_samples = sc_audiobuf_read(&ap->buf, NULL, remaining);
|
||||
assert(skipped_samples == remaining);
|
||||
}
|
||||
|
||||
SDL_UnlockAudioDevice(ap->device);
|
||||
|
||||
if (written < samples) {
|
||||
// Now there is enough space
|
||||
uint32_t w = sc_audiobuf_write(&ap->buf,
|
||||
swr_buf + TO_BYTES(written),
|
||||
remaining);
|
||||
assert(w == remaining);
|
||||
(void) w;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t underflow = 0;
|
||||
uint32_t max_buffered_samples;
|
||||
bool played = atomic_load_explicit(&ap->played, memory_order_relaxed);
|
||||
if (played) {
|
||||
underflow = atomic_exchange_explicit(&ap->underflow, 0,
|
||||
memory_order_relaxed);
|
||||
|
||||
max_buffered_samples = ap->target_buffering
|
||||
+ 12 * ap->output_buffer
|
||||
+ ap->target_buffering / 10;
|
||||
} else {
|
||||
// SDL playback not started yet, do not accumulate more than
|
||||
// max_initial_buffering samples, this would cause unnecessary delay
|
||||
// (and glitches to compensate) on start.
|
||||
max_buffered_samples = ap->target_buffering + 2 * ap->output_buffer;
|
||||
}
|
||||
|
||||
uint32_t can_read = sc_audiobuf_can_read(&ap->buf);
|
||||
if (can_read > max_buffered_samples) {
|
||||
uint32_t skip_samples = 0;
|
||||
|
||||
SDL_LockAudioDevice(ap->device);
|
||||
can_read = sc_audiobuf_can_read(&ap->buf);
|
||||
if (can_read > max_buffered_samples) {
|
||||
skip_samples = can_read - max_buffered_samples;
|
||||
uint32_t r = sc_audiobuf_read(&ap->buf, NULL, skip_samples);
|
||||
assert(r == skip_samples);
|
||||
(void) r;
|
||||
skipped_samples += skip_samples;
|
||||
}
|
||||
SDL_UnlockAudioDevice(ap->device);
|
||||
|
||||
if (skip_samples) {
|
||||
if (played) {
|
||||
LOGD("[Audio] Buffering threshold exceeded, skipping %" PRIu32
|
||||
" samples", skip_samples);
|
||||
#ifdef SC_AUDIO_PLAYER_DEBUG
|
||||
} else {
|
||||
LOGD("[Audio] Playback not started, skipping %" PRIu32
|
||||
" samples", skip_samples);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
atomic_store_explicit(&ap->received, true, memory_order_relaxed);
|
||||
if (!played) {
|
||||
// Nothing more to do
|
||||
return true;
|
||||
}
|
||||
|
||||
// Number of samples added (or removed, if negative) for compensation
|
||||
int32_t instant_compensation = (int32_t) written - frame->nb_samples;
|
||||
// Inserting silence instantly increases buffering
|
||||
int32_t inserted_silence = (int32_t) underflow;
|
||||
// Dropping input samples instantly decreases buffering
|
||||
int32_t dropped = (int32_t) skipped_samples;
|
||||
|
||||
// The compensation must apply instantly, it must not be smoothed
|
||||
ap->avg_buffering.avg += instant_compensation + inserted_silence - dropped;
|
||||
if (ap->avg_buffering.avg < 0) {
|
||||
// Since dropping samples instantly reduces buffering, the difference
|
||||
// is applied immediately to the average value, assuming that the delay
|
||||
// between the producer and the consumer will be caught up.
|
||||
//
|
||||
// However, when this assumption is not valid, the average buffering
|
||||
// may decrease indefinitely. Prevent it to become negative to limit
|
||||
// the consequences.
|
||||
ap->avg_buffering.avg = 0;
|
||||
}
|
||||
|
||||
// However, the buffering level must be smoothed
|
||||
sc_average_push(&ap->avg_buffering, can_read);
|
||||
|
||||
#ifdef SC_AUDIO_PLAYER_DEBUG
|
||||
LOGD("[Audio] can_read=%" PRIu32 " avg_buffering=%f",
|
||||
can_read, sc_average_get(&ap->avg_buffering));
|
||||
#endif
|
||||
|
||||
ap->samples_since_resync += written;
|
||||
if (ap->samples_since_resync >= ap->sample_rate) {
|
||||
// Recompute compensation every second
|
||||
ap->samples_since_resync = 0;
|
||||
|
||||
float avg = sc_average_get(&ap->avg_buffering);
|
||||
int diff = ap->target_buffering - avg;
|
||||
|
||||
// Enable compensation when the difference exceeds +/- 4ms.
|
||||
// Disable compensation when the difference is lower than +/- 1ms.
|
||||
int threshold = ap->compensation != 0
|
||||
? ap->sample_rate / 1000 /* 1ms */
|
||||
: ap->sample_rate * 4 / 1000; /* 4ms */
|
||||
|
||||
if (abs(diff) < threshold) {
|
||||
// Do not compensate for small values, the error is just noise
|
||||
diff = 0;
|
||||
} else if (diff < 0 && can_read < ap->target_buffering) {
|
||||
// Do not accelerate if the instant buffering level is below the
|
||||
// target, this would increase underflow
|
||||
diff = 0;
|
||||
}
|
||||
// Compensate the diff over 4 seconds (but will be recomputed after 1
|
||||
// second)
|
||||
int distance = 4 * ap->sample_rate;
|
||||
// Limit compensation rate to 2%
|
||||
int abs_max_diff = distance / 50;
|
||||
diff = CLAMP(diff, -abs_max_diff, abs_max_diff);
|
||||
LOGV("[Audio] Buffering: target=%" PRIu32 " avg=%f cur=%" PRIu32
|
||||
" compensation=%d", ap->target_buffering, avg, can_read, diff);
|
||||
|
||||
if (diff != ap->compensation) {
|
||||
int ret = swr_set_compensation(swr_ctx, diff, distance);
|
||||
if (ret < 0) {
|
||||
LOGW("Resampling compensation failed: %d", ret);
|
||||
// not fatal
|
||||
} else {
|
||||
ap->compensation = diff;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
return sc_audio_regulator_push(&ap->audioreg, frame);
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
const AVCodecContext *ctx) {
|
||||
struct sc_audio_player *ap = DOWNCAST(sink);
|
||||
|
||||
#ifdef SCRCPY_LAVU_HAS_CHLAYOUT
|
||||
assert(ctx->ch_layout.nb_channels > 0);
|
||||
unsigned nb_channels = ctx->ch_layout.nb_channels;
|
||||
assert(ctx->ch_layout.nb_channels > 0 && ctx->ch_layout.nb_channels < 256);
|
||||
uint8_t nb_channels = ctx->ch_layout.nb_channels;
|
||||
#else
|
||||
int tmp = av_get_channel_layout_nb_channels(ctx->channel_layout);
|
||||
assert(tmp > 0);
|
||||
unsigned nb_channels = tmp;
|
||||
assert(tmp > 0 && tmp < 256);
|
||||
uint8_t nb_channels = tmp;
|
||||
#endif
|
||||
|
||||
assert(ctx->sample_rate > 0);
|
||||
@ -350,17 +47,19 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
int out_bytes_per_sample = av_get_bytes_per_sample(SC_AV_SAMPLE_FMT);
|
||||
assert(out_bytes_per_sample > 0);
|
||||
|
||||
ap->sample_rate = ctx->sample_rate;
|
||||
ap->nb_channels = nb_channels;
|
||||
ap->out_bytes_per_sample = out_bytes_per_sample;
|
||||
uint32_t target_buffering_samples =
|
||||
ap->target_buffering_delay * ctx->sample_rate / SC_TICK_FREQ;
|
||||
|
||||
ap->target_buffering = ap->target_buffering_delay * ap->sample_rate
|
||||
/ SC_TICK_FREQ;
|
||||
size_t sample_size = nb_channels * out_bytes_per_sample;
|
||||
bool ok = sc_audio_regulator_init(&ap->audioreg, sample_size, ctx,
|
||||
target_buffering_samples);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint64_t aout_samples = ap->output_buffer_duration * ap->sample_rate
|
||||
uint64_t aout_samples = ap->output_buffer_duration * ctx->sample_rate
|
||||
/ SC_TICK_FREQ;
|
||||
assert(aout_samples <= 0xFFFF);
|
||||
ap->output_buffer = (uint16_t) aout_samples;
|
||||
|
||||
SDL_AudioSpec desired = {
|
||||
.freq = ctx->sample_rate,
|
||||
@ -375,69 +74,10 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
ap->device = SDL_OpenAudioDevice(NULL, 0, &desired, &obtained, 0);
|
||||
if (!ap->device) {
|
||||
LOGE("Could not open audio device: %s", SDL_GetError());
|
||||
sc_audio_regulator_destroy(&ap->audioreg);
|
||||
return false;
|
||||
}
|
||||
|
||||
SwrContext *swr_ctx = swr_alloc();
|
||||
if (!swr_ctx) {
|
||||
LOG_OOM();
|
||||
goto error_close_audio_device;
|
||||
}
|
||||
ap->swr_ctx = swr_ctx;
|
||||
|
||||
#ifdef SCRCPY_LAVU_HAS_CHLAYOUT
|
||||
av_opt_set_chlayout(swr_ctx, "in_chlayout", &ctx->ch_layout, 0);
|
||||
av_opt_set_chlayout(swr_ctx, "out_chlayout", &ctx->ch_layout, 0);
|
||||
#else
|
||||
av_opt_set_channel_layout(swr_ctx, "in_channel_layout",
|
||||
ctx->channel_layout, 0);
|
||||
av_opt_set_channel_layout(swr_ctx, "out_channel_layout",
|
||||
ctx->channel_layout, 0);
|
||||
#endif
|
||||
|
||||
av_opt_set_int(swr_ctx, "in_sample_rate", ctx->sample_rate, 0);
|
||||
av_opt_set_int(swr_ctx, "out_sample_rate", ctx->sample_rate, 0);
|
||||
|
||||
av_opt_set_sample_fmt(swr_ctx, "in_sample_fmt", ctx->sample_fmt, 0);
|
||||
av_opt_set_sample_fmt(swr_ctx, "out_sample_fmt", SC_AV_SAMPLE_FMT, 0);
|
||||
|
||||
int ret = swr_init(swr_ctx);
|
||||
if (ret) {
|
||||
LOGE("Failed to initialize the resampling context");
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
// Use a ring-buffer of the target buffering size plus 1 second between the
|
||||
// producer and the consumer. It's too big on purpose, to guarantee that
|
||||
// the producer and the consumer will be able to access it in parallel
|
||||
// without locking.
|
||||
uint32_t audiobuf_samples = ap->target_buffering + ap->sample_rate;
|
||||
|
||||
size_t sample_size = ap->nb_channels * ap->out_bytes_per_sample;
|
||||
bool ok = sc_audiobuf_init(&ap->buf, sample_size, audiobuf_samples);
|
||||
if (!ok) {
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
size_t initial_swr_buf_size = TO_BYTES(4096);
|
||||
ap->swr_buf = malloc(initial_swr_buf_size);
|
||||
if (!ap->swr_buf) {
|
||||
LOG_OOM();
|
||||
goto error_destroy_audiobuf;
|
||||
}
|
||||
ap->swr_buf_alloc_size = initial_swr_buf_size;
|
||||
|
||||
// Samples are produced and consumed by blocks, so the buffering must be
|
||||
// smoothed to get a relatively stable value.
|
||||
sc_average_init(&ap->avg_buffering, 128);
|
||||
ap->samples_since_resync = 0;
|
||||
|
||||
ap->received = false;
|
||||
atomic_init(&ap->played, false);
|
||||
atomic_init(&ap->received, false);
|
||||
atomic_init(&ap->underflow, 0);
|
||||
ap->compensation = 0;
|
||||
|
||||
// The thread calling open() is the thread calling push(), which fills the
|
||||
// audio buffer consumed by the SDL audio thread.
|
||||
ok = sc_thread_set_priority(SC_THREAD_PRIORITY_TIME_CRITICAL);
|
||||
@ -449,15 +89,6 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
SDL_PauseAudioDevice(ap->device, 0);
|
||||
|
||||
return true;
|
||||
|
||||
error_destroy_audiobuf:
|
||||
sc_audiobuf_destroy(&ap->buf);
|
||||
error_free_swr_ctx:
|
||||
swr_free(&ap->swr_ctx);
|
||||
error_close_audio_device:
|
||||
SDL_CloseAudioDevice(ap->device);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static void
|
||||
@ -468,9 +99,7 @@ sc_audio_player_frame_sink_close(struct sc_frame_sink *sink) {
|
||||
SDL_PauseAudioDevice(ap->device, 1);
|
||||
SDL_CloseAudioDevice(ap->device);
|
||||
|
||||
free(ap->swr_buf);
|
||||
sc_audiobuf_destroy(&ap->buf);
|
||||
swr_free(&ap->swr_ctx);
|
||||
sc_audio_regulator_destroy(&ap->audioreg);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -5,76 +5,27 @@
|
||||
|
||||
#include <stdatomic.h>
|
||||
#include <stdbool.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <libswresample/swresample.h>
|
||||
#include <SDL2/SDL.h>
|
||||
|
||||
#include "audio_regulator.h"
|
||||
#include "trait/frame_sink.h"
|
||||
#include "util/audiobuf.h"
|
||||
#include "util/average.h"
|
||||
#include "util/thread.h"
|
||||
#include "util/tick.h"
|
||||
|
||||
struct sc_audio_player {
|
||||
struct sc_frame_sink frame_sink;
|
||||
|
||||
SDL_AudioDeviceID device;
|
||||
|
||||
// The target buffering between the producer and the consumer. This value
|
||||
// is directly use for compensation.
|
||||
// Since audio capture and/or encoding on the device typically produce
|
||||
// blocks of 960 samples (20ms) or 1024 samples (~21.3ms), this target
|
||||
// value should be higher.
|
||||
sc_tick target_buffering_delay;
|
||||
uint32_t target_buffering; // in samples
|
||||
|
||||
// SDL audio output buffer size.
|
||||
// SDL audio output buffer size
|
||||
sc_tick output_buffer_duration;
|
||||
uint16_t output_buffer;
|
||||
|
||||
// Audio buffer to communicate between the receiver and the SDL audio
|
||||
// callback
|
||||
struct sc_audiobuf buf;
|
||||
|
||||
// Resampler (only used from the receiver thread)
|
||||
struct SwrContext *swr_ctx;
|
||||
|
||||
// The sample rate is the same for input and output
|
||||
unsigned sample_rate;
|
||||
// The number of channels is the same for input and output
|
||||
unsigned nb_channels;
|
||||
// The number of bytes per sample for a single channel
|
||||
size_t out_bytes_per_sample;
|
||||
|
||||
// Target buffer for resampling (only used by the receiver thread)
|
||||
uint8_t *swr_buf;
|
||||
size_t swr_buf_alloc_size;
|
||||
|
||||
// Number of buffered samples (may be negative on underflow) (only used by
|
||||
// the receiver thread)
|
||||
struct sc_average avg_buffering;
|
||||
// Count the number of samples to trigger a compensation update regularly
|
||||
// (only used by the receiver thread)
|
||||
uint32_t samples_since_resync;
|
||||
|
||||
// Number of silence samples inserted since the last received packet
|
||||
atomic_uint_least32_t underflow;
|
||||
|
||||
// Current applied compensation value (only used by the receiver thread)
|
||||
int compensation;
|
||||
|
||||
// Set to true the first time a sample is received
|
||||
atomic_bool received;
|
||||
|
||||
// Set to true the first time the SDL callback is called
|
||||
atomic_bool played;
|
||||
|
||||
const struct sc_audio_player_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
};
|
||||
|
||||
struct sc_audio_player_callbacks {
|
||||
void (*on_ended)(struct sc_audio_player *ap, bool success, void *userdata);
|
||||
SDL_AudioDeviceID device;
|
||||
struct sc_audio_regulator audioreg;
|
||||
};
|
||||
|
||||
void
|
||||
|
415
app/src/audio_regulator.c
Normal file
415
app/src/audio_regulator.c
Normal file
@ -0,0 +1,415 @@
|
||||
#include "audio_regulator.h"
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/opt.h>
|
||||
|
||||
#include "util/log.h"
|
||||
|
||||
//#define SC_AUDIO_REGULATOR_DEBUG // uncomment to debug
|
||||
|
||||
/**
|
||||
* Real-time audio regulator with configurable latency
|
||||
*
|
||||
* As input, the regulator regularly receives AVFrames of decoded audio samples.
|
||||
* As output, the audio player regularly requests audio samples to be played.
|
||||
* In the middle, an audio buffer stores the samples produced but not consumed
|
||||
* yet.
|
||||
*
|
||||
* The goal of the regulator is to feed the audio player with a latency as low
|
||||
* as possible while avoiding buffer underrun (i.e. not being able to provide
|
||||
* samples when requested).
|
||||
*
|
||||
* To achieve this, it attempts to maintain the average buffering (the number
|
||||
* of samples present in the buffer) around a target value. If this target
|
||||
* buffering is too low, then buffer underrun will occur frequently. If it is
|
||||
* too high, then latency will become unacceptable. This target value is
|
||||
* configured using the scrcpy option --audio-buffer.
|
||||
*
|
||||
* The regulator cannot adjust the sample input rate (it receives samples
|
||||
* produced in real-time) or the sample output rate (it must provide samples as
|
||||
* requested by the audio player). Therefore, it may only apply compensation by
|
||||
* resampling (converting _m_ input samples to _n_ output samples).
|
||||
*
|
||||
* The compensation itself is applied by libswresample (FFmpeg). It is
|
||||
* configured using swr_set_compensation(). An important work for the regulator
|
||||
* is to estimate the compensation value regularly and apply it.
|
||||
*
|
||||
* The estimated buffering level is the result of averaging the "natural"
|
||||
* buffering (samples are produced and consumed by blocks, so it must be
|
||||
* smoothed), and making instant adjustments resulting of its own actions
|
||||
* (explicit compensation and silence insertion on underflow), which are not
|
||||
* smoothed.
|
||||
*
|
||||
* Buffer underflow events can occur when packets arrive too late. In that case,
|
||||
* the regulator inserts silence. Once the packets finally arrive (late), one
|
||||
* strategy could be to drop the samples that were replaced by silence, in
|
||||
* order to keep a minimal latency. However, dropping samples in case of buffer
|
||||
* underflow is inadvisable, as it would temporarily increase the underflow
|
||||
* even more and cause very noticeable audio glitches.
|
||||
*
|
||||
* Therefore, the regulator doesn't drop any sample on underflow. The
|
||||
* compensation mechanism will absorb the delay introduced by the inserted
|
||||
* silence.
|
||||
*/
|
||||
|
||||
#define TO_BYTES(SAMPLES) sc_audiobuf_to_bytes(&ar->buf, (SAMPLES))
|
||||
#define TO_SAMPLES(BYTES) sc_audiobuf_to_samples(&ar->buf, (BYTES))
|
||||
|
||||
void
|
||||
sc_audio_regulator_pull(struct sc_audio_regulator *ar, uint8_t *out,
|
||||
uint32_t out_samples) {
|
||||
#ifdef SC_AUDIO_REGULATOR_DEBUG
|
||||
LOGD("[Audio] Audio regulator pulls %" PRIu32 " samples", out_samples);
|
||||
#endif
|
||||
|
||||
// A lock is necessary in the rare case where the producer needs to drop
|
||||
// samples already pushed (when the buffer is full)
|
||||
sc_mutex_lock(&ar->mutex);
|
||||
|
||||
bool played = atomic_load_explicit(&ar->played, memory_order_relaxed);
|
||||
if (!played) {
|
||||
uint32_t buffered_samples = sc_audiobuf_can_read(&ar->buf);
|
||||
// Wait until the buffer is filled up to at least target_buffering
|
||||
// before playing
|
||||
if (buffered_samples < ar->target_buffering) {
|
||||
LOGV("[Audio] Inserting initial buffering silence: %" PRIu32
|
||||
" samples", out_samples);
|
||||
// Delay playback starting to reach the target buffering. Fill the
|
||||
// whole buffer with silence (len is small compared to the
|
||||
// arbitrary margin value).
|
||||
memset(out, 0, out_samples * ar->sample_size);
|
||||
sc_mutex_unlock(&ar->mutex);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t read = sc_audiobuf_read(&ar->buf, out, out_samples);
|
||||
|
||||
sc_mutex_unlock(&ar->mutex);
|
||||
|
||||
if (read < out_samples) {
|
||||
uint32_t silence = out_samples - read;
|
||||
// Insert silence. In theory, the inserted silent samples replace the
|
||||
// missing real samples, which will arrive later, so they should be
|
||||
// dropped to keep the latency minimal. However, this would cause very
|
||||
// audible glitches, so let the clock compensation restore the target
|
||||
// latency.
|
||||
LOGD("[Audio] Buffer underflow, inserting silence: %" PRIu32 " samples",
|
||||
silence);
|
||||
memset(out + TO_BYTES(read), 0, TO_BYTES(silence));
|
||||
|
||||
bool received = atomic_load_explicit(&ar->received,
|
||||
memory_order_relaxed);
|
||||
if (received) {
|
||||
// Inserting additional samples immediately increases buffering
|
||||
atomic_fetch_add_explicit(&ar->underflow, silence,
|
||||
memory_order_relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
atomic_store_explicit(&ar->played, true, memory_order_relaxed);
|
||||
}
|
||||
|
||||
static uint8_t *
|
||||
sc_audio_regulator_get_swr_buf(struct sc_audio_regulator *ar,
|
||||
uint32_t min_samples) {
|
||||
size_t min_buf_size = TO_BYTES(min_samples);
|
||||
if (min_buf_size > ar->swr_buf_alloc_size) {
|
||||
size_t new_size = min_buf_size + 4096;
|
||||
uint8_t *buf = realloc(ar->swr_buf, new_size);
|
||||
if (!buf) {
|
||||
LOG_OOM();
|
||||
// Could not realloc to the requested size
|
||||
return NULL;
|
||||
}
|
||||
ar->swr_buf = buf;
|
||||
ar->swr_buf_alloc_size = new_size;
|
||||
}
|
||||
|
||||
return ar->swr_buf;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_audio_regulator_push(struct sc_audio_regulator *ar, const AVFrame *frame) {
|
||||
SwrContext *swr_ctx = ar->swr_ctx;
|
||||
|
||||
int64_t swr_delay = swr_get_delay(swr_ctx, ar->sample_rate);
|
||||
// No need to av_rescale_rnd(), input and output sample rates are the same.
|
||||
// Add more space (256) for clock compensation.
|
||||
int dst_nb_samples = swr_delay + frame->nb_samples + 256;
|
||||
|
||||
uint8_t *swr_buf = sc_audio_regulator_get_swr_buf(ar, dst_nb_samples);
|
||||
if (!swr_buf) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int ret = swr_convert(swr_ctx, &swr_buf, dst_nb_samples,
|
||||
(const uint8_t **) frame->data, frame->nb_samples);
|
||||
if (ret < 0) {
|
||||
LOGE("Resampling failed: %d", ret);
|
||||
return false;
|
||||
}
|
||||
|
||||
// swr_convert() returns the number of samples which would have been
|
||||
// written if the buffer was big enough.
|
||||
uint32_t samples = MIN(ret, dst_nb_samples);
|
||||
#ifdef SC_AUDIO_REGULATOR_DEBUG
|
||||
LOGD("[Audio] %" PRIu32 " samples written to buffer", samples);
|
||||
#endif
|
||||
|
||||
uint32_t cap = sc_audiobuf_capacity(&ar->buf);
|
||||
if (samples > cap) {
|
||||
// Very very unlikely: a single resampled frame should never
|
||||
// exceed the audio buffer size (or something is very wrong).
|
||||
// Ignore the first bytes in swr_buf to avoid memory corruption anyway.
|
||||
swr_buf += TO_BYTES(samples - cap);
|
||||
samples = cap;
|
||||
}
|
||||
|
||||
uint32_t skipped_samples = 0;
|
||||
|
||||
uint32_t written = sc_audiobuf_write(&ar->buf, swr_buf, samples);
|
||||
if (written < samples) {
|
||||
uint32_t remaining = samples - written;
|
||||
|
||||
// All samples that could be written without locking have been written,
|
||||
// now we need to lock to drop/consume old samples
|
||||
sc_mutex_lock(&ar->mutex);
|
||||
|
||||
// Retry with the lock
|
||||
written += sc_audiobuf_write(&ar->buf,
|
||||
swr_buf + TO_BYTES(written),
|
||||
remaining);
|
||||
if (written < samples) {
|
||||
remaining = samples - written;
|
||||
// Still insufficient, drop old samples to make space
|
||||
skipped_samples = sc_audiobuf_read(&ar->buf, NULL, remaining);
|
||||
assert(skipped_samples == remaining);
|
||||
}
|
||||
|
||||
sc_mutex_unlock(&ar->mutex);
|
||||
|
||||
if (written < samples) {
|
||||
// Now there is enough space
|
||||
uint32_t w = sc_audiobuf_write(&ar->buf,
|
||||
swr_buf + TO_BYTES(written),
|
||||
remaining);
|
||||
assert(w == remaining);
|
||||
(void) w;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t underflow = 0;
|
||||
uint32_t max_buffered_samples;
|
||||
bool played = atomic_load_explicit(&ar->played, memory_order_relaxed);
|
||||
if (played) {
|
||||
underflow = atomic_exchange_explicit(&ar->underflow, 0,
|
||||
memory_order_relaxed);
|
||||
|
||||
max_buffered_samples = ar->target_buffering * 11 / 10
|
||||
+ 60 * ar->sample_rate / 1000 /* 60 ms */;
|
||||
} else {
|
||||
// Playback not started yet, do not accumulate more than
|
||||
// max_initial_buffering samples, this would cause unnecessary delay
|
||||
// (and glitches to compensate) on start.
|
||||
max_buffered_samples = ar->target_buffering
|
||||
+ 10 * ar->sample_rate / 1000 /* 10 ms */;
|
||||
}
|
||||
|
||||
uint32_t can_read = sc_audiobuf_can_read(&ar->buf);
|
||||
if (can_read > max_buffered_samples) {
|
||||
uint32_t skip_samples = 0;
|
||||
|
||||
sc_mutex_lock(&ar->mutex);
|
||||
can_read = sc_audiobuf_can_read(&ar->buf);
|
||||
if (can_read > max_buffered_samples) {
|
||||
skip_samples = can_read - max_buffered_samples;
|
||||
uint32_t r = sc_audiobuf_read(&ar->buf, NULL, skip_samples);
|
||||
assert(r == skip_samples);
|
||||
(void) r;
|
||||
skipped_samples += skip_samples;
|
||||
}
|
||||
sc_mutex_unlock(&ar->mutex);
|
||||
|
||||
if (skip_samples) {
|
||||
if (played) {
|
||||
LOGD("[Audio] Buffering threshold exceeded, skipping %" PRIu32
|
||||
" samples", skip_samples);
|
||||
#ifdef SC_AUDIO_REGULATOR_DEBUG
|
||||
} else {
|
||||
LOGD("[Audio] Playback not started, skipping %" PRIu32
|
||||
" samples", skip_samples);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
atomic_store_explicit(&ar->received, true, memory_order_relaxed);
|
||||
if (!played) {
|
||||
// Nothing more to do
|
||||
return true;
|
||||
}
|
||||
|
||||
// Number of samples added (or removed, if negative) for compensation
|
||||
int32_t instant_compensation = (int32_t) written - frame->nb_samples;
|
||||
// Inserting silence instantly increases buffering
|
||||
int32_t inserted_silence = (int32_t) underflow;
|
||||
// Dropping input samples instantly decreases buffering
|
||||
int32_t dropped = (int32_t) skipped_samples;
|
||||
|
||||
// The compensation must apply instantly, it must not be smoothed
|
||||
ar->avg_buffering.avg += instant_compensation + inserted_silence - dropped;
|
||||
if (ar->avg_buffering.avg < 0) {
|
||||
// Since dropping samples instantly reduces buffering, the difference
|
||||
// is applied immediately to the average value, assuming that the delay
|
||||
// between the producer and the consumer will be caught up.
|
||||
//
|
||||
// However, when this assumption is not valid, the average buffering
|
||||
// may decrease indefinitely. Prevent it to become negative to limit
|
||||
// the consequences.
|
||||
ar->avg_buffering.avg = 0;
|
||||
}
|
||||
|
||||
// However, the buffering level must be smoothed
|
||||
sc_average_push(&ar->avg_buffering, can_read);
|
||||
|
||||
#ifdef SC_AUDIO_REGULATOR_DEBUG
|
||||
LOGD("[Audio] can_read=%" PRIu32 " avg_buffering=%f",
|
||||
can_read, sc_average_get(&ar->avg_buffering));
|
||||
#endif
|
||||
|
||||
ar->samples_since_resync += written;
|
||||
if (ar->samples_since_resync >= ar->sample_rate) {
|
||||
// Recompute compensation every second
|
||||
ar->samples_since_resync = 0;
|
||||
|
||||
float avg = sc_average_get(&ar->avg_buffering);
|
||||
int diff = ar->target_buffering - avg;
|
||||
|
||||
// Enable compensation when the difference exceeds +/- 4ms.
|
||||
// Disable compensation when the difference is lower than +/- 1ms.
|
||||
int threshold = ar->compensation != 0
|
||||
? ar->sample_rate / 1000 /* 1ms */
|
||||
: ar->sample_rate * 4 / 1000; /* 4ms */
|
||||
|
||||
if (abs(diff) < threshold) {
|
||||
// Do not compensate for small values, the error is just noise
|
||||
diff = 0;
|
||||
} else if (diff < 0 && can_read < ar->target_buffering) {
|
||||
// Do not accelerate if the instant buffering level is below the
|
||||
// target, this would increase underflow
|
||||
diff = 0;
|
||||
}
|
||||
// Compensate the diff over 4 seconds (but will be recomputed after 1
|
||||
// second)
|
||||
int distance = 4 * ar->sample_rate;
|
||||
// Limit compensation rate to 2%
|
||||
int abs_max_diff = distance / 50;
|
||||
diff = CLAMP(diff, -abs_max_diff, abs_max_diff);
|
||||
LOGV("[Audio] Buffering: target=%" PRIu32 " avg=%f cur=%" PRIu32
|
||||
" compensation=%d", ar->target_buffering, avg, can_read, diff);
|
||||
|
||||
if (diff != ar->compensation) {
|
||||
int ret = swr_set_compensation(swr_ctx, diff, distance);
|
||||
if (ret < 0) {
|
||||
LOGW("Resampling compensation failed: %d", ret);
|
||||
// not fatal
|
||||
} else {
|
||||
ar->compensation = diff;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_audio_regulator_init(struct sc_audio_regulator *ar, size_t sample_size,
|
||||
const AVCodecContext *ctx, uint32_t target_buffering) {
|
||||
SwrContext *swr_ctx = swr_alloc();
|
||||
if (!swr_ctx) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
ar->swr_ctx = swr_ctx;
|
||||
|
||||
#ifdef SCRCPY_LAVU_HAS_CHLAYOUT
|
||||
av_opt_set_chlayout(swr_ctx, "in_chlayout", &ctx->ch_layout, 0);
|
||||
av_opt_set_chlayout(swr_ctx, "out_chlayout", &ctx->ch_layout, 0);
|
||||
#else
|
||||
av_opt_set_channel_layout(swr_ctx, "in_channel_layout",
|
||||
ctx->channel_layout, 0);
|
||||
av_opt_set_channel_layout(swr_ctx, "out_channel_layout",
|
||||
ctx->channel_layout, 0);
|
||||
#endif
|
||||
|
||||
av_opt_set_int(swr_ctx, "in_sample_rate", ctx->sample_rate, 0);
|
||||
av_opt_set_int(swr_ctx, "out_sample_rate", ctx->sample_rate, 0);
|
||||
|
||||
av_opt_set_sample_fmt(swr_ctx, "in_sample_fmt", ctx->sample_fmt, 0);
|
||||
av_opt_set_sample_fmt(swr_ctx, "out_sample_fmt", SC_AV_SAMPLE_FMT, 0);
|
||||
|
||||
int ret = swr_init(swr_ctx);
|
||||
if (ret) {
|
||||
LOGE("Failed to initialize the resampling context");
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
bool ok = sc_mutex_init(&ar->mutex);
|
||||
if (!ok) {
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
ar->target_buffering = target_buffering;
|
||||
ar->sample_size = sample_size;
|
||||
ar->sample_rate = ctx->sample_rate;
|
||||
|
||||
// Use a ring-buffer of the target buffering size plus 1 second between the
|
||||
// producer and the consumer. It's too big on purpose, to guarantee that
|
||||
// the producer and the consumer will be able to access it in parallel
|
||||
// without locking.
|
||||
uint32_t audiobuf_samples = target_buffering + ar->sample_rate;
|
||||
|
||||
ok = sc_audiobuf_init(&ar->buf, sample_size, audiobuf_samples);
|
||||
if (!ok) {
|
||||
goto error_destroy_mutex;
|
||||
}
|
||||
|
||||
size_t initial_swr_buf_size = TO_BYTES(4096);
|
||||
ar->swr_buf = malloc(initial_swr_buf_size);
|
||||
if (!ar->swr_buf) {
|
||||
LOG_OOM();
|
||||
goto error_destroy_audiobuf;
|
||||
}
|
||||
ar->swr_buf_alloc_size = initial_swr_buf_size;
|
||||
|
||||
// Samples are produced and consumed by blocks, so the buffering must be
|
||||
// smoothed to get a relatively stable value.
|
||||
sc_average_init(&ar->avg_buffering, 128);
|
||||
ar->samples_since_resync = 0;
|
||||
|
||||
ar->received = false;
|
||||
atomic_init(&ar->played, false);
|
||||
atomic_init(&ar->received, false);
|
||||
atomic_init(&ar->underflow, 0);
|
||||
ar->compensation = 0;
|
||||
|
||||
return true;
|
||||
|
||||
error_destroy_audiobuf:
|
||||
sc_audiobuf_destroy(&ar->buf);
|
||||
error_destroy_mutex:
|
||||
sc_mutex_destroy(&ar->mutex);
|
||||
error_free_swr_ctx:
|
||||
swr_free(&ar->swr_ctx);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void
|
||||
sc_audio_regulator_destroy(struct sc_audio_regulator *ar) {
|
||||
free(ar->swr_buf);
|
||||
sc_audiobuf_destroy(&ar->buf);
|
||||
sc_mutex_destroy(&ar->mutex);
|
||||
swr_free(&ar->swr_ctx);
|
||||
}
|
71
app/src/audio_regulator.h
Normal file
71
app/src/audio_regulator.h
Normal file
@ -0,0 +1,71 @@
|
||||
#ifndef SC_AUDIO_REGULATOR_H
|
||||
#define SC_AUDIO_REGULATOR_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdatomic.h>
|
||||
#include <stdbool.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libswresample/swresample.h>
|
||||
#include "util/audiobuf.h"
|
||||
#include "util/average.h"
|
||||
#include "util/thread.h"
|
||||
|
||||
#define SC_AV_SAMPLE_FMT AV_SAMPLE_FMT_FLT
|
||||
|
||||
struct sc_audio_regulator {
|
||||
sc_mutex mutex;
|
||||
|
||||
// Target buffering between the producer and the consumer (in samples)
|
||||
uint32_t target_buffering;
|
||||
|
||||
// Audio buffer to communicate between the receiver and the player
|
||||
struct sc_audiobuf buf;
|
||||
|
||||
// Resampler (only used from the receiver thread)
|
||||
struct SwrContext *swr_ctx;
|
||||
|
||||
// The sample rate is the same for input and output
|
||||
uint32_t sample_rate;
|
||||
// The number of bytes per sample (for all channels)
|
||||
size_t sample_size;
|
||||
|
||||
// Target buffer for resampling (only used by the receiver thread)
|
||||
uint8_t *swr_buf;
|
||||
size_t swr_buf_alloc_size;
|
||||
|
||||
// Number of buffered samples (may be negative on underflow) (only used by
|
||||
// the receiver thread)
|
||||
struct sc_average avg_buffering;
|
||||
// Count the number of samples to trigger a compensation update regularly
|
||||
// (only used by the receiver thread)
|
||||
uint32_t samples_since_resync;
|
||||
|
||||
// Number of silence samples inserted since the last received packet
|
||||
atomic_uint_least32_t underflow;
|
||||
|
||||
// Current applied compensation value (only used by the receiver thread)
|
||||
int compensation;
|
||||
|
||||
// Set to true the first time a sample is received
|
||||
atomic_bool received;
|
||||
|
||||
// Set to true the first time samples are pulled by the player
|
||||
atomic_bool played;
|
||||
};
|
||||
|
||||
bool
|
||||
sc_audio_regulator_init(struct sc_audio_regulator *ar, size_t sample_size,
|
||||
const AVCodecContext *ctx, uint32_t target_buffering);
|
||||
|
||||
void
|
||||
sc_audio_regulator_destroy(struct sc_audio_regulator *ar);
|
||||
|
||||
bool
|
||||
sc_audio_regulator_push(struct sc_audio_regulator *ar, const AVFrame *frame);
|
||||
|
||||
void
|
||||
sc_audio_regulator_pull(struct sc_audio_regulator *ar, uint8_t *out,
|
||||
uint32_t samples);
|
||||
|
||||
#endif
|
111
app/src/cli.c
111
app/src/cli.c
@ -50,6 +50,7 @@ enum {
|
||||
OPT_POWER_OFF_ON_CLOSE,
|
||||
OPT_V4L2_SINK,
|
||||
OPT_DISPLAY_BUFFER,
|
||||
OPT_VIDEO_BUFFER,
|
||||
OPT_V4L2_BUFFER,
|
||||
OPT_TUNNEL_HOST,
|
||||
OPT_TUNNEL_PORT,
|
||||
@ -102,6 +103,9 @@ enum {
|
||||
OPT_NO_MOUSE_HOVER,
|
||||
OPT_AUDIO_DUP,
|
||||
OPT_GAMEPAD,
|
||||
OPT_NEW_DISPLAY,
|
||||
OPT_LIST_APPS,
|
||||
OPT_START_APP,
|
||||
};
|
||||
|
||||
struct sc_option {
|
||||
@ -318,12 +322,10 @@ static const struct sc_option options[] = {
|
||||
.argdesc = "id",
|
||||
},
|
||||
{
|
||||
// deprecated
|
||||
.longopt_id = OPT_DISPLAY_BUFFER,
|
||||
.longopt = "display-buffer",
|
||||
.argdesc = "ms",
|
||||
.text = "Add a buffering delay (in milliseconds) before displaying. "
|
||||
"This increases latency to compensate for jitter.\n"
|
||||
"Default is 0 (no buffering).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_DISPLAY_ID,
|
||||
@ -442,6 +444,11 @@ static const struct sc_option options[] = {
|
||||
"This is a workaround for some devices not behaving as "
|
||||
"expected when setting the device clipboard programmatically.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LIST_APPS,
|
||||
.longopt = "list-apps",
|
||||
.text = "List Android apps installed on the device.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LIST_CAMERAS,
|
||||
.longopt = "list-cameras",
|
||||
@ -557,6 +564,21 @@ static const struct sc_option options[] = {
|
||||
.text = "Disable video and audio playback on the computer (equivalent "
|
||||
"to --no-video-playback --no-audio-playback).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NEW_DISPLAY,
|
||||
.longopt = "new-display",
|
||||
.argdesc = "[<width>x<height>][/<dpi>]",
|
||||
.optional_arg = true,
|
||||
.text = "Create a new display with the specified resolution and "
|
||||
"density. If not provided, they default to the main display "
|
||||
"dimensions and DPI, and --max-size is considered.\n"
|
||||
"Examples:\n"
|
||||
" --new-display=1920x1080\n"
|
||||
" --new-display=1920x1080/420 # force 420 dpi\n"
|
||||
" --new-display # main display size and density\n"
|
||||
" --new-display -m1920 # scaled to fit a max size of 1920\n"
|
||||
" --new-display=/240 # main display size and 240 dpi",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_AUDIO,
|
||||
.longopt = "no-audio",
|
||||
@ -784,6 +806,20 @@ static const struct sc_option options[] = {
|
||||
"shortcuts, pass \"lctrl,lsuper\".\n"
|
||||
"Default is \"lalt,lsuper\" (left-Alt or left-Super).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_START_APP,
|
||||
.longopt = "start-app",
|
||||
.argdesc = "name",
|
||||
.text = "Start an Android app, by its exact package name.\n"
|
||||
"Add a '?' prefix to select an app whose name starts with the "
|
||||
"given name, case-insensitive (retrieving app names on the "
|
||||
"device may take some time):\n"
|
||||
" scrcpy --start-app=?firefox\n"
|
||||
"Add a '+' prefix to force-stop before starting the app:\n"
|
||||
" scrcpy --new-display --start-app=+org.mozilla.firefox\n"
|
||||
"Both prefixes can be used, in that order:\n"
|
||||
" scrcpy --start-app=+?firefox",
|
||||
},
|
||||
{
|
||||
.shortopt = 't',
|
||||
.longopt = "show-touches",
|
||||
@ -861,11 +897,20 @@ static const struct sc_option options[] = {
|
||||
.argdesc = "ms",
|
||||
.text = "Add a buffering delay (in milliseconds) before pushing "
|
||||
"frames. This increases latency to compensate for jitter.\n"
|
||||
"This option is similar to --display-buffer, but specific to "
|
||||
"This option is similar to --video-buffer, but specific to "
|
||||
"V4L2 sink.\n"
|
||||
"Default is 0 (no buffering).\n"
|
||||
"This option is only available on Linux.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_VIDEO_BUFFER,
|
||||
.longopt = "video-buffer",
|
||||
.argdesc = "ms",
|
||||
.text = "Add a buffering delay (in milliseconds) before displaying "
|
||||
"video frames.\n"
|
||||
"This increases latency to compensate for jitter.\n"
|
||||
"Default is 0 (no buffering).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_VIDEO_CODEC,
|
||||
.longopt = "video-codec",
|
||||
@ -1072,7 +1117,11 @@ static const struct sc_shortcut shortcuts[] = {
|
||||
},
|
||||
{
|
||||
.shortcuts = { "Shift+click-and-move" },
|
||||
.text = "Tilt (slide vertically with two fingers)",
|
||||
.text = "Tilt vertically (slide with 2 fingers)",
|
||||
},
|
||||
{
|
||||
.shortcuts = { "Ctrl+Shift+click-and-move" },
|
||||
.text = "Tilt horizontally (slide with 2 fingers)",
|
||||
},
|
||||
{
|
||||
.shortcuts = { "Drag & drop APK file" },
|
||||
@ -2508,7 +2557,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
opts->power_off_on_close = true;
|
||||
break;
|
||||
case OPT_DISPLAY_BUFFER:
|
||||
if (!parse_buffering_time(optarg, &opts->display_buffer)) {
|
||||
LOGW("--display-buffer is deprecated, use --video-buffer "
|
||||
"instead.");
|
||||
// fall through
|
||||
case OPT_VIDEO_BUFFER:
|
||||
if (!parse_buffering_time(optarg, &opts->video_buffer)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
@ -2591,6 +2644,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
case OPT_LIST_CAMERA_SIZES:
|
||||
opts->list |= SC_OPTION_LIST_CAMERA_SIZES;
|
||||
break;
|
||||
case OPT_LIST_APPS:
|
||||
opts->list |= SC_OPTION_LIST_APPS;
|
||||
break;
|
||||
case OPT_REQUIRE_AUDIO:
|
||||
opts->require_audio = true;
|
||||
break;
|
||||
@ -2664,6 +2720,12 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_NEW_DISPLAY:
|
||||
opts->new_display = optarg ? optarg : "";
|
||||
break;
|
||||
case OPT_START_APP:
|
||||
opts->start_app = optarg;
|
||||
break;
|
||||
default:
|
||||
// getopt prints the error message on stderr
|
||||
return false;
|
||||
@ -2762,7 +2824,8 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
|
||||
LOGI("Video orientation is locked for v4l2 sink. "
|
||||
"See --lock-video-orientation.");
|
||||
opts->lock_video_orientation = SC_LOCK_VIDEO_ORIENTATION_INITIAL;
|
||||
opts->lock_video_orientation =
|
||||
SC_LOCK_VIDEO_ORIENTATION_INITIAL_AUTO;
|
||||
}
|
||||
|
||||
// V4L2 could not handle size change.
|
||||
@ -2772,7 +2835,7 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
}
|
||||
|
||||
if (opts->v4l2_buffer && !opts->v4l2_device) {
|
||||
LOGE("V4L2 buffer value without V4L2 sink\n");
|
||||
LOGE("V4L2 buffer value without V4L2 sink");
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
@ -2791,8 +2854,8 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
if (otg) {
|
||||
opts->mouse_input_mode = SC_MOUSE_INPUT_MODE_AOA;
|
||||
} else if (!opts->video_playback) {
|
||||
LOGI("No video mirroring, mouse mode switched to UHID");
|
||||
opts->mouse_input_mode = SC_MOUSE_INPUT_MODE_UHID;
|
||||
LOGI("No video mirroring, SDK mouse disabled");
|
||||
opts->mouse_input_mode = SC_MOUSE_INPUT_MODE_DISABLED;
|
||||
} else {
|
||||
opts->mouse_input_mode = SC_MOUSE_INPUT_MODE_SDK;
|
||||
}
|
||||
@ -2844,6 +2907,25 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->new_display) {
|
||||
if (opts->video_source != SC_VIDEO_SOURCE_DISPLAY) {
|
||||
LOGE("--new-display is only available with --video-source=display");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!opts->video) {
|
||||
LOGE("--new-display is incompatible with --no-video");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->max_size && opts->new_display[0] != '\0'
|
||||
&& opts->new_display[0] != '/') {
|
||||
// An explicit size is defined (not "" nor "/<dpi>")
|
||||
LOGE("Cannot specify both --new-display size and -m/--max-size");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (otg) {
|
||||
if (!opts->control) {
|
||||
LOGE("--no-control is not allowed in OTG mode");
|
||||
@ -2950,6 +3032,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->display_id != 0 && opts->new_display) {
|
||||
LOGE("Cannot specify both --display-id and --new-display");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->audio && opts->audio_source == SC_AUDIO_SOURCE_AUTO) {
|
||||
// Select the audio source according to the video source
|
||||
if (opts->video_source == SC_VIDEO_SOURCE_DISPLAY) {
|
||||
@ -3082,6 +3169,10 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
LOGE("Cannot request power off on close if control is disabled");
|
||||
return false;
|
||||
}
|
||||
if (opts->start_app) {
|
||||
LOGE("Cannot start an Android app if control is disabled");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
# ifdef _WIN32
|
||||
|
@ -22,9 +22,6 @@
|
||||
#define MOTIONEVENT_ACTION_LABEL(value) \
|
||||
ENUM_TO_LABEL(android_motionevent_action_labels, value)
|
||||
|
||||
#define SCREEN_POWER_MODE_LABEL(value) \
|
||||
ENUM_TO_LABEL(screen_power_mode_labels, value)
|
||||
|
||||
static const char *const android_keyevent_action_labels[] = {
|
||||
"down",
|
||||
"up",
|
||||
@ -47,14 +44,6 @@ static const char *const android_motionevent_action_labels[] = {
|
||||
"btn-release",
|
||||
};
|
||||
|
||||
static const char *const screen_power_mode_labels[] = {
|
||||
"off",
|
||||
"doze",
|
||||
"normal",
|
||||
"doze-suspend",
|
||||
"suspend",
|
||||
};
|
||||
|
||||
static const char *const copy_key_labels[] = {
|
||||
"none",
|
||||
"copy",
|
||||
@ -158,8 +147,8 @@ sc_control_msg_serialize(const struct sc_control_msg *msg, uint8_t *buf) {
|
||||
size_t len = write_string(&buf[10], msg->set_clipboard.text,
|
||||
SC_CONTROL_MSG_CLIPBOARD_TEXT_MAX_LENGTH);
|
||||
return 10 + len;
|
||||
case SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE:
|
||||
buf[1] = msg->set_screen_power_mode.mode;
|
||||
case SC_CONTROL_MSG_TYPE_SET_DISPLAY_POWER:
|
||||
buf[1] = msg->set_display_power.on;
|
||||
return 2;
|
||||
case SC_CONTROL_MSG_TYPE_UHID_CREATE:
|
||||
sc_write16be(&buf[1], msg->uhid_create.id);
|
||||
@ -183,6 +172,10 @@ sc_control_msg_serialize(const struct sc_control_msg *msg, uint8_t *buf) {
|
||||
case SC_CONTROL_MSG_TYPE_UHID_DESTROY:
|
||||
sc_write16be(&buf[1], msg->uhid_destroy.id);
|
||||
return 3;
|
||||
case SC_CONTROL_MSG_TYPE_START_APP: {
|
||||
size_t len = write_string_tiny(&buf[1], msg->start_app.name, 255);
|
||||
return 1 + len;
|
||||
}
|
||||
case SC_CONTROL_MSG_TYPE_EXPAND_NOTIFICATION_PANEL:
|
||||
case SC_CONTROL_MSG_TYPE_EXPAND_SETTINGS_PANEL:
|
||||
case SC_CONTROL_MSG_TYPE_COLLAPSE_PANELS:
|
||||
@ -264,9 +257,9 @@ sc_control_msg_log(const struct sc_control_msg *msg) {
|
||||
msg->set_clipboard.paste ? "paste" : "nopaste",
|
||||
msg->set_clipboard.text);
|
||||
break;
|
||||
case SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE:
|
||||
LOG_CMSG("power mode %s",
|
||||
SCREEN_POWER_MODE_LABEL(msg->set_screen_power_mode.mode));
|
||||
case SC_CONTROL_MSG_TYPE_SET_DISPLAY_POWER:
|
||||
LOG_CMSG("display power %s",
|
||||
msg->set_display_power.on ? "on" : "off");
|
||||
break;
|
||||
case SC_CONTROL_MSG_TYPE_EXPAND_NOTIFICATION_PANEL:
|
||||
LOG_CMSG("expand notification panel");
|
||||
@ -308,6 +301,9 @@ sc_control_msg_log(const struct sc_control_msg *msg) {
|
||||
case SC_CONTROL_MSG_TYPE_OPEN_HARD_KEYBOARD_SETTINGS:
|
||||
LOG_CMSG("open hard keyboard settings");
|
||||
break;
|
||||
case SC_CONTROL_MSG_TYPE_START_APP:
|
||||
LOG_CMSG("start app \"%s\"", msg->start_app.name);
|
||||
break;
|
||||
default:
|
||||
LOG_CMSG("unknown type: %u", (unsigned) msg->type);
|
||||
break;
|
||||
@ -333,6 +329,9 @@ sc_control_msg_destroy(struct sc_control_msg *msg) {
|
||||
case SC_CONTROL_MSG_TYPE_SET_CLIPBOARD:
|
||||
free(msg->set_clipboard.text);
|
||||
break;
|
||||
case SC_CONTROL_MSG_TYPE_START_APP:
|
||||
free(msg->start_app.name);
|
||||
break;
|
||||
default:
|
||||
// do nothing
|
||||
break;
|
||||
|
@ -35,18 +35,13 @@ enum sc_control_msg_type {
|
||||
SC_CONTROL_MSG_TYPE_COLLAPSE_PANELS,
|
||||
SC_CONTROL_MSG_TYPE_GET_CLIPBOARD,
|
||||
SC_CONTROL_MSG_TYPE_SET_CLIPBOARD,
|
||||
SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE,
|
||||
SC_CONTROL_MSG_TYPE_SET_DISPLAY_POWER,
|
||||
SC_CONTROL_MSG_TYPE_ROTATE_DEVICE,
|
||||
SC_CONTROL_MSG_TYPE_UHID_CREATE,
|
||||
SC_CONTROL_MSG_TYPE_UHID_INPUT,
|
||||
SC_CONTROL_MSG_TYPE_UHID_DESTROY,
|
||||
SC_CONTROL_MSG_TYPE_OPEN_HARD_KEYBOARD_SETTINGS,
|
||||
};
|
||||
|
||||
enum sc_screen_power_mode {
|
||||
// see <https://android.googlesource.com/platform/frameworks/base.git/+/pie-release-2/core/java/android/view/SurfaceControl.java#305>
|
||||
SC_SCREEN_POWER_MODE_OFF = 0,
|
||||
SC_SCREEN_POWER_MODE_NORMAL = 2,
|
||||
SC_CONTROL_MSG_TYPE_START_APP,
|
||||
};
|
||||
|
||||
enum sc_copy_key {
|
||||
@ -94,8 +89,8 @@ struct sc_control_msg {
|
||||
bool paste;
|
||||
} set_clipboard;
|
||||
struct {
|
||||
enum sc_screen_power_mode mode;
|
||||
} set_screen_power_mode;
|
||||
bool on;
|
||||
} set_display_power;
|
||||
struct {
|
||||
uint16_t id;
|
||||
const char *name; // pointer to static data
|
||||
@ -110,6 +105,9 @@ struct sc_control_msg {
|
||||
struct {
|
||||
uint16_t id;
|
||||
} uhid_destroy;
|
||||
struct {
|
||||
char *name;
|
||||
} start_app;
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <SDL_events.h>
|
||||
#include <SDL2/SDL_events.h>
|
||||
|
||||
enum {
|
||||
SC_EVENT_NEW_FRAME = SDL_USEREVENT,
|
||||
|
@ -5,53 +5,9 @@
|
||||
|
||||
#include "input_events.h"
|
||||
#include "screen.h"
|
||||
#include "shortcut_mod.h"
|
||||
#include "util/log.h"
|
||||
|
||||
#define SC_SDL_SHORTCUT_MODS_MASK (KMOD_CTRL | KMOD_ALT | KMOD_GUI)
|
||||
|
||||
static inline uint16_t
|
||||
to_sdl_mod(uint8_t shortcut_mod) {
|
||||
uint16_t sdl_mod = 0;
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_LCTRL) {
|
||||
sdl_mod |= KMOD_LCTRL;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_RCTRL) {
|
||||
sdl_mod |= KMOD_RCTRL;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_LALT) {
|
||||
sdl_mod |= KMOD_LALT;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_RALT) {
|
||||
sdl_mod |= KMOD_RALT;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_LSUPER) {
|
||||
sdl_mod |= KMOD_LGUI;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_RSUPER) {
|
||||
sdl_mod |= KMOD_RGUI;
|
||||
}
|
||||
return sdl_mod;
|
||||
}
|
||||
|
||||
static bool
|
||||
is_shortcut_mod(struct sc_input_manager *im, uint16_t sdl_mod) {
|
||||
// keep only the relevant modifier keys
|
||||
sdl_mod &= SC_SDL_SHORTCUT_MODS_MASK;
|
||||
|
||||
// at least one shortcut mod pressed?
|
||||
return sdl_mod & im->sdl_shortcut_mods;
|
||||
}
|
||||
|
||||
static bool
|
||||
is_shortcut_key(struct sc_input_manager *im, SDL_Keycode keycode) {
|
||||
return (im->sdl_shortcut_mods & KMOD_LCTRL && keycode == SDLK_LCTRL)
|
||||
|| (im->sdl_shortcut_mods & KMOD_RCTRL && keycode == SDLK_RCTRL)
|
||||
|| (im->sdl_shortcut_mods & KMOD_LALT && keycode == SDLK_LALT)
|
||||
|| (im->sdl_shortcut_mods & KMOD_RALT && keycode == SDLK_RALT)
|
||||
|| (im->sdl_shortcut_mods & KMOD_LGUI && keycode == SDLK_LGUI)
|
||||
|| (im->sdl_shortcut_mods & KMOD_RGUI && keycode == SDLK_RGUI);
|
||||
}
|
||||
|
||||
void
|
||||
sc_input_manager_init(struct sc_input_manager *im,
|
||||
const struct sc_input_manager_params *params) {
|
||||
@ -73,7 +29,7 @@ sc_input_manager_init(struct sc_input_manager *im,
|
||||
im->legacy_paste = params->legacy_paste;
|
||||
im->clipboard_autosync = params->clipboard_autosync;
|
||||
|
||||
im->sdl_shortcut_mods = to_sdl_mod(params->shortcut_mods);
|
||||
im->sdl_shortcut_mods = sc_shortcut_mods_to_sdl(params->shortcut_mods);
|
||||
|
||||
im->vfinger_down = false;
|
||||
im->vfinger_invert_x = false;
|
||||
@ -247,13 +203,12 @@ set_device_clipboard(struct sc_input_manager *im, bool paste,
|
||||
}
|
||||
|
||||
static void
|
||||
set_screen_power_mode(struct sc_input_manager *im,
|
||||
enum sc_screen_power_mode mode) {
|
||||
set_display_power(struct sc_input_manager *im, bool on) {
|
||||
assert(im->controller);
|
||||
|
||||
struct sc_control_msg msg;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE;
|
||||
msg.set_screen_power_mode.mode = mode;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_SET_DISPLAY_POWER;
|
||||
msg.set_display_power.on = on;
|
||||
|
||||
if (!sc_controller_push_msg(im->controller, &msg)) {
|
||||
LOGW("Could not request 'set screen power mode'");
|
||||
@ -346,7 +301,8 @@ sc_input_manager_process_text_input(struct sc_input_manager *im,
|
||||
return;
|
||||
}
|
||||
|
||||
if (is_shortcut_mod(im, SDL_GetModState())) {
|
||||
if (sc_shortcut_mods_is_shortcut_mod(im->sdl_shortcut_mods,
|
||||
SDL_GetModState())) {
|
||||
// A shortcut must never generate text events
|
||||
return;
|
||||
}
|
||||
@ -413,8 +369,9 @@ sc_input_manager_process_key(struct sc_input_manager *im,
|
||||
// press/release is a modifier key.
|
||||
// The second condition is necessary to ignore the release of the modifier
|
||||
// key (because in this case mod is 0).
|
||||
bool is_shortcut = is_shortcut_mod(im, mod)
|
||||
|| is_shortcut_key(im, sdl_keycode);
|
||||
uint16_t mods = im->sdl_shortcut_mods;
|
||||
bool is_shortcut = sc_shortcut_mods_is_shortcut_mod(mods, mod)
|
||||
|| sc_shortcut_mods_is_shortcut_key(mods, sdl_keycode);
|
||||
|
||||
if (down && !repeat) {
|
||||
if (sdl_keycode == im->last_keycode && mod == im->last_mod) {
|
||||
@ -457,10 +414,8 @@ sc_input_manager_process_key(struct sc_input_manager *im,
|
||||
return;
|
||||
case SDLK_o:
|
||||
if (control && !repeat && down && !paused) {
|
||||
enum sc_screen_power_mode mode = shift
|
||||
? SC_SCREEN_POWER_MODE_NORMAL
|
||||
: SC_SCREEN_POWER_MODE_OFF;
|
||||
set_screen_power_mode(im, mode);
|
||||
bool on = shift;
|
||||
set_display_power(im, on);
|
||||
}
|
||||
return;
|
||||
case SDLK_z:
|
||||
@ -536,7 +491,7 @@ sc_input_manager_process_key(struct sc_input_manager *im,
|
||||
return;
|
||||
case SDLK_f:
|
||||
if (video && !shift && !repeat && down) {
|
||||
sc_screen_switch_fullscreen(im->screen);
|
||||
sc_screen_toggle_fullscreen(im->screen);
|
||||
}
|
||||
return;
|
||||
case SDLK_w:
|
||||
@ -836,7 +791,7 @@ sc_input_manager_process_mouse_button(struct sc_input_manager *im,
|
||||
}
|
||||
|
||||
bool change_vfinger = event->button == SDL_BUTTON_LEFT &&
|
||||
((down && !im->vfinger_down && (ctrl_pressed ^ shift_pressed)) ||
|
||||
((down && !im->vfinger_down && (ctrl_pressed || shift_pressed)) ||
|
||||
(!down && im->vfinger_down));
|
||||
bool use_finger = im->vfinger_down || change_vfinger;
|
||||
|
||||
@ -868,16 +823,28 @@ sc_input_manager_process_mouse_button(struct sc_input_manager *im,
|
||||
// In other words, the center of the rotation/scaling is the center of the
|
||||
// screen.
|
||||
//
|
||||
// To simulate a tilt gesture (a vertical slide with two fingers), Shift
|
||||
// can be used instead of Ctrl. The "virtual finger" has a position
|
||||
// To simulate a vertical tilt gesture (a vertical slide with two fingers),
|
||||
// Shift can be used instead of Ctrl. The "virtual finger" has a position
|
||||
// inverted with respect to the vertical axis of symmetry in the middle of
|
||||
// the screen.
|
||||
//
|
||||
// To simulate a horizontal tilt gesture (a horizontal slide with two
|
||||
// fingers), Ctrl+Shift can be used. The "virtual finger" has a position
|
||||
// inverted with respect to the horizontal axis of symmetry in the middle
|
||||
// of the screen. It is expected to be less frequently used, that's why the
|
||||
// one-mod shortcuts are assigned to rotation and vertical tilt.
|
||||
if (change_vfinger) {
|
||||
struct sc_point mouse =
|
||||
sc_screen_convert_window_to_frame_coords(im->screen, event->x,
|
||||
event->y);
|
||||
if (down) {
|
||||
im->vfinger_invert_x = ctrl_pressed || shift_pressed;
|
||||
// Ctrl Shift invert_x invert_y
|
||||
// ---- ----- ==> -------- --------
|
||||
// 0 0 0 0 -
|
||||
// 0 1 1 0 vertical tilt
|
||||
// 1 0 1 1 rotate
|
||||
// 1 1 0 1 horizontal tilt
|
||||
im->vfinger_invert_x = ctrl_pressed ^ shift_pressed;
|
||||
im->vfinger_invert_y = ctrl_pressed;
|
||||
}
|
||||
struct sc_point vfinger = inverse_point(mouse, im->screen->frame_size,
|
||||
|
@ -45,6 +45,10 @@ convert_keycode(enum sc_keycode from, enum android_keycode *to, uint16_t mod,
|
||||
{SC_KEYCODE_RCTRL, AKEYCODE_CTRL_RIGHT},
|
||||
{SC_KEYCODE_LSHIFT, AKEYCODE_SHIFT_LEFT},
|
||||
{SC_KEYCODE_RSHIFT, AKEYCODE_SHIFT_RIGHT},
|
||||
{SC_KEYCODE_LALT, AKEYCODE_ALT_LEFT},
|
||||
{SC_KEYCODE_RALT, AKEYCODE_ALT_RIGHT},
|
||||
{SC_KEYCODE_LGUI, AKEYCODE_META_LEFT},
|
||||
{SC_KEYCODE_RGUI, AKEYCODE_META_RIGHT},
|
||||
};
|
||||
|
||||
// Numpad navigation keys.
|
||||
@ -166,11 +170,7 @@ convert_keycode(enum sc_keycode from, enum android_keycode *to, uint16_t mod,
|
||||
return false;
|
||||
}
|
||||
|
||||
if (mod & (SC_MOD_LALT | SC_MOD_RALT | SC_MOD_LGUI | SC_MOD_RGUI)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// if ALT and META are not pressed, also handle letters and space
|
||||
// Handle letters and space
|
||||
entry = SC_INTMAP_FIND_ENTRY(alphaspace_keys, from);
|
||||
if (entry) {
|
||||
*to = entry->value;
|
||||
|
123
app/src/mouse_capture.c
Normal file
123
app/src/mouse_capture.c
Normal file
@ -0,0 +1,123 @@
|
||||
#include "mouse_capture.h"
|
||||
|
||||
#include "shortcut_mod.h"
|
||||
#include "util/log.h"
|
||||
|
||||
void
|
||||
sc_mouse_capture_init(struct sc_mouse_capture *mc, SDL_Window *window,
|
||||
uint8_t shortcut_mods) {
|
||||
mc->window = window;
|
||||
mc->sdl_mouse_capture_keys = sc_shortcut_mods_to_sdl(shortcut_mods);
|
||||
mc->mouse_capture_key_pressed = SDLK_UNKNOWN;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_mouse_capture_is_capture_key(struct sc_mouse_capture *mc, SDL_Keycode key) {
|
||||
return sc_shortcut_mods_is_shortcut_key(mc->sdl_mouse_capture_keys, key);
|
||||
}
|
||||
|
||||
bool
|
||||
sc_mouse_capture_handle_event(struct sc_mouse_capture *mc,
|
||||
const SDL_Event *event) {
|
||||
switch (event->type) {
|
||||
case SDL_WINDOWEVENT:
|
||||
if (event->window.event == SDL_WINDOWEVENT_FOCUS_LOST) {
|
||||
sc_mouse_capture_set_active(mc, false);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_KEYDOWN: {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
if (sc_mouse_capture_is_capture_key(mc, key)) {
|
||||
if (!mc->mouse_capture_key_pressed) {
|
||||
mc->mouse_capture_key_pressed = key;
|
||||
} else {
|
||||
// Another mouse capture key has been pressed, cancel
|
||||
// mouse (un)capture
|
||||
mc->mouse_capture_key_pressed = 0;
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SDL_KEYUP: {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
SDL_Keycode cap = mc->mouse_capture_key_pressed;
|
||||
mc->mouse_capture_key_pressed = 0;
|
||||
if (sc_mouse_capture_is_capture_key(mc, key)) {
|
||||
if (key == cap) {
|
||||
// A mouse capture key has been pressed then released:
|
||||
// toggle the capture mouse mode
|
||||
sc_mouse_capture_toggle(mc);
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SDL_MOUSEWHEEL:
|
||||
case SDL_MOUSEMOTION:
|
||||
case SDL_MOUSEBUTTONDOWN:
|
||||
if (!sc_mouse_capture_is_active(mc)) {
|
||||
// The mouse will be captured on SDL_MOUSEBUTTONUP, so consume
|
||||
// the event
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONUP:
|
||||
if (!sc_mouse_capture_is_active(mc)) {
|
||||
sc_mouse_capture_set_active(mc, true);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_FINGERMOTION:
|
||||
case SDL_FINGERDOWN:
|
||||
case SDL_FINGERUP:
|
||||
// Touch events are not compatible with relative mode
|
||||
// (coordinates are not relative), so consume the event
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void
|
||||
sc_mouse_capture_set_active(struct sc_mouse_capture *mc, bool capture) {
|
||||
#ifdef __APPLE__
|
||||
// Workaround for SDL bug on macOS:
|
||||
// <https://github.com/libsdl-org/SDL/issues/5340>
|
||||
if (capture) {
|
||||
int mouse_x, mouse_y;
|
||||
SDL_GetGlobalMouseState(&mouse_x, &mouse_y);
|
||||
|
||||
int x, y, w, h;
|
||||
SDL_GetWindowPosition(mc->window, &x, &y);
|
||||
SDL_GetWindowSize(mc->window, &w, &h);
|
||||
|
||||
bool outside_window = mouse_x < x || mouse_x >= x + w
|
||||
|| mouse_y < y || mouse_y >= y + h;
|
||||
if (outside_window) {
|
||||
SDL_WarpMouseInWindow(mc->window, w / 2, h / 2);
|
||||
}
|
||||
}
|
||||
#else
|
||||
(void) mc;
|
||||
#endif
|
||||
if (SDL_SetRelativeMouseMode(capture)) {
|
||||
LOGE("Could not set relative mouse mode to %s: %s",
|
||||
capture ? "true" : "false", SDL_GetError());
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
sc_mouse_capture_is_active(struct sc_mouse_capture *mc) {
|
||||
(void) mc;
|
||||
return SDL_GetRelativeMouseMode();
|
||||
}
|
||||
|
||||
void
|
||||
sc_mouse_capture_toggle(struct sc_mouse_capture *mc) {
|
||||
bool new_value = !sc_mouse_capture_is_active(mc);
|
||||
sc_mouse_capture_set_active(mc, new_value);
|
||||
}
|
38
app/src/mouse_capture.h
Normal file
38
app/src/mouse_capture.h
Normal file
@ -0,0 +1,38 @@
|
||||
#ifndef SC_MOUSE_CAPTURE_H
|
||||
#define SC_MOUSE_CAPTURE_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
|
||||
#include <SDL2/SDL.h>
|
||||
|
||||
struct sc_mouse_capture {
|
||||
SDL_Window *window;
|
||||
uint16_t sdl_mouse_capture_keys;
|
||||
|
||||
// To enable/disable mouse capture, a mouse capture key (LALT, LGUI or
|
||||
// RGUI) must be pressed. This variable tracks the pressed capture key.
|
||||
SDL_Keycode mouse_capture_key_pressed;
|
||||
|
||||
};
|
||||
|
||||
void
|
||||
sc_mouse_capture_init(struct sc_mouse_capture *mc, SDL_Window *window,
|
||||
uint8_t shortcut_mods);
|
||||
|
||||
void
|
||||
sc_mouse_capture_set_active(struct sc_mouse_capture *mc, bool capture);
|
||||
|
||||
bool
|
||||
sc_mouse_capture_is_active(struct sc_mouse_capture *mc);
|
||||
|
||||
void
|
||||
sc_mouse_capture_toggle(struct sc_mouse_capture *mc);
|
||||
|
||||
// Return true if it consumed the event
|
||||
bool
|
||||
sc_mouse_capture_handle_event(struct sc_mouse_capture *mc,
|
||||
const SDL_Event *event);
|
||||
|
||||
#endif
|
@ -58,7 +58,7 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.window_width = 0,
|
||||
.window_height = 0,
|
||||
.display_id = 0,
|
||||
.display_buffer = 0,
|
||||
.video_buffer = 0,
|
||||
.audio_buffer = -1, // depends on the audio format,
|
||||
.audio_output_buffer = SC_TICK_FROM_MS(5),
|
||||
.time_limit = 0,
|
||||
@ -103,6 +103,8 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.window = true,
|
||||
.mouse_hover = true,
|
||||
.audio_dup = false,
|
||||
.new_display = NULL,
|
||||
.start_app = NULL,
|
||||
};
|
||||
|
||||
enum sc_orientation
|
||||
|
@ -134,6 +134,8 @@ enum sc_lock_video_orientation {
|
||||
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED = -1,
|
||||
// lock the current orientation when scrcpy starts
|
||||
SC_LOCK_VIDEO_ORIENTATION_INITIAL = -2,
|
||||
// like SC_LOCK_VIDEO_ORIENTATION_INITIAL, but set automatically
|
||||
SC_LOCK_VIDEO_ORIENTATION_INITIAL_AUTO = -3,
|
||||
SC_LOCK_VIDEO_ORIENTATION_0 = 0,
|
||||
SC_LOCK_VIDEO_ORIENTATION_90 = 3,
|
||||
SC_LOCK_VIDEO_ORIENTATION_180 = 2,
|
||||
@ -259,7 +261,7 @@ struct scrcpy_options {
|
||||
uint16_t window_width;
|
||||
uint16_t window_height;
|
||||
uint32_t display_id;
|
||||
sc_tick display_buffer;
|
||||
sc_tick video_buffer;
|
||||
sc_tick audio_buffer;
|
||||
sc_tick audio_output_buffer;
|
||||
sc_tick time_limit;
|
||||
@ -304,10 +306,13 @@ struct scrcpy_options {
|
||||
#define SC_OPTION_LIST_DISPLAYS 0x2
|
||||
#define SC_OPTION_LIST_CAMERAS 0x4
|
||||
#define SC_OPTION_LIST_CAMERA_SIZES 0x8
|
||||
#define SC_OPTION_LIST_APPS 0x10
|
||||
uint8_t list;
|
||||
bool window;
|
||||
bool mouse_hover;
|
||||
bool audio_dup;
|
||||
const char *new_display; // [<width>x<height>][/<dpi>] parsed by the server
|
||||
const char *start_app;
|
||||
};
|
||||
|
||||
extern const struct scrcpy_options scrcpy_options_default;
|
||||
|
@ -53,7 +53,7 @@ struct scrcpy {
|
||||
struct sc_decoder video_decoder;
|
||||
struct sc_decoder audio_decoder;
|
||||
struct sc_recorder recorder;
|
||||
struct sc_delay_buffer display_buffer;
|
||||
struct sc_delay_buffer video_buffer;
|
||||
#ifdef HAVE_V4L2
|
||||
struct sc_v4l2_sink v4l2_sink;
|
||||
struct sc_delay_buffer v4l2_buffer;
|
||||
@ -431,6 +431,7 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.lock_video_orientation = options->lock_video_orientation,
|
||||
.control = options->control,
|
||||
.display_id = options->display_id,
|
||||
.new_display = options->new_display,
|
||||
.video = options->video,
|
||||
.audio = options->audio,
|
||||
.audio_dup = options->audio_dup,
|
||||
@ -814,11 +815,11 @@ aoa_complete:
|
||||
|
||||
if (options->video_playback) {
|
||||
struct sc_frame_source *src = &s->video_decoder.frame_source;
|
||||
if (options->display_buffer) {
|
||||
sc_delay_buffer_init(&s->display_buffer,
|
||||
options->display_buffer, true);
|
||||
sc_frame_source_add_sink(src, &s->display_buffer.frame_sink);
|
||||
src = &s->display_buffer.frame_source;
|
||||
if (options->video_buffer) {
|
||||
sc_delay_buffer_init(&s->video_buffer,
|
||||
options->video_buffer, true);
|
||||
sc_frame_source_add_sink(src, &s->video_buffer.frame_sink);
|
||||
src = &s->video_buffer.frame_source;
|
||||
}
|
||||
|
||||
sc_frame_source_add_sink(src, &s->screen.frame_sink);
|
||||
@ -872,11 +873,11 @@ aoa_complete:
|
||||
// everything is set up
|
||||
if (options->control && options->turn_screen_off) {
|
||||
struct sc_control_msg msg;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE;
|
||||
msg.set_screen_power_mode.mode = SC_SCREEN_POWER_MODE_OFF;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_SET_DISPLAY_POWER;
|
||||
msg.set_display_power.on = false;
|
||||
|
||||
if (!sc_controller_push_msg(&s->controller, &msg)) {
|
||||
LOGW("Could not request 'set screen power mode'");
|
||||
LOGW("Could not request 'set display power'");
|
||||
}
|
||||
}
|
||||
|
||||
@ -906,6 +907,25 @@ aoa_complete:
|
||||
init_sdl_gamepads();
|
||||
}
|
||||
|
||||
if (options->control && options->start_app) {
|
||||
assert(controller);
|
||||
|
||||
char *name = strdup(options->start_app);
|
||||
if (!name) {
|
||||
LOG_OOM();
|
||||
goto end;
|
||||
}
|
||||
|
||||
struct sc_control_msg msg;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_START_APP;
|
||||
msg.start_app.name = name;
|
||||
|
||||
if (!sc_controller_push_msg(controller, &msg)) {
|
||||
LOGW("Could not request start app '%s'", name);
|
||||
free(name);
|
||||
}
|
||||
}
|
||||
|
||||
ret = event_loop(s);
|
||||
terminate_event_loop();
|
||||
LOGD("quit...");
|
||||
|
127
app/src/screen.c
127
app/src/screen.c
@ -162,47 +162,6 @@ sc_screen_is_relative_mode(struct sc_screen *screen) {
|
||||
return screen->im.mp && screen->im.mp->relative_mode;
|
||||
}
|
||||
|
||||
static void
|
||||
sc_screen_set_mouse_capture(struct sc_screen *screen, bool capture) {
|
||||
#ifdef __APPLE__
|
||||
// Workaround for SDL bug on macOS:
|
||||
// <https://github.com/libsdl-org/SDL/issues/5340>
|
||||
if (capture) {
|
||||
int mouse_x, mouse_y;
|
||||
SDL_GetGlobalMouseState(&mouse_x, &mouse_y);
|
||||
|
||||
int x, y, w, h;
|
||||
SDL_GetWindowPosition(screen->window, &x, &y);
|
||||
SDL_GetWindowSize(screen->window, &w, &h);
|
||||
|
||||
bool outside_window = mouse_x < x || mouse_x >= x + w
|
||||
|| mouse_y < y || mouse_y >= y + h;
|
||||
if (outside_window) {
|
||||
SDL_WarpMouseInWindow(screen->window, w / 2, h / 2);
|
||||
}
|
||||
}
|
||||
#else
|
||||
(void) screen;
|
||||
#endif
|
||||
if (SDL_SetRelativeMouseMode(capture)) {
|
||||
LOGE("Could not set relative mouse mode to %s: %s",
|
||||
capture ? "true" : "false", SDL_GetError());
|
||||
}
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_screen_get_mouse_capture(struct sc_screen *screen) {
|
||||
(void) screen;
|
||||
return SDL_GetRelativeMouseMode();
|
||||
}
|
||||
|
||||
static inline void
|
||||
sc_screen_toggle_mouse_capture(struct sc_screen *screen) {
|
||||
(void) screen;
|
||||
bool new_value = !sc_screen_get_mouse_capture(screen);
|
||||
sc_screen_set_mouse_capture(screen, new_value);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_screen_update_content_rect(struct sc_screen *screen) {
|
||||
assert(screen->video);
|
||||
@ -371,7 +330,6 @@ sc_screen_init(struct sc_screen *screen,
|
||||
screen->fullscreen = false;
|
||||
screen->maximized = false;
|
||||
screen->minimized = false;
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
screen->paused = false;
|
||||
screen->resume_frame = NULL;
|
||||
screen->orientation = SC_ORIENTATION_0;
|
||||
@ -486,6 +444,9 @@ sc_screen_init(struct sc_screen *screen,
|
||||
|
||||
sc_input_manager_init(&screen->im, &im_params);
|
||||
|
||||
// Initialize even if not used for simplicity
|
||||
sc_mouse_capture_init(&screen->mc, screen->window, params->shortcut_mods);
|
||||
|
||||
#ifdef CONTINUOUS_RESIZING_WORKAROUND
|
||||
if (screen->video) {
|
||||
SDL_AddEventWatch(event_watcher, screen);
|
||||
@ -506,7 +467,7 @@ sc_screen_init(struct sc_screen *screen,
|
||||
|
||||
if (!screen->video && sc_screen_is_relative_mode(screen)) {
|
||||
// Capture mouse immediately if video mirroring is disabled
|
||||
sc_screen_set_mouse_capture(screen, true);
|
||||
sc_mouse_capture_set_active(&screen->mc, true);
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -538,7 +499,7 @@ sc_screen_show_initial_window(struct sc_screen *screen) {
|
||||
SDL_SetWindowPosition(screen->window, x, y);
|
||||
|
||||
if (screen->req.fullscreen) {
|
||||
sc_screen_switch_fullscreen(screen);
|
||||
sc_screen_toggle_fullscreen(screen);
|
||||
}
|
||||
|
||||
if (screen->req.start_fps_counter) {
|
||||
@ -713,7 +674,7 @@ sc_screen_apply_frame(struct sc_screen *screen) {
|
||||
|
||||
if (sc_screen_is_relative_mode(screen)) {
|
||||
// Capture mouse on start
|
||||
sc_screen_set_mouse_capture(screen, true);
|
||||
sc_mouse_capture_set_active(&screen->mc, true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -774,7 +735,7 @@ sc_screen_set_paused(struct sc_screen *screen, bool paused) {
|
||||
}
|
||||
|
||||
void
|
||||
sc_screen_switch_fullscreen(struct sc_screen *screen) {
|
||||
sc_screen_toggle_fullscreen(struct sc_screen *screen) {
|
||||
assert(screen->video);
|
||||
|
||||
uint32_t new_mode = screen->fullscreen ? 0 : SDL_WINDOW_FULLSCREEN_DESKTOP;
|
||||
@ -837,15 +798,8 @@ sc_screen_resize_to_pixel_perfect(struct sc_screen *screen) {
|
||||
content_size.height);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_screen_is_mouse_capture_key(SDL_Keycode key) {
|
||||
return key == SDLK_LALT || key == SDLK_LGUI || key == SDLK_RGUI;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event) {
|
||||
bool relative_mode = sc_screen_is_relative_mode(screen);
|
||||
|
||||
switch (event->type) {
|
||||
case SC_EVENT_SCREEN_INIT_SIZE: {
|
||||
// The initial size is passed via screen->frame_size
|
||||
@ -903,69 +857,14 @@ sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event) {
|
||||
apply_pending_resize(screen);
|
||||
sc_screen_render(screen, true);
|
||||
break;
|
||||
case SDL_WINDOWEVENT_FOCUS_LOST:
|
||||
if (relative_mode) {
|
||||
sc_screen_set_mouse_capture(screen, false);
|
||||
}
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
case SDL_KEYDOWN:
|
||||
if (relative_mode) {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
if (sc_screen_is_mouse_capture_key(key)) {
|
||||
if (!screen->mouse_capture_key_pressed) {
|
||||
screen->mouse_capture_key_pressed = key;
|
||||
} else {
|
||||
// Another mouse capture key has been pressed, cancel
|
||||
// mouse (un)capture
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case SDL_KEYUP:
|
||||
if (relative_mode) {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
SDL_Keycode cap = screen->mouse_capture_key_pressed;
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
if (sc_screen_is_mouse_capture_key(key)) {
|
||||
if (key == cap) {
|
||||
// A mouse capture key has been pressed then released:
|
||||
// toggle the capture mouse mode
|
||||
sc_screen_toggle_mouse_capture(screen);
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEWHEEL:
|
||||
case SDL_MOUSEMOTION:
|
||||
case SDL_MOUSEBUTTONDOWN:
|
||||
if (relative_mode && !sc_screen_get_mouse_capture(screen)) {
|
||||
// Do not forward to input manager, the mouse will be captured
|
||||
// on SDL_MOUSEBUTTONUP
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_FINGERMOTION:
|
||||
case SDL_FINGERDOWN:
|
||||
case SDL_FINGERUP:
|
||||
if (relative_mode) {
|
||||
// Touch events are not compatible with relative mode
|
||||
// (coordinates are not relative)
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONUP:
|
||||
if (relative_mode && !sc_screen_get_mouse_capture(screen)) {
|
||||
sc_screen_set_mouse_capture(screen, true);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (sc_screen_is_relative_mode(screen)
|
||||
&& sc_mouse_capture_handle_event(&screen->mc, event)) {
|
||||
// The mouse capture handler consumed the event
|
||||
return true;
|
||||
}
|
||||
|
||||
sc_input_manager_handle_event(&screen->im, event);
|
||||
|
@ -13,6 +13,7 @@
|
||||
#include "fps_counter.h"
|
||||
#include "frame_buffer.h"
|
||||
#include "input_manager.h"
|
||||
#include "mouse_capture.h"
|
||||
#include "opengl.h"
|
||||
#include "options.h"
|
||||
#include "trait/key_processor.h"
|
||||
@ -30,6 +31,7 @@ struct sc_screen {
|
||||
|
||||
struct sc_display display;
|
||||
struct sc_input_manager im;
|
||||
struct sc_mouse_capture mc; // only used in mouse relative mode
|
||||
struct sc_frame_buffer fb;
|
||||
struct sc_fps_counter fps_counter;
|
||||
|
||||
@ -61,10 +63,6 @@ struct sc_screen {
|
||||
bool maximized;
|
||||
bool minimized;
|
||||
|
||||
// To enable/disable mouse capture, a mouse capture key (LALT, LGUI or
|
||||
// RGUI) must be pressed. This variable tracks the pressed capture key.
|
||||
SDL_Keycode mouse_capture_key_pressed;
|
||||
|
||||
AVFrame *frame;
|
||||
|
||||
bool paused;
|
||||
@ -126,9 +124,9 @@ sc_screen_destroy(struct sc_screen *screen);
|
||||
void
|
||||
sc_screen_hide_window(struct sc_screen *screen);
|
||||
|
||||
// switch the fullscreen mode
|
||||
// toggle the fullscreen mode
|
||||
void
|
||||
sc_screen_switch_fullscreen(struct sc_screen *screen);
|
||||
sc_screen_toggle_fullscreen(struct sc_screen *screen);
|
||||
|
||||
// resize window to optimal size (remove black borders)
|
||||
void
|
||||
|
@ -66,56 +66,6 @@ get_server_path(void) {
|
||||
return server_path;
|
||||
}
|
||||
|
||||
static void
|
||||
sc_server_params_destroy(struct sc_server_params *params) {
|
||||
// The server stores a copy of the params provided by the user
|
||||
free((char *) params->req_serial);
|
||||
free((char *) params->crop);
|
||||
free((char *) params->video_codec_options);
|
||||
free((char *) params->audio_codec_options);
|
||||
free((char *) params->video_encoder);
|
||||
free((char *) params->audio_encoder);
|
||||
free((char *) params->tcpip_dst);
|
||||
free((char *) params->camera_id);
|
||||
free((char *) params->camera_ar);
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_server_params_copy(struct sc_server_params *dst,
|
||||
const struct sc_server_params *src) {
|
||||
*dst = *src;
|
||||
|
||||
// The params reference user-allocated memory, so we must copy them to
|
||||
// handle them from another thread
|
||||
|
||||
#define COPY(FIELD) do { \
|
||||
dst->FIELD = NULL; \
|
||||
if (src->FIELD) { \
|
||||
dst->FIELD = strdup(src->FIELD); \
|
||||
if (!dst->FIELD) { \
|
||||
goto error; \
|
||||
} \
|
||||
} \
|
||||
} while(0)
|
||||
|
||||
COPY(req_serial);
|
||||
COPY(crop);
|
||||
COPY(video_codec_options);
|
||||
COPY(audio_codec_options);
|
||||
COPY(video_encoder);
|
||||
COPY(audio_encoder);
|
||||
COPY(tcpip_dst);
|
||||
COPY(camera_id);
|
||||
COPY(camera_ar);
|
||||
#undef COPY
|
||||
|
||||
return true;
|
||||
|
||||
error:
|
||||
sc_server_params_destroy(dst);
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
push_server(struct sc_intr *intr, const char *serial) {
|
||||
char *server_path = get_server_path();
|
||||
@ -405,6 +355,10 @@ execute_server(struct sc_server *server,
|
||||
// By default, power_on is true
|
||||
ADD_PARAM("power_on=false");
|
||||
}
|
||||
if (params->new_display) {
|
||||
VALIDATE_STRING(params->new_display);
|
||||
ADD_PARAM("new_display=%s", params->new_display);
|
||||
}
|
||||
if (params->list & SC_OPTION_LIST_ENCODERS) {
|
||||
ADD_PARAM("list_encoders=true");
|
||||
}
|
||||
@ -417,6 +371,9 @@ execute_server(struct sc_server *server,
|
||||
if (params->list & SC_OPTION_LIST_CAMERA_SIZES) {
|
||||
ADD_PARAM("list_camera_sizes=true");
|
||||
}
|
||||
if (params->list & SC_OPTION_LIST_APPS) {
|
||||
ADD_PARAM("list_apps=true");
|
||||
}
|
||||
|
||||
#undef ADD_PARAM
|
||||
|
||||
@ -499,22 +456,18 @@ connect_to_server(struct sc_server *server, unsigned attempts, sc_tick delay,
|
||||
bool
|
||||
sc_server_init(struct sc_server *server, const struct sc_server_params *params,
|
||||
const struct sc_server_callbacks *cbs, void *cbs_userdata) {
|
||||
bool ok = sc_server_params_copy(&server->params, params);
|
||||
if (!ok) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
// The allocated data in params (const char *) must remain valid until the
|
||||
// end of the program
|
||||
server->params = *params;
|
||||
|
||||
ok = sc_mutex_init(&server->mutex);
|
||||
bool ok = sc_mutex_init(&server->mutex);
|
||||
if (!ok) {
|
||||
sc_server_params_destroy(&server->params);
|
||||
return false;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&server->cond_stopped);
|
||||
if (!ok) {
|
||||
sc_mutex_destroy(&server->mutex);
|
||||
sc_server_params_destroy(&server->params);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -522,7 +475,6 @@ sc_server_init(struct sc_server *server, const struct sc_server_params *params,
|
||||
if (!ok) {
|
||||
sc_cond_destroy(&server->cond_stopped);
|
||||
sc_mutex_destroy(&server->mutex);
|
||||
sc_server_params_destroy(&server->params);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1161,7 +1113,6 @@ sc_server_destroy(struct sc_server *server) {
|
||||
|
||||
free(server->serial);
|
||||
free(server->device_socket_name);
|
||||
sc_server_params_destroy(&server->params);
|
||||
sc_intr_destroy(&server->intr);
|
||||
sc_cond_destroy(&server->cond_stopped);
|
||||
sc_mutex_destroy(&server->mutex);
|
||||
|
@ -48,6 +48,7 @@ struct sc_server_params {
|
||||
int8_t lock_video_orientation;
|
||||
bool control;
|
||||
uint32_t display_id;
|
||||
const char *new_display;
|
||||
bool video;
|
||||
bool audio;
|
||||
bool audio_dup;
|
||||
|
60
app/src/shortcut_mod.h
Normal file
60
app/src/shortcut_mod.h
Normal file
@ -0,0 +1,60 @@
|
||||
#ifndef SC_SHORTCUT_MOD_H
|
||||
#define SC_SHORTCUT_MOD_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <SDL2/SDL_keycode.h>
|
||||
|
||||
#include "options.h"
|
||||
|
||||
#define SC_SDL_SHORTCUT_MODS_MASK (KMOD_CTRL | KMOD_ALT | KMOD_GUI)
|
||||
|
||||
// input: OR of enum sc_shortcut_mod
|
||||
// output: OR of SDL_Keymod
|
||||
static inline uint16_t
|
||||
sc_shortcut_mods_to_sdl(uint8_t shortcut_mods) {
|
||||
uint16_t sdl_mod = 0;
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_LCTRL) {
|
||||
sdl_mod |= KMOD_LCTRL;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_RCTRL) {
|
||||
sdl_mod |= KMOD_RCTRL;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_LALT) {
|
||||
sdl_mod |= KMOD_LALT;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_RALT) {
|
||||
sdl_mod |= KMOD_RALT;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_LSUPER) {
|
||||
sdl_mod |= KMOD_LGUI;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_RSUPER) {
|
||||
sdl_mod |= KMOD_RGUI;
|
||||
}
|
||||
return sdl_mod;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_shortcut_mods_is_shortcut_mod(uint16_t sdl_shortcut_mods, uint16_t sdl_mod) {
|
||||
// sdl_shortcut_mods must be within the mask
|
||||
assert(!(sdl_shortcut_mods & ~SC_SDL_SHORTCUT_MODS_MASK));
|
||||
|
||||
// at least one shortcut mod pressed?
|
||||
return sdl_mod & sdl_shortcut_mods;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_shortcut_mods_is_shortcut_key(uint16_t sdl_shortcut_mods,
|
||||
SDL_Keycode keycode) {
|
||||
return (sdl_shortcut_mods & KMOD_LCTRL && keycode == SDLK_LCTRL)
|
||||
|| (sdl_shortcut_mods & KMOD_RCTRL && keycode == SDLK_RCTRL)
|
||||
|| (sdl_shortcut_mods & KMOD_LALT && keycode == SDLK_LALT)
|
||||
|| (sdl_shortcut_mods & KMOD_RALT && keycode == SDLK_RALT)
|
||||
|| (sdl_shortcut_mods & KMOD_LGUI && keycode == SDLK_LGUI)
|
||||
|| (sdl_shortcut_mods & KMOD_RGUI && keycode == SDLK_RGUI);
|
||||
}
|
||||
|
||||
#endif
|
@ -1,6 +1,8 @@
|
||||
#ifndef SC_AOA_HID_H
|
||||
#define SC_AOA_HID_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
|
||||
|
@ -185,6 +185,7 @@ scrcpy_otg(struct scrcpy_options *options) {
|
||||
.window_width = options->window_width,
|
||||
.window_height = options->window_height,
|
||||
.window_borderless = options->window_borderless,
|
||||
.shortcut_mods = options->shortcut_mods,
|
||||
};
|
||||
|
||||
ok = sc_screen_otg_init(&s->screen_otg, ¶ms);
|
||||
|
@ -4,47 +4,6 @@
|
||||
#include "options.h"
|
||||
#include "util/log.h"
|
||||
|
||||
static void
|
||||
sc_screen_otg_set_mouse_capture(struct sc_screen_otg *screen, bool capture) {
|
||||
#ifdef __APPLE__
|
||||
// Workaround for SDL bug on macOS:
|
||||
// <https://github.com/libsdl-org/SDL/issues/5340>
|
||||
if (capture) {
|
||||
int mouse_x, mouse_y;
|
||||
SDL_GetGlobalMouseState(&mouse_x, &mouse_y);
|
||||
|
||||
int x, y, w, h;
|
||||
SDL_GetWindowPosition(screen->window, &x, &y);
|
||||
SDL_GetWindowSize(screen->window, &w, &h);
|
||||
|
||||
bool outside_window = mouse_x < x || mouse_x >= x + w
|
||||
|| mouse_y < y || mouse_y >= y + h;
|
||||
if (outside_window) {
|
||||
SDL_WarpMouseInWindow(screen->window, w / 2, h / 2);
|
||||
}
|
||||
}
|
||||
#else
|
||||
(void) screen;
|
||||
#endif
|
||||
if (SDL_SetRelativeMouseMode(capture)) {
|
||||
LOGE("Could not set relative mouse mode to %s: %s",
|
||||
capture ? "true" : "false", SDL_GetError());
|
||||
}
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_screen_otg_get_mouse_capture(struct sc_screen_otg *screen) {
|
||||
(void) screen;
|
||||
return SDL_GetRelativeMouseMode();
|
||||
}
|
||||
|
||||
static inline void
|
||||
sc_screen_otg_toggle_mouse_capture(struct sc_screen_otg *screen) {
|
||||
(void) screen;
|
||||
bool new_value = !sc_screen_otg_get_mouse_capture(screen);
|
||||
sc_screen_otg_set_mouse_capture(screen, new_value);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_screen_otg_render(struct sc_screen_otg *screen) {
|
||||
SDL_RenderClear(screen->renderer);
|
||||
@ -61,8 +20,6 @@ sc_screen_otg_init(struct sc_screen_otg *screen,
|
||||
screen->mouse = params->mouse;
|
||||
screen->gamepad = params->gamepad;
|
||||
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
|
||||
const char *title = params->window_title;
|
||||
assert(title);
|
||||
|
||||
@ -113,9 +70,11 @@ sc_screen_otg_init(struct sc_screen_otg *screen,
|
||||
LOGW("Could not load icon");
|
||||
}
|
||||
|
||||
sc_mouse_capture_init(&screen->mc, screen->window, params->shortcut_mods);
|
||||
|
||||
if (screen->mouse) {
|
||||
// Capture mouse on start
|
||||
sc_screen_otg_set_mouse_capture(screen, true);
|
||||
sc_mouse_capture_set_active(&screen->mc, true);
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -137,11 +96,6 @@ sc_screen_otg_destroy(struct sc_screen_otg *screen) {
|
||||
SDL_DestroyWindow(screen->window);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_screen_otg_is_mouse_capture_key(SDL_Keycode key) {
|
||||
return key == SDLK_LALT || key == SDLK_LGUI || key == SDLK_RGUI;
|
||||
}
|
||||
|
||||
static void
|
||||
sc_screen_otg_process_key(struct sc_screen_otg *screen,
|
||||
const SDL_KeyboardEvent *event) {
|
||||
@ -298,80 +252,46 @@ sc_screen_otg_process_gamepad_button(struct sc_screen_otg *screen,
|
||||
|
||||
void
|
||||
sc_screen_otg_handle_event(struct sc_screen_otg *screen, SDL_Event *event) {
|
||||
if (sc_mouse_capture_handle_event(&screen->mc, event)) {
|
||||
// The mouse capture handler consumed the event
|
||||
return;
|
||||
}
|
||||
|
||||
switch (event->type) {
|
||||
case SDL_WINDOWEVENT:
|
||||
switch (event->window.event) {
|
||||
case SDL_WINDOWEVENT_EXPOSED:
|
||||
sc_screen_otg_render(screen);
|
||||
break;
|
||||
case SDL_WINDOWEVENT_FOCUS_LOST:
|
||||
if (screen->mouse) {
|
||||
sc_screen_otg_set_mouse_capture(screen, false);
|
||||
}
|
||||
break;
|
||||
}
|
||||
return;
|
||||
case SDL_KEYDOWN:
|
||||
if (screen->mouse) {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
if (sc_screen_otg_is_mouse_capture_key(key)) {
|
||||
if (!screen->mouse_capture_key_pressed) {
|
||||
screen->mouse_capture_key_pressed = key;
|
||||
} else {
|
||||
// Another mouse capture key has been pressed, cancel
|
||||
// mouse (un)capture
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (screen->keyboard) {
|
||||
sc_screen_otg_process_key(screen, &event->key);
|
||||
}
|
||||
break;
|
||||
case SDL_KEYUP:
|
||||
if (screen->mouse) {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
SDL_Keycode cap = screen->mouse_capture_key_pressed;
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
if (sc_screen_otg_is_mouse_capture_key(key)) {
|
||||
if (key == cap) {
|
||||
// A mouse capture key has been pressed then released:
|
||||
// toggle the capture mouse mode
|
||||
sc_screen_otg_toggle_mouse_capture(screen);
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (screen->keyboard) {
|
||||
sc_screen_otg_process_key(screen, &event->key);
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEMOTION:
|
||||
if (screen->mouse && sc_screen_otg_get_mouse_capture(screen)) {
|
||||
if (screen->mouse) {
|
||||
sc_screen_otg_process_mouse_motion(screen, &event->motion);
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONDOWN:
|
||||
if (screen->mouse && sc_screen_otg_get_mouse_capture(screen)) {
|
||||
if (screen->mouse) {
|
||||
sc_screen_otg_process_mouse_button(screen, &event->button);
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONUP:
|
||||
if (screen->mouse) {
|
||||
if (sc_screen_otg_get_mouse_capture(screen)) {
|
||||
sc_screen_otg_process_mouse_button(screen, &event->button);
|
||||
} else {
|
||||
sc_screen_otg_set_mouse_capture(screen, true);
|
||||
}
|
||||
sc_screen_otg_process_mouse_button(screen, &event->button);
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEWHEEL:
|
||||
if (screen->mouse && sc_screen_otg_get_mouse_capture(screen)) {
|
||||
if (screen->mouse) {
|
||||
sc_screen_otg_process_mouse_wheel(screen, &event->wheel);
|
||||
}
|
||||
break;
|
||||
|
@ -8,6 +8,7 @@
|
||||
|
||||
#include "keyboard_aoa.h"
|
||||
#include "mouse_aoa.h"
|
||||
#include "mouse_capture.h"
|
||||
#include "gamepad_aoa.h"
|
||||
|
||||
struct sc_screen_otg {
|
||||
@ -19,8 +20,7 @@ struct sc_screen_otg {
|
||||
SDL_Renderer *renderer;
|
||||
SDL_Texture *texture;
|
||||
|
||||
// See equivalent mechanism in screen.h
|
||||
SDL_Keycode mouse_capture_key_pressed;
|
||||
struct sc_mouse_capture mc;
|
||||
};
|
||||
|
||||
struct sc_screen_otg_params {
|
||||
@ -35,6 +35,7 @@ struct sc_screen_otg_params {
|
||||
uint16_t window_width;
|
||||
uint16_t window_height;
|
||||
bool window_borderless;
|
||||
uint8_t shortcut_mods; // OR of enum sc_shortcut_mod values
|
||||
};
|
||||
|
||||
bool
|
||||
|
@ -10,14 +10,14 @@ typedef int64_t sc_tick;
|
||||
#define SC_TICK_FREQ 1000000 // microsecond
|
||||
|
||||
// To be adapted if SC_TICK_FREQ changes
|
||||
#define SC_TICK_TO_NS(tick) ((tick) * 1000)
|
||||
#define SC_TICK_TO_US(tick) (tick)
|
||||
#define SC_TICK_TO_MS(tick) ((tick) / 1000)
|
||||
#define SC_TICK_TO_SEC(tick) ((tick) / 1000000)
|
||||
#define SC_TICK_FROM_NS(ns) ((ns) / 1000)
|
||||
#define SC_TICK_FROM_US(us) (us)
|
||||
#define SC_TICK_FROM_MS(ms) ((ms) * 1000)
|
||||
#define SC_TICK_FROM_SEC(sec) ((sec) * 1000000)
|
||||
#define SC_TICK_TO_NS(tick) ((sc_tick) (tick) * 1000)
|
||||
#define SC_TICK_TO_US(tick) ((sc_tick) tick)
|
||||
#define SC_TICK_TO_MS(tick) ((sc_tick) (tick) / 1000)
|
||||
#define SC_TICK_TO_SEC(tick) ((sc_tick) (tick) / 1000000)
|
||||
#define SC_TICK_FROM_NS(ns) ((sc_tick) (ns) / 1000)
|
||||
#define SC_TICK_FROM_US(us) ((sc_tick) us)
|
||||
#define SC_TICK_FROM_MS(ms) ((sc_tick) (ms) * 1000)
|
||||
#define SC_TICK_FROM_SEC(sec) ((sc_tick) (sec) * 1000000)
|
||||
|
||||
sc_tick
|
||||
sc_tick_now(void);
|
||||
|
@ -62,6 +62,7 @@ void
|
||||
sc_timeout_stop(struct sc_timeout *timeout) {
|
||||
sc_mutex_lock(&timeout->mutex);
|
||||
timeout->stopped = true;
|
||||
sc_cond_signal(&timeout->cond);
|
||||
sc_mutex_unlock(&timeout->mutex);
|
||||
}
|
||||
|
||||
|
@ -289,11 +289,11 @@ static void test_serialize_set_clipboard_long(void) {
|
||||
assert(!memcmp(buf, expected, sizeof(expected)));
|
||||
}
|
||||
|
||||
static void test_serialize_set_screen_power_mode(void) {
|
||||
static void test_serialize_set_display_power(void) {
|
||||
struct sc_control_msg msg = {
|
||||
.type = SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE,
|
||||
.set_screen_power_mode = {
|
||||
.mode = SC_SCREEN_POWER_MODE_NORMAL,
|
||||
.type = SC_CONTROL_MSG_TYPE_SET_DISPLAY_POWER,
|
||||
.set_display_power = {
|
||||
.on = true,
|
||||
},
|
||||
};
|
||||
|
||||
@ -302,8 +302,8 @@ static void test_serialize_set_screen_power_mode(void) {
|
||||
assert(size == 2);
|
||||
|
||||
const uint8_t expected[] = {
|
||||
SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE,
|
||||
0x02, // SC_SCREEN_POWER_MODE_NORMAL
|
||||
SC_CONTROL_MSG_TYPE_SET_DISPLAY_POWER,
|
||||
0x01, // true
|
||||
};
|
||||
assert(!memcmp(buf, expected, sizeof(expected)));
|
||||
}
|
||||
@ -423,7 +423,7 @@ int main(int argc, char *argv[]) {
|
||||
test_serialize_get_clipboard();
|
||||
test_serialize_set_clipboard();
|
||||
test_serialize_set_clipboard_long();
|
||||
test_serialize_set_screen_power_mode();
|
||||
test_serialize_set_display_power();
|
||||
test_serialize_rotate_device();
|
||||
test_serialize_uhid_create();
|
||||
test_serialize_uhid_input();
|
||||
|
@ -7,7 +7,7 @@ buildscript {
|
||||
mavenCentral()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:8.3.0'
|
||||
classpath 'com.android.tools.build:gradle:8.7.1'
|
||||
|
||||
// NOTE: Do not place your application dependencies here; they belong
|
||||
// in the individual module build.gradle files
|
||||
|
@ -170,7 +170,7 @@ latency (for both [video](video.md#buffering) and audio) might be preferable to
|
||||
avoid glitches and smooth the playback:
|
||||
|
||||
```
|
||||
scrcpy --display-buffer=200 --audio-buffer=200
|
||||
scrcpy --video-buffer=200 --audio-buffer=200
|
||||
```
|
||||
|
||||
It is also possible to configure another audio buffer (the audio output buffer),
|
||||
|
@ -94,14 +94,18 @@ the content (if supported by the app) relative to the center of the screen.
|
||||
|
||||
https://github.com/Genymobile/scrcpy/assets/543275/26c4a920-9805-43f1-8d4c-608752d04767
|
||||
|
||||
To simulate a tilt gesture: <kbd>Shift</kbd>+_click-and-move-up-or-down_.
|
||||
To simulate a vertical tilt gesture: <kbd>Shift</kbd>+_click-and-move-up-or-down_.
|
||||
|
||||
https://github.com/Genymobile/scrcpy/assets/543275/1e252341-4a90-4b29-9d11-9153b324669f
|
||||
|
||||
Similarly, to simulate a horizontal tilt gesture:
|
||||
<kbd>Ctrl</kbd>+<kbd>Shift</kbd>+_click-and-move-left-or-right_.
|
||||
|
||||
Technically, _scrcpy_ generates additional touch events from a "virtual finger"
|
||||
at a location inverted through the center of the screen. When pressing
|
||||
<kbd>Ctrl</kbd> the _x_ and _y_ coordinates are inverted. Using <kbd>Shift</kbd>
|
||||
only inverts _x_.
|
||||
only inverts _x_, whereas using <kbd>Ctrl</kbd>+<kbd>Shift</kbd> only inverts
|
||||
_y_.
|
||||
|
||||
This only works for the default mouse mode (`--mouse=sdk`).
|
||||
|
||||
|
@ -21,9 +21,9 @@ the client and on the server.
|
||||
If video is enabled, then the server sends a raw video stream (H.264 by default)
|
||||
of the device screen, with some additional headers for each packet. The client
|
||||
decodes the video frames, and displays them as soon as possible, without
|
||||
buffering (unless `--display-buffer=delay` is specified) to minimize latency.
|
||||
The client is not aware of the device rotation (which is handled by the server),
|
||||
it just knows the dimensions of the video frames it receives.
|
||||
buffering (unless `--video-buffer=delay` is specified) to minimize latency. The
|
||||
client is not aware of the device rotation (which is handled by the server), it
|
||||
just knows the dimensions of the video frames it receives.
|
||||
|
||||
Similarly, if audio is enabled, then the server sends a raw audio stream (OPUS
|
||||
by default) of the device audio output (or the microphone if
|
||||
|
@ -18,6 +18,21 @@ The initial state is restored when _scrcpy_ is closed.
|
||||
If the device is not plugged in (i.e. only connected over TCP/IP),
|
||||
`--stay-awake` has no effect (this is the Android behavior).
|
||||
|
||||
This changes the value of [`stay_on_while_plugged_in`], setting which can be
|
||||
changed manually:
|
||||
|
||||
[`stay_on_while_plugged_in`]: https://developer.android.com/reference/android/provider/Settings.Global#STAY_ON_WHILE_PLUGGED_IN
|
||||
|
||||
|
||||
```bash
|
||||
# get the current show_touches value
|
||||
adb shell settings get global stay_on_while_plugged_in
|
||||
# enable for AC/USB/wireless chargers
|
||||
adb shell settings put global stay_on_while_plugged_in 7
|
||||
# disable
|
||||
adb shell settings put global stay_on_while_plugged_in 0
|
||||
```
|
||||
|
||||
|
||||
## Turn screen off
|
||||
|
||||
@ -46,6 +61,15 @@ scrcpy --turn-screen-off --stay-awake
|
||||
scrcpy -Sw # short version
|
||||
```
|
||||
|
||||
Since Android 15, it is possible to change this setting manually:
|
||||
|
||||
```
|
||||
# turn screen off (0 for main display)
|
||||
adb shell cmd display power-off 0
|
||||
# turn screen on
|
||||
adb shell cmd display power-on 0
|
||||
```
|
||||
|
||||
|
||||
## Show touches
|
||||
|
||||
@ -62,6 +86,16 @@ scrcpy -t # short version
|
||||
|
||||
Note that it only shows _physical_ touches (by a finger on the device).
|
||||
|
||||
It is possible to change this setting manually:
|
||||
|
||||
```bash
|
||||
# get the current show_touches value
|
||||
adb shell settings get system show_touches
|
||||
# enable show_touches
|
||||
adb shell settings put system show_touches 1
|
||||
# disable show_touches
|
||||
adb shell settings put system show_touches 0
|
||||
```
|
||||
|
||||
## Power off on close
|
||||
|
||||
@ -78,3 +112,48 @@ By default, on start, the device is powered on. To prevent this behavior:
|
||||
```bash
|
||||
scrcpy --no-power-on
|
||||
```
|
||||
|
||||
|
||||
## Start Android app
|
||||
|
||||
To list the Android apps installed on the device:
|
||||
|
||||
```bash
|
||||
scrcpy --list-apps
|
||||
```
|
||||
|
||||
An app, selected by its package name, can be launched on start:
|
||||
|
||||
```
|
||||
scrcpy --start-app=org.mozilla.firefox
|
||||
```
|
||||
|
||||
This feature can be used to run an app in a [virtual
|
||||
display](virtual_display.md):
|
||||
|
||||
```
|
||||
scrcpy --new-display=1920x1080 --start-app=org.videolan.vlc
|
||||
```
|
||||
|
||||
The app can be optionally forced-stop before being started, by adding a `+`
|
||||
prefix:
|
||||
|
||||
```
|
||||
scrcpy --start-app=+org.mozilla.firefox
|
||||
```
|
||||
|
||||
For convenience, it is also possible to select an app by its name, by adding a
|
||||
`?` prefix:
|
||||
|
||||
```
|
||||
scrcpy --start-app=?firefox
|
||||
```
|
||||
|
||||
But retrieving app names may take some time (sometimes several seconds), so
|
||||
passing the package name is recommended.
|
||||
|
||||
The `+` and `?` prefixes can be combined (in that order):
|
||||
|
||||
```
|
||||
scrcpy --start-app=+?firefox
|
||||
```
|
||||
|
@ -34,9 +34,9 @@ Two modes allow to simulate a physical HID mouse on the device.
|
||||
In these modes, the computer mouse is "captured": the mouse pointer disappears
|
||||
from the computer and appears on the Android device instead.
|
||||
|
||||
Special capture keys, either <kbd>Alt</kbd> or <kbd>Super</kbd>, toggle
|
||||
(disable or enable) the mouse capture. Use one of them to give the control of
|
||||
the mouse back to the computer.
|
||||
The [shortcut mod](shortcuts.md) (either <kbd>Alt</kbd> or <kbd>Super</kbd> by
|
||||
default) toggle (disable or enable) the mouse capture. Use one of them to give
|
||||
the control of the mouse back to the computer.
|
||||
|
||||
|
||||
### UHID
|
||||
|
@ -53,7 +53,8 @@ _<kbd>[Super]</kbd> is typically the <kbd>Windows</kbd> or <kbd>Cmd</kbd> key._
|
||||
| Open keyboard settings (HID keyboard only) | <kbd>MOD</kbd>+<kbd>k</kbd>
|
||||
| Enable/disable FPS counter (on stdout) | <kbd>MOD</kbd>+<kbd>i</kbd>
|
||||
| Pinch-to-zoom/rotate | <kbd>Ctrl</kbd>+_click-and-move_
|
||||
| Tilt (slide vertically with 2 fingers) | <kbd>Shift</kbd>+_click-and-move_
|
||||
| Tilt vertically (slide with 2 fingers) | <kbd>Shift</kbd>+_click-and-move_
|
||||
| Tilt horizontally (slide with 2 fingers) | <kbd>Ctrl</kbd>+<kbd>Shift</kbd>+_click-and-move_
|
||||
| Drag & drop APK file | Install APK from computer
|
||||
| Drag & drop non-APK file | [Push file to device](control.md#push-file-to-device)
|
||||
|
||||
|
@ -189,15 +189,15 @@ The configuration is available independently for the display,
|
||||
[v4l2 sinks](video.md#video4linux) and [audio](audio.md#buffering) playback.
|
||||
|
||||
```bash
|
||||
scrcpy --display-buffer=50 # add 50ms buffering for display
|
||||
scrcpy --v4l2-buffer=300 # add 300ms buffering for v4l2 sink
|
||||
scrcpy --video-buffer=50 # add 50ms buffering for video playback
|
||||
scrcpy --audio-buffer=200 # set 200ms buffering for audio playback
|
||||
scrcpy --v4l2-buffer=300 # add 300ms buffering for v4l2 sink
|
||||
```
|
||||
|
||||
They can be applied simultaneously:
|
||||
|
||||
```bash
|
||||
scrcpy --display-buffer=50 --v4l2-buffer=300
|
||||
scrcpy --video-buffer=50 --v4l2-buffer=300
|
||||
```
|
||||
|
||||
|
||||
|
26
doc/virtual_display.md
Normal file
26
doc/virtual_display.md
Normal file
@ -0,0 +1,26 @@
|
||||
# Virtual display
|
||||
|
||||
## New display
|
||||
|
||||
To mirror a new virtual display instead of the device screen:
|
||||
|
||||
```bash
|
||||
scrcpy --new-display=1920x1080
|
||||
scrcpy --new-display=1920x1080/420 # force 420 dpi
|
||||
scrcpy --new-display # use the main display size and density
|
||||
scrcpy --new-display -m1920 # ... scaled to fit a max size of 1920
|
||||
scrcpy --new-display=/240 # use the main display size and 240 dpi
|
||||
```
|
||||
|
||||
## Start app
|
||||
|
||||
On some devices, a launcher is available in the virtual display.
|
||||
|
||||
When no launcher is available, the virtual display is empty. In that case, you
|
||||
must [start an Android app](device.md#start-android-app).
|
||||
|
||||
For example:
|
||||
|
||||
```bash
|
||||
scrcpy --new-display=1920x1080 --start-app=org.videolan.vlc
|
||||
```
|
4
gradle/wrapper/gradle-wrapper.properties
vendored
4
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,5 +1,7 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip
|
||||
# https://gradle.org/release-checksums/
|
||||
distributionSha256Sum=d725d707bfabd4dfdc958c624003b3c80accc03f7037b5122c4b1d0ef15cecab
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
104
release.mk
104
release.mk
@ -9,13 +9,12 @@
|
||||
# the server to the device.
|
||||
|
||||
.PHONY: default clean \
|
||||
test \
|
||||
test test-client test-server \
|
||||
build-server \
|
||||
prepare-deps \
|
||||
prepare-deps-win32 prepare-deps-win64 \
|
||||
build-win32 build-win64 \
|
||||
dist-win32 dist-win64 \
|
||||
zip-win32 zip-win64 \
|
||||
release
|
||||
package release
|
||||
|
||||
GRADLE ?= ./gradlew
|
||||
|
||||
@ -26,7 +25,7 @@ WIN64_BUILD_DIR := build-win64
|
||||
|
||||
VERSION ?= $(shell git describe --tags --exclude='*install-release' --always)
|
||||
|
||||
DIST := dist
|
||||
ZIP := zip
|
||||
WIN32_TARGET_DIR := scrcpy-win32-$(VERSION)
|
||||
WIN64_TARGET_DIR := scrcpy-win64-$(VERSION)
|
||||
WIN32_TARGET := $(WIN32_TARGET_DIR).zip
|
||||
@ -34,33 +33,28 @@ WIN64_TARGET := $(WIN64_TARGET_DIR).zip
|
||||
|
||||
RELEASE_DIR := release-$(VERSION)
|
||||
|
||||
release: clean test build-server zip-win32 zip-win64
|
||||
mkdir -p "$(RELEASE_DIR)"
|
||||
cp "$(SERVER_BUILD_DIR)/server/scrcpy-server" \
|
||||
"$(RELEASE_DIR)/scrcpy-server-$(VERSION)"
|
||||
cp "$(DIST)/$(WIN32_TARGET)" "$(RELEASE_DIR)"
|
||||
cp "$(DIST)/$(WIN64_TARGET)" "$(RELEASE_DIR)"
|
||||
cd "$(RELEASE_DIR)" && \
|
||||
sha256sum "scrcpy-server-$(VERSION)" \
|
||||
"scrcpy-win32-$(VERSION).zip" \
|
||||
"scrcpy-win64-$(VERSION).zip" > SHA256SUMS.txt
|
||||
@echo "Release generated in $(RELEASE_DIR)/"
|
||||
release: clean test build-server build-win32 build-win64 package
|
||||
|
||||
clean:
|
||||
$(GRADLE) clean
|
||||
rm -rf "$(DIST)" "$(TEST_BUILD_DIR)" "$(SERVER_BUILD_DIR)" \
|
||||
rm -rf "$(ZIP)" "$(TEST_BUILD_DIR)" "$(SERVER_BUILD_DIR)" \
|
||||
"$(WIN32_BUILD_DIR)" "$(WIN64_BUILD_DIR)"
|
||||
|
||||
test:
|
||||
test-client:
|
||||
[ -d "$(TEST_BUILD_DIR)" ] || ( mkdir "$(TEST_BUILD_DIR)" && \
|
||||
meson setup "$(TEST_BUILD_DIR)" -Db_sanitize=address )
|
||||
ninja -C "$(TEST_BUILD_DIR)"
|
||||
|
||||
test-server:
|
||||
$(GRADLE) -p server check
|
||||
|
||||
test: test-client test-server
|
||||
|
||||
build-server:
|
||||
[ -d "$(SERVER_BUILD_DIR)" ] || ( mkdir "$(SERVER_BUILD_DIR)" && \
|
||||
meson setup "$(SERVER_BUILD_DIR)" --buildtype release -Dcompile_app=false )
|
||||
ninja -C "$(SERVER_BUILD_DIR)"
|
||||
$(GRADLE) -p server assembleRelease
|
||||
mkdir -p "$(SERVER_BUILD_DIR)/server"
|
||||
cp server/build/outputs/apk/release/server-release-unsigned.apk \
|
||||
"$(SERVER_BUILD_DIR)/server/scrcpy-server"
|
||||
|
||||
prepare-deps-win32:
|
||||
@app/deps/adb.sh win32
|
||||
@ -86,6 +80,15 @@ build-win32: prepare-deps-win32
|
||||
-Dcompile_server=false \
|
||||
-Dportable=true
|
||||
ninja -C "$(WIN32_BUILD_DIR)"
|
||||
# Group intermediate outputs into a 'dist' directory
|
||||
mkdir -p "$(WIN32_BUILD_DIR)/dist"
|
||||
cp "$(WIN32_BUILD_DIR)"/app/scrcpy.exe "$(WIN32_BUILD_DIR)/dist/"
|
||||
cp app/data/scrcpy-console.bat "$(WIN32_BUILD_DIR)/dist/"
|
||||
cp app/data/scrcpy-noconsole.vbs "$(WIN32_BUILD_DIR)/dist/"
|
||||
cp app/data/icon.png "$(WIN32_BUILD_DIR)/dist/"
|
||||
cp app/data/open_a_terminal_here.bat "$(WIN32_BUILD_DIR)/dist/"
|
||||
cp app/deps/work/install/win32/bin/*.dll "$(WIN32_BUILD_DIR)/dist/"
|
||||
cp app/deps/work/install/win32/bin/adb.exe "$(WIN32_BUILD_DIR)/dist/"
|
||||
|
||||
build-win64: prepare-deps-win64
|
||||
rm -rf "$(WIN64_BUILD_DIR)"
|
||||
@ -99,33 +102,40 @@ build-win64: prepare-deps-win64
|
||||
-Dcompile_server=false \
|
||||
-Dportable=true
|
||||
ninja -C "$(WIN64_BUILD_DIR)"
|
||||
# Group intermediate outputs into a 'dist' directory
|
||||
mkdir -p "$(WIN64_BUILD_DIR)/dist"
|
||||
cp "$(WIN64_BUILD_DIR)"/app/scrcpy.exe "$(WIN64_BUILD_DIR)/dist/"
|
||||
cp app/data/scrcpy-console.bat "$(WIN64_BUILD_DIR)/dist/"
|
||||
cp app/data/scrcpy-noconsole.vbs "$(WIN64_BUILD_DIR)/dist/"
|
||||
cp app/data/icon.png "$(WIN64_BUILD_DIR)/dist/"
|
||||
cp app/data/open_a_terminal_here.bat "$(WIN64_BUILD_DIR)/dist/"
|
||||
cp app/deps/work/install/win64/bin/*.dll "$(WIN64_BUILD_DIR)/dist/"
|
||||
cp app/deps/work/install/win64/bin/adb.exe "$(WIN64_BUILD_DIR)/dist/"
|
||||
|
||||
dist-win32: build-server build-win32
|
||||
mkdir -p "$(DIST)/$(WIN32_TARGET_DIR)"
|
||||
cp "$(SERVER_BUILD_DIR)"/server/scrcpy-server "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp "$(WIN32_BUILD_DIR)"/app/scrcpy.exe "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/data/scrcpy-console.bat "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/data/icon.png "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/deps/work/install/win32/bin/*.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/deps/work/install/win32/bin/adb.exe "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
|
||||
dist-win64: build-server build-win64
|
||||
mkdir -p "$(DIST)/$(WIN64_TARGET_DIR)"
|
||||
cp "$(SERVER_BUILD_DIR)"/server/scrcpy-server "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp "$(WIN64_BUILD_DIR)"/app/scrcpy.exe "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/data/scrcpy-console.bat "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/data/icon.png "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/deps/work/install/win64/bin/*.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/deps/work/install/win64/bin/adb.exe "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
|
||||
zip-win32: dist-win32
|
||||
cd "$(DIST)"; \
|
||||
zip-win32:
|
||||
mkdir -p "$(ZIP)/$(WIN32_TARGET_DIR)"
|
||||
cp -r "$(WIN32_BUILD_DIR)/dist/." "$(ZIP)/$(WIN32_TARGET_DIR)/"
|
||||
cp "$(SERVER_BUILD_DIR)"/server/scrcpy-server "$(ZIP)/$(WIN32_TARGET_DIR)/"
|
||||
cd "$(ZIP)"; \
|
||||
zip -r "$(WIN32_TARGET)" "$(WIN32_TARGET_DIR)"
|
||||
rm -rf "$(ZIP)/$(WIN32_TARGET_DIR)"
|
||||
|
||||
zip-win64: dist-win64
|
||||
cd "$(DIST)"; \
|
||||
zip-win64:
|
||||
mkdir -p "$(ZIP)/$(WIN64_TARGET_DIR)"
|
||||
cp -r "$(WIN64_BUILD_DIR)/dist/." "$(ZIP)/$(WIN64_TARGET_DIR)/"
|
||||
cp "$(SERVER_BUILD_DIR)"/server/scrcpy-server "$(ZIP)/$(WIN64_TARGET_DIR)/"
|
||||
cd "$(ZIP)"; \
|
||||
zip -r "$(WIN64_TARGET)" "$(WIN64_TARGET_DIR)"
|
||||
rm -rf "$(ZIP)/$(WIN64_TARGET_DIR)"
|
||||
|
||||
package: zip-win32 zip-win64
|
||||
mkdir -p "$(RELEASE_DIR)"
|
||||
cp "$(SERVER_BUILD_DIR)/server/scrcpy-server" \
|
||||
"$(RELEASE_DIR)/scrcpy-server-$(VERSION)"
|
||||
cp "$(ZIP)/$(WIN32_TARGET)" "$(RELEASE_DIR)"
|
||||
cp "$(ZIP)/$(WIN64_TARGET)" "$(RELEASE_DIR)"
|
||||
cd "$(RELEASE_DIR)" && \
|
||||
sha256sum "scrcpy-server-$(VERSION)" \
|
||||
"scrcpy-win32-$(VERSION).zip" \
|
||||
"scrcpy-win64-$(VERSION).zip" > SHA256SUMS.txt
|
||||
@echo "Release generated in $(RELEASE_DIR)/"
|
||||
|
@ -2,11 +2,11 @@ apply plugin: 'com.android.application'
|
||||
|
||||
android {
|
||||
namespace 'com.genymobile.scrcpy'
|
||||
compileSdk 34
|
||||
compileSdk 35
|
||||
defaultConfig {
|
||||
applicationId "com.genymobile.scrcpy"
|
||||
minSdkVersion 21
|
||||
targetSdkVersion 34
|
||||
targetSdkVersion 35
|
||||
versionCode 20700
|
||||
versionName "2.7"
|
||||
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
|
||||
|
@ -14,8 +14,8 @@ set -e
|
||||
SCRCPY_DEBUG=false
|
||||
SCRCPY_VERSION_NAME=2.7
|
||||
|
||||
PLATFORM=${ANDROID_PLATFORM:-34}
|
||||
BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-34.0.0}
|
||||
PLATFORM=${ANDROID_PLATFORM:-35}
|
||||
BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-35.0.0}
|
||||
BUILD_TOOLS_DIR="$ANDROID_HOME/build-tools/$BUILD_TOOLS"
|
||||
|
||||
BUILD_DIR="$(realpath ${BUILD_DIR:-build_manual})"
|
||||
@ -45,10 +45,10 @@ EOF
|
||||
|
||||
echo "Generating java from aidl..."
|
||||
cd "$SERVER_DIR/src/main/aidl"
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" android/view/IRotationWatcher.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" \
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. android/view/IRotationWatcher.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. \
|
||||
android/content/IOnPrimaryClipChangedListener.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" android/view/IDisplayFoldListener.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. android/view/IDisplayFoldListener.aidl
|
||||
|
||||
SRC=( \
|
||||
com/genymobile/scrcpy/*.java \
|
||||
|
@ -0,0 +1,32 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.os.Build;
|
||||
|
||||
/**
|
||||
* Android version code constants, done right.
|
||||
* <p/>
|
||||
* <a href="https://apilevels.com/">API levels</a>
|
||||
*/
|
||||
public final class AndroidVersions {
|
||||
|
||||
private AndroidVersions() {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static final int API_21_ANDROID_5_0 = Build.VERSION_CODES.LOLLIPOP;
|
||||
public static final int API_22_ANDROID_5_1 = Build.VERSION_CODES.LOLLIPOP_MR1;
|
||||
public static final int API_23_ANDROID_6_0 = Build.VERSION_CODES.M;
|
||||
public static final int API_24_ANDROID_7_0 = Build.VERSION_CODES.N;
|
||||
public static final int API_25_ANDROID_7_1 = Build.VERSION_CODES.N_MR1;
|
||||
public static final int API_26_ANDROID_8_0 = Build.VERSION_CODES.O;
|
||||
public static final int API_27_ANDROID_8_1 = Build.VERSION_CODES.O_MR1;
|
||||
public static final int API_28_ANDROID_9 = Build.VERSION_CODES.P;
|
||||
public static final int API_29_ANDROID_10 = Build.VERSION_CODES.Q;
|
||||
public static final int API_30_ANDROID_11 = Build.VERSION_CODES.R;
|
||||
public static final int API_31_ANDROID_12 = Build.VERSION_CODES.S;
|
||||
public static final int API_32_ANDROID_12L = Build.VERSION_CODES.S_V2;
|
||||
public static final int API_33_ANDROID_13 = Build.VERSION_CODES.TIRAMISU;
|
||||
public static final int API_34_ANDROID_14 = Build.VERSION_CODES.UPSIDE_DOWN_CAKE;
|
||||
public static final int API_35_ANDROID_15 = Build.VERSION_CODES.VANILLA_ICE_CREAM;
|
||||
|
||||
}
|
@ -19,7 +19,7 @@ public final class CleanUp {
|
||||
private static final int MSG_TYPE_MASK = 0b11;
|
||||
private static final int MSG_TYPE_RESTORE_STAY_ON = 0;
|
||||
private static final int MSG_TYPE_DISABLE_SHOW_TOUCHES = 1;
|
||||
private static final int MSG_TYPE_RESTORE_NORMAL_POWER_MODE = 2;
|
||||
private static final int MSG_TYPE_RESTORE_DISPLAY_POWER = 2;
|
||||
private static final int MSG_TYPE_POWER_OFF_SCREEN = 3;
|
||||
|
||||
private static final int MSG_PARAM_SHIFT = 2;
|
||||
@ -63,8 +63,8 @@ public final class CleanUp {
|
||||
return sendMessage(MSG_TYPE_DISABLE_SHOW_TOUCHES, disableOnExit ? 1 : 0);
|
||||
}
|
||||
|
||||
public boolean setRestoreNormalPowerMode(boolean restoreOnExit) {
|
||||
return sendMessage(MSG_TYPE_RESTORE_NORMAL_POWER_MODE, restoreOnExit ? 1 : 0);
|
||||
public boolean setRestoreDisplayPower(boolean restoreOnExit) {
|
||||
return sendMessage(MSG_TYPE_RESTORE_DISPLAY_POWER, restoreOnExit ? 1 : 0);
|
||||
}
|
||||
|
||||
public boolean setPowerOffScreen(boolean powerOffScreenOnExit) {
|
||||
@ -86,7 +86,7 @@ public final class CleanUp {
|
||||
|
||||
int restoreStayOn = -1;
|
||||
boolean disableShowTouches = false;
|
||||
boolean restoreNormalPowerMode = false;
|
||||
boolean restoreDisplayPower = false;
|
||||
boolean powerOffScreen = false;
|
||||
|
||||
try {
|
||||
@ -102,8 +102,8 @@ public final class CleanUp {
|
||||
case MSG_TYPE_DISABLE_SHOW_TOUCHES:
|
||||
disableShowTouches = param != 0;
|
||||
break;
|
||||
case MSG_TYPE_RESTORE_NORMAL_POWER_MODE:
|
||||
restoreNormalPowerMode = param != 0;
|
||||
case MSG_TYPE_RESTORE_DISPLAY_POWER:
|
||||
restoreDisplayPower = param != 0;
|
||||
break;
|
||||
case MSG_TYPE_POWER_OFF_SCREEN:
|
||||
powerOffScreen = param != 0;
|
||||
@ -137,13 +137,13 @@ public final class CleanUp {
|
||||
}
|
||||
}
|
||||
|
||||
if (Device.isScreenOn()) {
|
||||
if (Device.isScreenOn() && displayId != Device.DISPLAY_ID_NONE) {
|
||||
if (powerOffScreen) {
|
||||
Ln.i("Power off screen");
|
||||
Device.powerOffScreen(displayId);
|
||||
} else if (restoreNormalPowerMode) {
|
||||
Ln.i("Restoring normal power mode");
|
||||
Device.setScreenPowerMode(Device.POWER_MODE_NORMAL);
|
||||
} else if (restoreDisplayPower) {
|
||||
Ln.i("Restoring display power");
|
||||
Device.setDisplayPower(displayId, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,6 @@ import android.annotation.TargetApi;
|
||||
import android.content.AttributionSource;
|
||||
import android.content.Context;
|
||||
import android.content.ContextWrapper;
|
||||
import android.os.Build;
|
||||
import android.os.Process;
|
||||
|
||||
public final class FakeContext extends ContextWrapper {
|
||||
@ -32,7 +31,7 @@ public final class FakeContext extends ContextWrapper {
|
||||
return PACKAGE_NAME;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@TargetApi(AndroidVersions.API_31_ANDROID_12)
|
||||
@Override
|
||||
public AttributionSource getAttributionSource() {
|
||||
AttributionSource.Builder builder = new AttributionSource.Builder(Process.SHELL_UID);
|
||||
|
@ -2,6 +2,8 @@ package com.genymobile.scrcpy;
|
||||
|
||||
import com.genymobile.scrcpy.audio.AudioCodec;
|
||||
import com.genymobile.scrcpy.audio.AudioSource;
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.device.NewDisplay;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.CodecOption;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
@ -30,7 +32,7 @@ public class Options {
|
||||
private int videoBitRate = 8000000;
|
||||
private int audioBitRate = 128000;
|
||||
private float maxFps;
|
||||
private int lockVideoOrientation = -1;
|
||||
private int lockVideoOrientation = Device.LOCK_VIDEO_ORIENTATION_UNLOCKED;
|
||||
private boolean tunnelForward;
|
||||
private Rect crop;
|
||||
private boolean control = true;
|
||||
@ -54,10 +56,13 @@ public class Options {
|
||||
private boolean cleanup = true;
|
||||
private boolean powerOn = true;
|
||||
|
||||
private NewDisplay newDisplay;
|
||||
|
||||
private boolean listEncoders;
|
||||
private boolean listDisplays;
|
||||
private boolean listCameras;
|
||||
private boolean listCameraSizes;
|
||||
private boolean listApps;
|
||||
|
||||
// Options not used by the scrcpy client, but useful to use scrcpy-server directly
|
||||
private boolean sendDeviceMeta = true; // send device name and size
|
||||
@ -205,8 +210,12 @@ public class Options {
|
||||
return powerOn;
|
||||
}
|
||||
|
||||
public NewDisplay getNewDisplay() {
|
||||
return newDisplay;
|
||||
}
|
||||
|
||||
public boolean getList() {
|
||||
return listEncoders || listDisplays || listCameras || listCameraSizes;
|
||||
return listEncoders || listDisplays || listCameras || listCameraSizes || listApps;
|
||||
}
|
||||
|
||||
public boolean getListEncoders() {
|
||||
@ -225,6 +234,10 @@ public class Options {
|
||||
return listCameraSizes;
|
||||
}
|
||||
|
||||
public boolean getListApps() {
|
||||
return listApps;
|
||||
}
|
||||
|
||||
public boolean getSendDeviceMeta() {
|
||||
return sendDeviceMeta;
|
||||
}
|
||||
@ -241,6 +254,10 @@ public class Options {
|
||||
return sendCodecMeta;
|
||||
}
|
||||
|
||||
public void resetLockVideoOrientation() {
|
||||
this.lockVideoOrientation = Device.LOCK_VIDEO_ORIENTATION_UNLOCKED;
|
||||
}
|
||||
|
||||
@SuppressWarnings("MethodLength")
|
||||
public static Options parse(String... args) {
|
||||
if (args.length < 1) {
|
||||
@ -388,6 +405,9 @@ public class Options {
|
||||
case "list_camera_sizes":
|
||||
options.listCameraSizes = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "list_apps":
|
||||
options.listApps = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "camera_id":
|
||||
if (!value.isEmpty()) {
|
||||
options.cameraId = value;
|
||||
@ -418,6 +438,9 @@ public class Options {
|
||||
case "camera_high_speed":
|
||||
options.cameraHighSpeed = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "new_display":
|
||||
options.newDisplay = parseNewDisplay(value);
|
||||
break;
|
||||
case "send_device_meta":
|
||||
options.sendDeviceMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
@ -475,6 +498,9 @@ public class Options {
|
||||
}
|
||||
int width = Integer.parseInt(tokens[0]);
|
||||
int height = Integer.parseInt(tokens[1]);
|
||||
if (width <= 0 || height <= 0) {
|
||||
throw new IllegalArgumentException("Invalid non-positive size dimension: \"" + size + "\"");
|
||||
}
|
||||
return new Size(width, height);
|
||||
}
|
||||
|
||||
@ -501,4 +527,36 @@ public class Options {
|
||||
throw new IllegalArgumentException("Invalid float value for " + key + ": \"" + value + "\"");
|
||||
}
|
||||
}
|
||||
|
||||
private static NewDisplay parseNewDisplay(String newDisplay) {
|
||||
// Possible inputs:
|
||||
// - "" (empty string)
|
||||
// - "<width>x<height>/<dpi>"
|
||||
// - "<width>x<height>"
|
||||
// - "/<dpi>"
|
||||
if (newDisplay.isEmpty()) {
|
||||
return new NewDisplay();
|
||||
}
|
||||
|
||||
String[] tokens = newDisplay.split("/");
|
||||
|
||||
Size size;
|
||||
if (!tokens[0].isEmpty()) {
|
||||
size = parseSize(tokens[0]);
|
||||
} else {
|
||||
size = null;
|
||||
}
|
||||
|
||||
int dpi;
|
||||
if (tokens.length >= 2) {
|
||||
dpi = Integer.parseInt(tokens[1]);
|
||||
if (dpi <= 0) {
|
||||
throw new IllegalArgumentException("Invalid non-positive dpi: " + tokens[1]);
|
||||
}
|
||||
} else {
|
||||
dpi = 0;
|
||||
}
|
||||
|
||||
return new NewDisplay(size, dpi);
|
||||
}
|
||||
}
|
||||
|
@ -9,16 +9,17 @@ import com.genymobile.scrcpy.audio.AudioRawRecorder;
|
||||
import com.genymobile.scrcpy.audio.AudioSource;
|
||||
import com.genymobile.scrcpy.control.ControlChannel;
|
||||
import com.genymobile.scrcpy.control.Controller;
|
||||
import com.genymobile.scrcpy.control.DeviceMessage;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.DesktopConnection;
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.device.NewDisplay;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.util.Settings;
|
||||
import com.genymobile.scrcpy.util.SettingsException;
|
||||
import com.genymobile.scrcpy.video.CameraCapture;
|
||||
import com.genymobile.scrcpy.video.NewDisplayCapture;
|
||||
import com.genymobile.scrcpy.video.ScreenCapture;
|
||||
import com.genymobile.scrcpy.video.SurfaceCapture;
|
||||
import com.genymobile.scrcpy.video.SurfaceEncoder;
|
||||
@ -121,16 +122,41 @@ public final class Server {
|
||||
}
|
||||
|
||||
private static void scrcpy(Options options) throws IOException, ConfigurationException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && options.getVideoSource() == VideoSource.CAMERA) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_31_ANDROID_12 && options.getVideoSource() == VideoSource.CAMERA) {
|
||||
Ln.e("Camera mirroring is not supported before Android 12");
|
||||
throw new ConfigurationException("Camera mirroring is not supported");
|
||||
}
|
||||
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10 && options.getNewDisplay() != null) {
|
||||
Ln.e("New virtual display is not supported before Android 10");
|
||||
throw new ConfigurationException("New virtual display is not supported");
|
||||
}
|
||||
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14) {
|
||||
int lockVideoOrientation = options.getLockVideoOrientation();
|
||||
if (lockVideoOrientation != Device.LOCK_VIDEO_ORIENTATION_UNLOCKED) {
|
||||
if (lockVideoOrientation != Device.LOCK_VIDEO_ORIENTATION_INITIAL_AUTO) {
|
||||
Ln.e("--lock-video-orientation is broken on Android >= 14: <https://github.com/Genymobile/scrcpy/issues/4011>");
|
||||
throw new ConfigurationException("--lock-video-orientation is broken on Android >= 14");
|
||||
} else {
|
||||
// If the flag has been set automatically (because v4l2 sink is enabled), do not fail
|
||||
Ln.w("--lock-video-orientation is ignored on Android >= 14: <https://github.com/Genymobile/scrcpy/issues/4011>");
|
||||
}
|
||||
}
|
||||
if (options.getCrop() != null) {
|
||||
Ln.e("--crop is broken on Android >= 14: <https://github.com/Genymobile/scrcpy/issues/4162>");
|
||||
throw new ConfigurationException("Crop is not broken on Android >= 14");
|
||||
}
|
||||
}
|
||||
|
||||
CleanUp cleanUp = null;
|
||||
Thread initThread = null;
|
||||
|
||||
NewDisplay newDisplay = options.getNewDisplay();
|
||||
int displayId = newDisplay == null ? options.getDisplayId() : Device.DISPLAY_ID_NONE;
|
||||
|
||||
if (options.getCleanup()) {
|
||||
cleanUp = CleanUp.configure(options.getDisplayId());
|
||||
cleanUp = CleanUp.configure(displayId);
|
||||
initThread = startInitThread(options, cleanUp);
|
||||
}
|
||||
|
||||
@ -140,9 +166,6 @@ public final class Server {
|
||||
boolean video = options.getVideo();
|
||||
boolean audio = options.getAudio();
|
||||
boolean sendDummyByte = options.getSendDummyByte();
|
||||
boolean camera = video && options.getVideoSource() == VideoSource.CAMERA;
|
||||
|
||||
final Device device = camera ? null : new Device(options);
|
||||
|
||||
Workarounds.apply();
|
||||
|
||||
@ -154,13 +177,11 @@ public final class Server {
|
||||
connection.sendDeviceMeta(Device.getDeviceName());
|
||||
}
|
||||
|
||||
Controller controller = null;
|
||||
|
||||
if (control) {
|
||||
ControlChannel controlChannel = connection.getControlChannel();
|
||||
Controller controller = new Controller(device, controlChannel, cleanUp, options.getClipboardAutosync(), options.getPowerOn());
|
||||
device.setClipboardListener(text -> {
|
||||
DeviceMessage msg = DeviceMessage.createClipboard(text);
|
||||
controller.getSender().send(msg);
|
||||
});
|
||||
controller = new Controller(displayId, controlChannel, cleanUp, options.getClipboardAutosync(), options.getPowerOn());
|
||||
asyncProcessors.add(controller);
|
||||
}
|
||||
|
||||
@ -190,7 +211,13 @@ public final class Server {
|
||||
options.getSendFrameMeta());
|
||||
SurfaceCapture surfaceCapture;
|
||||
if (options.getVideoSource() == VideoSource.DISPLAY) {
|
||||
surfaceCapture = new ScreenCapture(device);
|
||||
if (newDisplay != null) {
|
||||
surfaceCapture = new NewDisplayCapture(controller, newDisplay, options.getMaxSize());
|
||||
} else {
|
||||
assert displayId != Device.DISPLAY_ID_NONE;
|
||||
surfaceCapture = new ScreenCapture(controller, displayId, options.getMaxSize(), options.getCrop(),
|
||||
options.getLockVideoOrientation());
|
||||
}
|
||||
} else {
|
||||
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(),
|
||||
options.getMaxSize(), options.getCameraAspectRatio(), options.getCameraFps(), options.getCameraHighSpeed());
|
||||
@ -282,6 +309,11 @@ public final class Server {
|
||||
Workarounds.apply();
|
||||
Ln.i(LogUtils.buildCameraListMessage(options.getListCameraSizes()));
|
||||
}
|
||||
if (options.getListApps()) {
|
||||
Workarounds.apply();
|
||||
Ln.i("Processing Android apps... (this may take some time)");
|
||||
Ln.i(LogUtils.buildAppListMessage());
|
||||
}
|
||||
// Just print the requested data, do not mirror
|
||||
return;
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ public final class Workarounds {
|
||||
}
|
||||
|
||||
public static void apply() {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_31_ANDROID_12) {
|
||||
// On some Samsung devices, DisplayManagerGlobal.getDisplayInfoLocked() calls ActivityThread.currentActivityThread().getConfiguration(),
|
||||
// which requires a non-null ConfigurationController.
|
||||
// ConfigurationController was introduced in Android 12, so do not attempt to set it on lower versions.
|
||||
@ -155,7 +155,7 @@ public final class Workarounds {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.R)
|
||||
@TargetApi(AndroidVersions.API_30_ANDROID_11)
|
||||
@SuppressLint("WrongConstant,MissingPermission")
|
||||
public static AudioRecord createAudioRecord(int source, int sampleRate, int channelConfig, int channels, int channelMask, int encoding) throws
|
||||
AudioCaptureException {
|
||||
@ -226,7 +226,7 @@ public final class Workarounds {
|
||||
int[] session = new int[]{AudioManager.AUDIO_SESSION_ID_GENERATE};
|
||||
|
||||
int initResult;
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_31_ANDROID_12) {
|
||||
// private native final int native_setup(Object audiorecord_this,
|
||||
// Object /*AudioAttributes*/ attributes,
|
||||
// int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
|
||||
@ -252,7 +252,7 @@ public final class Workarounds {
|
||||
Method getParcelMethod = attributionSourceState.getClass().getDeclaredMethod("getParcel");
|
||||
Parcel attributionSourceParcel = (Parcel) getParcelMethod.invoke(attributionSourceState);
|
||||
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.UPSIDE_DOWN_CAKE) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_34_ANDROID_14) {
|
||||
// private native int native_setup(Object audiorecordThis,
|
||||
// Object /*AudioAttributes*/ attributes,
|
||||
// int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.Workarounds;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
@ -45,11 +46,11 @@ public class AudioDirectCapture implements AudioCapture {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
@TargetApi(AndroidVersions.API_23_ANDROID_6_0)
|
||||
@SuppressLint({"WrongConstant", "MissingPermission"})
|
||||
private static AudioRecord createAudioRecord(int audioSource) {
|
||||
AudioRecord.Builder builder = new AudioRecord.Builder();
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_31_ANDROID_12) {
|
||||
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
|
||||
builder.setContext(FakeContext.get());
|
||||
}
|
||||
@ -117,7 +118,7 @@ public class AudioDirectCapture implements AudioCapture {
|
||||
|
||||
@Override
|
||||
public void checkCompatibility() throws AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_30_ANDROID_11) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
throw new AudioCaptureException();
|
||||
}
|
||||
@ -125,7 +126,7 @@ public class AudioDirectCapture implements AudioCapture {
|
||||
|
||||
@Override
|
||||
public void start() throws AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_30_ANDROID_11) {
|
||||
startWorkaroundAndroid11();
|
||||
try {
|
||||
tryStartRecording(5, 100);
|
||||
@ -146,7 +147,7 @@ public class AudioDirectCapture implements AudioCapture {
|
||||
}
|
||||
|
||||
@Override
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
public int read(ByteBuffer outDirectBuffer, MediaCodec.BufferInfo outBufferInfo) {
|
||||
return reader.read(outDirectBuffer, outBufferInfo);
|
||||
}
|
||||
|
@ -1,14 +1,15 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.AsyncProcessor;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
import com.genymobile.scrcpy.util.Codec;
|
||||
import com.genymobile.scrcpy.util.CodecOption;
|
||||
import com.genymobile.scrcpy.util.CodecUtils;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.util.IO;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.media.MediaCodec;
|
||||
@ -93,7 +94,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
return format;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
private void inputThread(MediaCodec mediaCodec, AudioCapture capture) throws IOException, InterruptedException {
|
||||
final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
@ -175,9 +176,9 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
@TargetApi(AndroidVersions.API_23_ANDROID_6_0)
|
||||
private void encode() throws IOException, ConfigurationException, AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_30_ANDROID_11) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
streamer.writeDisableStream(false);
|
||||
return;
|
||||
@ -287,7 +288,13 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
if (encoderName != null) {
|
||||
Ln.d("Creating audio encoder by name: '" + encoderName + "'");
|
||||
try {
|
||||
return MediaCodec.createByCodecName(encoderName);
|
||||
MediaCodec mediaCodec = MediaCodec.createByCodecName(encoderName);
|
||||
String mimeType = Codec.getMimeType(mediaCodec);
|
||||
if (!codec.getMimeType().equals(mimeType)) {
|
||||
Ln.e("Audio encoder type for \"" + encoderName + "\" (" + mimeType + ") does not match codec type (" + codec.getMimeType() + ")");
|
||||
throw new ConfigurationException("Incorrect encoder type: " + encoderName);
|
||||
}
|
||||
return mediaCodec;
|
||||
} catch (IllegalArgumentException e) {
|
||||
Ln.e("Audio encoder '" + encoderName + "' for " + codec.getName() + " not found\n" + LogUtils.buildAudioEncoderListMessage());
|
||||
throw new ConfigurationException("Unknown encoder: " + encoderName);
|
||||
@ -308,7 +315,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
|
||||
private final class EncoderCallback extends MediaCodec.Callback {
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
@Override
|
||||
public void onInputBufferAvailable(MediaCodec codec, int index) {
|
||||
try {
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
@ -108,7 +109,7 @@ public final class AudioPlaybackCapture implements AudioCapture {
|
||||
|
||||
@Override
|
||||
public void checkCompatibility() throws AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_33_ANDROID_13) {
|
||||
Ln.w("Audio disabled: audio playback capture source not supported before Android 13");
|
||||
throw new AudioCaptureException();
|
||||
}
|
||||
@ -130,7 +131,7 @@ public final class AudioPlaybackCapture implements AudioCapture {
|
||||
}
|
||||
|
||||
@Override
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
public int read(ByteBuffer outDirectBuffer, MediaCodec.BufferInfo outBufferInfo) {
|
||||
return reader.read(outDirectBuffer, outBufferInfo);
|
||||
}
|
||||
|
@ -1,9 +1,10 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.AsyncProcessor;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
import com.genymobile.scrcpy.util.IO;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
@ -24,7 +25,7 @@ public final class AudioRawRecorder implements AsyncProcessor {
|
||||
}
|
||||
|
||||
private void record() throws IOException, AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_30_ANDROID_11) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
streamer.writeDisableStream(false);
|
||||
return;
|
||||
|
@ -1,12 +1,12 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.AudioTimestamp;
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
@ -26,7 +26,7 @@ public class AudioRecordReader {
|
||||
this.recorder = recorder;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
public int read(ByteBuffer outDirectBuffer, MediaCodec.BufferInfo outBufferInfo) {
|
||||
int r = recorder.read(outDirectBuffer, AudioConfig.MAX_READ_SIZE);
|
||||
if (r <= 0) {
|
||||
|
@ -17,12 +17,13 @@ public final class ControlMessage {
|
||||
public static final int TYPE_COLLAPSE_PANELS = 7;
|
||||
public static final int TYPE_GET_CLIPBOARD = 8;
|
||||
public static final int TYPE_SET_CLIPBOARD = 9;
|
||||
public static final int TYPE_SET_SCREEN_POWER_MODE = 10;
|
||||
public static final int TYPE_SET_DISPLAY_POWER = 10;
|
||||
public static final int TYPE_ROTATE_DEVICE = 11;
|
||||
public static final int TYPE_UHID_CREATE = 12;
|
||||
public static final int TYPE_UHID_INPUT = 13;
|
||||
public static final int TYPE_UHID_DESTROY = 14;
|
||||
public static final int TYPE_OPEN_HARD_KEYBOARD_SETTINGS = 15;
|
||||
public static final int TYPE_START_APP = 16;
|
||||
|
||||
public static final long SEQUENCE_INVALID = 0;
|
||||
|
||||
@ -33,7 +34,7 @@ public final class ControlMessage {
|
||||
private int type;
|
||||
private String text;
|
||||
private int metaState; // KeyEvent.META_*
|
||||
private int action; // KeyEvent.ACTION_* or MotionEvent.ACTION_* or POWER_MODE_*
|
||||
private int action; // KeyEvent.ACTION_* or MotionEvent.ACTION_*
|
||||
private int keycode; // KeyEvent.KEYCODE_*
|
||||
private int actionButton; // MotionEvent.BUTTON_*
|
||||
private int buttons; // MotionEvent.BUTTON_*
|
||||
@ -48,6 +49,7 @@ public final class ControlMessage {
|
||||
private long sequence;
|
||||
private int id;
|
||||
private byte[] data;
|
||||
private boolean on;
|
||||
|
||||
private ControlMessage() {
|
||||
}
|
||||
@ -115,13 +117,10 @@ public final class ControlMessage {
|
||||
return msg;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param mode one of the {@code Device.SCREEN_POWER_MODE_*} constants
|
||||
*/
|
||||
public static ControlMessage createSetScreenPowerMode(int mode) {
|
||||
public static ControlMessage createSetDisplayPower(boolean on) {
|
||||
ControlMessage msg = new ControlMessage();
|
||||
msg.type = TYPE_SET_SCREEN_POWER_MODE;
|
||||
msg.action = mode;
|
||||
msg.type = TYPE_SET_DISPLAY_POWER;
|
||||
msg.on = on;
|
||||
return msg;
|
||||
}
|
||||
|
||||
@ -155,6 +154,13 @@ public final class ControlMessage {
|
||||
return msg;
|
||||
}
|
||||
|
||||
public static ControlMessage createStartApp(String name) {
|
||||
ControlMessage msg = new ControlMessage();
|
||||
msg.type = TYPE_START_APP;
|
||||
msg.text = name;
|
||||
return msg;
|
||||
}
|
||||
|
||||
public int getType() {
|
||||
return type;
|
||||
}
|
||||
@ -226,4 +232,8 @@ public final class ControlMessage {
|
||||
public byte[] getData() {
|
||||
return data;
|
||||
}
|
||||
|
||||
public boolean getOn() {
|
||||
return on;
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.util.Binary;
|
||||
import com.genymobile.scrcpy.device.Position;
|
||||
import com.genymobile.scrcpy.util.Binary;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.DataInputStream;
|
||||
@ -39,8 +39,8 @@ public class ControlMessageReader {
|
||||
return parseGetClipboard();
|
||||
case ControlMessage.TYPE_SET_CLIPBOARD:
|
||||
return parseSetClipboard();
|
||||
case ControlMessage.TYPE_SET_SCREEN_POWER_MODE:
|
||||
return parseSetScreenPowerMode();
|
||||
case ControlMessage.TYPE_SET_DISPLAY_POWER:
|
||||
return parseSetDisplayPower();
|
||||
case ControlMessage.TYPE_EXPAND_NOTIFICATION_PANEL:
|
||||
case ControlMessage.TYPE_EXPAND_SETTINGS_PANEL:
|
||||
case ControlMessage.TYPE_COLLAPSE_PANELS:
|
||||
@ -53,6 +53,8 @@ public class ControlMessageReader {
|
||||
return parseUhidInput();
|
||||
case ControlMessage.TYPE_UHID_DESTROY:
|
||||
return parseUhidDestroy();
|
||||
case ControlMessage.TYPE_START_APP:
|
||||
return parseStartApp();
|
||||
default:
|
||||
throw new ControlProtocolException("Unknown event type: " + type);
|
||||
}
|
||||
@ -132,9 +134,9 @@ public class ControlMessageReader {
|
||||
return ControlMessage.createSetClipboard(sequence, text, paste);
|
||||
}
|
||||
|
||||
private ControlMessage parseSetScreenPowerMode() throws IOException {
|
||||
int mode = dis.readUnsignedByte();
|
||||
return ControlMessage.createSetScreenPowerMode(mode);
|
||||
private ControlMessage parseSetDisplayPower() throws IOException {
|
||||
boolean on = dis.readBoolean();
|
||||
return ControlMessage.createSetDisplayPower(on);
|
||||
}
|
||||
|
||||
private ControlMessage parseUhidCreate() throws IOException {
|
||||
@ -155,6 +157,11 @@ public class ControlMessageReader {
|
||||
return ControlMessage.createUhidDestroy(id);
|
||||
}
|
||||
|
||||
private ControlMessage parseStartApp() throws IOException {
|
||||
String name = parseString(1);
|
||||
return ControlMessage.createStartApp(name);
|
||||
}
|
||||
|
||||
private Position parsePosition() throws IOException {
|
||||
int x = dis.readInt();
|
||||
int y = dis.readInt();
|
||||
|
@ -1,14 +1,20 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.AsyncProcessor;
|
||||
import com.genymobile.scrcpy.CleanUp;
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.DeviceApp;
|
||||
import com.genymobile.scrcpy.device.Point;
|
||||
import com.genymobile.scrcpy.device.Position;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.video.VirtualDisplayListener;
|
||||
import com.genymobile.scrcpy.wrappers.ClipboardManager;
|
||||
import com.genymobile.scrcpy.wrappers.InputManager;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.content.IOnPrimaryClipChangedListener;
|
||||
import android.content.Intent;
|
||||
import android.os.Build;
|
||||
import android.os.SystemClock;
|
||||
@ -18,11 +24,40 @@ import android.view.KeyEvent;
|
||||
import android.view.MotionEvent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
public class Controller implements AsyncProcessor {
|
||||
public class Controller implements AsyncProcessor, VirtualDisplayListener {
|
||||
|
||||
/*
|
||||
* For event injection, there are two display ids:
|
||||
* - the displayId passed to the constructor (which comes from --display-id passed by the client, 0 for the main display);
|
||||
* - the virtualDisplayId used for mirroring, notified by the capture instance via the VirtualDisplayListener interface.
|
||||
*
|
||||
* (In case the ScreenCapture uses the "SurfaceControl API", then both ids are equals, but this is an implementation detail.)
|
||||
*
|
||||
* In order to make events work correctly in all cases:
|
||||
* - virtualDisplayId must be used for events relative to the display (mouse and touch events with coordinates);
|
||||
* - displayId must be used for other events (like key events).
|
||||
*
|
||||
* If a new separate virtual display is created (using --new-display), then displayId == Device.DISPLAY_ID_NONE. In that case, all events are
|
||||
* sent to the virtual display id.
|
||||
*/
|
||||
|
||||
private static final class DisplayData {
|
||||
private final int virtualDisplayId;
|
||||
private final PositionMapper positionMapper;
|
||||
|
||||
private DisplayData(int virtualDisplayId, PositionMapper positionMapper) {
|
||||
this.virtualDisplayId = virtualDisplayId;
|
||||
this.positionMapper = positionMapper;
|
||||
}
|
||||
}
|
||||
|
||||
private static final int DEFAULT_DEVICE_ID = 0;
|
||||
|
||||
@ -30,12 +65,14 @@ public class Controller implements AsyncProcessor {
|
||||
private static final int POINTER_ID_MOUSE = -1;
|
||||
|
||||
private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor();
|
||||
private ExecutorService startAppExecutor;
|
||||
|
||||
private Thread thread;
|
||||
|
||||
private UhidManager uhidManager;
|
||||
|
||||
private final Device device;
|
||||
private final int displayId;
|
||||
private final boolean supportsInputEvents;
|
||||
private final ControlChannel controlChannel;
|
||||
private final CleanUp cleanUp;
|
||||
private final DeviceMessageSender sender;
|
||||
@ -44,21 +81,66 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
private final KeyCharacterMap charMap = KeyCharacterMap.load(KeyCharacterMap.VIRTUAL_KEYBOARD);
|
||||
|
||||
private final AtomicBoolean isSettingClipboard = new AtomicBoolean();
|
||||
|
||||
private final AtomicReference<DisplayData> displayData = new AtomicReference<>();
|
||||
private final Object displayDataAvailable = new Object(); // condition variable
|
||||
|
||||
private long lastTouchDown;
|
||||
private final PointersState pointersState = new PointersState();
|
||||
private final MotionEvent.PointerProperties[] pointerProperties = new MotionEvent.PointerProperties[PointersState.MAX_POINTERS];
|
||||
private final MotionEvent.PointerCoords[] pointerCoords = new MotionEvent.PointerCoords[PointersState.MAX_POINTERS];
|
||||
|
||||
private boolean keepPowerModeOff;
|
||||
private boolean keepDisplayPowerOff;
|
||||
|
||||
public Controller(Device device, ControlChannel controlChannel, CleanUp cleanUp, boolean clipboardAutosync, boolean powerOn) {
|
||||
this.device = device;
|
||||
public Controller(int displayId, ControlChannel controlChannel, CleanUp cleanUp, boolean clipboardAutosync, boolean powerOn) {
|
||||
this.displayId = displayId;
|
||||
this.controlChannel = controlChannel;
|
||||
this.cleanUp = cleanUp;
|
||||
this.clipboardAutosync = clipboardAutosync;
|
||||
this.powerOn = powerOn;
|
||||
initPointers();
|
||||
sender = new DeviceMessageSender(controlChannel);
|
||||
|
||||
supportsInputEvents = Device.supportsInputEvents(displayId);
|
||||
if (!supportsInputEvents) {
|
||||
Ln.w("Input events are not supported for secondary displays before Android 10");
|
||||
}
|
||||
|
||||
if (clipboardAutosync) {
|
||||
// If control and autosync are enabled, synchronize Android clipboard to the computer automatically
|
||||
ClipboardManager clipboardManager = ServiceManager.getClipboardManager();
|
||||
if (clipboardManager != null) {
|
||||
clipboardManager.addPrimaryClipChangedListener(new IOnPrimaryClipChangedListener.Stub() {
|
||||
@Override
|
||||
public void dispatchPrimaryClipChanged() {
|
||||
if (isSettingClipboard.get()) {
|
||||
// This is a notification for the change we are currently applying, ignore it
|
||||
return;
|
||||
}
|
||||
String text = Device.getClipboardText();
|
||||
if (text != null) {
|
||||
DeviceMessage msg = DeviceMessage.createClipboard(text);
|
||||
sender.send(msg);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Ln.w("No clipboard manager, copy-paste between device and computer will not work");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewVirtualDisplay(int virtualDisplayId, PositionMapper positionMapper) {
|
||||
DisplayData data = new DisplayData(virtualDisplayId, positionMapper);
|
||||
DisplayData old = this.displayData.getAndSet(data);
|
||||
if (old == null) {
|
||||
// The very first time the Controller is notified of a new virtual display
|
||||
synchronized (displayDataAvailable) {
|
||||
displayDataAvailable.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private UhidManager getUhidManager() {
|
||||
@ -84,8 +166,8 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
private void control() throws IOException {
|
||||
// on start, power on the device
|
||||
if (powerOn && !Device.isScreenOn()) {
|
||||
device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, Device.INJECT_MODE_ASYNC);
|
||||
if (powerOn && displayId == 0 && !Device.isScreenOn()) {
|
||||
Device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, displayId, Device.INJECT_MODE_ASYNC);
|
||||
|
||||
// dirty hack
|
||||
// After POWER is injected, the device is powered on asynchronously.
|
||||
@ -138,10 +220,6 @@ public class Controller implements AsyncProcessor {
|
||||
sender.join();
|
||||
}
|
||||
|
||||
public DeviceMessageSender getSender() {
|
||||
return sender;
|
||||
}
|
||||
|
||||
private boolean handleEvent() throws IOException {
|
||||
ControlMessage msg;
|
||||
try {
|
||||
@ -153,27 +231,27 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
switch (msg.getType()) {
|
||||
case ControlMessage.TYPE_INJECT_KEYCODE:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
injectKeycode(msg.getAction(), msg.getKeycode(), msg.getRepeat(), msg.getMetaState());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_INJECT_TEXT:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
injectText(msg.getText());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_INJECT_TOUCH_EVENT:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
injectTouch(msg.getAction(), msg.getPointerId(), msg.getPosition(), msg.getPressure(), msg.getActionButton(), msg.getButtons());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_INJECT_SCROLL_EVENT:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
injectScroll(msg.getPosition(), msg.getHScroll(), msg.getVScroll(), msg.getButtons());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_BACK_OR_SCREEN_ON:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
pressBackOrTurnScreenOn(msg.getAction());
|
||||
}
|
||||
break;
|
||||
@ -192,22 +270,13 @@ public class Controller implements AsyncProcessor {
|
||||
case ControlMessage.TYPE_SET_CLIPBOARD:
|
||||
setClipboard(msg.getText(), msg.getPaste(), msg.getSequence());
|
||||
break;
|
||||
case ControlMessage.TYPE_SET_SCREEN_POWER_MODE:
|
||||
if (device.supportsInputEvents()) {
|
||||
int mode = msg.getAction();
|
||||
boolean setPowerModeOk = Device.setScreenPowerMode(mode);
|
||||
if (setPowerModeOk) {
|
||||
keepPowerModeOff = mode == Device.POWER_MODE_OFF;
|
||||
Ln.i("Device screen turned " + (mode == Device.POWER_MODE_OFF ? "off" : "on"));
|
||||
if (cleanUp != null) {
|
||||
boolean mustRestoreOnExit = mode != Device.POWER_MODE_NORMAL;
|
||||
cleanUp.setRestoreNormalPowerMode(mustRestoreOnExit);
|
||||
}
|
||||
}
|
||||
case ControlMessage.TYPE_SET_DISPLAY_POWER:
|
||||
if (supportsInputEvents && displayId != Device.DISPLAY_ID_NONE) {
|
||||
setDisplayPower(msg.getOn());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_ROTATE_DEVICE:
|
||||
device.rotateDevice();
|
||||
Device.rotateDevice(getActionDisplayId());
|
||||
break;
|
||||
case ControlMessage.TYPE_UHID_CREATE:
|
||||
getUhidManager().open(msg.getId(), msg.getText(), msg.getData());
|
||||
@ -221,6 +290,9 @@ public class Controller implements AsyncProcessor {
|
||||
case ControlMessage.TYPE_OPEN_HARD_KEYBOARD_SETTINGS:
|
||||
openHardKeyboardSettings();
|
||||
break;
|
||||
case ControlMessage.TYPE_START_APP:
|
||||
startAppAsync(msg.getText());
|
||||
break;
|
||||
default:
|
||||
// do nothing
|
||||
}
|
||||
@ -229,10 +301,11 @@ public class Controller implements AsyncProcessor {
|
||||
}
|
||||
|
||||
private boolean injectKeycode(int action, int keycode, int repeat, int metaState) {
|
||||
if (keepPowerModeOff && action == KeyEvent.ACTION_UP && (keycode == KeyEvent.KEYCODE_POWER || keycode == KeyEvent.KEYCODE_WAKEUP)) {
|
||||
schedulePowerModeOff();
|
||||
if (keepDisplayPowerOff && action == KeyEvent.ACTION_UP && (keycode == KeyEvent.KEYCODE_POWER || keycode == KeyEvent.KEYCODE_WAKEUP)) {
|
||||
assert displayId != Device.DISPLAY_ID_NONE;
|
||||
scheduleDisplayPowerOff(displayId);
|
||||
}
|
||||
return device.injectKeyEvent(action, keycode, repeat, metaState, Device.INJECT_MODE_ASYNC);
|
||||
return injectKeyEvent(action, keycode, repeat, metaState, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
private boolean injectChar(char c) {
|
||||
@ -242,8 +315,10 @@ public class Controller implements AsyncProcessor {
|
||||
if (events == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int actionDisplayId = getActionDisplayId();
|
||||
for (KeyEvent event : events) {
|
||||
if (!device.injectEvent(event, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(event, actionDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -265,7 +340,12 @@ public class Controller implements AsyncProcessor {
|
||||
private boolean injectTouch(int action, long pointerId, Position position, float pressure, int actionButton, int buttons) {
|
||||
long now = SystemClock.uptimeMillis();
|
||||
|
||||
Point point = device.getPhysicalPoint(position);
|
||||
// it hides the field on purpose, to read it with atomic access
|
||||
@SuppressWarnings("checkstyle:HiddenField")
|
||||
DisplayData displayData = this.displayData.get();
|
||||
assert displayData != null : "Cannot receive a touch event without a display";
|
||||
|
||||
Point point = displayData.positionMapper.map(position);
|
||||
if (point == null) {
|
||||
Ln.w("Ignore touch event, it was generated for a different device size");
|
||||
return false;
|
||||
@ -318,13 +398,13 @@ public class Controller implements AsyncProcessor {
|
||||
*
|
||||
* Otherwise, Chrome does not work properly: <https://github.com/Genymobile/scrcpy/issues/3635>
|
||||
*/
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && source == InputDevice.SOURCE_MOUSE) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0 && source == InputDevice.SOURCE_MOUSE) {
|
||||
if (action == MotionEvent.ACTION_DOWN) {
|
||||
if (actionButton == buttons) {
|
||||
// First button pressed: ACTION_DOWN
|
||||
MotionEvent downEvent = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_DOWN, pointerCount, pointerProperties,
|
||||
pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source, 0);
|
||||
if (!device.injectEvent(downEvent, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(downEvent, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -335,7 +415,7 @@ public class Controller implements AsyncProcessor {
|
||||
if (!InputManager.setActionButton(pressEvent, actionButton)) {
|
||||
return false;
|
||||
}
|
||||
if (!device.injectEvent(pressEvent, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(pressEvent, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -349,7 +429,7 @@ public class Controller implements AsyncProcessor {
|
||||
if (!InputManager.setActionButton(releaseEvent, actionButton)) {
|
||||
return false;
|
||||
}
|
||||
if (!device.injectEvent(releaseEvent, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(releaseEvent, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -357,7 +437,7 @@ public class Controller implements AsyncProcessor {
|
||||
// Last button released: ACTION_UP
|
||||
MotionEvent upEvent = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_UP, pointerCount, pointerProperties,
|
||||
pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source, 0);
|
||||
if (!device.injectEvent(upEvent, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(upEvent, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -368,14 +448,20 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
MotionEvent event = MotionEvent.obtain(lastTouchDown, now, action, pointerCount, pointerProperties, pointerCoords, 0, buttons, 1f, 1f,
|
||||
DEFAULT_DEVICE_ID, 0, source, 0);
|
||||
return device.injectEvent(event, Device.INJECT_MODE_ASYNC);
|
||||
return Device.injectEvent(event, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
private boolean injectScroll(Position position, float hScroll, float vScroll, int buttons) {
|
||||
long now = SystemClock.uptimeMillis();
|
||||
Point point = device.getPhysicalPoint(position);
|
||||
|
||||
// it hides the field on purpose, to read it with atomic access
|
||||
@SuppressWarnings("checkstyle:HiddenField")
|
||||
DisplayData displayData = this.displayData.get();
|
||||
assert displayData != null : "Cannot receive a scroll event without a display";
|
||||
|
||||
Point point = displayData.positionMapper.map(position);
|
||||
if (point == null) {
|
||||
// ignore event
|
||||
Ln.w("Ignore scroll event, it was generated for a different device size");
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -390,22 +476,22 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
MotionEvent event = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_SCROLL, 1, pointerProperties, pointerCoords, 0, buttons, 1f, 1f,
|
||||
DEFAULT_DEVICE_ID, 0, InputDevice.SOURCE_MOUSE, 0);
|
||||
return device.injectEvent(event, Device.INJECT_MODE_ASYNC);
|
||||
return Device.injectEvent(event, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule a call to set power mode to off after a small delay.
|
||||
* Schedule a call to set display power to off after a small delay.
|
||||
*/
|
||||
private static void schedulePowerModeOff() {
|
||||
private static void scheduleDisplayPowerOff(int displayId) {
|
||||
EXECUTOR.schedule(() -> {
|
||||
Ln.i("Forcing screen off");
|
||||
Device.setScreenPowerMode(Device.POWER_MODE_OFF);
|
||||
Ln.i("Forcing display off");
|
||||
Device.setDisplayPower(displayId, false);
|
||||
}, 200, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
private boolean pressBackOrTurnScreenOn(int action) {
|
||||
if (Device.isScreenOn()) {
|
||||
return device.injectKeyEvent(action, KeyEvent.KEYCODE_BACK, 0, 0, Device.INJECT_MODE_ASYNC);
|
||||
return injectKeyEvent(action, KeyEvent.KEYCODE_BACK, 0, 0, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
// Screen is off
|
||||
@ -415,18 +501,19 @@ public class Controller implements AsyncProcessor {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (keepPowerModeOff) {
|
||||
schedulePowerModeOff();
|
||||
if (keepDisplayPowerOff) {
|
||||
assert displayId != Device.DISPLAY_ID_NONE;
|
||||
scheduleDisplayPowerOff(displayId);
|
||||
}
|
||||
return device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, Device.INJECT_MODE_ASYNC);
|
||||
return pressReleaseKeycode(KeyEvent.KEYCODE_POWER, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
private void getClipboard(int copyKey) {
|
||||
// On Android >= 7, press the COPY or CUT key if requested
|
||||
if (copyKey != ControlMessage.COPY_KEY_NONE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && device.supportsInputEvents()) {
|
||||
if (copyKey != ControlMessage.COPY_KEY_NONE && Build.VERSION.SDK_INT >= AndroidVersions.API_24_ANDROID_7_0 && supportsInputEvents) {
|
||||
int key = copyKey == ControlMessage.COPY_KEY_COPY ? KeyEvent.KEYCODE_COPY : KeyEvent.KEYCODE_CUT;
|
||||
// Wait until the event is finished, to ensure that the clipboard text we read just after is the correct one
|
||||
device.pressReleaseKeycode(key, Device.INJECT_MODE_WAIT_FOR_FINISH);
|
||||
pressReleaseKeycode(key, Device.INJECT_MODE_WAIT_FOR_FINISH);
|
||||
}
|
||||
|
||||
// If clipboard autosync is enabled, then the device clipboard is synchronized to the computer clipboard whenever it changes, in
|
||||
@ -442,14 +529,16 @@ public class Controller implements AsyncProcessor {
|
||||
}
|
||||
|
||||
private boolean setClipboard(String text, boolean paste, long sequence) {
|
||||
boolean ok = device.setClipboardText(text);
|
||||
isSettingClipboard.set(true);
|
||||
boolean ok = Device.setClipboardText(text);
|
||||
isSettingClipboard.set(false);
|
||||
if (ok) {
|
||||
Ln.i("Device clipboard set");
|
||||
}
|
||||
|
||||
// On Android >= 7, also press the PASTE key if requested
|
||||
if (paste && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && device.supportsInputEvents()) {
|
||||
device.pressReleaseKeycode(KeyEvent.KEYCODE_PASTE, Device.INJECT_MODE_ASYNC);
|
||||
if (paste && Build.VERSION.SDK_INT >= AndroidVersions.API_24_ANDROID_7_0 && supportsInputEvents) {
|
||||
pressReleaseKeycode(KeyEvent.KEYCODE_PASTE, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
if (sequence != ControlMessage.SEQUENCE_INVALID) {
|
||||
@ -465,4 +554,130 @@ public class Controller implements AsyncProcessor {
|
||||
Intent intent = new Intent("android.settings.HARD_KEYBOARD_SETTINGS");
|
||||
ServiceManager.getActivityManager().startActivity(intent);
|
||||
}
|
||||
|
||||
private boolean injectKeyEvent(int action, int keyCode, int repeat, int metaState, int injectMode) {
|
||||
return Device.injectKeyEvent(action, keyCode, repeat, metaState, getActionDisplayId(), injectMode);
|
||||
}
|
||||
|
||||
private boolean pressReleaseKeycode(int keyCode, int injectMode) {
|
||||
return Device.pressReleaseKeycode(keyCode, getActionDisplayId(), injectMode);
|
||||
}
|
||||
|
||||
private int getActionDisplayId() {
|
||||
if (displayId != Device.DISPLAY_ID_NONE) {
|
||||
// Real screen mirrored, use the source display id
|
||||
return displayId;
|
||||
}
|
||||
|
||||
// Virtual display created by --new-display, use the virtualDisplayId
|
||||
DisplayData data = displayData.get();
|
||||
if (data == null) {
|
||||
// If no virtual display id is initialized yet, use the main display id
|
||||
return 0;
|
||||
}
|
||||
|
||||
return data.virtualDisplayId;
|
||||
}
|
||||
|
||||
private void startAppAsync(String name) {
|
||||
if (startAppExecutor == null) {
|
||||
startAppExecutor = Executors.newSingleThreadExecutor();
|
||||
}
|
||||
|
||||
// Listing and selecting the app may take a lot of time
|
||||
startAppExecutor.submit(() -> startApp(name));
|
||||
}
|
||||
|
||||
private void startApp(String name) {
|
||||
boolean forceStopBeforeStart = name.startsWith("+");
|
||||
if (forceStopBeforeStart) {
|
||||
name = name.substring(1);
|
||||
}
|
||||
|
||||
DeviceApp app;
|
||||
boolean searchByName = name.startsWith("?");
|
||||
if (searchByName) {
|
||||
name = name.substring(1);
|
||||
|
||||
Ln.i("Processing Android apps... (this may take some time)");
|
||||
List<DeviceApp> apps = Device.findByName(name);
|
||||
if (apps.isEmpty()) {
|
||||
Ln.w("No app found for name \"" + name + "\"");
|
||||
return;
|
||||
}
|
||||
|
||||
if (apps.size() > 1) {
|
||||
String title = "No unique app found for name \"" + name + "\":";
|
||||
Ln.w(LogUtils.buildAppListMessage(title, apps));
|
||||
return;
|
||||
}
|
||||
|
||||
app = apps.get(0);
|
||||
} else {
|
||||
app = Device.findByPackageName(name);
|
||||
if (app == null) {
|
||||
Ln.w("No app found for package \"" + name + "\"");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
int startAppDisplayId = getStartAppDisplayId();
|
||||
if (startAppDisplayId == Device.DISPLAY_ID_NONE) {
|
||||
Ln.e("No known display id to start app \"" + name + "\"");
|
||||
return;
|
||||
}
|
||||
|
||||
Ln.i("Starting app \"" + app.getName() + "\" [" + app.getPackageName() + "] on display " + startAppDisplayId + "...");
|
||||
Device.startApp(app.getPackageName(), startAppDisplayId, forceStopBeforeStart);
|
||||
}
|
||||
|
||||
private int getStartAppDisplayId() {
|
||||
if (displayId != Device.DISPLAY_ID_NONE) {
|
||||
return displayId;
|
||||
}
|
||||
|
||||
// Mirroring a new virtual display id (using --new-display-id feature)
|
||||
try {
|
||||
// Wait for at most 1 second until a virtual display id is known
|
||||
DisplayData data = waitDisplayData(1000);
|
||||
if (data != null) {
|
||||
return data.virtualDisplayId;
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
// No display id available
|
||||
return Device.DISPLAY_ID_NONE;
|
||||
}
|
||||
|
||||
private DisplayData waitDisplayData(long timeoutMillis) throws InterruptedException {
|
||||
long deadline = System.currentTimeMillis() + timeoutMillis;
|
||||
|
||||
synchronized (displayDataAvailable) {
|
||||
DisplayData data = displayData.get();
|
||||
while (data == null) {
|
||||
long timeout = deadline - System.currentTimeMillis();
|
||||
if (timeout < 0) {
|
||||
return null;
|
||||
}
|
||||
displayDataAvailable.wait(timeout);
|
||||
data = displayData.get();
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
private void setDisplayPower(boolean on) {
|
||||
boolean setDisplayPowerOk = Device.setDisplayPower(displayId, on);
|
||||
if (setDisplayPowerOk) {
|
||||
keepDisplayPowerOff = !on;
|
||||
Ln.i("Device display turned " + (on ? "on" : "off"));
|
||||
if (cleanUp != null) {
|
||||
boolean mustRestoreOnExit = !on;
|
||||
cleanUp.setRestoreDisplayPower(mustRestoreOnExit);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,48 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.device.Point;
|
||||
import com.genymobile.scrcpy.device.Position;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.video.ScreenInfo;
|
||||
|
||||
import android.graphics.Rect;
|
||||
|
||||
public final class PositionMapper {
|
||||
|
||||
private final Size videoSize;
|
||||
private final Rect contentRect;
|
||||
private final int coordsRotation;
|
||||
|
||||
public PositionMapper(Size videoSize, Rect contentRect, int videoRotation) {
|
||||
this.videoSize = videoSize;
|
||||
this.contentRect = contentRect;
|
||||
this.coordsRotation = reverseRotation(videoRotation);
|
||||
}
|
||||
|
||||
public static PositionMapper from(ScreenInfo screenInfo) {
|
||||
// ignore the locked video orientation, the events will apply in coordinates considered in the physical device orientation
|
||||
Size videoSize = screenInfo.getUnlockedVideoSize();
|
||||
return new PositionMapper(videoSize, screenInfo.getContentRect(), screenInfo.getVideoRotation());
|
||||
}
|
||||
|
||||
private static int reverseRotation(int rotation) {
|
||||
return (4 - rotation) % 4;
|
||||
}
|
||||
|
||||
public Point map(Position position) {
|
||||
// reverse the video rotation to apply the events
|
||||
Position devicePosition = position.rotate(coordsRotation);
|
||||
|
||||
Size clientVideoSize = devicePosition.getScreenSize();
|
||||
if (!videoSize.equals(clientVideoSize)) {
|
||||
// The client sends a click relative to a video with wrong dimensions,
|
||||
// the device may have been rotated since the event was generated, so ignore the event
|
||||
return null;
|
||||
}
|
||||
|
||||
Point point = devicePosition.getPoint();
|
||||
int convertedX = contentRect.left + point.getX() * contentRect.width() / videoSize.getWidth();
|
||||
int convertedY = contentRect.top + point.getY() * contentRect.height() / videoSize.getHeight();
|
||||
return new Point(convertedX, convertedY);
|
||||
}
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.StringUtils;
|
||||
|
||||
@ -38,7 +39,7 @@ public final class UhidManager {
|
||||
|
||||
public UhidManager(DeviceMessageSender sender) {
|
||||
this.sender = sender;
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0) {
|
||||
HandlerThread thread = new HandlerThread("UHidManager");
|
||||
thread.start();
|
||||
queue = thread.getLooper().getQueue();
|
||||
@ -71,7 +72,7 @@ public final class UhidManager {
|
||||
}
|
||||
|
||||
private void registerUhidListener(int id, FileDescriptor fd) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0) {
|
||||
queue.addOnFileDescriptorEventListener(fd, MessageQueue.OnFileDescriptorEventListener.EVENT_INPUT, (fd2, events) -> {
|
||||
try {
|
||||
buffer.clear();
|
||||
@ -97,7 +98,7 @@ public final class UhidManager {
|
||||
}
|
||||
|
||||
private void unregisterUhidListener(FileDescriptor fd) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0) {
|
||||
queue.removeOnFileDescriptorEventListener(fd);
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
package com.genymobile.scrcpy.device;
|
||||
|
||||
import com.genymobile.scrcpy.Options;
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.video.ScreenInfo;
|
||||
import com.genymobile.scrcpy.wrappers.ActivityManager;
|
||||
import com.genymobile.scrcpy.wrappers.ClipboardManager;
|
||||
import com.genymobile.scrcpy.wrappers.DisplayControl;
|
||||
import com.genymobile.scrcpy.wrappers.InputManager;
|
||||
@ -11,22 +11,28 @@ import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
import com.genymobile.scrcpy.wrappers.SurfaceControl;
|
||||
import com.genymobile.scrcpy.wrappers.WindowManager;
|
||||
|
||||
import android.content.IOnPrimaryClipChangedListener;
|
||||
import android.graphics.Rect;
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.Intent;
|
||||
import android.app.ActivityOptions;
|
||||
import android.content.pm.ApplicationInfo;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.SystemClock;
|
||||
import android.view.IDisplayFoldListener;
|
||||
import android.view.IRotationWatcher;
|
||||
import android.view.InputDevice;
|
||||
import android.view.InputEvent;
|
||||
import android.view.KeyCharacterMap;
|
||||
import android.view.KeyEvent;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
public final class Device {
|
||||
|
||||
public static final int DISPLAY_ID_NONE = -1;
|
||||
|
||||
public static final int POWER_MODE_OFF = SurfaceControl.POWER_MODE_OFF;
|
||||
public static final int POWER_MODE_NORMAL = SurfaceControl.POWER_MODE_NORMAL;
|
||||
|
||||
@ -36,178 +42,11 @@ public final class Device {
|
||||
|
||||
public static final int LOCK_VIDEO_ORIENTATION_UNLOCKED = -1;
|
||||
public static final int LOCK_VIDEO_ORIENTATION_INITIAL = -2;
|
||||
// like SC_LOCK_VIDEO_ORIENTATION_INITIAL, but set automatically
|
||||
public static final int LOCK_VIDEO_ORIENTATION_INITIAL_AUTO = -3;
|
||||
|
||||
public interface RotationListener {
|
||||
void onRotationChanged(int rotation);
|
||||
}
|
||||
|
||||
public interface FoldListener {
|
||||
void onFoldChanged(int displayId, boolean folded);
|
||||
}
|
||||
|
||||
public interface ClipboardListener {
|
||||
void onClipboardTextChanged(String text);
|
||||
}
|
||||
|
||||
private final Rect crop;
|
||||
private int maxSize;
|
||||
private final int lockVideoOrientation;
|
||||
|
||||
private Size deviceSize;
|
||||
private ScreenInfo screenInfo;
|
||||
private RotationListener rotationListener;
|
||||
private FoldListener foldListener;
|
||||
private ClipboardListener clipboardListener;
|
||||
private final AtomicBoolean isSettingClipboard = new AtomicBoolean();
|
||||
|
||||
/**
|
||||
* Logical display identifier
|
||||
*/
|
||||
private final int displayId;
|
||||
|
||||
/**
|
||||
* The surface flinger layer stack associated with this logical display
|
||||
*/
|
||||
private final int layerStack;
|
||||
|
||||
private final boolean supportsInputEvents;
|
||||
|
||||
public Device(Options options) throws ConfigurationException {
|
||||
displayId = options.getDisplayId();
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (displayInfo == null) {
|
||||
Ln.e("Display " + displayId + " not found\n" + LogUtils.buildDisplayListMessage());
|
||||
throw new ConfigurationException("Unknown display id: " + displayId);
|
||||
}
|
||||
|
||||
int displayInfoFlags = displayInfo.getFlags();
|
||||
|
||||
deviceSize = displayInfo.getSize();
|
||||
crop = options.getCrop();
|
||||
maxSize = options.getMaxSize();
|
||||
lockVideoOrientation = options.getLockVideoOrientation();
|
||||
|
||||
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), deviceSize, crop, maxSize, lockVideoOrientation);
|
||||
layerStack = displayInfo.getLayerStack();
|
||||
|
||||
ServiceManager.getWindowManager().registerRotationWatcher(new IRotationWatcher.Stub() {
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
synchronized (Device.this) {
|
||||
screenInfo = screenInfo.withDeviceRotation(rotation);
|
||||
|
||||
// notify
|
||||
if (rotationListener != null) {
|
||||
rotationListener.onRotationChanged(rotation);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, displayId);
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
ServiceManager.getWindowManager().registerDisplayFoldListener(new IDisplayFoldListener.Stub() {
|
||||
@Override
|
||||
public void onDisplayFoldChanged(int displayId, boolean folded) {
|
||||
if (Device.this.displayId != displayId) {
|
||||
// Ignore events related to other display ids
|
||||
return;
|
||||
}
|
||||
|
||||
synchronized (Device.this) {
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (displayInfo == null) {
|
||||
Ln.e("Display " + displayId + " not found\n" + LogUtils.buildDisplayListMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
deviceSize = displayInfo.getSize();
|
||||
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), deviceSize, crop, maxSize, lockVideoOrientation);
|
||||
// notify
|
||||
if (foldListener != null) {
|
||||
foldListener.onFoldChanged(displayId, folded);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (options.getControl() && options.getClipboardAutosync()) {
|
||||
// If control and autosync are enabled, synchronize Android clipboard to the computer automatically
|
||||
ClipboardManager clipboardManager = ServiceManager.getClipboardManager();
|
||||
if (clipboardManager != null) {
|
||||
clipboardManager.addPrimaryClipChangedListener(new IOnPrimaryClipChangedListener.Stub() {
|
||||
@Override
|
||||
public void dispatchPrimaryClipChanged() {
|
||||
if (isSettingClipboard.get()) {
|
||||
// This is a notification for the change we are currently applying, ignore it
|
||||
return;
|
||||
}
|
||||
synchronized (Device.this) {
|
||||
if (clipboardListener != null) {
|
||||
String text = getClipboardText();
|
||||
if (text != null) {
|
||||
clipboardListener.onClipboardTextChanged(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Ln.w("No clipboard manager, copy-paste between device and computer will not work");
|
||||
}
|
||||
}
|
||||
|
||||
if ((displayInfoFlags & DisplayInfo.FLAG_SUPPORTS_PROTECTED_BUFFERS) == 0) {
|
||||
Ln.w("Display doesn't have FLAG_SUPPORTS_PROTECTED_BUFFERS flag, mirroring can be restricted");
|
||||
}
|
||||
|
||||
// main display or any display on Android >= Q
|
||||
supportsInputEvents = displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
|
||||
if (!supportsInputEvents) {
|
||||
Ln.w("Input events are not supported for secondary displays before Android 10");
|
||||
}
|
||||
}
|
||||
|
||||
public int getDisplayId() {
|
||||
return displayId;
|
||||
}
|
||||
|
||||
public synchronized void setMaxSize(int newMaxSize) {
|
||||
maxSize = newMaxSize;
|
||||
screenInfo = ScreenInfo.computeScreenInfo(screenInfo.getReverseVideoRotation(), deviceSize, crop, newMaxSize, lockVideoOrientation);
|
||||
}
|
||||
|
||||
public synchronized ScreenInfo getScreenInfo() {
|
||||
return screenInfo;
|
||||
}
|
||||
|
||||
public int getLayerStack() {
|
||||
return layerStack;
|
||||
}
|
||||
|
||||
public Point getPhysicalPoint(Position position) {
|
||||
// it hides the field on purpose, to read it with a lock
|
||||
@SuppressWarnings("checkstyle:HiddenField")
|
||||
ScreenInfo screenInfo = getScreenInfo(); // read with synchronization
|
||||
|
||||
// ignore the locked video orientation, the events will apply in coordinates considered in the physical device orientation
|
||||
Size unlockedVideoSize = screenInfo.getUnlockedVideoSize();
|
||||
|
||||
int reverseVideoRotation = screenInfo.getReverseVideoRotation();
|
||||
// reverse the video rotation to apply the events
|
||||
Position devicePosition = position.rotate(reverseVideoRotation);
|
||||
|
||||
Size clientVideoSize = devicePosition.getScreenSize();
|
||||
if (!unlockedVideoSize.equals(clientVideoSize)) {
|
||||
// The client sends a click relative to a video with wrong dimensions,
|
||||
// the device may have been rotated since the event was generated, so ignore the event
|
||||
return null;
|
||||
}
|
||||
Rect contentRect = screenInfo.getContentRect();
|
||||
Point point = devicePosition.getPoint();
|
||||
int convertedX = contentRect.left + point.getX() * contentRect.width() / unlockedVideoSize.getWidth();
|
||||
int convertedY = contentRect.top + point.getY() * contentRect.height() / unlockedVideoSize.getHeight();
|
||||
return new Point(convertedX, convertedY);
|
||||
private Device() {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static String getDeviceName() {
|
||||
@ -215,11 +54,8 @@ public final class Device {
|
||||
}
|
||||
|
||||
public static boolean supportsInputEvents(int displayId) {
|
||||
return displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
|
||||
}
|
||||
|
||||
public boolean supportsInputEvents() {
|
||||
return supportsInputEvents;
|
||||
// main display or any display on Android >= 10
|
||||
return displayId == 0 || Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10;
|
||||
}
|
||||
|
||||
public static boolean injectEvent(InputEvent inputEvent, int displayId, int injectMode) {
|
||||
@ -234,10 +70,6 @@ public final class Device {
|
||||
return ServiceManager.getInputManager().injectInputEvent(inputEvent, injectMode);
|
||||
}
|
||||
|
||||
public boolean injectEvent(InputEvent event, int injectMode) {
|
||||
return injectEvent(event, displayId, injectMode);
|
||||
}
|
||||
|
||||
public static boolean injectKeyEvent(int action, int keyCode, int repeat, int metaState, int displayId, int injectMode) {
|
||||
long now = SystemClock.uptimeMillis();
|
||||
KeyEvent event = new KeyEvent(now, now, action, keyCode, repeat, metaState, KeyCharacterMap.VIRTUAL_KEYBOARD, 0, 0,
|
||||
@ -245,35 +77,15 @@ public final class Device {
|
||||
return injectEvent(event, displayId, injectMode);
|
||||
}
|
||||
|
||||
public boolean injectKeyEvent(int action, int keyCode, int repeat, int metaState, int injectMode) {
|
||||
return injectKeyEvent(action, keyCode, repeat, metaState, displayId, injectMode);
|
||||
}
|
||||
|
||||
public static boolean pressReleaseKeycode(int keyCode, int displayId, int injectMode) {
|
||||
return injectKeyEvent(KeyEvent.ACTION_DOWN, keyCode, 0, 0, displayId, injectMode)
|
||||
&& injectKeyEvent(KeyEvent.ACTION_UP, keyCode, 0, 0, displayId, injectMode);
|
||||
}
|
||||
|
||||
public boolean pressReleaseKeycode(int keyCode, int injectMode) {
|
||||
return pressReleaseKeycode(keyCode, displayId, injectMode);
|
||||
}
|
||||
|
||||
public static boolean isScreenOn() {
|
||||
return ServiceManager.getPowerManager().isScreenOn();
|
||||
}
|
||||
|
||||
public synchronized void setRotationListener(RotationListener rotationListener) {
|
||||
this.rotationListener = rotationListener;
|
||||
}
|
||||
|
||||
public synchronized void setFoldListener(FoldListener foldlistener) {
|
||||
this.foldListener = foldlistener;
|
||||
}
|
||||
|
||||
public synchronized void setClipboardListener(ClipboardListener clipboardListener) {
|
||||
this.clipboardListener = clipboardListener;
|
||||
}
|
||||
|
||||
public static void expandNotificationPanel() {
|
||||
ServiceManager.getStatusBarManager().expandNotificationsPanel();
|
||||
}
|
||||
@ -298,7 +110,7 @@ public final class Device {
|
||||
return s.toString();
|
||||
}
|
||||
|
||||
public boolean setClipboardText(String text) {
|
||||
public static boolean setClipboardText(String text) {
|
||||
ClipboardManager clipboardManager = ServiceManager.getClipboardManager();
|
||||
if (clipboardManager == null) {
|
||||
return false;
|
||||
@ -313,20 +125,20 @@ public final class Device {
|
||||
return false;
|
||||
}
|
||||
|
||||
isSettingClipboard.set(true);
|
||||
boolean ok = clipboardManager.setText(text);
|
||||
isSettingClipboard.set(false);
|
||||
return ok;
|
||||
return clipboardManager.setText(text);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param mode one of the {@code POWER_MODE_*} constants
|
||||
*/
|
||||
public static boolean setScreenPowerMode(int mode) {
|
||||
boolean applyToMultiPhysicalDisplays = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
|
||||
public static boolean setDisplayPower(int displayId, boolean on) {
|
||||
assert displayId != Device.DISPLAY_ID_NONE;
|
||||
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_35_ANDROID_15) {
|
||||
return ServiceManager.getDisplayManager().requestDisplayPower(displayId, on);
|
||||
}
|
||||
|
||||
boolean applyToMultiPhysicalDisplays = Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10;
|
||||
|
||||
if (applyToMultiPhysicalDisplays
|
||||
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE
|
||||
&& Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14
|
||||
&& Build.BRAND.equalsIgnoreCase("honor")
|
||||
&& SurfaceControl.hasGetBuildInDisplayMethod()) {
|
||||
// Workaround for Honor devices with Android 14:
|
||||
@ -335,10 +147,11 @@ public final class Device {
|
||||
applyToMultiPhysicalDisplays = false;
|
||||
}
|
||||
|
||||
int mode = on ? POWER_MODE_NORMAL : POWER_MODE_OFF;
|
||||
if (applyToMultiPhysicalDisplays) {
|
||||
// On Android 14, these internal methods have been moved to DisplayControl
|
||||
boolean useDisplayControl =
|
||||
Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE && !SurfaceControl.hasGetPhysicalDisplayIdsMethod();
|
||||
Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14 && !SurfaceControl.hasGetPhysicalDisplayIdsMethod();
|
||||
|
||||
// Change the power mode for all physical displays
|
||||
long[] physicalDisplayIds = useDisplayControl ? DisplayControl.getPhysicalDisplayIds() : SurfaceControl.getPhysicalDisplayIds();
|
||||
@ -366,6 +179,8 @@ public final class Device {
|
||||
}
|
||||
|
||||
public static boolean powerOffScreen(int displayId) {
|
||||
assert displayId != DISPLAY_ID_NONE;
|
||||
|
||||
if (!isScreenOn()) {
|
||||
return true;
|
||||
}
|
||||
@ -375,7 +190,9 @@ public final class Device {
|
||||
/**
|
||||
* Disable auto-rotation (if enabled), set the screen rotation and re-enable auto-rotation (if it was enabled).
|
||||
*/
|
||||
public void rotateDevice() {
|
||||
public static void rotateDevice(int displayId) {
|
||||
assert displayId != DISPLAY_ID_NONE;
|
||||
|
||||
WindowManager wm = ServiceManager.getWindowManager();
|
||||
|
||||
boolean accelerometerRotation = !wm.isRotationFrozen(displayId);
|
||||
@ -394,6 +211,8 @@ public final class Device {
|
||||
}
|
||||
|
||||
private static int getCurrentRotation(int displayId) {
|
||||
assert displayId != DISPLAY_ID_NONE;
|
||||
|
||||
if (displayId == 0) {
|
||||
return ServiceManager.getWindowManager().getRotation();
|
||||
}
|
||||
@ -401,4 +220,96 @@ public final class Device {
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
return displayInfo.getRotation();
|
||||
}
|
||||
|
||||
public static List<DeviceApp> listApps() {
|
||||
List<DeviceApp> apps = new ArrayList<>();
|
||||
PackageManager pm = FakeContext.get().getPackageManager();
|
||||
for (ApplicationInfo appInfo : getLaunchableApps(pm)) {
|
||||
apps.add(toApp(pm, appInfo));
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
|
||||
@SuppressLint("QueryPermissionsNeeded")
|
||||
private static List<ApplicationInfo> getLaunchableApps(PackageManager pm) {
|
||||
List<ApplicationInfo> result = new ArrayList<>();
|
||||
for (ApplicationInfo appInfo : pm.getInstalledApplications(PackageManager.GET_META_DATA)) {
|
||||
if (appInfo.enabled && getLaunchIntent(pm, appInfo.packageName) != null) {
|
||||
result.add(appInfo);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Intent getLaunchIntent(PackageManager pm, String packageName) {
|
||||
Intent launchIntent = pm.getLaunchIntentForPackage(packageName);
|
||||
if (launchIntent != null) {
|
||||
return launchIntent;
|
||||
}
|
||||
|
||||
return pm.getLeanbackLaunchIntentForPackage(packageName);
|
||||
}
|
||||
|
||||
private static DeviceApp toApp(PackageManager pm, ApplicationInfo appInfo) {
|
||||
String name = pm.getApplicationLabel(appInfo).toString();
|
||||
boolean system = (appInfo.flags & ApplicationInfo.FLAG_SYSTEM) != 0;
|
||||
return new DeviceApp(appInfo.packageName, name, system);
|
||||
}
|
||||
|
||||
@SuppressLint("QueryPermissionsNeeded")
|
||||
public static DeviceApp findByPackageName(String packageName) {
|
||||
PackageManager pm = FakeContext.get().getPackageManager();
|
||||
// No need to filter by "launchable" apps, an error will be reported on start if the app is not launchable
|
||||
for (ApplicationInfo appInfo : pm.getInstalledApplications(PackageManager.GET_META_DATA)) {
|
||||
if (packageName.equals(appInfo.packageName)) {
|
||||
return toApp(pm, appInfo);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressLint("QueryPermissionsNeeded")
|
||||
public static List<DeviceApp> findByName(String searchName) {
|
||||
List<DeviceApp> result = new ArrayList<>();
|
||||
searchName = searchName.toLowerCase(Locale.getDefault());
|
||||
|
||||
PackageManager pm = FakeContext.get().getPackageManager();
|
||||
for (ApplicationInfo appInfo : getLaunchableApps(pm)) {
|
||||
String name = pm.getApplicationLabel(appInfo).toString();
|
||||
if (name.toLowerCase(Locale.getDefault()).startsWith(searchName)) {
|
||||
boolean system = (appInfo.flags & ApplicationInfo.FLAG_SYSTEM) != 0;
|
||||
result.add(new DeviceApp(appInfo.packageName, name, system));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static void startApp(String packageName, int displayId, boolean forceStop) {
|
||||
PackageManager pm = FakeContext.get().getPackageManager();
|
||||
|
||||
Intent launchIntent = getLaunchIntent(pm, packageName);
|
||||
if (launchIntent == null) {
|
||||
Ln.w("Cannot create launch intent for app " + packageName);
|
||||
return;
|
||||
}
|
||||
|
||||
launchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
|
||||
|
||||
Bundle options = null;
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_26_ANDROID_8_0) {
|
||||
ActivityOptions launchOptions = ActivityOptions.makeBasic();
|
||||
launchOptions.setLaunchDisplayId(displayId);
|
||||
options = launchOptions.toBundle();
|
||||
}
|
||||
|
||||
ActivityManager am = ServiceManager.getActivityManager();
|
||||
if (forceStop) {
|
||||
am.forceStopPackage(packageName);
|
||||
}
|
||||
am.startActivity(launchIntent, options);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,26 @@
|
||||
package com.genymobile.scrcpy.device;
|
||||
|
||||
public final class DeviceApp {
|
||||
|
||||
private final String packageName;
|
||||
private final String name;
|
||||
private final boolean system;
|
||||
|
||||
public DeviceApp(String packageName, String name, boolean system) {
|
||||
this.packageName = packageName;
|
||||
this.name = name;
|
||||
this.system = system;
|
||||
}
|
||||
|
||||
public String getPackageName() {
|
||||
return packageName;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public boolean isSystem() {
|
||||
return system;
|
||||
}
|
||||
}
|
@ -6,15 +6,17 @@ public final class DisplayInfo {
|
||||
private final int rotation;
|
||||
private final int layerStack;
|
||||
private final int flags;
|
||||
private final int dpi;
|
||||
|
||||
public static final int FLAG_SUPPORTS_PROTECTED_BUFFERS = 0x00000001;
|
||||
|
||||
public DisplayInfo(int displayId, Size size, int rotation, int layerStack, int flags) {
|
||||
public DisplayInfo(int displayId, Size size, int rotation, int layerStack, int flags, int dpi) {
|
||||
this.displayId = displayId;
|
||||
this.size = size;
|
||||
this.rotation = rotation;
|
||||
this.layerStack = layerStack;
|
||||
this.flags = flags;
|
||||
this.dpi = dpi;
|
||||
}
|
||||
|
||||
public int getDisplayId() {
|
||||
@ -36,5 +38,9 @@ public final class DisplayInfo {
|
||||
public int getFlags() {
|
||||
return flags;
|
||||
}
|
||||
|
||||
public int getDpi() {
|
||||
return dpi;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,31 @@
|
||||
package com.genymobile.scrcpy.device;
|
||||
|
||||
public final class NewDisplay {
|
||||
private Size size;
|
||||
private int dpi;
|
||||
|
||||
public NewDisplay() {
|
||||
// Auto size and dpi
|
||||
}
|
||||
|
||||
public NewDisplay(Size size, int dpi) {
|
||||
this.size = size;
|
||||
this.dpi = dpi;
|
||||
}
|
||||
|
||||
public Size getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public int getDpi() {
|
||||
return dpi;
|
||||
}
|
||||
|
||||
public boolean hasExplicitSize() {
|
||||
return size != null;
|
||||
}
|
||||
|
||||
public boolean hasExplicitDpi() {
|
||||
return dpi != 0;
|
||||
}
|
||||
}
|
@ -21,6 +21,10 @@ public final class Size {
|
||||
return height;
|
||||
}
|
||||
|
||||
public int getMax() {
|
||||
return Math.max(width, height);
|
||||
}
|
||||
|
||||
public Size rotate() {
|
||||
return new Size(height, width);
|
||||
}
|
||||
@ -48,6 +52,6 @@ public final class Size {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Size{" + "width=" + width + ", height=" + height + '}';
|
||||
return "Size{" + width + 'x' + height + '}';
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
|
||||
public interface Codec {
|
||||
|
||||
enum Type {
|
||||
@ -14,4 +16,9 @@ public interface Codec {
|
||||
String getName();
|
||||
|
||||
String getMimeType();
|
||||
|
||||
static String getMimeType(MediaCodec codec) {
|
||||
String[] types = codec.getCodecInfo().getSupportedTypes();
|
||||
return types.length > 0 ? types[0] : null;
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,5 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import com.genymobile.scrcpy.audio.AudioCodec;
|
||||
import com.genymobile.scrcpy.video.VideoCodec;
|
||||
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaFormat;
|
||||
@ -13,24 +10,6 @@ import java.util.List;
|
||||
|
||||
public final class CodecUtils {
|
||||
|
||||
public static final class DeviceEncoder {
|
||||
private final Codec codec;
|
||||
private final MediaCodecInfo info;
|
||||
|
||||
DeviceEncoder(Codec codec, MediaCodecInfo info) {
|
||||
this.codec = codec;
|
||||
this.info = info;
|
||||
}
|
||||
|
||||
public Codec getCodec() {
|
||||
return codec;
|
||||
}
|
||||
|
||||
public MediaCodecInfo getInfo() {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
|
||||
private CodecUtils() {
|
||||
// not instantiable
|
||||
}
|
||||
@ -47,7 +26,7 @@ public final class CodecUtils {
|
||||
}
|
||||
}
|
||||
|
||||
private static MediaCodecInfo[] getEncoders(MediaCodecList codecs, String mimeType) {
|
||||
public static MediaCodecInfo[] getEncoders(MediaCodecList codecs, String mimeType) {
|
||||
List<MediaCodecInfo> result = new ArrayList<>();
|
||||
for (MediaCodecInfo codecInfo : codecs.getCodecInfos()) {
|
||||
if (codecInfo.isEncoder() && Arrays.asList(codecInfo.getSupportedTypes()).contains(mimeType)) {
|
||||
@ -56,26 +35,4 @@ public final class CodecUtils {
|
||||
}
|
||||
return result.toArray(new MediaCodecInfo[result.size()]);
|
||||
}
|
||||
|
||||
public static List<DeviceEncoder> listVideoEncoders() {
|
||||
List<DeviceEncoder> encoders = new ArrayList<>();
|
||||
MediaCodecList codecs = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
|
||||
for (VideoCodec codec : VideoCodec.values()) {
|
||||
for (MediaCodecInfo info : getEncoders(codecs, codec.getMimeType())) {
|
||||
encoders.add(new DeviceEncoder(codec, info));
|
||||
}
|
||||
}
|
||||
return encoders;
|
||||
}
|
||||
|
||||
public static List<DeviceEncoder> listAudioEncoders() {
|
||||
List<DeviceEncoder> encoders = new ArrayList<>();
|
||||
MediaCodecList codecs = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
|
||||
for (AudioCodec codec : AudioCodec.values()) {
|
||||
for (MediaCodecInfo info : getEncoders(codecs, codec.getMimeType())) {
|
||||
encoders.add(new DeviceEncoder(codec, info));
|
||||
}
|
||||
}
|
||||
return encoders;
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,9 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.BuildConfig;
|
||||
|
||||
import android.os.Build;
|
||||
import android.system.ErrnoException;
|
||||
import android.system.Os;
|
||||
import android.system.OsConstants;
|
||||
@ -17,23 +19,38 @@ public final class IO {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static void writeFully(FileDescriptor fd, ByteBuffer from) throws IOException {
|
||||
// ByteBuffer position is not updated as expected by Os.write() on old Android versions, so
|
||||
// count the remaining bytes manually.
|
||||
// See <https://github.com/Genymobile/scrcpy/issues/291>.
|
||||
int remaining = from.remaining();
|
||||
while (remaining > 0) {
|
||||
private static int write(FileDescriptor fd, ByteBuffer from) throws IOException {
|
||||
while (true) {
|
||||
try {
|
||||
int w = Os.write(fd, from);
|
||||
return Os.write(fd, from);
|
||||
} catch (ErrnoException e) {
|
||||
if (e.errno != OsConstants.EINTR) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeFully(FileDescriptor fd, ByteBuffer from) throws IOException {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0) {
|
||||
while (from.hasRemaining()) {
|
||||
write(fd, from);
|
||||
}
|
||||
} else {
|
||||
// ByteBuffer position is not updated as expected by Os.write() on old Android versions, so
|
||||
// handle the position and the remaining bytes manually.
|
||||
// See <https://github.com/Genymobile/scrcpy/issues/291>.
|
||||
int position = from.position();
|
||||
int remaining = from.remaining();
|
||||
while (remaining > 0) {
|
||||
int w = write(fd, from);
|
||||
if (BuildConfig.DEBUG && w < 0) {
|
||||
// w should not be negative, since an exception is thrown on error
|
||||
throw new AssertionError("Os.write() returned a negative value (" + w + ")");
|
||||
}
|
||||
remaining -= w;
|
||||
} catch (ErrnoException e) {
|
||||
if (e.errno != OsConstants.EINTR) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
position += w;
|
||||
from.position(position);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,19 +1,31 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.audio.AudioCodec;
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.device.DeviceApp;
|
||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.video.VideoCodec;
|
||||
import com.genymobile.scrcpy.wrappers.DisplayManager;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Rect;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.os.Build;
|
||||
import android.util.Range;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
|
||||
@ -23,32 +35,54 @@ public final class LogUtils {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static String buildVideoEncoderListMessage() {
|
||||
StringBuilder builder = new StringBuilder("List of video encoders:");
|
||||
List<CodecUtils.DeviceEncoder> videoEncoders = CodecUtils.listVideoEncoders();
|
||||
if (videoEncoders.isEmpty()) {
|
||||
builder.append("\n (none)");
|
||||
} else {
|
||||
for (CodecUtils.DeviceEncoder encoder : videoEncoders) {
|
||||
builder.append("\n --video-codec=").append(encoder.getCodec().getName());
|
||||
builder.append(" --video-encoder='").append(encoder.getInfo().getName()).append("'");
|
||||
private static String buildEncoderListMessage(String type, Codec[] codecs) {
|
||||
StringBuilder builder = new StringBuilder("List of ").append(type).append(" encoders:");
|
||||
MediaCodecList codecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
|
||||
for (Codec codec : codecs) {
|
||||
MediaCodecInfo[] encoders = CodecUtils.getEncoders(codecList, codec.getMimeType());
|
||||
for (MediaCodecInfo info : encoders) {
|
||||
int lineStart = builder.length();
|
||||
builder.append("\n --").append(type).append("-codec=").append(codec.getName());
|
||||
builder.append(" --").append(type).append("-encoder=").append(info.getName());
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10) {
|
||||
int lineLength = builder.length() - lineStart;
|
||||
final int column = 70;
|
||||
if (lineLength < column) {
|
||||
int padding = column - lineLength;
|
||||
builder.append(String.format("%" + padding + "s", " "));
|
||||
}
|
||||
builder.append(" (").append(getHwCodecType(info)).append(')');
|
||||
if (info.isVendor()) {
|
||||
builder.append(" [vendor]");
|
||||
}
|
||||
if (info.isAlias()) {
|
||||
builder.append(" (alias for ").append(info.getCanonicalName()).append(')');
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
public static String buildVideoEncoderListMessage() {
|
||||
return buildEncoderListMessage("video", VideoCodec.values());
|
||||
}
|
||||
|
||||
public static String buildAudioEncoderListMessage() {
|
||||
StringBuilder builder = new StringBuilder("List of audio encoders:");
|
||||
List<CodecUtils.DeviceEncoder> audioEncoders = CodecUtils.listAudioEncoders();
|
||||
if (audioEncoders.isEmpty()) {
|
||||
builder.append("\n (none)");
|
||||
} else {
|
||||
for (CodecUtils.DeviceEncoder encoder : audioEncoders) {
|
||||
builder.append("\n --audio-codec=").append(encoder.getCodec().getName());
|
||||
builder.append(" --audio-encoder='").append(encoder.getInfo().getName()).append("'");
|
||||
}
|
||||
return buildEncoderListMessage("audio", AudioCodec.values());
|
||||
}
|
||||
|
||||
@TargetApi(AndroidVersions.API_29_ANDROID_10)
|
||||
private static String getHwCodecType(MediaCodecInfo info) {
|
||||
if (info.isSoftwareOnly()) {
|
||||
return "sw";
|
||||
}
|
||||
return builder.toString();
|
||||
if (info.isHardwareAccelerated()) {
|
||||
return "hw";
|
||||
}
|
||||
return "hybrid";
|
||||
}
|
||||
|
||||
public static String buildDisplayListMessage() {
|
||||
@ -154,4 +188,57 @@ public final class LogUtils {
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
|
||||
public static String buildAppListMessage() {
|
||||
List<DeviceApp> apps = Device.listApps();
|
||||
return buildAppListMessage("List of apps:", apps);
|
||||
}
|
||||
|
||||
@SuppressLint("QueryPermissionsNeeded")
|
||||
public static String buildAppListMessage(String title, List<DeviceApp> apps) {
|
||||
StringBuilder builder = new StringBuilder(title);
|
||||
|
||||
// Sort by:
|
||||
// 1. system flag (system apps are before non-system apps)
|
||||
// 2. name
|
||||
// 3. package name
|
||||
// Comparator.comparing() was introduced in API 24, so it cannot be used here to simplify the code
|
||||
Collections.sort(apps, (thisApp, otherApp) -> {
|
||||
// System apps first
|
||||
int cmp = -Boolean.compare(thisApp.isSystem(), otherApp.isSystem());
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = Objects.compare(thisApp.getName(), otherApp.getName(), String::compareTo);
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return Objects.compare(thisApp.getPackageName(), otherApp.getPackageName(), String::compareTo);
|
||||
});
|
||||
|
||||
final int column = 30;
|
||||
for (DeviceApp app : apps) {
|
||||
String name = app.getName();
|
||||
int padding = column - name.length();
|
||||
builder.append("\n ");
|
||||
if (app.isSystem()) {
|
||||
builder.append("* ");
|
||||
} else {
|
||||
builder.append("- ");
|
||||
|
||||
}
|
||||
builder.append(name);
|
||||
if (padding > 0) {
|
||||
builder.append(String.format("%" + padding + "s", " "));
|
||||
} else {
|
||||
builder.append("\n ").append(String.format("%" + column + "s", " "));
|
||||
}
|
||||
builder.append(" [").append(app.getPackageName()).append(']');
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.wrappers.ContentProvider;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
@ -34,7 +35,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static String getValue(String table, String key) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT <= AndroidVersions.API_30_ANDROID_11) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
return provider.getValue(table, key);
|
||||
@ -47,7 +48,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static void putValue(String table, String key, String value) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT <= AndroidVersions.API_30_ANDROID_11) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
provider.putValue(table, key, value);
|
||||
@ -60,7 +61,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static String getAndPutValue(String table, String key, String value) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT <= AndroidVersions.API_30_ANDROID_11) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
String oldValue = provider.getValue(table, key);
|
||||
|
@ -1,8 +1,9 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.HandlerExecutor;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
@ -20,7 +21,6 @@ import android.hardware.camera2.params.OutputConfiguration;
|
||||
import android.hardware.camera2.params.SessionConfiguration;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.util.Range;
|
||||
@ -118,7 +118,7 @@ public class CameraCapture extends SurfaceCapture {
|
||||
return null;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
private static Size selectSize(String cameraId, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, boolean highSpeed)
|
||||
throws CameraAccessException {
|
||||
if (explicitSize != null) {
|
||||
@ -242,7 +242,7 @@ public class CameraCapture extends SurfaceCapture {
|
||||
}
|
||||
|
||||
@SuppressLint("MissingPermission")
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@TargetApi(AndroidVersions.API_31_ANDROID_12)
|
||||
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
|
||||
CompletableFuture<CameraDevice> future = new CompletableFuture<>();
|
||||
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
|
||||
@ -289,7 +289,7 @@ public class CameraCapture extends SurfaceCapture {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@TargetApi(AndroidVersions.API_31_ANDROID_12)
|
||||
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
|
||||
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
|
||||
OutputConfiguration outputConfig = new OutputConfiguration(surface);
|
||||
@ -328,7 +328,7 @@ public class CameraCapture extends SurfaceCapture {
|
||||
return requestBuilder.build();
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@TargetApi(AndroidVersions.API_31_ANDROID_12)
|
||||
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
|
||||
CameraCaptureSession.CaptureCallback callback = new CameraCaptureSession.CaptureCallback() {
|
||||
@Override
|
||||
|
@ -0,0 +1,146 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.control.PositionMapper;
|
||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||
import com.genymobile.scrcpy.device.NewDisplay;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.hardware.display.DisplayManager;
|
||||
import android.hardware.display.VirtualDisplay;
|
||||
import android.os.Build;
|
||||
import android.view.Surface;
|
||||
|
||||
public class NewDisplayCapture extends SurfaceCapture {
|
||||
|
||||
// Internal fields copied from android.hardware.display.DisplayManager
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_SUPPORTS_TOUCH = 1 << 6;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_ROTATES_WITH_CONTENT = 1 << 7;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_DESTROY_CONTENT_ON_REMOVAL = 1 << 8;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_SHOULD_SHOW_SYSTEM_DECORATIONS = 1 << 9;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_TRUSTED = 1 << 10;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_OWN_DISPLAY_GROUP = 1 << 11;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_ALWAYS_UNLOCKED = 1 << 12;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_TOUCH_FEEDBACK_DISABLED = 1 << 13;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_OWN_FOCUS = 1 << 14;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_DEVICE_DISPLAY_GROUP = 1 << 15;
|
||||
|
||||
private final VirtualDisplayListener vdListener;
|
||||
private final NewDisplay newDisplay;
|
||||
|
||||
private Size mainDisplaySize;
|
||||
private int mainDisplayDpi;
|
||||
private int maxSize; // only used if newDisplay.getSize() != null
|
||||
|
||||
private VirtualDisplay virtualDisplay;
|
||||
private Size size;
|
||||
private int dpi;
|
||||
|
||||
public NewDisplayCapture(VirtualDisplayListener vdListener, NewDisplay newDisplay, int maxSize) {
|
||||
this.vdListener = vdListener;
|
||||
this.newDisplay = newDisplay;
|
||||
this.maxSize = maxSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
size = newDisplay.getSize();
|
||||
dpi = newDisplay.getDpi();
|
||||
if (size == null || dpi == 0) {
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(0);
|
||||
if (displayInfo != null) {
|
||||
mainDisplaySize = displayInfo.getSize();
|
||||
mainDisplayDpi = displayInfo.getDpi();
|
||||
} else {
|
||||
Ln.w("Main display not found, fallback to 1920x1080 240dpi");
|
||||
mainDisplaySize = new Size(1920, 1080);
|
||||
mainDisplayDpi = 240;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepare() {
|
||||
if (!newDisplay.hasExplicitSize()) {
|
||||
size = ScreenInfo.computeVideoSize(mainDisplaySize.getWidth(), mainDisplaySize.getHeight(), maxSize);
|
||||
}
|
||||
if (!newDisplay.hasExplicitDpi()) {
|
||||
dpi = scaleDpi(mainDisplaySize, mainDisplayDpi, size);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(Surface surface) {
|
||||
if (virtualDisplay != null) {
|
||||
virtualDisplay.release();
|
||||
virtualDisplay = null;
|
||||
}
|
||||
|
||||
int virtualDisplayId;
|
||||
try {
|
||||
int flags = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC
|
||||
| DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY
|
||||
| VIRTUAL_DISPLAY_FLAG_SUPPORTS_TOUCH
|
||||
| VIRTUAL_DISPLAY_FLAG_ROTATES_WITH_CONTENT
|
||||
| VIRTUAL_DISPLAY_FLAG_DESTROY_CONTENT_ON_REMOVAL
|
||||
| VIRTUAL_DISPLAY_FLAG_SHOULD_SHOW_SYSTEM_DECORATIONS;
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_33_ANDROID_13) {
|
||||
flags |= VIRTUAL_DISPLAY_FLAG_TRUSTED
|
||||
| VIRTUAL_DISPLAY_FLAG_OWN_DISPLAY_GROUP
|
||||
| VIRTUAL_DISPLAY_FLAG_ALWAYS_UNLOCKED
|
||||
| VIRTUAL_DISPLAY_FLAG_TOUCH_FEEDBACK_DISABLED;
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14) {
|
||||
flags |= VIRTUAL_DISPLAY_FLAG_OWN_FOCUS
|
||||
| VIRTUAL_DISPLAY_FLAG_DEVICE_DISPLAY_GROUP;
|
||||
}
|
||||
}
|
||||
virtualDisplay = ServiceManager.getDisplayManager()
|
||||
.createNewVirtualDisplay("scrcpy", size.getWidth(), size.getHeight(), dpi, surface, flags);
|
||||
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
|
||||
Ln.i("New display: " + size.getWidth() + "x" + size.getHeight() + "/" + dpi + " (id=" + virtualDisplayId + ")");
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not create display", e);
|
||||
throw new AssertionError("Could not create display");
|
||||
}
|
||||
|
||||
if (vdListener != null) {
|
||||
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
|
||||
Rect contentRect = new Rect(0, 0, size.getWidth(), size.getHeight());
|
||||
PositionMapper positionMapper = new PositionMapper(size, contentRect, 0);
|
||||
vdListener.onNewVirtualDisplay(virtualDisplayId, positionMapper);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (virtualDisplay != null) {
|
||||
virtualDisplay.release();
|
||||
virtualDisplay = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Size getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized boolean setMaxSize(int newMaxSize) {
|
||||
if (newDisplay.hasExplicitSize()) {
|
||||
// Cannot retry with a different size if the display size was explicitly provided
|
||||
return false;
|
||||
}
|
||||
|
||||
maxSize = newMaxSize;
|
||||
return true;
|
||||
}
|
||||
|
||||
private static int scaleDpi(Size initialSize, int initialDpi, Size size) {
|
||||
int den = initialSize.getMax();
|
||||
int num = size.getMax();
|
||||
return initialDpi * num / den;
|
||||
}
|
||||
}
|
@ -1,43 +1,134 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.control.PositionMapper;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.wrappers.DisplayManager;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
import com.genymobile.scrcpy.wrappers.SurfaceControl;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.hardware.display.VirtualDisplay;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.IBinder;
|
||||
import android.view.IDisplayFoldListener;
|
||||
import android.view.IRotationWatcher;
|
||||
import android.view.Surface;
|
||||
|
||||
public class ScreenCapture extends SurfaceCapture implements Device.RotationListener, Device.FoldListener {
|
||||
public class ScreenCapture extends SurfaceCapture {
|
||||
|
||||
private final VirtualDisplayListener vdListener;
|
||||
private final int displayId;
|
||||
private int maxSize;
|
||||
private final Rect crop;
|
||||
private final int lockVideoOrientation;
|
||||
|
||||
private DisplayInfo displayInfo;
|
||||
private ScreenInfo screenInfo;
|
||||
|
||||
// Source display size (before resizing/crop) for the current session
|
||||
private Size sessionDisplaySize;
|
||||
|
||||
private final Device device;
|
||||
private IBinder display;
|
||||
private VirtualDisplay virtualDisplay;
|
||||
|
||||
public ScreenCapture(Device device) {
|
||||
this.device = device;
|
||||
private DisplayManager.DisplayListenerHandle displayListenerHandle;
|
||||
private HandlerThread handlerThread;
|
||||
|
||||
// On Android 14, the DisplayListener may be broken (it never sends events). This is fixed in recent Android 14 upgrades, but we can't really
|
||||
// detect it directly, so register a RotationWatcher and a DisplayFoldListener as a fallback, until we receive the first event from
|
||||
// DisplayListener (which proves that it works).
|
||||
private boolean displayListenerWorks; // only accessed from the display listener thread
|
||||
private IRotationWatcher rotationWatcher;
|
||||
private IDisplayFoldListener displayFoldListener;
|
||||
|
||||
public ScreenCapture(VirtualDisplayListener vdListener, int displayId, int maxSize, Rect crop, int lockVideoOrientation) {
|
||||
this.vdListener = vdListener;
|
||||
this.displayId = displayId;
|
||||
this.maxSize = maxSize;
|
||||
this.crop = crop;
|
||||
this.lockVideoOrientation = lockVideoOrientation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
device.setRotationListener(this);
|
||||
device.setFoldListener(this);
|
||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
|
||||
registerDisplayListenerFallbacks();
|
||||
}
|
||||
|
||||
handlerThread = new HandlerThread("DisplayListener");
|
||||
handlerThread.start();
|
||||
Handler handler = new Handler(handlerThread.getLooper());
|
||||
displayListenerHandle = ServiceManager.getDisplayManager().registerDisplayListener(displayId -> {
|
||||
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
|
||||
Ln.v("ScreenCapture: onDisplayChanged(" + displayId + ")");
|
||||
}
|
||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
|
||||
if (!displayListenerWorks) {
|
||||
// On the first display listener event, we know it works, we can unregister the fallbacks
|
||||
displayListenerWorks = true;
|
||||
unregisterDisplayListenerFallbacks();
|
||||
}
|
||||
}
|
||||
if (this.displayId == displayId) {
|
||||
DisplayInfo di = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (di == null) {
|
||||
Ln.w("DisplayInfo for " + displayId + " cannot be retrieved");
|
||||
// We can't compare with the current size, so reset unconditionally
|
||||
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
|
||||
Ln.v("ScreenCapture: requestReset(): " + getSessionDisplaySize() + " -> (unknown)");
|
||||
}
|
||||
setSessionDisplaySize(null);
|
||||
requestReset();
|
||||
} else {
|
||||
Size size = di.getSize();
|
||||
|
||||
// The field is hidden on purpose, to read it with synchronization
|
||||
@SuppressWarnings("checkstyle:HiddenField")
|
||||
Size sessionDisplaySize = getSessionDisplaySize(); // synchronized
|
||||
|
||||
// .equals() also works if sessionDisplaySize == null
|
||||
if (!size.equals(sessionDisplaySize)) {
|
||||
// Reset only if the size is different
|
||||
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
|
||||
Ln.v("ScreenCapture: requestReset(): " + sessionDisplaySize + " -> " + size);
|
||||
}
|
||||
// Set the new size immediately, so that a future onDisplayChanged() event called before the asynchronous prepare()
|
||||
// considers that the current size is the requested size (to avoid a duplicate requestReset())
|
||||
setSessionDisplaySize(size);
|
||||
requestReset();
|
||||
} else if (Ln.isEnabled(Ln.Level.VERBOSE)) {
|
||||
Ln.v("ScreenCapture: Size not changed (" + size + "): do not requestReset()");
|
||||
}
|
||||
}
|
||||
}
|
||||
}, handler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepare() throws ConfigurationException {
|
||||
displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (displayInfo == null) {
|
||||
Ln.e("Display " + displayId + " not found\n" + LogUtils.buildDisplayListMessage());
|
||||
throw new ConfigurationException("Unknown display id: " + displayId);
|
||||
}
|
||||
|
||||
if ((displayInfo.getFlags() & DisplayInfo.FLAG_SUPPORTS_PROTECTED_BUFFERS) == 0) {
|
||||
Ln.w("Display doesn't have FLAG_SUPPORTS_PROTECTED_BUFFERS flag, mirroring can be restricted");
|
||||
}
|
||||
|
||||
setSessionDisplaySize(displayInfo.getSize());
|
||||
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), displayInfo.getSize(), crop, maxSize, lockVideoOrientation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(Surface surface) {
|
||||
ScreenInfo screenInfo = device.getScreenInfo();
|
||||
Rect contentRect = screenInfo.getContentRect();
|
||||
|
||||
// does not include the locked video orientation
|
||||
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
|
||||
int videoRotation = screenInfo.getVideoRotation();
|
||||
int layerStack = device.getLayerStack();
|
||||
|
||||
if (display != null) {
|
||||
SurfaceControl.destroyDisplay(display);
|
||||
display = null;
|
||||
@ -47,15 +138,31 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
|
||||
virtualDisplay = null;
|
||||
}
|
||||
|
||||
int virtualDisplayId;
|
||||
PositionMapper positionMapper;
|
||||
try {
|
||||
Rect videoRect = screenInfo.getVideoSize().toRect();
|
||||
Size videoSize = screenInfo.getVideoSize();
|
||||
virtualDisplay = ServiceManager.getDisplayManager()
|
||||
.createVirtualDisplay("scrcpy", videoRect.width(), videoRect.height(), device.getDisplayId(), surface);
|
||||
.createVirtualDisplay("scrcpy", videoSize.getWidth(), videoSize.getHeight(), displayId, surface);
|
||||
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
|
||||
Rect contentRect = new Rect(0, 0, videoSize.getWidth(), videoSize.getHeight());
|
||||
// The position are relative to the virtual display, not the original display
|
||||
positionMapper = new PositionMapper(videoSize, contentRect, 0);
|
||||
Ln.d("Display: using DisplayManager API");
|
||||
} catch (Exception displayManagerException) {
|
||||
try {
|
||||
display = createDisplay();
|
||||
|
||||
Rect contentRect = screenInfo.getContentRect();
|
||||
|
||||
// does not include the locked video orientation
|
||||
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
|
||||
int videoRotation = screenInfo.getVideoRotation();
|
||||
int layerStack = displayInfo.getLayerStack();
|
||||
|
||||
setDisplaySurface(display, surface, videoRotation, contentRect, unlockedVideoRect, layerStack);
|
||||
virtualDisplayId = displayId;
|
||||
positionMapper = PositionMapper.from(screenInfo);
|
||||
Ln.d("Display: using SurfaceControl API");
|
||||
} catch (Exception surfaceControlException) {
|
||||
Ln.e("Could not create display using DisplayManager", displayManagerException);
|
||||
@ -63,12 +170,27 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
|
||||
throw new AssertionError("Could not create display");
|
||||
}
|
||||
}
|
||||
|
||||
if (vdListener != null) {
|
||||
vdListener.onNewVirtualDisplay(virtualDisplayId, positionMapper);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
device.setRotationListener(null);
|
||||
device.setFoldListener(null);
|
||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
|
||||
unregisterDisplayListenerFallbacks();
|
||||
}
|
||||
|
||||
handlerThread.quitSafely();
|
||||
handlerThread = null;
|
||||
|
||||
// displayListenerHandle may be null if registration failed
|
||||
if (displayListenerHandle != null) {
|
||||
ServiceManager.getDisplayManager().unregisterDisplayListener(displayListenerHandle);
|
||||
displayListenerHandle = null;
|
||||
}
|
||||
|
||||
if (display != null) {
|
||||
SurfaceControl.destroyDisplay(display);
|
||||
display = null;
|
||||
@ -81,30 +203,20 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
|
||||
|
||||
@Override
|
||||
public Size getSize() {
|
||||
return device.getScreenInfo().getVideoSize();
|
||||
return screenInfo.getVideoSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean setMaxSize(int maxSize) {
|
||||
device.setMaxSize(maxSize);
|
||||
public boolean setMaxSize(int newMaxSize) {
|
||||
maxSize = newMaxSize;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFoldChanged(int displayId, boolean folded) {
|
||||
requestReset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
requestReset();
|
||||
}
|
||||
|
||||
private static IBinder createDisplay() throws Exception {
|
||||
// Since Android 12 (preview), secure displays could not be created with shell permissions anymore.
|
||||
// On Android 12 preview, SDK_INT is still R (not S), but CODENAME is "S".
|
||||
boolean secure = Build.VERSION.SDK_INT < Build.VERSION_CODES.R || (Build.VERSION.SDK_INT == Build.VERSION_CODES.R && !"S".equals(
|
||||
Build.VERSION.CODENAME));
|
||||
boolean secure = Build.VERSION.SDK_INT < AndroidVersions.API_30_ANDROID_11 || (Build.VERSION.SDK_INT == AndroidVersions.API_30_ANDROID_11
|
||||
&& !"S".equals(Build.VERSION.CODENAME));
|
||||
return SurfaceControl.createDisplay("scrcpy", secure);
|
||||
}
|
||||
|
||||
@ -118,4 +230,65 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
|
||||
SurfaceControl.closeTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized Size getSessionDisplaySize() {
|
||||
return sessionDisplaySize;
|
||||
}
|
||||
|
||||
private synchronized void setSessionDisplaySize(Size sessionDisplaySize) {
|
||||
this.sessionDisplaySize = sessionDisplaySize;
|
||||
}
|
||||
|
||||
private void registerDisplayListenerFallbacks() {
|
||||
rotationWatcher = new IRotationWatcher.Stub() {
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
|
||||
Ln.v("ScreenCapture: onRotationChanged(" + rotation + ")");
|
||||
}
|
||||
requestReset();
|
||||
}
|
||||
};
|
||||
ServiceManager.getWindowManager().registerRotationWatcher(rotationWatcher, displayId);
|
||||
|
||||
// Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10 (but implied by == API_34_ANDROID 14)
|
||||
displayFoldListener = new IDisplayFoldListener.Stub() {
|
||||
|
||||
private boolean first = true;
|
||||
|
||||
@Override
|
||||
public void onDisplayFoldChanged(int displayId, boolean folded) {
|
||||
if (first) {
|
||||
// An event is posted on registration to signal the initial state. Ignore it to avoid restarting encoding.
|
||||
first = false;
|
||||
return;
|
||||
}
|
||||
|
||||
if (Ln.isEnabled(Ln.Level.VERBOSE)) {
|
||||
Ln.v("ScreenCapture: onDisplayFoldChanged(" + displayId + ", " + folded + ")");
|
||||
}
|
||||
|
||||
if (ScreenCapture.this.displayId != displayId) {
|
||||
// Ignore events related to other display ids
|
||||
return;
|
||||
}
|
||||
requestReset();
|
||||
}
|
||||
};
|
||||
ServiceManager.getWindowManager().registerDisplayFoldListener(displayFoldListener);
|
||||
}
|
||||
|
||||
private void unregisterDisplayListenerFallbacks() {
|
||||
synchronized (this) {
|
||||
if (rotationWatcher != null) {
|
||||
ServiceManager.getWindowManager().unregisterRotationWatcher(rotationWatcher);
|
||||
rotationWatcher = null;
|
||||
}
|
||||
if (displayFoldListener != null) {
|
||||
// Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10 (but implied by == API_34_ANDROID 14)
|
||||
ServiceManager.getWindowManager().unregisterDisplayFoldListener(displayFoldListener);
|
||||
displayFoldListener = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,8 +2,8 @@ package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.BuildConfig;
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.graphics.Rect;
|
||||
|
||||
@ -63,30 +63,8 @@ public final class ScreenInfo {
|
||||
return unlockedVideoSize.rotate();
|
||||
}
|
||||
|
||||
public int getDeviceRotation() {
|
||||
return deviceRotation;
|
||||
}
|
||||
|
||||
public ScreenInfo withDeviceRotation(int newDeviceRotation) {
|
||||
if (newDeviceRotation == deviceRotation) {
|
||||
return this;
|
||||
}
|
||||
// true if changed between portrait and landscape
|
||||
boolean orientationChanged = (deviceRotation + newDeviceRotation) % 2 != 0;
|
||||
Rect newContentRect;
|
||||
Size newUnlockedVideoSize;
|
||||
if (orientationChanged) {
|
||||
newContentRect = flipRect(contentRect);
|
||||
newUnlockedVideoSize = unlockedVideoSize.rotate();
|
||||
} else {
|
||||
newContentRect = contentRect;
|
||||
newUnlockedVideoSize = unlockedVideoSize;
|
||||
}
|
||||
return new ScreenInfo(newContentRect, newUnlockedVideoSize, newDeviceRotation, lockedVideoOrientation);
|
||||
}
|
||||
|
||||
public static ScreenInfo computeScreenInfo(int rotation, Size deviceSize, Rect crop, int maxSize, int lockedVideoOrientation) {
|
||||
if (lockedVideoOrientation == Device.LOCK_VIDEO_ORIENTATION_INITIAL) {
|
||||
if (lockedVideoOrientation == Device.LOCK_VIDEO_ORIENTATION_INITIAL || lockedVideoOrientation == Device.LOCK_VIDEO_ORIENTATION_INITIAL_AUTO) {
|
||||
// The user requested to lock the video orientation to the current orientation
|
||||
lockedVideoOrientation = rotation;
|
||||
}
|
||||
@ -112,7 +90,7 @@ public final class ScreenInfo {
|
||||
return rect.width() + ":" + rect.height() + ":" + rect.left + ":" + rect.top;
|
||||
}
|
||||
|
||||
private static Size computeVideoSize(int w, int h, int maxSize) {
|
||||
public static Size computeVideoSize(int w, int h, int maxSize) {
|
||||
// Compute the video size and the padding of the content inside this video.
|
||||
// Principle:
|
||||
// - scale down the great side of the screen to maxSize (if necessary);
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
|
||||
import android.view.Surface;
|
||||
@ -32,15 +33,22 @@ public abstract class SurfaceCapture {
|
||||
}
|
||||
|
||||
/**
|
||||
* Called once before the capture starts.
|
||||
* Called once before the first capture starts.
|
||||
*/
|
||||
public abstract void init() throws IOException;
|
||||
public abstract void init() throws ConfigurationException, IOException;
|
||||
|
||||
/**
|
||||
* Called after the capture ends (if and only if {@link #init()} has been called).
|
||||
* Called after the last capture ends (if and only if {@link #init()} has been called).
|
||||
*/
|
||||
public abstract void release();
|
||||
|
||||
/**
|
||||
* Called once before each capture starts, before {@link #getSize()}.
|
||||
*/
|
||||
public void prepare() throws ConfigurationException {
|
||||
// empty by default
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the capture to the target surface.
|
||||
*
|
||||
|
@ -1,15 +1,16 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.AsyncProcessor;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
import com.genymobile.scrcpy.util.Codec;
|
||||
import com.genymobile.scrcpy.util.CodecOption;
|
||||
import com.genymobile.scrcpy.util.CodecUtils;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.util.IO;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
@ -67,12 +68,18 @@ public class SurfaceEncoder implements AsyncProcessor {
|
||||
capture.init();
|
||||
|
||||
try {
|
||||
streamer.writeVideoHeader(capture.getSize());
|
||||
|
||||
boolean alive;
|
||||
boolean headerWritten = false;
|
||||
|
||||
do {
|
||||
capture.consumeReset(); // If a capture reset was requested, it is implicitly fulfilled
|
||||
capture.prepare();
|
||||
Size size = capture.getSize();
|
||||
if (!headerWritten) {
|
||||
streamer.writeVideoHeader(size);
|
||||
headerWritten = true;
|
||||
}
|
||||
|
||||
format.setInteger(MediaFormat.KEY_WIDTH, size.getWidth());
|
||||
format.setInteger(MediaFormat.KEY_HEIGHT, size.getHeight());
|
||||
|
||||
@ -81,6 +88,9 @@ public class SurfaceEncoder implements AsyncProcessor {
|
||||
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
surface = mediaCodec.createInputSurface();
|
||||
|
||||
VideoFilter filter = new VideoFilter(surface);
|
||||
surface = filter.getInputSurface();
|
||||
|
||||
capture.start(surface);
|
||||
|
||||
mediaCodec.start();
|
||||
@ -88,6 +98,7 @@ public class SurfaceEncoder implements AsyncProcessor {
|
||||
alive = encode(mediaCodec, streamer);
|
||||
// do not call stop() on exception, it would trigger an IllegalStateException
|
||||
mediaCodec.stop();
|
||||
filter.release();
|
||||
} catch (IllegalStateException | IllegalArgumentException e) {
|
||||
Ln.e("Encoding error: " + e.getClass().getName() + ": " + e.getMessage());
|
||||
if (!prepareRetry(size)) {
|
||||
@ -205,7 +216,13 @@ public class SurfaceEncoder implements AsyncProcessor {
|
||||
if (encoderName != null) {
|
||||
Ln.d("Creating encoder by name: '" + encoderName + "'");
|
||||
try {
|
||||
return MediaCodec.createByCodecName(encoderName);
|
||||
MediaCodec mediaCodec = MediaCodec.createByCodecName(encoderName);
|
||||
String mimeType = Codec.getMimeType(mediaCodec);
|
||||
if (!codec.getMimeType().equals(mimeType)) {
|
||||
Ln.e("Video encoder type for \"" + encoderName + "\" (" + mimeType + ") does not match codec type (" + codec.getMimeType() + ")");
|
||||
throw new ConfigurationException("Incorrect encoder type: " + encoderName);
|
||||
}
|
||||
return mediaCodec;
|
||||
} catch (IllegalArgumentException e) {
|
||||
Ln.e("Video encoder '" + encoderName + "' for " + codec.getName() + " not found\n" + LogUtils.buildVideoEncoderListMessage());
|
||||
throw new ConfigurationException("Unknown encoder: " + encoderName);
|
||||
@ -232,7 +249,7 @@ public class SurfaceEncoder implements AsyncProcessor {
|
||||
// must be present to configure the encoder, but does not impact the actual frame rate, which is variable
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, 60);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_24_ANDROID_7_0) {
|
||||
format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED);
|
||||
}
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, DEFAULT_I_FRAME_INTERVAL);
|
||||
|
@ -0,0 +1,116 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.view.Surface;
|
||||
|
||||
public class VideoFilter {
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLContext eglContext;
|
||||
private EGLSurface eglSurface;
|
||||
private SurfaceTexture surfaceTexture;
|
||||
private Surface inputSurface;
|
||||
private int textureId;
|
||||
|
||||
public VideoFilter(Surface outputSurface) {
|
||||
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException("Unable to get EGL14 display");
|
||||
}
|
||||
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
|
||||
throw new RuntimeException("Unable to initialize EGL14");
|
||||
}
|
||||
|
||||
int[] attribList = {
|
||||
EGL14.EGL_RED_SIZE, 8,
|
||||
EGL14.EGL_GREEN_SIZE, 8,
|
||||
EGL14.EGL_BLUE_SIZE, 8,
|
||||
EGL14.EGL_ALPHA_SIZE, 8,
|
||||
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0);
|
||||
if (numConfigs[0] <= 0) {
|
||||
throw new RuntimeException("Unable to find ES2 EGL config");
|
||||
}
|
||||
EGLConfig eglConfig = configs[0];
|
||||
|
||||
int[] contextAttribList = {
|
||||
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, EGL14.EGL_NO_CONTEXT, contextAttribList, 0);
|
||||
if (eglContext == null) {
|
||||
throw new RuntimeException("Failed to create EGL context");
|
||||
}
|
||||
|
||||
int[] surfaceAttribList = {
|
||||
EGL14.EGL_NONE
|
||||
};
|
||||
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, outputSurface, surfaceAttribList, 0);
|
||||
if (eglSurface == null) {
|
||||
throw new RuntimeException("Failed to create EGL window surface");
|
||||
}
|
||||
|
||||
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException("Failed to make EGL context current");
|
||||
}
|
||||
|
||||
int[] textures = new int[1];
|
||||
GLES20.glGenTextures(1, textures, 0);
|
||||
textureId = textures[0];
|
||||
|
||||
surfaceTexture = new SurfaceTexture(textureId);
|
||||
inputSurface = new Surface(surfaceTexture);
|
||||
|
||||
surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
|
||||
@Override
|
||||
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
|
||||
// XXX This should be called when the VirtualDisplay has rendered a new frame
|
||||
Ln.i("==== render");
|
||||
render();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public Surface getInputSurface() {
|
||||
return inputSurface;
|
||||
}
|
||||
|
||||
public void render() {
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
|
||||
|
||||
// For now, just paint with a color
|
||||
GLES20.glClearColor(0.0f, 0.5f, 0.5f, 1.0f);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
|
||||
GLES20.glViewport(0, 0, 1920, 1080);
|
||||
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
|
||||
public void release() {
|
||||
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
|
||||
EGL14.eglDestroySurface(eglDisplay, eglSurface);
|
||||
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
||||
EGL14.eglTerminate(eglDisplay);
|
||||
}
|
||||
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
surfaceTexture.release();
|
||||
inputSurface.release();
|
||||
}
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.control.PositionMapper;
|
||||
|
||||
public interface VirtualDisplayListener {
|
||||
void onNewVirtualDisplay(int displayId, PositionMapper positionMapper);
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
@ -7,7 +8,6 @@ import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Intent;
|
||||
import android.os.Binder;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.IInterface;
|
||||
@ -63,7 +63,7 @@ public final class ActivityManager {
|
||||
return removeContentProviderExternalMethod;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.Q)
|
||||
@TargetApi(AndroidVersions.API_29_ANDROID_10)
|
||||
private ContentProvider getContentProviderExternal(String name, IBinder token) {
|
||||
try {
|
||||
Method method = getGetContentProviderExternalMethod();
|
||||
@ -118,8 +118,12 @@ public final class ActivityManager {
|
||||
return startActivityAsUserMethod;
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
public int startActivity(Intent intent) {
|
||||
return startActivity(intent, null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
public int startActivity(Intent intent, Bundle options) {
|
||||
try {
|
||||
Method method = getStartActivityAsUserMethod();
|
||||
return (int) method.invoke(
|
||||
@ -133,7 +137,7 @@ public final class ActivityManager {
|
||||
/* requestCode */ 0,
|
||||
/* startFlags */ 0,
|
||||
/* profilerInfo */ null,
|
||||
/* bOptions */ null,
|
||||
/* bOptions */ options,
|
||||
/* userId */ /* UserHandle.USER_CURRENT */ -2);
|
||||
} catch (Throwable e) {
|
||||
Ln.e("Could not invoke method", e);
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
@ -36,7 +37,7 @@ public final class ClipboardManager {
|
||||
|
||||
private Method getGetPrimaryClipMethod() throws NoSuchMethodException {
|
||||
if (getPrimaryClipMethod == null) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class);
|
||||
return getPrimaryClipMethod;
|
||||
}
|
||||
@ -99,7 +100,7 @@ public final class ClipboardManager {
|
||||
|
||||
private Method getSetPrimaryClipMethod() throws NoSuchMethodException {
|
||||
if (setPrimaryClipMethod == null) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class);
|
||||
return setPrimaryClipMethod;
|
||||
}
|
||||
@ -137,7 +138,7 @@ public final class ClipboardManager {
|
||||
}
|
||||
|
||||
private static ClipData getPrimaryClip(Method method, int methodVersion, IInterface manager) throws ReflectiveOperationException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME);
|
||||
}
|
||||
|
||||
@ -161,7 +162,7 @@ public final class ClipboardManager {
|
||||
}
|
||||
|
||||
private static void setPrimaryClip(Method method, int methodVersion, IInterface manager, ClipData clipData) throws ReflectiveOperationException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME);
|
||||
return;
|
||||
}
|
||||
@ -210,7 +211,7 @@ public final class ClipboardManager {
|
||||
|
||||
private static void addPrimaryClipChangedListener(Method method, int methodVersion, IInterface manager, IOnPrimaryClipChangedListener listener)
|
||||
throws ReflectiveOperationException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME);
|
||||
return;
|
||||
}
|
||||
@ -230,7 +231,7 @@ public final class ClipboardManager {
|
||||
|
||||
private Method getAddPrimaryClipChangedListener() throws NoSuchMethodException {
|
||||
if (addPrimaryClipChangedListener == null) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
addPrimaryClipChangedListener = manager.getClass()
|
||||
.getMethod("addPrimaryClipChangedListener", IOnPrimaryClipChangedListener.class, String.class);
|
||||
} else {
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.SettingsException;
|
||||
@ -51,7 +52,7 @@ public final class ContentProvider implements Closeable {
|
||||
@SuppressLint("PrivateApi")
|
||||
private Method getCallMethod() throws NoSuchMethodException {
|
||||
if (callMethod == null) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_31_ANDROID_12) {
|
||||
callMethod = provider.getClass().getMethod("call", AttributionSource.class, String.class, String.class, String.class, Bundle.class);
|
||||
callMethodVersion = 0;
|
||||
} else {
|
||||
@ -79,7 +80,7 @@ public final class ContentProvider implements Closeable {
|
||||
Method method = getCallMethod();
|
||||
Object[] args;
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && callMethodVersion == 0) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_31_ANDROID_12 && callMethodVersion == 0) {
|
||||
args = new Object[]{FakeContext.get().getAttributionSource(), "settings", callMethod, arg, extras};
|
||||
} else {
|
||||
switch (callMethodVersion) {
|
||||
|
@ -1,16 +1,16 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.os.Build;
|
||||
import android.os.IBinder;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
@SuppressLint({"PrivateApi", "SoonBlockedPrivateApi", "BlockedPrivateApi"})
|
||||
@TargetApi(Build.VERSION_CODES.UPSIDE_DOWN_CAKE)
|
||||
@TargetApi(AndroidVersions.API_34_ANDROID_14)
|
||||
public final class DisplayControl {
|
||||
|
||||
private static final Class<?> CLASS;
|
||||
|
@ -1,24 +1,53 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.util.Command;
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.Command;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.hardware.display.VirtualDisplay;
|
||||
import android.os.Handler;
|
||||
import android.view.Display;
|
||||
import android.view.Surface;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public final class DisplayManager {
|
||||
|
||||
// android.hardware.display.DisplayManager.EVENT_FLAG_DISPLAY_CHANGED
|
||||
public static final long EVENT_FLAG_DISPLAY_CHANGED = 1L << 2;
|
||||
|
||||
public interface DisplayListener {
|
||||
/**
|
||||
* Called whenever the properties of a logical {@link android.view.Display},
|
||||
* such as size and density, have changed.
|
||||
*
|
||||
* @param displayId The id of the logical display that changed.
|
||||
*/
|
||||
void onDisplayChanged(int displayId);
|
||||
}
|
||||
|
||||
public static final class DisplayListenerHandle {
|
||||
private final Object displayListenerProxy;
|
||||
private DisplayListenerHandle(Object displayListenerProxy) {
|
||||
this.displayListenerProxy = displayListenerProxy;
|
||||
}
|
||||
}
|
||||
|
||||
private final Object manager; // instance of hidden class android.hardware.display.DisplayManagerGlobal
|
||||
private Method createVirtualDisplayMethod;
|
||||
private Method requestDisplayPowerMethod;
|
||||
|
||||
static DisplayManager create() {
|
||||
try {
|
||||
@ -39,7 +68,7 @@ public final class DisplayManager {
|
||||
public static DisplayInfo parseDisplayInfo(String dumpsysDisplayOutput, int displayId) {
|
||||
Pattern regex = Pattern.compile(
|
||||
"^ mOverrideDisplayInfo=DisplayInfo\\{\".*?, displayId " + displayId + ".*?(, FLAG_.*)?, real ([0-9]+) x ([0-9]+).*?, "
|
||||
+ "rotation ([0-9]+).*?, layerStack ([0-9]+)",
|
||||
+ "rotation ([0-9]+).*?, density ([0-9]+).*?, layerStack ([0-9]+)",
|
||||
Pattern.MULTILINE);
|
||||
Matcher m = regex.matcher(dumpsysDisplayOutput);
|
||||
if (!m.find()) {
|
||||
@ -49,9 +78,10 @@ public final class DisplayManager {
|
||||
int width = Integer.parseInt(m.group(2));
|
||||
int height = Integer.parseInt(m.group(3));
|
||||
int rotation = Integer.parseInt(m.group(4));
|
||||
int layerStack = Integer.parseInt(m.group(5));
|
||||
int density = Integer.parseInt(m.group(5));
|
||||
int layerStack = Integer.parseInt(m.group(6));
|
||||
|
||||
return new DisplayInfo(displayId, new Size(width, height), rotation, layerStack, flags);
|
||||
return new DisplayInfo(displayId, new Size(width, height), rotation, layerStack, flags, density);
|
||||
}
|
||||
|
||||
private static DisplayInfo getDisplayInfoFromDumpsysDisplay(int displayId) {
|
||||
@ -98,7 +128,8 @@ public final class DisplayManager {
|
||||
int rotation = cls.getDeclaredField("rotation").getInt(displayInfo);
|
||||
int layerStack = cls.getDeclaredField("layerStack").getInt(displayInfo);
|
||||
int flags = cls.getDeclaredField("flags").getInt(displayInfo);
|
||||
return new DisplayInfo(displayId, new Size(width, height), rotation, layerStack, flags);
|
||||
int dpi = cls.getDeclaredField("logicalDensityDpi").getInt(displayInfo);
|
||||
return new DisplayInfo(displayId, new Size(width, height), rotation, layerStack, flags, dpi);
|
||||
} catch (ReflectiveOperationException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
@ -124,4 +155,76 @@ public final class DisplayManager {
|
||||
Method method = getCreateVirtualDisplayMethod();
|
||||
return (VirtualDisplay) method.invoke(null, name, width, height, displayIdToMirror, surface);
|
||||
}
|
||||
|
||||
public VirtualDisplay createNewVirtualDisplay(String name, int width, int height, int dpi, Surface surface, int flags) throws Exception {
|
||||
Constructor<android.hardware.display.DisplayManager> ctor = android.hardware.display.DisplayManager.class.getDeclaredConstructor(
|
||||
Context.class);
|
||||
ctor.setAccessible(true);
|
||||
android.hardware.display.DisplayManager dm = ctor.newInstance(FakeContext.get());
|
||||
return dm.createVirtualDisplay(name, width, height, dpi, surface, flags);
|
||||
}
|
||||
|
||||
private Method getRequestDisplayPowerMethod() throws NoSuchMethodException {
|
||||
if (requestDisplayPowerMethod == null) {
|
||||
requestDisplayPowerMethod = manager.getClass().getMethod("requestDisplayPower", int.class, boolean.class);
|
||||
}
|
||||
return requestDisplayPowerMethod;
|
||||
}
|
||||
|
||||
@TargetApi(AndroidVersions.API_35_ANDROID_15)
|
||||
public boolean requestDisplayPower(int displayId, boolean on) {
|
||||
try {
|
||||
Method method = getRequestDisplayPowerMethod();
|
||||
return (boolean) method.invoke(manager, displayId, on);
|
||||
} catch (ReflectiveOperationException e) {
|
||||
Ln.e("Could not invoke method", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public DisplayListenerHandle registerDisplayListener(DisplayListener listener, Handler handler) {
|
||||
try {
|
||||
Class<?> displayListenerClass = Class.forName("android.hardware.display.DisplayManager$DisplayListener");
|
||||
Object displayListenerProxy = Proxy.newProxyInstance(
|
||||
ClassLoader.getSystemClassLoader(),
|
||||
new Class[] {displayListenerClass},
|
||||
(proxy, method, args) -> {
|
||||
if ("onDisplayChanged".equals(method.getName())) {
|
||||
listener.onDisplayChanged((int) args[0]);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
try {
|
||||
manager.getClass()
|
||||
.getMethod("registerDisplayListener", displayListenerClass, Handler.class, long.class, String.class)
|
||||
.invoke(manager, displayListenerProxy, handler, EVENT_FLAG_DISPLAY_CHANGED, FakeContext.PACKAGE_NAME);
|
||||
} catch (NoSuchMethodException e) {
|
||||
try {
|
||||
manager.getClass()
|
||||
.getMethod("registerDisplayListener", displayListenerClass, Handler.class, long.class)
|
||||
.invoke(manager, displayListenerProxy, handler, EVENT_FLAG_DISPLAY_CHANGED);
|
||||
} catch (NoSuchMethodException e2) {
|
||||
manager.getClass()
|
||||
.getMethod("registerDisplayListener", displayListenerClass, Handler.class)
|
||||
.invoke(manager, displayListenerProxy, handler);
|
||||
}
|
||||
}
|
||||
|
||||
return new DisplayListenerHandle(displayListenerProxy);
|
||||
} catch (Exception e) {
|
||||
// Rotation and screen size won't be updated, not a fatal error
|
||||
Ln.e("Could not register display listener", e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public void unregisterDisplayListener(DisplayListenerHandle listener) {
|
||||
try {
|
||||
Class<?> displayListenerClass = Class.forName("android.hardware.display.DisplayManager$DisplayListener");
|
||||
manager.getClass().getMethod("unregisterDisplayListener", displayListenerClass).invoke(manager, listener.displayListenerProxy);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not unregister display listener", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,8 +2,6 @@ package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.os.Build;
|
||||
import android.os.IInterface;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
@ -23,9 +21,7 @@ public final class PowerManager {
|
||||
|
||||
private Method getIsScreenOnMethod() throws NoSuchMethodException {
|
||||
if (isScreenOnMethod == null) {
|
||||
@SuppressLint("ObsoleteSdkInt") // we may lower minSdkVersion in the future
|
||||
String methodName = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH ? "isInteractive" : "isScreenOn";
|
||||
isScreenOnMethod = manager.getClass().getMethod(methodName);
|
||||
isScreenOnMethod = manager.getClass().getMethod("isInteractive");
|
||||
}
|
||||
return isScreenOnMethod;
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
@ -83,9 +84,9 @@ public final class SurfaceControl {
|
||||
|
||||
private static Method getGetBuiltInDisplayMethod() throws NoSuchMethodException {
|
||||
if (getBuiltInDisplayMethod == null) {
|
||||
// the method signature has changed in Android Q
|
||||
// the method signature has changed in Android 10
|
||||
// <https://github.com/Genymobile/scrcpy/issues/586>
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
getBuiltInDisplayMethod = CLASS.getMethod("getBuiltInDisplay", int.class);
|
||||
} else {
|
||||
getBuiltInDisplayMethod = CLASS.getMethod("getInternalDisplayToken");
|
||||
@ -106,7 +107,7 @@ public final class SurfaceControl {
|
||||
public static IBinder getBuiltInDisplay() {
|
||||
try {
|
||||
Method method = getGetBuiltInDisplayMethod();
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
// call getBuiltInDisplay(0)
|
||||
return (IBinder) method.invoke(null, 0);
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
@ -200,13 +201,29 @@ public final class WindowManager {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(29)
|
||||
public void unregisterRotationWatcher(IRotationWatcher rotationWatcher) {
|
||||
try {
|
||||
manager.getClass().getMethod("removeRotationWatcher", IRotationWatcher.class).invoke(manager, rotationWatcher);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not unregister rotation watcher", e);
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(AndroidVersions.API_29_ANDROID_10)
|
||||
public void registerDisplayFoldListener(IDisplayFoldListener foldListener) {
|
||||
try {
|
||||
Class<?> cls = manager.getClass();
|
||||
cls.getMethod("registerDisplayFoldListener", IDisplayFoldListener.class).invoke(manager, foldListener);
|
||||
manager.getClass().getMethod("registerDisplayFoldListener", IDisplayFoldListener.class).invoke(manager, foldListener);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not register display fold listener", e);
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(AndroidVersions.API_29_ANDROID_10)
|
||||
public void unregisterDisplayFoldListener(IDisplayFoldListener foldListener) {
|
||||
try {
|
||||
manager.getClass().getMethod("unregisterDisplayFoldListener", IDisplayFoldListener.class).invoke(manager, foldListener);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not unregister display fold listener", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,5 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
|
||||
import android.view.KeyEvent;
|
||||
import android.view.MotionEvent;
|
||||
import org.junit.Assert;
|
||||
@ -285,19 +283,19 @@ public class ControlMessageReaderTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseSetScreenPowerMode() throws IOException {
|
||||
public void testParseSetDisplayPower() throws IOException {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
DataOutputStream dos = new DataOutputStream(bos);
|
||||
dos.writeByte(ControlMessage.TYPE_SET_SCREEN_POWER_MODE);
|
||||
dos.writeByte(Device.POWER_MODE_NORMAL);
|
||||
dos.writeByte(ControlMessage.TYPE_SET_DISPLAY_POWER);
|
||||
dos.writeBoolean(true);
|
||||
byte[] packet = bos.toByteArray();
|
||||
|
||||
ByteArrayInputStream bis = new ByteArrayInputStream(packet);
|
||||
ControlMessageReader reader = new ControlMessageReader(bis);
|
||||
|
||||
ControlMessage event = reader.read();
|
||||
Assert.assertEquals(ControlMessage.TYPE_SET_SCREEN_POWER_MODE, event.getType());
|
||||
Assert.assertEquals(Device.POWER_MODE_NORMAL, event.getAction());
|
||||
Assert.assertEquals(ControlMessage.TYPE_SET_DISPLAY_POWER, event.getType());
|
||||
Assert.assertTrue(event.getOn());
|
||||
|
||||
Assert.assertEquals(-1, bis.read()); // EOS
|
||||
}
|
||||
@ -399,6 +397,27 @@ public class ControlMessageReaderTest {
|
||||
Assert.assertEquals(-1, bis.read()); // EOS
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseStartApp() throws IOException {
|
||||
byte[] name = "firefox".getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
DataOutputStream dos = new DataOutputStream(bos);
|
||||
dos.writeByte(ControlMessage.TYPE_START_APP);
|
||||
dos.writeByte(name.length);
|
||||
dos.write(name);
|
||||
byte[] packet = bos.toByteArray();
|
||||
|
||||
ByteArrayInputStream bis = new ByteArrayInputStream(packet);
|
||||
ControlMessageReader reader = new ControlMessageReader(bis);
|
||||
|
||||
ControlMessage event = reader.read();
|
||||
Assert.assertEquals(ControlMessage.TYPE_START_APP, event.getType());
|
||||
Assert.assertEquals("firefox", event.getText());
|
||||
|
||||
Assert.assertEquals(-1, bis.read()); // EOS
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiEvents() throws IOException {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
Reference in New Issue
Block a user