Compare commits
140 Commits
android-fr
...
camera.26
Author | SHA1 | Date | |
---|---|---|---|
e3942b9a78 | |||
c0e1a4152f | |||
5834657dae | |||
5f30a68297 | |||
8735b388da | |||
8e9fa773c5 | |||
75e58b3101 | |||
548858bfdb | |||
5ed63314a5 | |||
8c96ab130e | |||
c191e9a46e | |||
e30b7575b2 | |||
c79e0f6989 | |||
b7ad652a75 | |||
76a99a7fcd | |||
68b55ef2fe | |||
bc8913e12b | |||
3c2013de10 | |||
8cef8bac94 | |||
0bbe8a7007 | |||
9fdb882509 | |||
8e7b041f35 | |||
9ade389069 | |||
90ba885547 | |||
7adf98e9d4 | |||
1c864a88eb | |||
1650b7c058 | |||
a7c3c9a54c | |||
111d02fca4 | |||
36670dda40 | |||
0983f0a194 | |||
110b3a16f6 | |||
1ee46970e3 | |||
fcdf847dd3 | |||
ad05a01800 | |||
328ed3650d | |||
c14668b177 | |||
637f48f360 | |||
d391fc3b69 | |||
75ad925423 | |||
7e936fa879 | |||
01d785d9a3 | |||
fe6e9acb36 | |||
625934fb1b | |||
85b55b3c4e | |||
7b7076ef85 | |||
808bd14e30 | |||
0049b3ce07 | |||
5764f47fee | |||
2dab1f7024 | |||
744312ec64 | |||
b9315620e2 | |||
ea59d525bd | |||
0ffcfa0f5c | |||
c0f3c080b6 | |||
d046678f85 | |||
fae3fbc934 | |||
5061b7e02c | |||
09009c2aa7 | |||
fb21bbf763 | |||
0f1afff7a6 | |||
48a00fb481 | |||
3b7e2ca9c8 | |||
5bd7514871 | |||
d3c2955fb9 | |||
5042f8de93 | |||
7536f95d1c | |||
6832e8d629 | |||
28313631e5 | |||
fdbc9397a7 | |||
4ad7479425 | |||
a3cdf1a6b8 | |||
b16d4d1835 | |||
b8d43866d2 | |||
2d79aeb117 | |||
888a5aae7d | |||
323ea2f1d9 | |||
9ca554ca41 | |||
9d3c656414 | |||
379caf8551 | |||
2aec7b4c9d | |||
fc52b24503 | |||
ff5ffc892f | |||
360f2fea1e | |||
24999d0d32 | |||
8e2c0d6407 | |||
9a2abba098 | |||
b2d860382f | |||
4c4a03ebe1 | |||
798dfd240e | |||
c4caa6b81d | |||
1efbfe1175 | |||
751c09f47a | |||
6ad46d70b8 | |||
f46758d1c5 | |||
e71f5358b3 | |||
a2c8910006 | |||
cab354102d | |||
597d2ccc01 | |||
38900d7730 | |||
e926bf1fe8 | |||
6298ef095f | |||
958f22490b | |||
7d33798b40 | |||
d500550212 | |||
a166eee909 | |||
b11b363e8e | |||
7321db6f28 | |||
d6bcde565f | |||
98f4f4e68a | |||
be86e14e05 | |||
8c650e53cd | |||
e89e772c7c | |||
feab87053a | |||
751a3653a0 | |||
9c08eb79cb | |||
92483fe11b | |||
6928acdeac | |||
cb20bcb16f | |||
0f3af2d20b | |||
c083a7cc90 | |||
9eb6591913 | |||
9cfea347d0 | |||
ce064fb5e0 | |||
afcdfc7fd7 | |||
051b74c883 | |||
2e532afd2b | |||
fdf465851c | |||
669e9a8d1e | |||
f77e1c474e | |||
8f0b38cc4f | |||
a1e8a34001 | |||
00534b0b2d | |||
21df2c240e | |||
2d3059e1ab | |||
478aece68f | |||
55899c091e | |||
d9a644df9c | |||
45717733a1 | |||
6ad037ea04 |
8
FAQ.md
8
FAQ.md
@ -159,6 +159,8 @@ In developer options, enable:
|
||||
> **USB debugging (Security settings)**
|
||||
> _Allow granting permissions and simulating input via USB debugging_
|
||||
|
||||
Rebooting the device is necessary once this option is set.
|
||||
|
||||
[simulating input]: https://github.com/Genymobile/scrcpy/issues/70#issuecomment-373286323
|
||||
|
||||
|
||||
@ -168,12 +170,12 @@ The default text injection method is [limited to ASCII characters][text-input].
|
||||
A trick allows to also inject some [accented characters][accented-characters],
|
||||
but that's all. See [#37].
|
||||
|
||||
Since scrcpy v1.20, it is possible to simulate a [physical keyboard][hid] (HID).
|
||||
It is also possible to simulate a [physical keyboard][hid] (HID).
|
||||
|
||||
[text-input]: https://github.com/Genymobile/scrcpy/issues?q=is%3Aopen+is%3Aissue+label%3Aunicode
|
||||
[accented-characters]: https://blog.rom1v.com/2018/03/introducing-scrcpy/#handle-accented-characters
|
||||
[#37]: https://github.com/Genymobile/scrcpy/issues/37
|
||||
[hid]: README.md#physical-keyboard-simulation-hid
|
||||
[hid]: doc/hid-otg.md
|
||||
|
||||
|
||||
## Client issues
|
||||
@ -229,4 +231,4 @@ Translations of this FAQ in other languages are available in the [wiki].
|
||||
|
||||
[wiki]: https://github.com/Genymobile/scrcpy/wiki
|
||||
|
||||
Only this README file is guaranteed to be up-to-date.
|
||||
Only this FAQ file is guaranteed to be up-to-date.
|
||||
|
22
README.md
22
README.md
@ -1,11 +1,11 @@
|
||||
# scrcpy (v2.0)
|
||||
# scrcpy (v2.1.1)
|
||||
|
||||
<img src="app/data/icon.svg" width="128" height="128" alt="scrcpy" align="right" />
|
||||
|
||||
_pronounced "**scr**een **c**o**py**"_
|
||||
|
||||
This application mirrors Android devices (video and audio) connected via
|
||||
USB or [over TCP/IP](doc/device.md#tcpip-wireless), and allows to control the
|
||||
USB or [over TCP/IP](doc/connection.md#tcpip-wireless), and allows to control the
|
||||
device with the keyboard and the mouse of the computer. It does not require any
|
||||
_root_ access. It works on _Linux_, _Windows_ and _macOS_.
|
||||
|
||||
@ -30,7 +30,7 @@ Its features include:
|
||||
- mirroring with [Android device screen off](doc/device.md#turn-screen-off)
|
||||
- [copy-paste](doc/control.md#copy-paste) in both directions
|
||||
- [configurable quality](doc/video.md)
|
||||
- Android device [as a webcam (V4L2)](doc/v4l2.md) (Linux-only)
|
||||
- Android device screen [as a webcam (V4L2)](doc/v4l2.md) (Linux-only)
|
||||
- [physical keyboard/mouse simulation (HID)](doc/hid-otg.md)
|
||||
- [OTG mode](doc/hid-otg.md#otg)
|
||||
- and more…
|
||||
@ -39,7 +39,7 @@ Its features include:
|
||||
|
||||
The Android device requires at least API 21 (Android 5.0).
|
||||
|
||||
[Audio forwarding](doc/audio.md) is supported from API 30 (Android 11).
|
||||
[Audio forwarding](doc/audio.md) is supported for API >= 30 (Android 11+).
|
||||
|
||||
Make sure you [enabled USB debugging][enable-adb] on your device(s).
|
||||
|
||||
@ -47,10 +47,14 @@ Make sure you [enabled USB debugging][enable-adb] on your device(s).
|
||||
|
||||
On some devices, you also need to enable [an additional option][control] `USB
|
||||
debugging (Security Settings)` (this is an item different from `USB debugging`)
|
||||
to control it using a keyboard and mouse.
|
||||
to control it using a keyboard and mouse. Rebooting the device is necessary once
|
||||
this option is set.
|
||||
|
||||
[control]: https://github.com/Genymobile/scrcpy/issues/70#issuecomment-373286323
|
||||
|
||||
Note that USB debugging is not required to run scrcpy in [OTG
|
||||
mode](doc/hid-otg.md#otg).
|
||||
|
||||
|
||||
## Get the app
|
||||
|
||||
@ -64,10 +68,11 @@ to control it using a keyboard and mouse.
|
||||
The application provides a lot of features and configuration options. They are
|
||||
documented in the following pages:
|
||||
|
||||
- [Device](doc/device.md)
|
||||
- [Connection](doc/connection.md)
|
||||
- [Video](doc/video.md)
|
||||
- [Audio](doc/audio.md)
|
||||
- [Control](doc/control.md)
|
||||
- [Device](doc/device.md)
|
||||
- [Window](doc/window.md)
|
||||
- [Recording](doc/recording.md)
|
||||
- [Tunnels](doc/tunnels.md)
|
||||
@ -113,7 +118,10 @@ For general questions or discussions, you can also use:
|
||||
I'm [@rom1v](https://github.com/rom1v), the author and maintainer of _scrcpy_.
|
||||
|
||||
If you appreciate this application, you can [support my open source
|
||||
work][donate].
|
||||
work][donate]:
|
||||
- [GitHub Sponsors](https://github.com/sponsors/rom1v)
|
||||
- [Liberapay](https://liberapay.com/rom1v/)
|
||||
- [PayPal](https://paypal.me/rom2v)
|
||||
|
||||
[donate]: https://blog.rom1v.com/about/#support-my-open-source-work
|
||||
|
||||
|
@ -7,63 +7,78 @@ _scrcpy() {
|
||||
--audio-codec=
|
||||
--audio-codec-options=
|
||||
--audio-encoder=
|
||||
--audio-source=
|
||||
--audio-output-buffer=
|
||||
-b --video-bit-rate=
|
||||
--camera-ar=
|
||||
--camera-id=
|
||||
--camera-facing=
|
||||
--camera-size=
|
||||
--crop=
|
||||
-d --select-usb
|
||||
--disable-screensaver
|
||||
--display=
|
||||
--display-id=
|
||||
--display-buffer=
|
||||
-e --select-tcpip
|
||||
-f --fullscreen
|
||||
--force-adb-forward
|
||||
--forward-all-clicks
|
||||
-f --fullscreen
|
||||
-K --hid-keyboard
|
||||
-h --help
|
||||
--kill-adb-on-close
|
||||
-K --hid-keyboard
|
||||
--legacy-paste
|
||||
--list-camera-sizes
|
||||
--list-cameras
|
||||
--list-displays
|
||||
--list-encoders
|
||||
--lock-video-orientation
|
||||
--lock-video-orientation=
|
||||
--max-fps=
|
||||
-M --hid-mouse
|
||||
-m --max-size=
|
||||
-M --hid-mouse
|
||||
--max-fps=
|
||||
-n --no-control
|
||||
-N --no-playback
|
||||
--no-audio
|
||||
--no-audio-playback
|
||||
--no-cleanup
|
||||
--no-clipboard-autosync
|
||||
--no-downsize-on-error
|
||||
-n --no-control
|
||||
-N --no-display
|
||||
--no-key-repeat
|
||||
--no-mipmaps
|
||||
--no-power-on
|
||||
--no-video
|
||||
--no-video-playback
|
||||
--otg
|
||||
-p --port=
|
||||
--pause-on-exit
|
||||
--pause-on-exit=
|
||||
--power-off-on-close
|
||||
--prefer-text
|
||||
--print-fps
|
||||
--push-target=
|
||||
--raw-key-events
|
||||
-r --record=
|
||||
--raw-key-events
|
||||
--record-format=
|
||||
--render-driver=
|
||||
--require-audio
|
||||
--rotation=
|
||||
-s --serial=
|
||||
--shortcut-mod=
|
||||
-S --turn-screen-off
|
||||
--shortcut-mod=
|
||||
-t --show-touches
|
||||
--tcpip
|
||||
--tcpip=
|
||||
--time-limit=
|
||||
--tunnel-host=
|
||||
--tunnel-port=
|
||||
--v4l2-buffer=
|
||||
--v4l2-sink=
|
||||
-V --verbosity=
|
||||
-v --version
|
||||
-V --verbosity=
|
||||
--video-codec=
|
||||
--video-codec-options=
|
||||
--video-encoder=
|
||||
--video-source=
|
||||
-w --stay-awake
|
||||
--window-borderless
|
||||
--window-title=
|
||||
@ -83,10 +98,26 @@ _scrcpy() {
|
||||
COMPREPLY=($(compgen -W 'opus aac raw' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
--video-source)
|
||||
COMPREPLY=($(compgen -W 'display camera' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
--audio-source)
|
||||
COMPREPLY=($(compgen -W 'output mic' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
--camera-facing)
|
||||
COMPREPLY=($(compgen -W 'front back external' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
--lock-video-orientation)
|
||||
COMPREPLY=($(compgen -W 'unlocked initial 0 1 2 3' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
--pause-on-exit)
|
||||
COMPREPLY=($(compgen -W 'true false if-error' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
-r|--record)
|
||||
COMPREPLY=($(compgen -f -- "$cur"))
|
||||
return
|
||||
@ -123,8 +154,11 @@ _scrcpy() {
|
||||
|--audio-codec-options \
|
||||
|--audio-encoder \
|
||||
|--audio-output-buffer \
|
||||
|--camera-ar \
|
||||
|--camera-id \
|
||||
|--camera-size \
|
||||
|--crop \
|
||||
|--display \
|
||||
|--display-id \
|
||||
|--display-buffer \
|
||||
|--max-fps \
|
||||
|-m|--max-size \
|
||||
|
@ -1,4 +1,2 @@
|
||||
@echo off
|
||||
scrcpy.exe %*
|
||||
:: if the exit code is >= 1, then pause
|
||||
if errorlevel 1 pause
|
||||
scrcpy.exe --pause-on-exit=if-error %*
|
||||
|
@ -5,7 +5,7 @@ Comment=Display and control your Android device
|
||||
# For some users, the PATH or ADB environment variables are set from the shell
|
||||
# startup file, like .bashrc or .zshrc… Run an interactive shell to get
|
||||
# environment correctly initialized.
|
||||
Exec=/bin/bash --norc --noprofile -i -c "\"\\$SHELL\" -i -c scrcpy || read -p 'Press Enter to quit...'"
|
||||
Exec=/bin/sh -c "\"\\$SHELL\" -i -c scrcpy --pause-on-exit=if-error"
|
||||
Icon=scrcpy
|
||||
Terminal=true
|
||||
Type=Application
|
||||
|
@ -14,61 +14,75 @@ arguments=(
|
||||
'--audio-codec=[Select the audio codec]:codec:(opus aac raw)'
|
||||
'--audio-codec-options=[Set a list of comma-separated key\:type=value options for the device audio encoder]'
|
||||
'--audio-encoder=[Use a specific MediaCodec audio encoder]'
|
||||
'--audio-source=[Select the audio source]:source:(output mic)'
|
||||
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
|
||||
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
|
||||
'--camera-ar=[Select the camera size by its aspect ratio]'
|
||||
'--camera-id=[Specify the camera id to mirror]'
|
||||
'--camera-facing=[Select the device camera by its facing direction]:facing:(front back external)'
|
||||
'--camera-size=[Specify an explicit camera capture size]'
|
||||
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
|
||||
{-d,--select-usb}'[Use USB device]'
|
||||
'--disable-screensaver[Disable screensaver while scrcpy is running]'
|
||||
'--display=[Specify the display id to mirror]'
|
||||
'--display-id=[Specify the display id to mirror]'
|
||||
'--display-buffer=[Add a buffering delay \(in milliseconds\) before displaying]'
|
||||
{-e,--select-tcpip}'[Use TCP/IP device]'
|
||||
{-f,--fullscreen}'[Start in fullscreen]'
|
||||
'--force-adb-forward[Do not attempt to use \"adb reverse\" to connect to the device]'
|
||||
'--forward-all-clicks[Forward clicks to device]'
|
||||
{-f,--fullscreen}'[Start in fullscreen]'
|
||||
{-K,--hid-keyboard}'[Simulate a physical keyboard by using HID over AOAv2]'
|
||||
{-h,--help}'[Print the help]'
|
||||
'--kill-adb-on-close[Kill adb when scrcpy terminates]'
|
||||
{-K,--hid-keyboard}'[Simulate a physical keyboard by using HID over AOAv2]'
|
||||
'--legacy-paste[Inject computer clipboard text as a sequence of key events on Ctrl+v]'
|
||||
'--list-camera-sizes[List the valid camera capture sizes]'
|
||||
'--list-cameras[List cameras available on the device]'
|
||||
'--list-displays[List displays available on the device]'
|
||||
'--list-encoders[List video and audio encoders available on the device]'
|
||||
'--lock-video-orientation=[Lock video orientation]:orientation:(unlocked initial 0 1 2 3)'
|
||||
'--max-fps=[Limit the frame rate of screen capture]'
|
||||
{-M,--hid-mouse}'[Simulate a physical mouse by using HID over AOAv2]'
|
||||
{-m,--max-size=}'[Limit both the width and height of the video to value]'
|
||||
{-M,--hid-mouse}'[Simulate a physical mouse by using HID over AOAv2]'
|
||||
'--max-fps=[Limit the frame rate of screen capture]'
|
||||
{-n,--no-control}'[Disable device control \(mirror the device in read only\)]'
|
||||
{-N,--no-playback}'[Disable video and audio playback]'
|
||||
'--no-audio[Disable audio forwarding]'
|
||||
'--no-audio-playback[Disable audio playback]'
|
||||
'--no-cleanup[Disable device cleanup actions on exit]'
|
||||
'--no-clipboard-autosync[Disable automatic clipboard synchronization]'
|
||||
'--no-downsize-on-error[Disable lowering definition on MediaCodec error]'
|
||||
{-n,--no-control}'[Disable device control \(mirror the device in read only\)]'
|
||||
{-N,--no-display}'[Do not display device \(during screen recording or when V4L2 sink is enabled\)]'
|
||||
'--no-key-repeat[Do not forward repeated key events when a key is held down]'
|
||||
'--no-mipmaps[Disable the generation of mipmaps]'
|
||||
'--no-power-on[Do not power on the device on start]'
|
||||
'--no-video[Disable video forwarding]'
|
||||
'--no-video-playback[Disable video playback]'
|
||||
'--otg[Run in OTG mode \(simulating physical keyboard and mouse\)]'
|
||||
{-p,--port=}'[\[port\[\:port\]\] Set the TCP port \(range\) used by the client to listen]'
|
||||
'--pause-on-exit=[Make scrcpy pause before exiting]:mode:(true false if-error)'
|
||||
'--power-off-on-close[Turn the device screen off when closing scrcpy]'
|
||||
'--prefer-text[Inject alpha characters and space as text events instead of key events]'
|
||||
'--print-fps[Start FPS counter, to print frame logs to the console]'
|
||||
'--push-target=[Set the target directory for pushing files to the device by drag and drop]'
|
||||
'--raw-key-events[Inject key events for all input keys, and ignore text events]'
|
||||
{-r,--record=}'[Record screen to file]:record file:_files'
|
||||
'--raw-key-events[Inject key events for all input keys, and ignore text events]'
|
||||
'--record-format=[Force recording format]:format:(mp4 mkv)'
|
||||
'--render-driver=[Request SDL to use the given render driver]:driver name:(direct3d opengl opengles2 opengles metal software)'
|
||||
'--require-audio=[Make scrcpy fail if audio is enabled but does not work]'
|
||||
'--rotation=[Set the initial display rotation]:rotation values:(0 1 2 3)'
|
||||
{-s,--serial=}'[The device serial number \(mandatory for multiple devices only\)]:serial:($("${ADB-adb}" devices | awk '\''$2 == "device" {print $1}'\''))'
|
||||
'--shortcut-mod=[\[key1,key2+key3,...\] Specify the modifiers to use for scrcpy shortcuts]:shortcut mod:(lctrl rctrl lalt ralt lsuper rsuper)'
|
||||
{-S,--turn-screen-off}'[Turn the device screen off immediately]'
|
||||
'--shortcut-mod=[\[key1,key2+key3,...\] Specify the modifiers to use for scrcpy shortcuts]:shortcut mod:(lctrl rctrl lalt ralt lsuper rsuper)'
|
||||
{-t,--show-touches}'[Show physical touches]'
|
||||
'--tcpip[\(optional \[ip\:port\]\) Configure and connect the device over TCP/IP]'
|
||||
'--time-limit=[Set the maximum mirroring time, in seconds]'
|
||||
'--tunnel-host=[Set the IP address of the adb tunnel to reach the scrcpy server]'
|
||||
'--tunnel-port=[Set the TCP port of the adb tunnel to reach the scrcpy server]'
|
||||
'--v4l2-buffer=[Add a buffering delay \(in milliseconds\) before pushing frames]'
|
||||
'--v4l2-sink=[\[\/dev\/videoN\] Output to v4l2loopback device]'
|
||||
{-V,--verbosity=}'[Set the log level]:verbosity:(verbose debug info warn error)'
|
||||
{-v,--version}'[Print the version of scrcpy]'
|
||||
{-V,--verbosity=}'[Set the log level]:verbosity:(verbose debug info warn error)'
|
||||
'--video-codec=[Select the video codec]:codec:(h264 h265 av1)'
|
||||
'--video-codec-options=[Set a list of comma-separated key\:type=value options for the device video encoder]'
|
||||
'--video-encoder=[Use a specific MediaCodec video encoder]'
|
||||
'--video-source=[Select the video source]:source:(display camera)'
|
||||
{-w,--stay-awake}'[Keep the device on while scrcpy is running, when the device is plugged in]'
|
||||
'--window-borderless[Disable window decorations \(display borderless window\)]'
|
||||
'--window-title=[Set a custom window title]'
|
||||
|
@ -14,6 +14,7 @@ src = [
|
||||
'src/delay_buffer.c',
|
||||
'src/demuxer.c',
|
||||
'src/device_msg.c',
|
||||
'src/display.c',
|
||||
'src/icon.c',
|
||||
'src/file_pusher.c',
|
||||
'src/fps_counter.c',
|
||||
@ -50,6 +51,7 @@ src = [
|
||||
'src/util/term.c',
|
||||
'src/util/thread.c',
|
||||
'src/util/tick.c',
|
||||
'src/util/timeout.c',
|
||||
]
|
||||
|
||||
conf = configuration_data()
|
||||
|
@ -6,10 +6,10 @@ cd "$DIR"
|
||||
mkdir -p "$PREBUILT_DATA_DIR"
|
||||
cd "$PREBUILT_DATA_DIR"
|
||||
|
||||
DEP_DIR=platform-tools-34.0.1
|
||||
DEP_DIR=platform-tools-34.0.3
|
||||
|
||||
FILENAME=platform-tools_r34.0.1-windows.zip
|
||||
SHA256SUM=5dd9c2be744c224fa3a7cbe30ba02d2cb378c763bd0f797a7e47e9f3156a5daa
|
||||
FILENAME=platform-tools_r34.0.3-windows.zip
|
||||
SHA256SUM=fce992e93eb786fc9f47df93d83a7b912c46742d45c39d712c02e06d05b72e2b
|
||||
|
||||
if [[ -d "$DEP_DIR" ]]
|
||||
then
|
||||
|
@ -6,11 +6,11 @@ cd "$DIR"
|
||||
mkdir -p "$PREBUILT_DATA_DIR"
|
||||
cd "$PREBUILT_DATA_DIR"
|
||||
|
||||
VERSION=6.0-scrcpy-2
|
||||
VERSION=6.0-scrcpy-4
|
||||
DEP_DIR="ffmpeg-$VERSION"
|
||||
|
||||
FILENAME="$DEP_DIR".7z
|
||||
SHA256SUM=98ef97f8607c97a5c4f9c5a0a991b78f105d002a3619145011d16ffb92501b14
|
||||
SHA256SUM=39274b321491ce83e76cab5d24e7cbe3f402d3ccf382f739b13be5651c146b60
|
||||
|
||||
if [[ -d "$DEP_DIR" ]]
|
||||
then
|
||||
|
@ -6,10 +6,10 @@ cd "$DIR"
|
||||
mkdir -p "$PREBUILT_DATA_DIR"
|
||||
cd "$PREBUILT_DATA_DIR"
|
||||
|
||||
DEP_DIR=SDL2-2.26.4
|
||||
DEP_DIR=SDL2-2.28.0
|
||||
|
||||
FILENAME=SDL2-devel-2.26.4-mingw.tar.gz
|
||||
SHA256SUM=fe899c8642caac2f180b1ee6f786857ddcaa0adc1fa82474312b09dd47d74712
|
||||
FILENAME=SDL2-devel-2.28.0-mingw.tar.gz
|
||||
SHA256SUM=b91ce59eeacd4a9db403f976fd2337d9360b21ada374124417d716065c380e20
|
||||
|
||||
if [[ -d "$DEP_DIR" ]]
|
||||
then
|
||||
|
@ -13,7 +13,7 @@ BEGIN
|
||||
VALUE "LegalCopyright", "Romain Vimont, Genymobile"
|
||||
VALUE "OriginalFilename", "scrcpy.exe"
|
||||
VALUE "ProductName", "scrcpy"
|
||||
VALUE "ProductVersion", "2.0"
|
||||
VALUE "ProductVersion", "2.1.1"
|
||||
END
|
||||
END
|
||||
BLOCK "VarFileInfo"
|
||||
|
174
app/scrcpy.1
174
app/scrcpy.1
@ -21,7 +21,7 @@ Make scrcpy window always on top (above other windows).
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-bit\-rate " value
|
||||
Encode the audio at the given bit\-rate, expressed in bits/s. Unit suffixes are supported: '\fBK\fR' (x1000) and '\fBM\fR' (x1000000).
|
||||
Encode the audio at the given bit rate, expressed in bits/s. Unit suffixes are supported: '\fBK\fR' (x1000) and '\fBM\fR' (x1000000).
|
||||
|
||||
Default is 128K (128000).
|
||||
|
||||
@ -33,14 +33,6 @@ Lower values decrease the latency, but increase the likelyhood of buffer underru
|
||||
|
||||
Default is 50.
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-output\-buffer ms
|
||||
Configure the size of the SDL audio output buffer (in milliseconds).
|
||||
|
||||
If you get "robotic" audio playback, you should test with a higher value (10). Do not change this setting otherwise.
|
||||
|
||||
Default is 5.
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-codec " name
|
||||
Select an audio codec (opus, aac or raw).
|
||||
@ -63,12 +55,48 @@ Use a specific MediaCodec audio encoder (depending on the codec provided by \fB\
|
||||
|
||||
The available encoders can be listed by \-\-list\-encoders.
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-source " source
|
||||
Select the audio source (output or mic).
|
||||
|
||||
Default is output.
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-output\-buffer ms
|
||||
Configure the size of the SDL audio output buffer (in milliseconds).
|
||||
|
||||
If you get "robotic" audio playback, you should test with a higher value (10). Do not change this setting otherwise.
|
||||
|
||||
Default is 5.
|
||||
|
||||
.TP
|
||||
.BI "\-b, \-\-video\-bit\-rate " value
|
||||
Encode the video at the given bit\-rate, expressed in bits/s. Unit suffixes are supported: '\fBK\fR' (x1000) and '\fBM\fR' (x1000000).
|
||||
Encode the video at the given bit rate, expressed in bits/s. Unit suffixes are supported: '\fBK\fR' (x1000) and '\fBM\fR' (x1000000).
|
||||
|
||||
Default is 8M (8000000).
|
||||
|
||||
.TP
|
||||
.BI "\-\-camera\-ar " ar
|
||||
Select the camera size by its aspect ratio (+/- 10%).
|
||||
|
||||
Possible values are "sensor" (use the camera sensor aspect ratio), "<num>:<den>" (e.g. "4:3") and "<value>" (e.g. "1.6").
|
||||
|
||||
.TP
|
||||
.BI "\-\-camera\-id " id
|
||||
Specify the device camera id to mirror.
|
||||
|
||||
The available camera ids can be listed by \-\-list\-cameras.
|
||||
|
||||
.TP
|
||||
.BI "\-\-camera\-facing " facing
|
||||
Select the device camera by its facing direction.
|
||||
|
||||
Possible values are "front", "back" and "external".
|
||||
|
||||
.TP
|
||||
.BI "\-\-camera\-size " width\fRx\fIheight
|
||||
Specify an explicit camera capture size.
|
||||
|
||||
.TP
|
||||
.BI "\-\-crop " width\fR:\fIheight\fR:\fIx\fR:\fIy
|
||||
Crop the device screen on the server.
|
||||
@ -88,7 +116,7 @@ Also see \fB\-e\fR (\fB\-\-select\-tcpip\fR).
|
||||
Disable screensaver while scrcpy is running.
|
||||
|
||||
.TP
|
||||
.BI "\-\-display " id
|
||||
.BI "\-\-display\-id " id
|
||||
Specify the device display id to mirror.
|
||||
|
||||
The available display ids can be listed by \-\-list\-displays.
|
||||
@ -107,6 +135,10 @@ Use TCP/IP device (if there is exactly one, like adb -e).
|
||||
|
||||
Also see \fB\-d\fR (\fB\-\-select\-usb\fR).
|
||||
|
||||
.TP
|
||||
.B \-f, \-\-fullscreen
|
||||
Start in fullscreen.
|
||||
|
||||
.TP
|
||||
.B \-\-force\-adb\-forward
|
||||
Do not attempt to use "adb reverse" to connect to the device.
|
||||
@ -115,14 +147,14 @@ Do not attempt to use "adb reverse" to connect to the device.
|
||||
.B \-\-forward\-all\-clicks
|
||||
By default, right-click triggers BACK (or POWER on) and middle-click triggers HOME. This option disables these shortcuts and forward the clicks to the device instead.
|
||||
|
||||
.TP
|
||||
.B \-f, \-\-fullscreen
|
||||
Start in fullscreen.
|
||||
|
||||
.TP
|
||||
.B \-h, \-\-help
|
||||
Print this help.
|
||||
|
||||
.TP
|
||||
.B \-\-kill\-adb\-on\-close
|
||||
Kill adb when scrcpy terminates.
|
||||
|
||||
.TP
|
||||
.B \-K, \-\-hid\-keyboard
|
||||
Simulate a physical keyboard by using HID over AOAv2.
|
||||
@ -145,6 +177,12 @@ Inject computer clipboard text as a sequence of key events on Ctrl+v (like MOD+S
|
||||
|
||||
This is a workaround for some devices not behaving as expected when setting the device clipboard programmatically.
|
||||
|
||||
.B \-\-list\-camera\-sizes
|
||||
List the valid camera capture sizes.
|
||||
|
||||
.B \-\-list\-cameras
|
||||
List cameras available on the device.
|
||||
|
||||
.TP
|
||||
.B \-\-list\-encoders
|
||||
List video and audio encoders available on the device.
|
||||
@ -161,10 +199,6 @@ Default is "unlocked".
|
||||
|
||||
Passing the option without argument is equivalent to passing "initial".
|
||||
|
||||
.TP
|
||||
.BI "\-\-max\-fps " value
|
||||
Limit the framerate of screen capture (officially supported since Android 10, but may work on earlier versions).
|
||||
|
||||
.TP
|
||||
.BI "\-m, \-\-max\-size " value
|
||||
Limit both the width and height of the video to \fIvalue\fR. The other dimension is computed so that the device aspect\-ratio is preserved.
|
||||
@ -183,6 +217,26 @@ It may only work over USB.
|
||||
|
||||
Also see \fB\-\-hid\-keyboard\fR.
|
||||
|
||||
.TP
|
||||
.BI "\-\-max\-fps " value
|
||||
Limit the framerate of screen capture (officially supported since Android 10, but may work on earlier versions).
|
||||
|
||||
.TP
|
||||
.B \-n, \-\-no\-control
|
||||
Disable device control (mirror the device in read\-only).
|
||||
|
||||
.TP
|
||||
.B \-N, \-\-no\-playback
|
||||
Disable video and audio playback on the computer (equivalent to --no-video-playback --no-audio-playback).
|
||||
|
||||
.TP
|
||||
.B \-\-no\-audio
|
||||
Disable audio forwarding.
|
||||
|
||||
.TP
|
||||
.B \-\-no\-audio\-playback
|
||||
Disable audio playback on the computer.
|
||||
|
||||
.TP
|
||||
.B \-\-no\-cleanup
|
||||
By default, scrcpy removes the server binary from the device and restores the device state (show touches, stay awake and power mode) on exit.
|
||||
@ -201,14 +255,6 @@ By default, on MediaCodec error, scrcpy automatically tries again with a lower d
|
||||
|
||||
This option disables this behavior.
|
||||
|
||||
.TP
|
||||
.B \-n, \-\-no\-control
|
||||
Disable device control (mirror the device in read\-only).
|
||||
|
||||
.TP
|
||||
.B \-N, \-\-no\-display
|
||||
Do not display device (only when screen recording is enabled).
|
||||
|
||||
.TP
|
||||
.B \-\-no\-key\-repeat
|
||||
Do not forward repeated key events when a key is held down.
|
||||
@ -221,6 +267,14 @@ If the renderer is OpenGL 3.0+ or OpenGL ES 2.0+, then mipmaps are automatically
|
||||
.B \-\-no\-power\-on
|
||||
Do not power on the device on start.
|
||||
|
||||
.TP
|
||||
.B \-\-no\-video
|
||||
Disable video forwarding.
|
||||
|
||||
.TP
|
||||
.B \-\-no\-video\-playback
|
||||
Disable video playback on the computer.
|
||||
|
||||
.TP
|
||||
.B \-\-otg
|
||||
Run in OTG mode: simulate physical keyboard and mouse, as if the computer keyboard and mouse were plugged directly to the device via an OTG cable.
|
||||
@ -241,6 +295,16 @@ Set the TCP port (range) used by the client to listen.
|
||||
|
||||
Default is 27183:27199.
|
||||
|
||||
.TP
|
||||
\fB\-\-pause\-on\-exit\fR[=\fImode\fR]
|
||||
Configure pause on exit. Possible values are "true" (always pause on exit), "false" (never pause on exit) and "if-error" (pause only if an error occured).
|
||||
|
||||
This is useful to prevent the terminal window from automatically closing, so that error messages can be read.
|
||||
|
||||
Default is "false".
|
||||
|
||||
Passing the option without argument is equivalent to passing "true".
|
||||
|
||||
.TP
|
||||
.B \-\-power\-off\-on\-close
|
||||
Turn the device screen off when closing scrcpy.
|
||||
@ -262,10 +326,6 @@ Set the target directory for pushing files to the device by drag & drop. It is p
|
||||
|
||||
Default is "/sdcard/Download/".
|
||||
|
||||
.TP
|
||||
.B \-\-raw\-key\-events
|
||||
Inject key events for all input keys, and ignore text events.
|
||||
|
||||
.TP
|
||||
.BI "\-r, \-\-record " file
|
||||
Record screen to
|
||||
@ -275,6 +335,10 @@ The format is determined by the
|
||||
.B \-\-record\-format
|
||||
option if set, or by the file extension (.mp4 or .mkv).
|
||||
|
||||
.TP
|
||||
.B \-\-raw\-key\-events
|
||||
Inject key events for all input keys, and ignore text events.
|
||||
|
||||
.TP
|
||||
.BI "\-\-record\-format " format
|
||||
Force recording format (either mp4 or mkv).
|
||||
@ -300,6 +364,10 @@ Set the initial display rotation. Possibles values are 0, 1, 2 and 3. Each incre
|
||||
.BI "\-s, \-\-serial " number
|
||||
The device serial number. Mandatory only if several devices are connected to adb.
|
||||
|
||||
.TP
|
||||
.B \-S, \-\-turn\-screen\-off
|
||||
Turn the device screen off immediately.
|
||||
|
||||
.TP
|
||||
.BI "\-\-shortcut\-mod " key\fR[+...]][,...]
|
||||
Specify the modifiers to use for scrcpy shortcuts. Possible keys are "lctrl", "rctrl", "lalt", "ralt", "lsuper" and "rsuper".
|
||||
@ -310,6 +378,12 @@ For example, to use either LCtrl+LAlt or LSuper for scrcpy shortcuts, pass "lctr
|
||||
|
||||
Default is "lalt,lsuper" (left-Alt or left-Super).
|
||||
|
||||
.TP
|
||||
.B \-t, \-\-show\-touches
|
||||
Enable "show touches" on start, restore the initial value on exit.
|
||||
|
||||
It only shows physical touches (not clicks from scrcpy).
|
||||
|
||||
.TP
|
||||
.BI "\-\-tcpip\fR[=\fIip\fR[:\fIport\fR]]
|
||||
Configure and reconnect the device over TCP/IP.
|
||||
@ -319,14 +393,8 @@ If a destination address is provided, then scrcpy connects to this address befor
|
||||
If no destination address is provided, then scrcpy attempts to find the IP address and adb port of the current device (typically connected over USB), enables TCP/IP mode if necessary, then connects to this address before starting.
|
||||
|
||||
.TP
|
||||
.B \-S, \-\-turn\-screen\-off
|
||||
Turn the device screen off immediately.
|
||||
|
||||
.TP
|
||||
.B \-t, \-\-show\-touches
|
||||
Enable "show touches" on start, restore the initial value on exit.
|
||||
|
||||
It only shows physical touches (not clicks from scrcpy).
|
||||
.BI "\-\-time\-limit " seconds
|
||||
Set the maximum mirroring time, in seconds.
|
||||
|
||||
.TP
|
||||
.BI "\-\-tunnel\-host " ip
|
||||
@ -340,6 +408,16 @@ Set the TCP port of the adb tunnel to reach the scrcpy server. This option autom
|
||||
|
||||
Default is 0 (not forced): the local port used for establishing the tunnel will be used.
|
||||
|
||||
.TP
|
||||
.B \-v, \-\-version
|
||||
Print the version of scrcpy.
|
||||
|
||||
.TP
|
||||
.BI "\-V, \-\-verbosity " value
|
||||
Set the log level ("verbose", "debug", "info", "warn" or "error").
|
||||
|
||||
Default is "info" for release builds, "debug" for debug builds.
|
||||
|
||||
.TP
|
||||
.BI "\-\-v4l2-sink " /dev/videoN
|
||||
Output to v4l2loopback device.
|
||||
@ -354,16 +432,6 @@ This option is similar to \fB\-\-display\-buffer\fR, but specific to V4L2 sink.
|
||||
|
||||
Default is 0 (no buffering).
|
||||
|
||||
.TP
|
||||
.BI "\-V, \-\-verbosity " value
|
||||
Set the log level ("verbose", "debug", "info", "warn" or "error").
|
||||
|
||||
Default is "info" for release builds, "debug" for debug builds.
|
||||
|
||||
.TP
|
||||
.B \-v, \-\-version
|
||||
Print the version of scrcpy.
|
||||
|
||||
.TP
|
||||
.BI "\-\-video\-codec " name
|
||||
Select a video codec (h264, h265 or av1).
|
||||
@ -386,6 +454,14 @@ Use a specific MediaCodec video encoder (depending on the codec provided by \fB\
|
||||
|
||||
The available encoders can be listed by \-\-list\-encoders.
|
||||
|
||||
.TP
|
||||
.BI "\-\-video\-source " source
|
||||
Select the video source (display or camera).
|
||||
|
||||
Camera mirroring requires Android 12+.
|
||||
|
||||
Default is display.
|
||||
|
||||
.TP
|
||||
.B \-w, \-\-stay-awake
|
||||
Keep the device on while scrcpy is running, when the device is plugged in.
|
||||
|
@ -70,7 +70,7 @@ argv_to_string(const char *const *argv, char *buf, size_t bufsize) {
|
||||
}
|
||||
|
||||
static void
|
||||
show_adb_installation_msg() {
|
||||
show_adb_installation_msg(void) {
|
||||
#ifndef __WINDOWS__
|
||||
static const struct {
|
||||
const char *binary;
|
||||
@ -218,8 +218,16 @@ sc_adb_forward(struct sc_intr *intr, const char *serial, uint16_t local_port,
|
||||
const char *device_socket_name, unsigned flags) {
|
||||
char local[4 + 5 + 1]; // tcp:PORT
|
||||
char remote[108 + 14 + 1]; // localabstract:NAME
|
||||
sprintf(local, "tcp:%" PRIu16, local_port);
|
||||
snprintf(remote, sizeof(remote), "localabstract:%s", device_socket_name);
|
||||
|
||||
int r = snprintf(local, sizeof(local), "tcp:%" PRIu16, local_port);
|
||||
assert(r >= 0 && (size_t) r < sizeof(local));
|
||||
|
||||
r = snprintf(remote, sizeof(remote), "localabstract:%s",
|
||||
device_socket_name);
|
||||
if (r < 0 || (size_t) r >= sizeof(remote)) {
|
||||
LOGE("Could not write socket name");
|
||||
return false;
|
||||
}
|
||||
|
||||
assert(serial);
|
||||
const char *const argv[] =
|
||||
@ -233,7 +241,9 @@ bool
|
||||
sc_adb_forward_remove(struct sc_intr *intr, const char *serial,
|
||||
uint16_t local_port, unsigned flags) {
|
||||
char local[4 + 5 + 1]; // tcp:PORT
|
||||
sprintf(local, "tcp:%" PRIu16, local_port);
|
||||
int r = snprintf(local, sizeof(local), "tcp:%" PRIu16, local_port);
|
||||
assert(r >= 0 && (size_t) r < sizeof(local));
|
||||
(void) r;
|
||||
|
||||
assert(serial);
|
||||
const char *const argv[] =
|
||||
@ -249,8 +259,16 @@ sc_adb_reverse(struct sc_intr *intr, const char *serial,
|
||||
unsigned flags) {
|
||||
char local[4 + 5 + 1]; // tcp:PORT
|
||||
char remote[108 + 14 + 1]; // localabstract:NAME
|
||||
sprintf(local, "tcp:%" PRIu16, local_port);
|
||||
snprintf(remote, sizeof(remote), "localabstract:%s", device_socket_name);
|
||||
int r = snprintf(local, sizeof(local), "tcp:%" PRIu16, local_port);
|
||||
assert(r >= 0 && (size_t) r < sizeof(local));
|
||||
|
||||
r = snprintf(remote, sizeof(remote), "localabstract:%s",
|
||||
device_socket_name);
|
||||
if (r < 0 || (size_t) r >= sizeof(remote)) {
|
||||
LOGE("Could not write socket name");
|
||||
return false;
|
||||
}
|
||||
|
||||
assert(serial);
|
||||
const char *const argv[] =
|
||||
SC_ADB_COMMAND("-s", serial, "reverse", remote, local);
|
||||
@ -263,7 +281,12 @@ bool
|
||||
sc_adb_reverse_remove(struct sc_intr *intr, const char *serial,
|
||||
const char *device_socket_name, unsigned flags) {
|
||||
char remote[108 + 14 + 1]; // localabstract:NAME
|
||||
snprintf(remote, sizeof(remote), "localabstract:%s", device_socket_name);
|
||||
int r = snprintf(remote, sizeof(remote), "localabstract:%s",
|
||||
device_socket_name);
|
||||
if (r < 0 || (size_t) r >= sizeof(remote)) {
|
||||
LOGE("Device socket name too long");
|
||||
return false;
|
||||
}
|
||||
|
||||
assert(serial);
|
||||
const char *const argv[] =
|
||||
@ -333,7 +356,9 @@ bool
|
||||
sc_adb_tcpip(struct sc_intr *intr, const char *serial, uint16_t port,
|
||||
unsigned flags) {
|
||||
char port_string[5 + 1];
|
||||
sprintf(port_string, "%" PRIu16, port);
|
||||
int r = snprintf(port_string, sizeof(port_string), "%" PRIu16, port);
|
||||
assert(r >= 0 && (size_t) r < sizeof(port_string));
|
||||
(void) r;
|
||||
|
||||
assert(serial);
|
||||
const char *const argv[] =
|
||||
@ -628,8 +653,8 @@ sc_adb_select_device(struct sc_intr *intr,
|
||||
return false;
|
||||
}
|
||||
|
||||
LOGD("ADB device found:");
|
||||
sc_adb_devices_log(SC_LOG_LEVEL_DEBUG, vec.data, vec.size);
|
||||
LOGI("ADB device found:");
|
||||
sc_adb_devices_log(SC_LOG_LEVEL_INFO, vec.data, vec.size);
|
||||
|
||||
// Move devics into out_device (do not destroy device)
|
||||
sc_adb_device_move(out_device, device);
|
||||
|
@ -7,7 +7,7 @@
|
||||
#include "util/log.h"
|
||||
#include "util/str.h"
|
||||
|
||||
bool
|
||||
static bool
|
||||
sc_adb_parse_device(char *line, struct sc_adb_device *device) {
|
||||
// One device line looks like:
|
||||
// "0123456789abcdef device usb:2-1 product:MyProduct model:MyModel "
|
||||
@ -204,6 +204,7 @@ sc_adb_parse_device_ip(char *str) {
|
||||
while (str[idx_line] != '\0') {
|
||||
char *line = &str[idx_line];
|
||||
size_t len = strcspn(line, "\n");
|
||||
bool is_last_line = line[len] == '\0';
|
||||
|
||||
// The same, but without any trailing '\r'
|
||||
size_t line_len = sc_str_remove_trailing_cr(line, len);
|
||||
@ -215,12 +216,12 @@ sc_adb_parse_device_ip(char *str) {
|
||||
return ip;
|
||||
}
|
||||
|
||||
idx_line += len;
|
||||
|
||||
if (str[idx_line] != '\0') {
|
||||
// The next line starts after the '\n'
|
||||
++idx_line;
|
||||
if (is_last_line) {
|
||||
break;
|
||||
}
|
||||
|
||||
// The next line starts after the '\n'
|
||||
idx_line += len + 1;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
|
@ -107,7 +107,7 @@ sc_audio_player_sdl_callback(void *userdata, uint8_t *stream, int len_int) {
|
||||
// latency.
|
||||
LOGD("[Audio] Buffer underflow, inserting silence: %" PRIu32 " samples",
|
||||
silence);
|
||||
memset(stream + read, 0, TO_BYTES(silence));
|
||||
memset(stream + TO_BYTES(read), 0, TO_BYTES(silence));
|
||||
|
||||
if (ap->received) {
|
||||
// Inserting additional samples immediately increases buffering
|
||||
|
670
app/src/cli.c
670
app/src/cli.c
@ -32,6 +32,7 @@ enum {
|
||||
OPT_WINDOW_BORDERLESS,
|
||||
OPT_MAX_FPS,
|
||||
OPT_LOCK_VIDEO_ORIENTATION,
|
||||
OPT_DISPLAY,
|
||||
OPT_DISPLAY_ID,
|
||||
OPT_ROTATION,
|
||||
OPT_RENDER_DRIVER,
|
||||
@ -72,6 +73,21 @@ enum {
|
||||
OPT_REQUIRE_AUDIO,
|
||||
OPT_AUDIO_BUFFER,
|
||||
OPT_AUDIO_OUTPUT_BUFFER,
|
||||
OPT_NO_DISPLAY,
|
||||
OPT_NO_VIDEO,
|
||||
OPT_NO_AUDIO_PLAYBACK,
|
||||
OPT_NO_VIDEO_PLAYBACK,
|
||||
OPT_VIDEO_SOURCE,
|
||||
OPT_AUDIO_SOURCE,
|
||||
OPT_KILL_ADB_ON_CLOSE,
|
||||
OPT_TIME_LIMIT,
|
||||
OPT_PAUSE_ON_EXIT,
|
||||
OPT_LIST_CAMERAS,
|
||||
OPT_LIST_CAMERA_SIZES,
|
||||
OPT_CAMERA_ID,
|
||||
OPT_CAMERA_SIZE,
|
||||
OPT_CAMERA_FACING,
|
||||
OPT_CAMERA_AR,
|
||||
};
|
||||
|
||||
struct sc_option {
|
||||
@ -117,7 +133,7 @@ static const struct sc_option options[] = {
|
||||
.longopt_id = OPT_AUDIO_BIT_RATE,
|
||||
.longopt = "audio-bit-rate",
|
||||
.argdesc = "value",
|
||||
.text = "Encode the audio at the given bit-rate, expressed in bits/s. "
|
||||
.text = "Encode the audio at the given bit rate, expressed in bits/s. "
|
||||
"Unit suffixes are supported: 'K' (x1000) and 'M' (x1000000).\n"
|
||||
"Default is 128K (128000).",
|
||||
},
|
||||
@ -130,16 +146,6 @@ static const struct sc_option options[] = {
|
||||
"likelyhood of buffer underrun (causing audio glitches).\n"
|
||||
"Default is 50.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_AUDIO_OUTPUT_BUFFER,
|
||||
.longopt = "audio-output-buffer",
|
||||
.argdesc = "ms",
|
||||
.text = "Configure the size of the SDL audio output buffer (in "
|
||||
"milliseconds).\n"
|
||||
"If you get \"robotic\" audio playback, you should test with "
|
||||
"a higher value (10). Do not change this setting otherwise.\n"
|
||||
"Default is 5.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_AUDIO_CODEC,
|
||||
.longopt = "audio-codec",
|
||||
@ -167,11 +173,28 @@ static const struct sc_option options[] = {
|
||||
"codec provided by --audio-codec).\n"
|
||||
"The available encoders can be listed by --list-encoders.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_AUDIO_SOURCE,
|
||||
.longopt = "audio-source",
|
||||
.argdesc = "source",
|
||||
.text = "Select the audio source (output or mic).\n"
|
||||
"Default is output.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_AUDIO_OUTPUT_BUFFER,
|
||||
.longopt = "audio-output-buffer",
|
||||
.argdesc = "ms",
|
||||
.text = "Configure the size of the SDL audio output buffer (in "
|
||||
"milliseconds).\n"
|
||||
"If you get \"robotic\" audio playback, you should test with "
|
||||
"a higher value (10). Do not change this setting otherwise.\n"
|
||||
"Default is 5.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'b',
|
||||
.longopt = "video-bit-rate",
|
||||
.argdesc = "value",
|
||||
.text = "Encode the video at the given bit-rate, expressed in bits/s. "
|
||||
.text = "Encode the video at the given bit rate, expressed in bits/s. "
|
||||
"Unit suffixes are supported: 'K' (x1000) and 'M' (x1000000).\n"
|
||||
"Default is 8M (8000000).",
|
||||
},
|
||||
@ -181,6 +204,36 @@ static const struct sc_option options[] = {
|
||||
.longopt = "bit-rate",
|
||||
.argdesc = "value",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_CAMERA_AR,
|
||||
.longopt = "camera-ar",
|
||||
.argdesc = "ar",
|
||||
.text = "Select the camera size by its aspect ratio (+/- 10%).\n"
|
||||
"Possible values are \"sensor\" (use the camera sensor aspect "
|
||||
"ratio), \"<num>:<den>\" (e.g. \"4:3\") or \"<value>\" (e.g. "
|
||||
"\"1.6\")."
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_CAMERA_ID,
|
||||
.longopt = "camera-id",
|
||||
.argdesc = "id",
|
||||
.text = "Specify the device camera id to mirror.\n"
|
||||
"The available camera ids can be listed by:\n"
|
||||
" scrcpy --list-cameras",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_CAMERA_FACING,
|
||||
.longopt = "camera-facing",
|
||||
.argdesc = "facing",
|
||||
.text = "Select the device camera by its facing direction.\n"
|
||||
"Possible values are \"front\", \"back\" and \"external\".",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_CAMERA_SIZE,
|
||||
.longopt = "camera-size",
|
||||
.argdesc = "<width>x<height>",
|
||||
.text = "Specify an explicit camera capture size.",
|
||||
},
|
||||
{
|
||||
// Not really deprecated (--codec has never been released), but without
|
||||
// declaring an explicit --codec option, getopt_long() partial matching
|
||||
@ -217,9 +270,15 @@ static const struct sc_option options[] = {
|
||||
.text = "Disable screensaver while scrcpy is running.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_DISPLAY_ID,
|
||||
// deprecated
|
||||
.longopt_id = OPT_DISPLAY,
|
||||
.longopt = "display",
|
||||
.argdesc = "id",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_DISPLAY_ID,
|
||||
.longopt = "display-id",
|
||||
.argdesc = "id",
|
||||
.text = "Specify the device display id to mirror.\n"
|
||||
"The available display ids can be listed by:\n"
|
||||
" scrcpy --list-displays\n"
|
||||
@ -245,6 +304,11 @@ static const struct sc_option options[] = {
|
||||
.longopt = "encoder",
|
||||
.argdesc = "name",
|
||||
},
|
||||
{
|
||||
.shortopt = 'f',
|
||||
.longopt = "fullscreen",
|
||||
.text = "Start in fullscreen.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_FORCE_ADB_FORWARD,
|
||||
.longopt = "force-adb-forward",
|
||||
@ -259,9 +323,14 @@ static const struct sc_option options[] = {
|
||||
"shortcuts and forwards the clicks to the device instead.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'f',
|
||||
.longopt = "fullscreen",
|
||||
.text = "Start in fullscreen.",
|
||||
.shortopt = 'h',
|
||||
.longopt = "help",
|
||||
.text = "Print this help.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_KILL_ADB_ON_CLOSE,
|
||||
.longopt = "kill-adb-on-close",
|
||||
.text = "Kill adb when scrcpy terminates.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'K',
|
||||
@ -280,11 +349,6 @@ static const struct sc_option options[] = {
|
||||
"is enabled (or a physical keyboard is connected).\n"
|
||||
"Also see --hid-mouse.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'h',
|
||||
.longopt = "help",
|
||||
.text = "Print this help.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LEGACY_PASTE,
|
||||
.longopt = "legacy-paste",
|
||||
@ -293,6 +357,16 @@ static const struct sc_option options[] = {
|
||||
"This is a workaround for some devices not behaving as "
|
||||
"expected when setting the device clipboard programmatically.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LIST_CAMERAS,
|
||||
.longopt = "list-cameras",
|
||||
.text = "List device cameras.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LIST_CAMERA_SIZES,
|
||||
.longopt = "list-camera-sizes",
|
||||
.text = "List the valid camera capture sizes.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LIST_DISPLAYS,
|
||||
.longopt = "list-displays",
|
||||
@ -318,11 +392,13 @@ static const struct sc_option options[] = {
|
||||
"\"initial\".",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_MAX_FPS,
|
||||
.longopt = "max-fps",
|
||||
.shortopt = 'm',
|
||||
.longopt = "max-size",
|
||||
.argdesc = "value",
|
||||
.text = "Limit the frame rate of screen capture (officially supported "
|
||||
"since Android 10, but may work on earlier versions).",
|
||||
.text = "Limit both the width and height of the video to value. The "
|
||||
"other dimension is computed so that the device aspect-ratio "
|
||||
"is preserved.\n"
|
||||
"Default is 0 (unlimited).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'M',
|
||||
@ -336,19 +412,33 @@ static const struct sc_option options[] = {
|
||||
"Also see --hid-keyboard.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'm',
|
||||
.longopt = "max-size",
|
||||
.longopt_id = OPT_MAX_FPS,
|
||||
.longopt = "max-fps",
|
||||
.argdesc = "value",
|
||||
.text = "Limit both the width and height of the video to value. The "
|
||||
"other dimension is computed so that the device aspect-ratio "
|
||||
"is preserved.\n"
|
||||
"Default is 0 (unlimited).",
|
||||
.text = "Limit the frame rate of screen capture (officially supported "
|
||||
"since Android 10, but may work on earlier versions).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'n',
|
||||
.longopt = "no-control",
|
||||
.text = "Disable device control (mirror the device in read-only).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'N',
|
||||
.longopt = "no-playback",
|
||||
.text = "Disable video and audio playback on the computer (equivalent "
|
||||
"to --no-video-playback --no-audio-playback).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_AUDIO,
|
||||
.longopt = "no-audio",
|
||||
.text = "Disable audio forwarding.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_AUDIO_PLAYBACK,
|
||||
.longopt = "no-audio-playback",
|
||||
.text = "Disable audio playback on the computer.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_CLEANUP,
|
||||
.longopt = "no-cleanup",
|
||||
@ -374,15 +464,9 @@ static const struct sc_option options[] = {
|
||||
"This option disables this behavior.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'n',
|
||||
.longopt = "no-control",
|
||||
.text = "Disable device control (mirror the device in read-only).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'N',
|
||||
// deprecated
|
||||
.longopt_id = OPT_NO_DISPLAY,
|
||||
.longopt = "no-display",
|
||||
.text = "Do not display device (only when screen recording or V4L2 "
|
||||
"sink is enabled).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_KEY_REPEAT,
|
||||
@ -401,6 +485,16 @@ static const struct sc_option options[] = {
|
||||
.longopt = "no-power-on",
|
||||
.text = "Do not power on the device on start.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_VIDEO,
|
||||
.longopt = "no-video",
|
||||
.text = "Disable video forwarding.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_VIDEO_PLAYBACK,
|
||||
.longopt = "no-video-playback",
|
||||
.text = "Disable video playback on the computer.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_OTG,
|
||||
.longopt = "otg",
|
||||
@ -424,6 +518,20 @@ static const struct sc_option options[] = {
|
||||
"Default is " STR(DEFAULT_LOCAL_PORT_RANGE_FIRST) ":"
|
||||
STR(DEFAULT_LOCAL_PORT_RANGE_LAST) ".",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_PAUSE_ON_EXIT,
|
||||
.longopt = "pause-on-exit",
|
||||
.argdesc = "mode",
|
||||
.optional_arg = true,
|
||||
.text = "Configure pause on exit. Possible values are \"true\" (always "
|
||||
"pause on exit), \"false\" (never pause on exit) and "
|
||||
"\"if-error\" (pause only if an error occured).\n"
|
||||
"This is useful to prevent the terminal window from "
|
||||
"automatically closing, so that error messages can be read.\n"
|
||||
"Default is \"false\".\n"
|
||||
"Passing the option without argument is equivalent to passing "
|
||||
"\"true\".",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_POWER_OFF_ON_CLOSE,
|
||||
.longopt = "power-off-on-close",
|
||||
@ -452,11 +560,6 @@ static const struct sc_option options[] = {
|
||||
"drag & drop. It is passed as is to \"adb push\".\n"
|
||||
"Default is \"/sdcard/Download/\".",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_RAW_KEY_EVENTS,
|
||||
.longopt = "raw-key-events",
|
||||
.text = "Inject key events for all input keys, and ignore text events."
|
||||
},
|
||||
{
|
||||
.shortopt = 'r',
|
||||
.longopt = "record",
|
||||
@ -465,6 +568,11 @@ static const struct sc_option options[] = {
|
||||
"The format is determined by the --record-format option if "
|
||||
"set, or by the file extension (.mp4 or .mkv).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_RAW_KEY_EVENTS,
|
||||
.longopt = "raw-key-events",
|
||||
.text = "Inject key events for all input keys, and ignore text events."
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_RECORD_FORMAT,
|
||||
.longopt = "record-format",
|
||||
@ -503,6 +611,11 @@ static const struct sc_option options[] = {
|
||||
.text = "The device serial number. Mandatory only if several devices "
|
||||
"are connected to adb.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'S',
|
||||
.longopt = "turn-screen-off",
|
||||
.text = "Turn the device screen off immediately.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_SHORTCUT_MOD,
|
||||
.longopt = "shortcut-mod",
|
||||
@ -516,11 +629,6 @@ static const struct sc_option options[] = {
|
||||
"shortcuts, pass \"lctrl+lalt,lsuper\".\n"
|
||||
"Default is \"lalt,lsuper\" (left-Alt or left-Super).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'S',
|
||||
.longopt = "turn-screen-off",
|
||||
.text = "Turn the device screen off immediately.",
|
||||
},
|
||||
{
|
||||
.shortopt = 't',
|
||||
.longopt = "show-touches",
|
||||
@ -542,6 +650,12 @@ static const struct sc_option options[] = {
|
||||
"connected over USB), enables TCP/IP mode, then connects to "
|
||||
"this address before starting.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_TIME_LIMIT,
|
||||
.longopt = "time-limit",
|
||||
.argdesc = "seconds",
|
||||
.text = "Set the maximum mirroring time, in seconds.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_TUNNEL_HOST,
|
||||
.longopt = "tunnel-host",
|
||||
@ -561,6 +675,22 @@ static const struct sc_option options[] = {
|
||||
"Default is 0 (not forced): the local port used for "
|
||||
"establishing the tunnel will be used.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'v',
|
||||
.longopt = "version",
|
||||
.text = "Print the version of scrcpy.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'V',
|
||||
.longopt = "verbosity",
|
||||
.argdesc = "value",
|
||||
.text = "Set the log level (verbose, debug, info, warn or error).\n"
|
||||
#ifndef NDEBUG
|
||||
"Default is debug.",
|
||||
#else
|
||||
"Default is info.",
|
||||
#endif
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_V4L2_SINK,
|
||||
.longopt = "v4l2-sink",
|
||||
@ -581,22 +711,6 @@ static const struct sc_option options[] = {
|
||||
"Default is 0 (no buffering).\n"
|
||||
"This option is only available on Linux.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'V',
|
||||
.longopt = "verbosity",
|
||||
.argdesc = "value",
|
||||
.text = "Set the log level (verbose, debug, info, warn or error).\n"
|
||||
#ifndef NDEBUG
|
||||
"Default is debug.",
|
||||
#else
|
||||
"Default is info.",
|
||||
#endif
|
||||
},
|
||||
{
|
||||
.shortopt = 'v',
|
||||
.longopt = "version",
|
||||
.text = "Print the version of scrcpy.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_VIDEO_CODEC,
|
||||
.longopt = "video-codec",
|
||||
@ -624,6 +738,14 @@ static const struct sc_option options[] = {
|
||||
"codec provided by --video-codec).\n"
|
||||
"The available encoders can be listed by --list-encoders.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_VIDEO_SOURCE,
|
||||
.longopt = "video-source",
|
||||
.argdesc = "source",
|
||||
.text = "Select the video source (display or camera).\n"
|
||||
"Camera mirroring requires Android 12+.\n"
|
||||
"Default is display.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'w',
|
||||
.longopt = "stay-awake",
|
||||
@ -1025,7 +1147,7 @@ print_shortcut(const struct sc_shortcut *shortcut, unsigned cols) {
|
||||
while (shortcut->shortcuts[i]) {
|
||||
printf(" %s\n", shortcut->shortcuts[i]);
|
||||
++i;
|
||||
};
|
||||
}
|
||||
|
||||
char *text = sc_str_wrap_lines(shortcut->text, cols, 8);
|
||||
if (!text) {
|
||||
@ -1144,9 +1266,9 @@ parse_integer_arg(const char *s, long *out, bool accept_suffix, long min,
|
||||
}
|
||||
|
||||
static size_t
|
||||
parse_integers_arg(const char *s, size_t max_items, long *out, long min,
|
||||
long max, const char *name) {
|
||||
size_t count = sc_str_parse_integers(s, ':', max_items, out);
|
||||
parse_integers_arg(const char *s, const char sep, size_t max_items, long *out,
|
||||
long min, long max, const char *name) {
|
||||
size_t count = sc_str_parse_integers(s, sep, max_items, out);
|
||||
if (!count) {
|
||||
LOGE("Could not parse %s: %s", name, s);
|
||||
return 0;
|
||||
@ -1302,7 +1424,7 @@ parse_window_dimension(const char *s, uint16_t *dimension) {
|
||||
static bool
|
||||
parse_port_range(const char *s, struct sc_port_range *port_range) {
|
||||
long values[2];
|
||||
size_t count = parse_integers_arg(s, 2, values, 0, 0xFFFF, "port");
|
||||
size_t count = parse_integers_arg(s, ':', 2, values, 0, 0xFFFF, "port");
|
||||
if (!count) {
|
||||
return false;
|
||||
}
|
||||
@ -1467,18 +1589,39 @@ sc_parse_shortcut_mods(const char *s, struct sc_shortcut_mods *mods) {
|
||||
}
|
||||
#endif
|
||||
|
||||
static enum sc_record_format
|
||||
get_record_format(const char *name) {
|
||||
if (!strcmp(name, "mp4")) {
|
||||
return SC_RECORD_FORMAT_MP4;
|
||||
}
|
||||
if (!strcmp(name, "mkv")) {
|
||||
return SC_RECORD_FORMAT_MKV;
|
||||
}
|
||||
if (!strcmp(name, "m4a")) {
|
||||
return SC_RECORD_FORMAT_M4A;
|
||||
}
|
||||
if (!strcmp(name, "mka")) {
|
||||
return SC_RECORD_FORMAT_MKA;
|
||||
}
|
||||
if (!strcmp(name, "opus")) {
|
||||
return SC_RECORD_FORMAT_OPUS;
|
||||
}
|
||||
if (!strcmp(name, "aac")) {
|
||||
return SC_RECORD_FORMAT_AAC;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_record_format(const char *optarg, enum sc_record_format *format) {
|
||||
if (!strcmp(optarg, "mp4")) {
|
||||
*format = SC_RECORD_FORMAT_MP4;
|
||||
return true;
|
||||
enum sc_record_format fmt = get_record_format(optarg);
|
||||
if (!fmt) {
|
||||
LOGE("Unsupported format: %s (expected mp4 or mkv)", optarg);
|
||||
return false;
|
||||
}
|
||||
if (!strcmp(optarg, "mkv")) {
|
||||
*format = SC_RECORD_FORMAT_MKV;
|
||||
return true;
|
||||
}
|
||||
LOGE("Unsupported format: %s (expected mp4 or mkv)", optarg);
|
||||
return false;
|
||||
|
||||
*format = fmt;
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -1498,18 +1641,13 @@ parse_port(const char *optarg, uint16_t *port) {
|
||||
|
||||
static enum sc_record_format
|
||||
guess_record_format(const char *filename) {
|
||||
size_t len = strlen(filename);
|
||||
if (len < 4) {
|
||||
const char *dot = strrchr(filename, '.');
|
||||
if (!dot) {
|
||||
return 0;
|
||||
}
|
||||
const char *ext = &filename[len - 4];
|
||||
if (!strcmp(ext, ".mp4")) {
|
||||
return SC_RECORD_FORMAT_MP4;
|
||||
}
|
||||
if (!strcmp(ext, ".mkv")) {
|
||||
return SC_RECORD_FORMAT_MKV;
|
||||
}
|
||||
return 0;
|
||||
|
||||
const char *ext = dot + 1;
|
||||
return get_record_format(ext);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -1548,6 +1686,101 @@ parse_audio_codec(const char *optarg, enum sc_codec *codec) {
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_video_source(const char *optarg, enum sc_video_source *source) {
|
||||
if (!strcmp(optarg, "display")) {
|
||||
*source = SC_VIDEO_SOURCE_DISPLAY;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!strcmp(optarg, "camera")) {
|
||||
*source = SC_VIDEO_SOURCE_CAMERA;
|
||||
return true;
|
||||
}
|
||||
|
||||
LOGE("Unsupported video source: %s (expected display or camera)", optarg);
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_audio_source(const char *optarg, enum sc_audio_source *source) {
|
||||
if (!strcmp(optarg, "mic")) {
|
||||
*source = SC_AUDIO_SOURCE_MIC;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!strcmp(optarg, "output")) {
|
||||
*source = SC_AUDIO_SOURCE_OUTPUT;
|
||||
return true;
|
||||
}
|
||||
|
||||
LOGE("Unsupported audio source: %s (expected output or mic)", optarg);
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_camera_facing(const char *optarg, enum sc_camera_facing *facing) {
|
||||
if (!strcmp(optarg, "front")) {
|
||||
*facing = SC_CAMERA_FACING_FRONT;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!strcmp(optarg, "back")) {
|
||||
*facing = SC_CAMERA_FACING_BACK;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!strcmp(optarg, "external")) {
|
||||
*facing = SC_CAMERA_FACING_EXTERNAL;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (*optarg == '\0') {
|
||||
// Empty string is a valid value (equivalent to not passing the option)
|
||||
*facing = SC_CAMERA_FACING_ANY;
|
||||
return true;
|
||||
}
|
||||
|
||||
LOGE("Unsupported camera facing: %s (expected front, back or external)",
|
||||
optarg);
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_time_limit(const char *s, sc_tick *tick) {
|
||||
long value;
|
||||
bool ok = parse_integer_arg(s, &value, false, 0, 0x7FFFFFFF, "time limit");
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
*tick = SC_TICK_FROM_SEC(value);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_pause_on_exit(const char *s, enum sc_pause_on_exit *pause_on_exit) {
|
||||
if (!s || !strcmp(s, "true")) {
|
||||
*pause_on_exit = SC_PAUSE_ON_EXIT_TRUE;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!strcmp(s, "false")) {
|
||||
*pause_on_exit = SC_PAUSE_ON_EXIT_FALSE;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!strcmp(s, "if-error")) {
|
||||
*pause_on_exit = SC_PAUSE_ON_EXIT_IF_ERROR;
|
||||
return true;
|
||||
}
|
||||
|
||||
LOGE("Unsupported pause on exit mode: %s "
|
||||
"(expected true, false or if-error)", optarg);
|
||||
return false;
|
||||
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
const char *optstring, const struct option *longopts) {
|
||||
@ -1575,6 +1808,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
case OPT_CROP:
|
||||
opts->crop = optarg;
|
||||
break;
|
||||
case OPT_DISPLAY:
|
||||
LOGW("--display is deprecated, use --display-id instead.");
|
||||
// fall through
|
||||
case OPT_DISPLAY_ID:
|
||||
if (!parse_display_id(optarg, &opts->display_id)) {
|
||||
return false;
|
||||
@ -1642,8 +1878,18 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
case 'n':
|
||||
opts->control = false;
|
||||
break;
|
||||
case OPT_NO_DISPLAY:
|
||||
LOGW("--no-display is deprecated, use --no-playback instead.");
|
||||
// fall through
|
||||
case 'N':
|
||||
opts->display = false;
|
||||
opts->video_playback = false;
|
||||
opts->audio_playback = false;
|
||||
break;
|
||||
case OPT_NO_VIDEO_PLAYBACK:
|
||||
opts->video_playback = false;
|
||||
break;
|
||||
case OPT_NO_AUDIO_PLAYBACK:
|
||||
opts->audio_playback = false;
|
||||
break;
|
||||
case 'p':
|
||||
if (!parse_port_range(optarg, &opts->port_range)) {
|
||||
@ -1788,6 +2034,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
case OPT_NO_DOWNSIZE_ON_ERROR:
|
||||
opts->downsize_on_error = false;
|
||||
break;
|
||||
case OPT_NO_VIDEO:
|
||||
opts->video = false;
|
||||
break;
|
||||
case OPT_NO_AUDIO:
|
||||
opts->audio = false;
|
||||
break;
|
||||
@ -1838,14 +2087,21 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
}
|
||||
break;
|
||||
#else
|
||||
LOGE("V4L2 (--v4l2-buffer) is only available on Linux.");
|
||||
LOGE("V4L2 (--v4l2-buffer) is disabled (or unsupported on this "
|
||||
"platform).");
|
||||
return false;
|
||||
#endif
|
||||
case OPT_LIST_ENCODERS:
|
||||
opts->list_encoders = true;
|
||||
opts->list |= SC_OPTION_LIST_ENCODERS;
|
||||
break;
|
||||
case OPT_LIST_DISPLAYS:
|
||||
opts->list_displays = true;
|
||||
opts->list |= SC_OPTION_LIST_DISPLAYS;
|
||||
break;
|
||||
case OPT_LIST_CAMERAS:
|
||||
opts->list |= SC_OPTION_LIST_CAMERAS;
|
||||
break;
|
||||
case OPT_LIST_CAMERA_SIZES:
|
||||
opts->list |= SC_OPTION_LIST_CAMERA_SIZES;
|
||||
break;
|
||||
case OPT_REQUIRE_AUDIO:
|
||||
opts->require_audio = true;
|
||||
@ -1861,6 +2117,43 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_VIDEO_SOURCE:
|
||||
if (!parse_video_source(optarg, &opts->video_source)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_AUDIO_SOURCE:
|
||||
if (!parse_audio_source(optarg, &opts->audio_source)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_KILL_ADB_ON_CLOSE:
|
||||
opts->kill_adb_on_close = true;
|
||||
break;
|
||||
case OPT_TIME_LIMIT:
|
||||
if (!parse_time_limit(optarg, &opts->time_limit)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_PAUSE_ON_EXIT:
|
||||
if (!parse_pause_on_exit(optarg, &args->pause_on_exit)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_CAMERA_AR:
|
||||
opts->camera_ar = optarg;
|
||||
break;
|
||||
case OPT_CAMERA_ID:
|
||||
opts->camera_id = optarg;
|
||||
break;
|
||||
case OPT_CAMERA_SIZE:
|
||||
opts->camera_size = optarg;
|
||||
break;
|
||||
case OPT_CAMERA_FACING:
|
||||
if (!parse_camera_facing(optarg, &opts->camera_facing)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// getopt prints the error message on stderr
|
||||
return false;
|
||||
@ -1889,14 +2182,46 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
|
||||
bool otg = false;
|
||||
bool v4l2 = false;
|
||||
#ifdef HAVE_USB
|
||||
otg = opts->otg;
|
||||
#endif
|
||||
#ifdef HAVE_V4L2
|
||||
if (!opts->display && !opts->record_filename && !opts->v4l2_device) {
|
||||
LOGE("-N/--no-display requires either screen recording (-r/--record)"
|
||||
" or sink to v4l2loopback device (--v4l2-sink)");
|
||||
v4l2 = !!opts->v4l2_device;
|
||||
#endif
|
||||
|
||||
if (!opts->video) {
|
||||
opts->video_playback = false;
|
||||
}
|
||||
|
||||
if (!opts->audio) {
|
||||
opts->audio_playback = false;
|
||||
}
|
||||
|
||||
if (opts->video && !opts->video_playback && !opts->record_filename
|
||||
&& !v4l2) {
|
||||
LOGI("No video playback, no recording, no V4L2 sink: video disabled");
|
||||
opts->video = false;
|
||||
}
|
||||
|
||||
if (opts->audio && !opts->audio_playback && !opts->record_filename) {
|
||||
LOGI("No audio playback, no recording: audio disabled");
|
||||
opts->audio = false;
|
||||
}
|
||||
|
||||
if (!opts->video && !opts->audio && !otg) {
|
||||
LOGE("No video, no audio, no OTG: nothing to do");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->v4l2_device) {
|
||||
if (!opts->video && !otg) {
|
||||
// If video is disabled, then scrcpy must exit on audio failure.
|
||||
opts->require_audio = true;
|
||||
}
|
||||
|
||||
#ifdef HAVE_V4L2
|
||||
if (v4l2) {
|
||||
if (opts->lock_video_orientation ==
|
||||
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
|
||||
LOGI("Video orientation is locked for v4l2 sink. "
|
||||
@ -1914,42 +2239,105 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
LOGE("V4L2 buffer value without V4L2 sink\n");
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
if (!opts->display && !opts->record_filename) {
|
||||
LOGE("-N/--no-display requires screen recording (-r/--record)");
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (opts->audio && !opts->display && !opts->record_filename) {
|
||||
LOGI("No display and no recording: audio disabled");
|
||||
opts->audio = false;
|
||||
}
|
||||
|
||||
if ((opts->tunnel_host || opts->tunnel_port) && !opts->force_adb_forward) {
|
||||
LOGI("Tunnel host/port is set, "
|
||||
"--force-adb-forward automatically enabled.");
|
||||
opts->force_adb_forward = true;
|
||||
}
|
||||
|
||||
if (opts->video_source == SC_VIDEO_SOURCE_CAMERA) {
|
||||
if (opts->display_id) {
|
||||
LOGE("--display-id is only available with --video-source=display");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->lock_video_orientation !=
|
||||
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
|
||||
LOGE("--lock-video-orientation is not supported for camera");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->camera_id && opts->camera_facing != SC_CAMERA_FACING_ANY) {
|
||||
LOGE("Could not specify both --camera-id and --camera-facing");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->camera_size) {
|
||||
if (opts->camera_ar) {
|
||||
LOGE("Could not specify both --camera-size and -m/--max-size");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->camera_ar) {
|
||||
LOGE("Could not specify both --camera-size and --camera-ar");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->control) {
|
||||
LOGI("Camera video source: control disabled");
|
||||
opts->control = false;
|
||||
}
|
||||
} else if (opts->camera_id
|
||||
|| opts->camera_ar
|
||||
|| opts->camera_facing != SC_CAMERA_FACING_ANY
|
||||
|| opts->camera_size) {
|
||||
LOGE("Camera options are only available with --video-source=camera");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->audio && opts->audio_source == SC_AUDIO_SOURCE_AUTO) {
|
||||
// Select the audio source according to the video source
|
||||
if (opts->video_source == SC_VIDEO_SOURCE_DISPLAY) {
|
||||
opts->audio_source = SC_AUDIO_SOURCE_OUTPUT;
|
||||
} else {
|
||||
opts->audio_source = SC_AUDIO_SOURCE_MIC;
|
||||
LOGI("Camera video source: microphone audio source selected");
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->record_format && !opts->record_filename) {
|
||||
LOGE("Record format specified without recording");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->record_filename && !opts->record_format) {
|
||||
opts->record_format = guess_record_format(opts->record_filename);
|
||||
if (opts->record_filename) {
|
||||
if (!opts->record_format) {
|
||||
LOGE("No format specified for \"%s\" "
|
||||
"(try with --record-format=mkv)",
|
||||
opts->record_filename);
|
||||
opts->record_format = guess_record_format(opts->record_filename);
|
||||
if (!opts->record_format) {
|
||||
LOGE("No format specified for \"%s\" "
|
||||
"(try with --record-format=mkv)",
|
||||
opts->record_filename);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->audio_codec == SC_CODEC_RAW) {
|
||||
LOGW("Recording does not support RAW audio codec");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->record_filename && opts->audio_codec == SC_CODEC_RAW) {
|
||||
LOGW("Recording does not support RAW audio codec");
|
||||
return false;
|
||||
if (opts->video
|
||||
&& sc_record_format_is_audio_only(opts->record_format)) {
|
||||
LOGE("Audio container does not support video stream");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->record_format == SC_RECORD_FORMAT_OPUS
|
||||
&& opts->audio_codec != SC_CODEC_OPUS) {
|
||||
LOGE("Recording to OPUS file requires an OPUS audio stream "
|
||||
"(try with --audio-codec=opus)");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->record_format == SC_RECORD_FORMAT_AAC
|
||||
&& opts->audio_codec != SC_CODEC_AAC) {
|
||||
LOGE("Recording to AAC file requires an AAC audio stream "
|
||||
"(try with --audio-codec=aac)");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->audio_codec == SC_CODEC_RAW) {
|
||||
@ -1983,11 +2371,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_USB
|
||||
|
||||
# ifdef _WIN32
|
||||
if (!opts->otg && (opts->keyboard_input_mode == SC_KEYBOARD_INPUT_MODE_HID
|
||||
|| opts->mouse_input_mode == SC_MOUSE_INPUT_MODE_HID)) {
|
||||
if (!otg && (opts->keyboard_input_mode == SC_KEYBOARD_INPUT_MODE_HID
|
||||
|| opts->mouse_input_mode == SC_MOUSE_INPUT_MODE_HID)) {
|
||||
LOGE("On Windows, it is not possible to open a USB device already open "
|
||||
"by another process (like adb).");
|
||||
LOGE("Therefore, -K/--hid-keyboard and -M/--hid-mouse may only work in "
|
||||
@ -1996,7 +2382,7 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
}
|
||||
# endif
|
||||
|
||||
if (opts->otg) {
|
||||
if (otg) {
|
||||
// OTG mode is compatible with only very few options.
|
||||
// Only report obvious errors.
|
||||
if (opts->record_filename) {
|
||||
@ -2023,18 +2409,46 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
LOGE("OTG mode: could not select display");
|
||||
return false;
|
||||
}
|
||||
# ifdef HAVE_V4L2
|
||||
if (opts->v4l2_device) {
|
||||
if (v4l2) {
|
||||
LOGE("OTG mode: could not sink to V4L2 device");
|
||||
return false;
|
||||
}
|
||||
# endif
|
||||
}
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static enum sc_pause_on_exit
|
||||
sc_get_pause_on_exit(int argc, char *argv[]) {
|
||||
// Read arguments backwards so that the last --pause-on-exit is considered
|
||||
// (same behavior as getopt())
|
||||
for (int i = argc - 1; i >= 1; --i) {
|
||||
const char *arg = argv[i];
|
||||
// Starts with "--pause-on-exit"
|
||||
if (!strncmp("--pause-on-exit", arg, 15)) {
|
||||
if (arg[15] == '\0') {
|
||||
// No argument
|
||||
return SC_PAUSE_ON_EXIT_TRUE;
|
||||
}
|
||||
if (arg[15] != '=') {
|
||||
// Invalid parameter, ignore
|
||||
return SC_PAUSE_ON_EXIT_FALSE;
|
||||
}
|
||||
const char *value = &arg[16];
|
||||
if (!strcmp(value, "true")) {
|
||||
return SC_PAUSE_ON_EXIT_TRUE;
|
||||
}
|
||||
if (!strcmp(value, "if-error")) {
|
||||
return SC_PAUSE_ON_EXIT_IF_ERROR;
|
||||
}
|
||||
// Set to false, inclusing when the value is invalid
|
||||
return SC_PAUSE_ON_EXIT_FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
return SC_PAUSE_ON_EXIT_FALSE;
|
||||
}
|
||||
|
||||
bool
|
||||
scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
|
||||
struct sc_getopt_adapter adapter;
|
||||
@ -2048,5 +2462,11 @@ scrcpy_parse_args(struct scrcpy_cli_args *args, int argc, char *argv[]) {
|
||||
|
||||
sc_getopt_adapter_destroy(&adapter);
|
||||
|
||||
if (!ret && args->pause_on_exit == SC_PAUSE_ON_EXIT_FALSE) {
|
||||
// Check if "--pause-on-exit" is present in the arguments list, because
|
||||
// it must be taken into account even if command line parsing failed
|
||||
args->pause_on_exit = sc_get_pause_on_exit(argc, argv);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
@ -7,10 +7,17 @@
|
||||
|
||||
#include "options.h"
|
||||
|
||||
enum sc_pause_on_exit {
|
||||
SC_PAUSE_ON_EXIT_TRUE,
|
||||
SC_PAUSE_ON_EXIT_FALSE,
|
||||
SC_PAUSE_ON_EXIT_IF_ERROR,
|
||||
};
|
||||
|
||||
struct scrcpy_cli_args {
|
||||
struct scrcpy_options opts;
|
||||
bool help;
|
||||
bool version;
|
||||
enum sc_pause_on_exit pause_on_exit;
|
||||
};
|
||||
|
||||
void
|
||||
|
@ -25,6 +25,12 @@
|
||||
# define SCRCPY_LAVF_REQUIRES_REGISTER_ALL
|
||||
#endif
|
||||
|
||||
// Not documented in ffmpeg/doc/APIchanges, but AV_CODEC_ID_AV1 has been added
|
||||
// by FFmpeg commit d42809f9835a4e9e5c7c63210abb09ad0ef19cfb (included in tag
|
||||
// n3.3).
|
||||
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 89, 100)
|
||||
# define SCRCPY_LAVC_HAS_AV1
|
||||
#endif
|
||||
|
||||
// In ffmpeg/doc/APIchanges:
|
||||
// 2018-01-28 - ea3672b7d6 - lavf 58.7.100 - avformat.h
|
||||
|
@ -33,7 +33,12 @@ sc_demuxer_to_avcodec_id(uint32_t codec_id) {
|
||||
case SC_CODEC_ID_H265:
|
||||
return AV_CODEC_ID_HEVC;
|
||||
case SC_CODEC_ID_AV1:
|
||||
#ifdef SCRCPY_LAVC_HAS_AV1
|
||||
return AV_CODEC_ID_AV1;
|
||||
#else
|
||||
LOGE("AV1 not supported by this FFmpeg version");
|
||||
return AV_CODEC_ID_NONE;
|
||||
#endif
|
||||
case SC_CODEC_ID_OPUS:
|
||||
return AV_CODEC_ID_OPUS;
|
||||
case SC_CODEC_ID_AAC:
|
||||
@ -74,9 +79,8 @@ sc_demuxer_recv_video_size(struct sc_demuxer *demuxer, uint32_t *width,
|
||||
|
||||
static bool
|
||||
sc_demuxer_recv_packet(struct sc_demuxer *demuxer, AVPacket *packet) {
|
||||
// The video stream contains raw packets, without time information. When we
|
||||
// record, we retrieve the timestamps separately, from a "meta" header
|
||||
// added by the server before each raw packet.
|
||||
// The video and audio streams contain a sequence of raw packets (as
|
||||
// provided by MediaCodec), each prefixed with a "meta" header.
|
||||
//
|
||||
// The "meta" header length is 12 bytes:
|
||||
// [. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ...
|
||||
|
285
app/src/display.c
Normal file
285
app/src/display.c
Normal file
@ -0,0 +1,285 @@
|
||||
#include "display.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "util/log.h"
|
||||
|
||||
bool
|
||||
sc_display_init(struct sc_display *display, SDL_Window *window, bool mipmaps) {
|
||||
display->renderer =
|
||||
SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);
|
||||
if (!display->renderer) {
|
||||
LOGE("Could not create renderer: %s", SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
SDL_RendererInfo renderer_info;
|
||||
int r = SDL_GetRendererInfo(display->renderer, &renderer_info);
|
||||
const char *renderer_name = r ? NULL : renderer_info.name;
|
||||
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
|
||||
|
||||
display->mipmaps = false;
|
||||
|
||||
// starts with "opengl"
|
||||
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
|
||||
if (use_opengl) {
|
||||
|
||||
#ifdef SC_DISPLAY_FORCE_OPENGL_CORE_PROFILE
|
||||
// Persuade macOS to give us something better than OpenGL 2.1.
|
||||
// If we create a Core Profile context, we get the best OpenGL version.
|
||||
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK,
|
||||
SDL_GL_CONTEXT_PROFILE_CORE);
|
||||
|
||||
LOGD("Creating OpenGL Core Profile context");
|
||||
display->gl_context = SDL_GL_CreateContext(window);
|
||||
if (!display->gl_context) {
|
||||
LOGE("Could not create OpenGL context: %s", SDL_GetError());
|
||||
SDL_DestroyRenderer(display->renderer);
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
struct sc_opengl *gl = &display->gl;
|
||||
sc_opengl_init(gl);
|
||||
|
||||
LOGI("OpenGL version: %s", gl->version);
|
||||
|
||||
if (mipmaps) {
|
||||
bool supports_mipmaps =
|
||||
sc_opengl_version_at_least(gl, 3, 0, /* OpenGL 3.0+ */
|
||||
2, 0 /* OpenGL ES 2.0+ */);
|
||||
if (supports_mipmaps) {
|
||||
LOGI("Trilinear filtering enabled");
|
||||
display->mipmaps = true;
|
||||
} else {
|
||||
LOGW("Trilinear filtering disabled "
|
||||
"(OpenGL 3.0+ or ES 2.0+ required)");
|
||||
}
|
||||
} else {
|
||||
LOGI("Trilinear filtering disabled");
|
||||
}
|
||||
} else if (mipmaps) {
|
||||
LOGD("Trilinear filtering disabled (not an OpenGL renderer");
|
||||
}
|
||||
|
||||
display->pending.flags = 0;
|
||||
display->pending.frame = NULL;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
sc_display_destroy(struct sc_display *display) {
|
||||
if (display->pending.frame) {
|
||||
av_frame_free(&display->pending.frame);
|
||||
}
|
||||
#ifdef SC_DISPLAY_FORCE_OPENGL_CORE_PROFILE
|
||||
SDL_GL_DeleteContext(display->gl_context);
|
||||
#endif
|
||||
if (display->texture) {
|
||||
SDL_DestroyTexture(display->texture);
|
||||
}
|
||||
SDL_DestroyRenderer(display->renderer);
|
||||
}
|
||||
|
||||
static SDL_Texture *
|
||||
sc_display_create_texture(struct sc_display *display,
|
||||
struct sc_size size) {
|
||||
SDL_Renderer *renderer = display->renderer;
|
||||
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
|
||||
SDL_TEXTUREACCESS_STREAMING,
|
||||
size.width, size.height);
|
||||
if (!texture) {
|
||||
LOGD("Could not create texture: %s", SDL_GetError());
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (display->mipmaps) {
|
||||
struct sc_opengl *gl = &display->gl;
|
||||
|
||||
SDL_GL_BindTexture(texture, NULL, NULL);
|
||||
|
||||
// Enable trilinear filtering for downscaling
|
||||
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
|
||||
GL_LINEAR_MIPMAP_LINEAR);
|
||||
gl->TexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -1.f);
|
||||
|
||||
SDL_GL_UnbindTexture(texture);
|
||||
}
|
||||
|
||||
return texture;
|
||||
}
|
||||
|
||||
static inline void
|
||||
sc_display_set_pending_size(struct sc_display *display, struct sc_size size) {
|
||||
assert(!display->texture);
|
||||
display->pending.size = size;
|
||||
display->pending.flags |= SC_DISPLAY_PENDING_FLAG_SIZE;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_display_set_pending_frame(struct sc_display *display, const AVFrame *frame) {
|
||||
if (!display->pending.frame) {
|
||||
display->pending.frame = av_frame_alloc();
|
||||
if (!display->pending.frame) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
int r = av_frame_ref(display->pending.frame, frame);
|
||||
if (r) {
|
||||
LOGE("Could not ref frame: %d", r);
|
||||
return false;
|
||||
}
|
||||
|
||||
display->pending.flags |= SC_DISPLAY_PENDING_FLAG_FRAME;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_display_apply_pending(struct sc_display *display) {
|
||||
if (display->pending.flags & SC_DISPLAY_PENDING_FLAG_SIZE) {
|
||||
assert(!display->texture);
|
||||
display->texture =
|
||||
sc_display_create_texture(display, display->pending.size);
|
||||
if (!display->texture) {
|
||||
return false;
|
||||
}
|
||||
|
||||
display->pending.flags &= ~SC_DISPLAY_PENDING_FLAG_SIZE;
|
||||
}
|
||||
|
||||
if (display->pending.flags & SC_DISPLAY_PENDING_FLAG_FRAME) {
|
||||
assert(display->pending.frame);
|
||||
bool ok = sc_display_update_texture(display, display->pending.frame);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
av_frame_unref(display->pending.frame);
|
||||
display->pending.flags &= ~SC_DISPLAY_PENDING_FLAG_FRAME;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_display_set_texture_size_internal(struct sc_display *display,
|
||||
struct sc_size size) {
|
||||
assert(size.width && size.height);
|
||||
|
||||
if (display->texture) {
|
||||
SDL_DestroyTexture(display->texture);
|
||||
}
|
||||
|
||||
display->texture = sc_display_create_texture(display, size);
|
||||
if (!display->texture) {
|
||||
return false;
|
||||
}
|
||||
|
||||
LOGI("Texture: %" PRIu16 "x%" PRIu16, size.width, size.height);
|
||||
return true;
|
||||
}
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_set_texture_size(struct sc_display *display, struct sc_size size) {
|
||||
bool ok = sc_display_set_texture_size_internal(display, size);
|
||||
if (!ok) {
|
||||
sc_display_set_pending_size(display, size);
|
||||
return SC_DISPLAY_RESULT_PENDING;
|
||||
|
||||
}
|
||||
|
||||
return SC_DISPLAY_RESULT_OK;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_display_update_texture_internal(struct sc_display *display,
|
||||
const AVFrame *frame) {
|
||||
int ret = SDL_UpdateYUVTexture(display->texture, NULL,
|
||||
frame->data[0], frame->linesize[0],
|
||||
frame->data[1], frame->linesize[1],
|
||||
frame->data[2], frame->linesize[2]);
|
||||
if (ret) {
|
||||
LOGD("Could not update texture: %s", SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
if (display->mipmaps) {
|
||||
SDL_GL_BindTexture(display->texture, NULL, NULL);
|
||||
display->gl.GenerateMipmap(GL_TEXTURE_2D);
|
||||
SDL_GL_UnbindTexture(display->texture);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_update_texture(struct sc_display *display, const AVFrame *frame) {
|
||||
bool ok = sc_display_update_texture_internal(display, frame);
|
||||
if (!ok) {
|
||||
ok = sc_display_set_pending_frame(display, frame);
|
||||
if (!ok) {
|
||||
LOGE("Could not set pending frame");
|
||||
return SC_DISPLAY_RESULT_ERROR;
|
||||
}
|
||||
|
||||
return SC_DISPLAY_RESULT_PENDING;
|
||||
}
|
||||
|
||||
return SC_DISPLAY_RESULT_OK;
|
||||
}
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_render(struct sc_display *display, const SDL_Rect *geometry,
|
||||
unsigned rotation) {
|
||||
SDL_RenderClear(display->renderer);
|
||||
|
||||
if (display->pending.flags) {
|
||||
bool ok = sc_display_apply_pending(display);
|
||||
if (!ok) {
|
||||
return SC_DISPLAY_RESULT_PENDING;
|
||||
}
|
||||
}
|
||||
|
||||
SDL_Renderer *renderer = display->renderer;
|
||||
SDL_Texture *texture = display->texture;
|
||||
|
||||
if (rotation == 0) {
|
||||
int ret = SDL_RenderCopy(renderer, texture, NULL, geometry);
|
||||
if (ret) {
|
||||
LOGE("Could not render texture: %s", SDL_GetError());
|
||||
return SC_DISPLAY_RESULT_ERROR;
|
||||
}
|
||||
} else {
|
||||
// rotation in RenderCopyEx() is clockwise, while screen->rotation is
|
||||
// counterclockwise (to be consistent with --lock-video-orientation)
|
||||
int cw_rotation = (4 - rotation) % 4;
|
||||
double angle = 90 * cw_rotation;
|
||||
|
||||
const SDL_Rect *dstrect = NULL;
|
||||
SDL_Rect rect;
|
||||
if (rotation & 1) {
|
||||
rect.x = geometry->x + (geometry->w - geometry->h) / 2;
|
||||
rect.y = geometry->y + (geometry->h - geometry->w) / 2;
|
||||
rect.w = geometry->h;
|
||||
rect.h = geometry->w;
|
||||
dstrect = ▭
|
||||
} else {
|
||||
assert(rotation == 2);
|
||||
dstrect = geometry;
|
||||
}
|
||||
|
||||
int ret = SDL_RenderCopyEx(renderer, texture, NULL, dstrect, angle,
|
||||
NULL, 0);
|
||||
if (ret) {
|
||||
LOGE("Could not render texture: %s", SDL_GetError());
|
||||
return SC_DISPLAY_RESULT_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
SDL_RenderPresent(display->renderer);
|
||||
return SC_DISPLAY_RESULT_OK;
|
||||
}
|
59
app/src/display.h
Normal file
59
app/src/display.h
Normal file
@ -0,0 +1,59 @@
|
||||
#ifndef SC_DISPLAY_H
|
||||
#define SC_DISPLAY_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <SDL2/SDL.h>
|
||||
|
||||
#include "coords.h"
|
||||
#include "opengl.h"
|
||||
|
||||
#ifdef __APPLE__
|
||||
# define SC_DISPLAY_FORCE_OPENGL_CORE_PROFILE
|
||||
#endif
|
||||
|
||||
struct sc_display {
|
||||
SDL_Renderer *renderer;
|
||||
SDL_Texture *texture;
|
||||
|
||||
struct sc_opengl gl;
|
||||
#ifdef SC_DISPLAY_FORCE_OPENGL_CORE_PROFILE
|
||||
SDL_GLContext *gl_context;
|
||||
#endif
|
||||
|
||||
bool mipmaps;
|
||||
|
||||
struct {
|
||||
#define SC_DISPLAY_PENDING_FLAG_SIZE 1
|
||||
#define SC_DISPLAY_PENDING_FLAG_FRAME 2
|
||||
int8_t flags;
|
||||
struct sc_size size;
|
||||
AVFrame *frame;
|
||||
} pending;
|
||||
};
|
||||
|
||||
enum sc_display_result {
|
||||
SC_DISPLAY_RESULT_OK,
|
||||
SC_DISPLAY_RESULT_PENDING,
|
||||
SC_DISPLAY_RESULT_ERROR,
|
||||
};
|
||||
|
||||
bool
|
||||
sc_display_init(struct sc_display *display, SDL_Window *window, bool mipmaps);
|
||||
|
||||
void
|
||||
sc_display_destroy(struct sc_display *display);
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_set_texture_size(struct sc_display *display, struct sc_size size);
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_update_texture(struct sc_display *display, const AVFrame *frame);
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_render(struct sc_display *display, const SDL_Rect *geometry,
|
||||
unsigned rotation);
|
||||
|
||||
#endif
|
@ -6,3 +6,4 @@
|
||||
#define SC_EVENT_DEMUXER_ERROR (SDL_USEREVENT + 5)
|
||||
#define SC_EVENT_RECORDER_ERROR (SDL_USEREVENT + 6)
|
||||
#define SC_EVENT_SCREEN_INIT_SIZE (SDL_USEREVENT + 7)
|
||||
#define SC_EVENT_TIME_LIMIT_REACHED (SDL_USEREVENT + 8)
|
||||
|
@ -271,7 +271,7 @@ error:
|
||||
}
|
||||
|
||||
SDL_Surface *
|
||||
scrcpy_icon_load() {
|
||||
scrcpy_icon_load(void) {
|
||||
char *icon_path = get_icon_path();
|
||||
if (!icon_path) {
|
||||
return NULL;
|
||||
|
@ -797,7 +797,8 @@ sc_input_manager_process_file(struct sc_input_manager *im,
|
||||
}
|
||||
|
||||
void
|
||||
sc_input_manager_handle_event(struct sc_input_manager *im, SDL_Event *event) {
|
||||
sc_input_manager_handle_event(struct sc_input_manager *im,
|
||||
const SDL_Event *event) {
|
||||
bool control = im->controller;
|
||||
switch (event->type) {
|
||||
case SDL_TEXTINPUT:
|
||||
|
@ -61,6 +61,7 @@ sc_input_manager_init(struct sc_input_manager *im,
|
||||
const struct sc_input_manager_params *params);
|
||||
|
||||
void
|
||||
sc_input_manager_handle_event(struct sc_input_manager *im, SDL_Event *event);
|
||||
sc_input_manager_handle_event(struct sc_input_manager *im,
|
||||
const SDL_Event *event);
|
||||
|
||||
#endif
|
||||
|
@ -23,7 +23,7 @@
|
||||
#include "util/str.h"
|
||||
#endif
|
||||
|
||||
int
|
||||
static int
|
||||
main_scrcpy(int argc, char *argv[]) {
|
||||
#ifdef _WIN32
|
||||
// disable buffering, we want logs immediately
|
||||
@ -39,26 +39,32 @@ main_scrcpy(int argc, char *argv[]) {
|
||||
.opts = scrcpy_options_default,
|
||||
.help = false,
|
||||
.version = false,
|
||||
.pause_on_exit = SC_PAUSE_ON_EXIT_FALSE,
|
||||
};
|
||||
|
||||
#ifndef NDEBUG
|
||||
args.opts.log_level = SC_LOG_LEVEL_DEBUG;
|
||||
#endif
|
||||
|
||||
enum scrcpy_exit_code ret;
|
||||
|
||||
if (!scrcpy_parse_args(&args, argc, argv)) {
|
||||
return SCRCPY_EXIT_FAILURE;
|
||||
ret = SCRCPY_EXIT_FAILURE;
|
||||
goto end;
|
||||
}
|
||||
|
||||
sc_set_log_level(args.opts.log_level);
|
||||
|
||||
if (args.help) {
|
||||
scrcpy_print_usage(argv[0]);
|
||||
return SCRCPY_EXIT_SUCCESS;
|
||||
ret = SCRCPY_EXIT_SUCCESS;
|
||||
goto end;
|
||||
}
|
||||
|
||||
if (args.version) {
|
||||
scrcpy_print_version();
|
||||
return SCRCPY_EXIT_SUCCESS;
|
||||
ret = SCRCPY_EXIT_SUCCESS;
|
||||
goto end;
|
||||
}
|
||||
|
||||
#ifdef SCRCPY_LAVF_REQUIRES_REGISTER_ALL
|
||||
@ -72,18 +78,26 @@ main_scrcpy(int argc, char *argv[]) {
|
||||
#endif
|
||||
|
||||
if (!net_init()) {
|
||||
return SCRCPY_EXIT_FAILURE;
|
||||
ret = SCRCPY_EXIT_FAILURE;
|
||||
goto end;
|
||||
}
|
||||
|
||||
sc_log_configure();
|
||||
|
||||
#ifdef HAVE_USB
|
||||
enum scrcpy_exit_code ret = args.opts.otg ? scrcpy_otg(&args.opts)
|
||||
: scrcpy(&args.opts);
|
||||
ret = args.opts.otg ? scrcpy_otg(&args.opts) : scrcpy(&args.opts);
|
||||
#else
|
||||
enum scrcpy_exit_code ret = scrcpy(&args.opts);
|
||||
ret = scrcpy(&args.opts);
|
||||
#endif
|
||||
|
||||
end:
|
||||
if (args.pause_on_exit == SC_PAUSE_ON_EXIT_TRUE ||
|
||||
(args.pause_on_exit == SC_PAUSE_ON_EXIT_IF_ERROR &&
|
||||
ret != SCRCPY_EXIT_SUCCESS)) {
|
||||
printf("Press Enter to continue...\n");
|
||||
getchar();
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -11,15 +11,18 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.audio_codec_options = NULL,
|
||||
.video_encoder = NULL,
|
||||
.audio_encoder = NULL,
|
||||
#ifdef HAVE_V4L2
|
||||
.v4l2_device = NULL,
|
||||
#endif
|
||||
.camera_id = NULL,
|
||||
.camera_size = NULL,
|
||||
.camera_ar = NULL,
|
||||
.log_level = SC_LOG_LEVEL_INFO,
|
||||
.video_codec = SC_CODEC_H264,
|
||||
.audio_codec = SC_CODEC_OPUS,
|
||||
.video_source = SC_VIDEO_SOURCE_DISPLAY,
|
||||
.audio_source = SC_AUDIO_SOURCE_AUTO,
|
||||
.record_format = SC_RECORD_FORMAT_AUTO,
|
||||
.keyboard_input_mode = SC_KEYBOARD_INPUT_MODE_INJECT,
|
||||
.mouse_input_mode = SC_MOUSE_INPUT_MODE_INJECT,
|
||||
.camera_facing = SC_CAMERA_FACING_ANY,
|
||||
.port_range = {
|
||||
.first = DEFAULT_LOCAL_PORT_RANGE_FIRST,
|
||||
.last = DEFAULT_LOCAL_PORT_RANGE_LAST,
|
||||
@ -42,9 +45,13 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.window_height = 0,
|
||||
.display_id = 0,
|
||||
.display_buffer = 0,
|
||||
.v4l2_buffer = 0,
|
||||
.audio_buffer = SC_TICK_FROM_MS(50),
|
||||
.audio_output_buffer = SC_TICK_FROM_MS(5),
|
||||
.time_limit = 0,
|
||||
#ifdef HAVE_V4L2
|
||||
.v4l2_device = NULL,
|
||||
.v4l2_buffer = 0,
|
||||
#endif
|
||||
#ifdef HAVE_USB
|
||||
.otg = false,
|
||||
#endif
|
||||
@ -52,7 +59,8 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.fullscreen = false,
|
||||
.always_on_top = false,
|
||||
.control = true,
|
||||
.display = true,
|
||||
.video_playback = true,
|
||||
.audio_playback = true,
|
||||
.turn_screen_off = false,
|
||||
.key_inject_mode = SC_KEY_INJECT_MODE_MIXED,
|
||||
.window_borderless = false,
|
||||
@ -73,8 +81,9 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.cleanup = true,
|
||||
.start_fps_counter = false,
|
||||
.power_on = true,
|
||||
.video = true,
|
||||
.audio = true,
|
||||
.require_audio = false,
|
||||
.list_encoders = false,
|
||||
.list_displays = false,
|
||||
.kill_adb_on_close = false,
|
||||
.list = 0,
|
||||
};
|
||||
|
@ -21,8 +21,20 @@ enum sc_record_format {
|
||||
SC_RECORD_FORMAT_AUTO,
|
||||
SC_RECORD_FORMAT_MP4,
|
||||
SC_RECORD_FORMAT_MKV,
|
||||
SC_RECORD_FORMAT_M4A,
|
||||
SC_RECORD_FORMAT_MKA,
|
||||
SC_RECORD_FORMAT_OPUS,
|
||||
SC_RECORD_FORMAT_AAC,
|
||||
};
|
||||
|
||||
static inline bool
|
||||
sc_record_format_is_audio_only(enum sc_record_format fmt) {
|
||||
return fmt == SC_RECORD_FORMAT_M4A
|
||||
|| fmt == SC_RECORD_FORMAT_MKA
|
||||
|| fmt == SC_RECORD_FORMAT_OPUS
|
||||
|| fmt == SC_RECORD_FORMAT_AAC;
|
||||
}
|
||||
|
||||
enum sc_codec {
|
||||
SC_CODEC_H264,
|
||||
SC_CODEC_H265,
|
||||
@ -32,6 +44,24 @@ enum sc_codec {
|
||||
SC_CODEC_RAW,
|
||||
};
|
||||
|
||||
enum sc_video_source {
|
||||
SC_VIDEO_SOURCE_DISPLAY,
|
||||
SC_VIDEO_SOURCE_CAMERA,
|
||||
};
|
||||
|
||||
enum sc_audio_source {
|
||||
SC_AUDIO_SOURCE_AUTO, // OUTPUT for video DISPLAY, MIC for video CAMERA
|
||||
SC_AUDIO_SOURCE_OUTPUT,
|
||||
SC_AUDIO_SOURCE_MIC,
|
||||
};
|
||||
|
||||
enum sc_camera_facing {
|
||||
SC_CAMERA_FACING_ANY,
|
||||
SC_CAMERA_FACING_FRONT,
|
||||
SC_CAMERA_FACING_BACK,
|
||||
SC_CAMERA_FACING_EXTERNAL,
|
||||
};
|
||||
|
||||
enum sc_lock_video_orientation {
|
||||
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED = -1,
|
||||
// lock the current orientation when scrcpy starts
|
||||
@ -100,15 +130,18 @@ struct scrcpy_options {
|
||||
const char *audio_codec_options;
|
||||
const char *video_encoder;
|
||||
const char *audio_encoder;
|
||||
#ifdef HAVE_V4L2
|
||||
const char *v4l2_device;
|
||||
#endif
|
||||
const char *camera_id;
|
||||
const char *camera_size;
|
||||
const char *camera_ar;
|
||||
enum sc_log_level log_level;
|
||||
enum sc_codec video_codec;
|
||||
enum sc_codec audio_codec;
|
||||
enum sc_video_source video_source;
|
||||
enum sc_audio_source audio_source;
|
||||
enum sc_record_format record_format;
|
||||
enum sc_keyboard_input_mode keyboard_input_mode;
|
||||
enum sc_mouse_input_mode mouse_input_mode;
|
||||
enum sc_camera_facing camera_facing;
|
||||
struct sc_port_range port_range;
|
||||
uint32_t tunnel_host;
|
||||
uint16_t tunnel_port;
|
||||
@ -125,9 +158,13 @@ struct scrcpy_options {
|
||||
uint16_t window_height;
|
||||
uint32_t display_id;
|
||||
sc_tick display_buffer;
|
||||
sc_tick v4l2_buffer;
|
||||
sc_tick audio_buffer;
|
||||
sc_tick audio_output_buffer;
|
||||
sc_tick time_limit;
|
||||
#ifdef HAVE_V4L2
|
||||
const char *v4l2_device;
|
||||
sc_tick v4l2_buffer;
|
||||
#endif
|
||||
#ifdef HAVE_USB
|
||||
bool otg;
|
||||
#endif
|
||||
@ -135,7 +172,8 @@ struct scrcpy_options {
|
||||
bool fullscreen;
|
||||
bool always_on_top;
|
||||
bool control;
|
||||
bool display;
|
||||
bool video_playback;
|
||||
bool audio_playback;
|
||||
bool turn_screen_off;
|
||||
enum sc_key_inject_mode key_inject_mode;
|
||||
bool window_borderless;
|
||||
@ -156,10 +194,15 @@ struct scrcpy_options {
|
||||
bool cleanup;
|
||||
bool start_fps_counter;
|
||||
bool power_on;
|
||||
bool video;
|
||||
bool audio;
|
||||
bool require_audio;
|
||||
bool list_encoders;
|
||||
bool list_displays;
|
||||
bool kill_adb_on_close;
|
||||
#define SC_OPTION_LIST_ENCODERS 0x1
|
||||
#define SC_OPTION_LIST_DISPLAYS 0x2
|
||||
#define SC_OPTION_LIST_CAMERAS 0x4
|
||||
#define SC_OPTION_LIST_CAMERA_SIZES 0x8
|
||||
uint8_t list;
|
||||
};
|
||||
|
||||
extern const struct scrcpy_options scrcpy_options_default;
|
||||
|
@ -60,9 +60,17 @@ sc_recorder_queue_clear(struct sc_recorder_queue *queue) {
|
||||
static const char *
|
||||
sc_recorder_get_format_name(enum sc_record_format format) {
|
||||
switch (format) {
|
||||
case SC_RECORD_FORMAT_MP4: return "mp4";
|
||||
case SC_RECORD_FORMAT_MKV: return "matroska";
|
||||
default: return NULL;
|
||||
case SC_RECORD_FORMAT_MP4:
|
||||
case SC_RECORD_FORMAT_M4A:
|
||||
case SC_RECORD_FORMAT_AAC:
|
||||
return "mp4";
|
||||
case SC_RECORD_FORMAT_MKV:
|
||||
case SC_RECORD_FORMAT_MKA:
|
||||
return "matroska";
|
||||
case SC_RECORD_FORMAT_OPUS:
|
||||
return "opus";
|
||||
default:
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
@ -88,23 +96,30 @@ sc_recorder_rescale_packet(AVStream *stream, AVPacket *packet) {
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_recorder_write_stream(struct sc_recorder *recorder, int stream_index,
|
||||
AVPacket *packet) {
|
||||
AVStream *stream = recorder->ctx->streams[stream_index];
|
||||
sc_recorder_write_stream(struct sc_recorder *recorder,
|
||||
struct sc_recorder_stream *st, AVPacket *packet) {
|
||||
AVStream *stream = recorder->ctx->streams[st->index];
|
||||
sc_recorder_rescale_packet(stream, packet);
|
||||
if (st->last_pts != AV_NOPTS_VALUE && packet->pts <= st->last_pts) {
|
||||
LOGW("Fixing PTS non monotonically increasing in stream %d "
|
||||
"(%" PRIi64 " >= %" PRIi64 ")",
|
||||
st->index, st->last_pts, packet->pts);
|
||||
packet->pts = ++st->last_pts;
|
||||
packet->dts = packet->pts;
|
||||
} else {
|
||||
st->last_pts = packet->pts;
|
||||
}
|
||||
return av_interleaved_write_frame(recorder->ctx, packet) >= 0;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_recorder_write_video(struct sc_recorder *recorder, AVPacket *packet) {
|
||||
return sc_recorder_write_stream(recorder, recorder->video_stream_index,
|
||||
packet);
|
||||
return sc_recorder_write_stream(recorder, &recorder->video_stream, packet);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_recorder_write_audio(struct sc_recorder *recorder, AVPacket *packet) {
|
||||
return sc_recorder_write_stream(recorder, recorder->audio_stream_index,
|
||||
packet);
|
||||
return sc_recorder_write_stream(recorder, &recorder->audio_stream, packet);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -152,7 +167,7 @@ sc_recorder_close_output_file(struct sc_recorder *recorder) {
|
||||
|
||||
static inline bool
|
||||
sc_recorder_has_empty_queues(struct sc_recorder *recorder) {
|
||||
if (sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
if (recorder->video && sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
// The video queue is empty
|
||||
return true;
|
||||
}
|
||||
@ -170,13 +185,14 @@ static bool
|
||||
sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
|
||||
while (!recorder->stopped && (!recorder->video_init
|
||||
|| !recorder->audio_init
|
||||
|| sc_recorder_has_empty_queues(recorder))) {
|
||||
sc_cond_wait(&recorder->stream_cond, &recorder->mutex);
|
||||
while (!recorder->stopped &&
|
||||
((recorder->video && !recorder->video_init)
|
||||
|| (recorder->audio && !recorder->audio_init)
|
||||
|| sc_recorder_has_empty_queues(recorder))) {
|
||||
sc_cond_wait(&recorder->cond, &recorder->mutex);
|
||||
}
|
||||
|
||||
if (sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
if (recorder->video && sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
assert(recorder->stopped);
|
||||
// If the recorder is stopped, don't process anything if there are not
|
||||
// at least video packets
|
||||
@ -184,7 +200,11 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AVPacket *video_pkt = sc_vecdeque_pop(&recorder->video_queue);
|
||||
AVPacket *video_pkt = NULL;
|
||||
if (!sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
assert(recorder->video);
|
||||
video_pkt = sc_vecdeque_pop(&recorder->video_queue);
|
||||
}
|
||||
|
||||
AVPacket *audio_pkt = NULL;
|
||||
if (!sc_vecdeque_is_empty(&recorder->audio_queue)) {
|
||||
@ -196,17 +216,19 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
|
||||
int ret = false;
|
||||
|
||||
if (video_pkt->pts != AV_NOPTS_VALUE) {
|
||||
LOGE("The first video packet is not a config packet");
|
||||
goto end;
|
||||
}
|
||||
if (video_pkt) {
|
||||
if (video_pkt->pts != AV_NOPTS_VALUE) {
|
||||
LOGE("The first video packet is not a config packet");
|
||||
goto end;
|
||||
}
|
||||
|
||||
assert(recorder->video_stream_index >= 0);
|
||||
AVStream *video_stream =
|
||||
recorder->ctx->streams[recorder->video_stream_index];
|
||||
bool ok = sc_recorder_set_extradata(video_stream, video_pkt);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
assert(recorder->video_stream.index >= 0);
|
||||
AVStream *video_stream =
|
||||
recorder->ctx->streams[recorder->video_stream.index];
|
||||
bool ok = sc_recorder_set_extradata(video_stream, video_pkt);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
}
|
||||
}
|
||||
|
||||
if (audio_pkt) {
|
||||
@ -215,16 +237,16 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
assert(recorder->audio_stream_index >= 0);
|
||||
assert(recorder->audio_stream.index >= 0);
|
||||
AVStream *audio_stream =
|
||||
recorder->ctx->streams[recorder->audio_stream_index];
|
||||
ok = sc_recorder_set_extradata(audio_stream, audio_pkt);
|
||||
recorder->ctx->streams[recorder->audio_stream.index];
|
||||
bool ok = sc_recorder_set_extradata(audio_stream, audio_pkt);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
}
|
||||
}
|
||||
|
||||
ok = avformat_write_header(recorder->ctx, NULL) >= 0;
|
||||
bool ok = avformat_write_header(recorder->ctx, NULL) >= 0;
|
||||
if (!ok) {
|
||||
LOGE("Failed to write header to %s", recorder->filename);
|
||||
goto end;
|
||||
@ -233,7 +255,9 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
ret = true;
|
||||
|
||||
end:
|
||||
av_packet_free(&video_pkt);
|
||||
if (video_pkt) {
|
||||
av_packet_free(&video_pkt);
|
||||
}
|
||||
if (audio_pkt) {
|
||||
av_packet_free(&audio_pkt);
|
||||
}
|
||||
@ -263,7 +287,8 @@ sc_recorder_process_packets(struct sc_recorder *recorder) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
|
||||
while (!recorder->stopped) {
|
||||
if (!video_pkt && !sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
if (recorder->video && !video_pkt &&
|
||||
!sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
// A new packet may be assigned to video_pkt and be processed
|
||||
break;
|
||||
}
|
||||
@ -272,12 +297,17 @@ sc_recorder_process_packets(struct sc_recorder *recorder) {
|
||||
// A new packet may be assigned to audio_pkt and be processed
|
||||
break;
|
||||
}
|
||||
sc_cond_wait(&recorder->queue_cond, &recorder->mutex);
|
||||
sc_cond_wait(&recorder->cond, &recorder->mutex);
|
||||
}
|
||||
|
||||
// If stopped is set, continue to process the remaining events (to
|
||||
// finish the recording) before actually stopping.
|
||||
|
||||
// If there is no video, then the video_queue will remain empty forever
|
||||
// and video_pkt will always be NULL.
|
||||
assert(recorder->video || (!video_pkt
|
||||
&& sc_vecdeque_is_empty(&recorder->video_queue)));
|
||||
|
||||
// If there is no audio, then the audio_queue will remain empty forever
|
||||
// and audio_pkt will always be NULL.
|
||||
assert(recorder->audio || (!audio_pkt
|
||||
@ -319,6 +349,9 @@ sc_recorder_process_packets(struct sc_recorder *recorder) {
|
||||
if (!recorder->audio) {
|
||||
assert(video_pkt);
|
||||
pts_origin = video_pkt->pts;
|
||||
} else if (!recorder->video) {
|
||||
assert(audio_pkt);
|
||||
pts_origin = audio_pkt->pts;
|
||||
} else if (video_pkt && audio_pkt) {
|
||||
pts_origin = MIN(video_pkt->pts, audio_pkt->pts);
|
||||
} else if (recorder->stopped) {
|
||||
@ -479,10 +512,10 @@ sc_recorder_video_packet_sink_open(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
recorder->video_stream_index = stream->index;
|
||||
recorder->video_stream.index = stream->index;
|
||||
|
||||
recorder->video_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
|
||||
return true;
|
||||
@ -497,7 +530,7 @@ sc_recorder_video_packet_sink_close(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
// EOS also stops the recorder
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -523,7 +556,7 @@ sc_recorder_video_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
rec->stream_index = recorder->video_stream_index;
|
||||
rec->stream_index = recorder->video_stream.index;
|
||||
|
||||
bool ok = sc_vecdeque_push(&recorder->video_queue, rec);
|
||||
if (!ok) {
|
||||
@ -532,7 +565,7 @@ sc_recorder_video_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
return true;
|
||||
@ -560,10 +593,10 @@ sc_recorder_audio_packet_sink_open(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
recorder->audio_stream_index = stream->index;
|
||||
recorder->audio_stream.index = stream->index;
|
||||
|
||||
recorder->audio_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
|
||||
return true;
|
||||
@ -579,7 +612,7 @@ sc_recorder_audio_packet_sink_close(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
// EOS also stops the recorder
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -606,7 +639,7 @@ sc_recorder_audio_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
rec->stream_index = recorder->audio_stream_index;
|
||||
rec->stream_index = recorder->audio_stream.index;
|
||||
|
||||
bool ok = sc_vecdeque_push(&recorder->audio_queue, rec);
|
||||
if (!ok) {
|
||||
@ -615,7 +648,7 @@ sc_recorder_audio_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
return true;
|
||||
@ -633,13 +666,19 @@ sc_recorder_audio_packet_sink_disable(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
recorder->audio = false;
|
||||
recorder->audio_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_recorder_stream_init(struct sc_recorder_stream *stream) {
|
||||
stream->index = -1;
|
||||
stream->last_pts = AV_NOPTS_VALUE;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
enum sc_record_format format, bool audio,
|
||||
enum sc_record_format format, bool video, bool audio,
|
||||
const struct sc_recorder_callbacks *cbs, void *cbs_userdata) {
|
||||
recorder->filename = strdup(filename);
|
||||
if (!recorder->filename) {
|
||||
@ -652,16 +691,13 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
goto error_free_filename;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&recorder->queue_cond);
|
||||
ok = sc_cond_init(&recorder->cond);
|
||||
if (!ok) {
|
||||
goto error_mutex_destroy;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&recorder->stream_cond);
|
||||
if (!ok) {
|
||||
goto error_queue_cond_destroy;
|
||||
}
|
||||
|
||||
assert(video || audio);
|
||||
recorder->video = video;
|
||||
recorder->audio = audio;
|
||||
|
||||
sc_vecdeque_init(&recorder->video_queue);
|
||||
@ -671,8 +707,8 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
recorder->video_init = false;
|
||||
recorder->audio_init = false;
|
||||
|
||||
recorder->video_stream_index = -1;
|
||||
recorder->audio_stream_index = -1;
|
||||
sc_recorder_stream_init(&recorder->video_stream);
|
||||
sc_recorder_stream_init(&recorder->audio_stream);
|
||||
|
||||
recorder->format = format;
|
||||
|
||||
@ -680,13 +716,15 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
recorder->cbs = cbs;
|
||||
recorder->cbs_userdata = cbs_userdata;
|
||||
|
||||
static const struct sc_packet_sink_ops video_ops = {
|
||||
.open = sc_recorder_video_packet_sink_open,
|
||||
.close = sc_recorder_video_packet_sink_close,
|
||||
.push = sc_recorder_video_packet_sink_push,
|
||||
};
|
||||
if (video) {
|
||||
static const struct sc_packet_sink_ops video_ops = {
|
||||
.open = sc_recorder_video_packet_sink_open,
|
||||
.close = sc_recorder_video_packet_sink_close,
|
||||
.push = sc_recorder_video_packet_sink_push,
|
||||
};
|
||||
|
||||
recorder->video_packet_sink.ops = &video_ops;
|
||||
recorder->video_packet_sink.ops = &video_ops;
|
||||
}
|
||||
|
||||
if (audio) {
|
||||
static const struct sc_packet_sink_ops audio_ops = {
|
||||
@ -701,8 +739,6 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
|
||||
return true;
|
||||
|
||||
error_queue_cond_destroy:
|
||||
sc_cond_destroy(&recorder->queue_cond);
|
||||
error_mutex_destroy:
|
||||
sc_mutex_destroy(&recorder->mutex);
|
||||
error_free_filename:
|
||||
@ -727,8 +763,7 @@ void
|
||||
sc_recorder_stop(struct sc_recorder *recorder) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -739,8 +774,7 @@ sc_recorder_join(struct sc_recorder *recorder) {
|
||||
|
||||
void
|
||||
sc_recorder_destroy(struct sc_recorder *recorder) {
|
||||
sc_cond_destroy(&recorder->stream_cond);
|
||||
sc_cond_destroy(&recorder->queue_cond);
|
||||
sc_cond_destroy(&recorder->cond);
|
||||
sc_mutex_destroy(&recorder->mutex);
|
||||
free(recorder->filename);
|
||||
}
|
||||
|
@ -14,6 +14,11 @@
|
||||
|
||||
struct sc_recorder_queue SC_VECDEQUE(AVPacket *);
|
||||
|
||||
struct sc_recorder_stream {
|
||||
int index;
|
||||
int64_t last_pts;
|
||||
};
|
||||
|
||||
struct sc_recorder {
|
||||
struct sc_packet_sink video_packet_sink;
|
||||
struct sc_packet_sink audio_packet_sink;
|
||||
@ -27,6 +32,7 @@ struct sc_recorder {
|
||||
* may access it without data races.
|
||||
*/
|
||||
bool audio;
|
||||
bool video;
|
||||
|
||||
char *filename;
|
||||
enum sc_record_format format;
|
||||
@ -34,19 +40,18 @@ struct sc_recorder {
|
||||
|
||||
sc_thread thread;
|
||||
sc_mutex mutex;
|
||||
sc_cond queue_cond;
|
||||
sc_cond cond;
|
||||
// set on sc_recorder_stop(), packet_sink close or recording failure
|
||||
bool stopped;
|
||||
struct sc_recorder_queue video_queue;
|
||||
struct sc_recorder_queue audio_queue;
|
||||
|
||||
// wake up the recorder thread once the video or audio codec is known
|
||||
sc_cond stream_cond;
|
||||
bool video_init;
|
||||
bool audio_init;
|
||||
|
||||
int video_stream_index;
|
||||
int audio_stream_index;
|
||||
struct sc_recorder_stream video_stream;
|
||||
struct sc_recorder_stream audio_stream;
|
||||
|
||||
const struct sc_recorder_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
@ -59,7 +64,7 @@ struct sc_recorder_callbacks {
|
||||
|
||||
bool
|
||||
sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
enum sc_record_format format, bool audio,
|
||||
enum sc_record_format format, bool video, bool audio,
|
||||
const struct sc_recorder_callbacks *cbs, void *cbs_userdata);
|
||||
|
||||
bool
|
||||
|
178
app/src/scrcpy.c
178
app/src/scrcpy.c
@ -35,6 +35,7 @@
|
||||
#include "util/log.h"
|
||||
#include "util/net.h"
|
||||
#include "util/rand.h"
|
||||
#include "util/timeout.h"
|
||||
#ifdef HAVE_V4L2
|
||||
# include "v4l2_sink.h"
|
||||
#endif
|
||||
@ -73,6 +74,7 @@ struct scrcpy {
|
||||
struct sc_hid_mouse mouse_hid;
|
||||
#endif
|
||||
};
|
||||
struct sc_timeout timeout;
|
||||
};
|
||||
|
||||
static inline void
|
||||
@ -88,7 +90,7 @@ push_event(uint32_t type, const char *name) {
|
||||
#define PUSH_EVENT(TYPE) push_event(TYPE, # TYPE)
|
||||
|
||||
#ifdef _WIN32
|
||||
BOOL WINAPI windows_ctrl_handler(DWORD ctrl_type) {
|
||||
static BOOL WINAPI windows_ctrl_handler(DWORD ctrl_type) {
|
||||
if (ctrl_type == CTRL_C_EVENT) {
|
||||
PUSH_EVENT(SDL_QUIT);
|
||||
return TRUE;
|
||||
@ -137,7 +139,7 @@ sdl_set_hints(const char *render_driver) {
|
||||
}
|
||||
|
||||
static void
|
||||
sdl_configure(bool display, bool disable_screensaver) {
|
||||
sdl_configure(bool video_playback, bool disable_screensaver) {
|
||||
#ifdef _WIN32
|
||||
// Clean up properly on Ctrl+C on Windows
|
||||
bool ok = SetConsoleCtrlHandler(windows_ctrl_handler, TRUE);
|
||||
@ -146,7 +148,7 @@ sdl_configure(bool display, bool disable_screensaver) {
|
||||
}
|
||||
#endif // _WIN32
|
||||
|
||||
if (!display) {
|
||||
if (!video_playback) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -171,6 +173,9 @@ event_loop(struct scrcpy *s) {
|
||||
case SC_EVENT_RECORDER_ERROR:
|
||||
LOGE("Recorder error");
|
||||
return SCRCPY_EXIT_FAILURE;
|
||||
case SC_EVENT_TIME_LIMIT_REACHED:
|
||||
LOGI("Time limit reached");
|
||||
return SCRCPY_EXIT_SUCCESS;
|
||||
case SDL_QUIT:
|
||||
LOGD("User requested to quit");
|
||||
return SCRCPY_EXIT_SUCCESS;
|
||||
@ -247,7 +252,9 @@ sc_audio_demuxer_on_ended(struct sc_demuxer *demuxer,
|
||||
|
||||
// Contrary to the video demuxer, keep mirroring if only the audio fails
|
||||
// (unless --require-audio is set).
|
||||
if (status == SC_DEMUXER_STATUS_ERROR
|
||||
if (status == SC_DEMUXER_STATUS_EOS) {
|
||||
PUSH_EVENT(SC_EVENT_DEVICE_DISCONNECTED);
|
||||
} else if (status == SC_DEMUXER_STATUS_ERROR
|
||||
|| (status == SC_DEMUXER_STATUS_DISABLED
|
||||
&& options->require_audio)) {
|
||||
PUSH_EVENT(SC_EVENT_DEMUXER_ERROR);
|
||||
@ -280,9 +287,17 @@ sc_server_on_disconnected(struct sc_server *server, void *userdata) {
|
||||
// event
|
||||
}
|
||||
|
||||
static void
|
||||
sc_timeout_on_timeout(struct sc_timeout *timeout, void *userdata) {
|
||||
(void) timeout;
|
||||
(void) userdata;
|
||||
|
||||
PUSH_EVENT(SC_EVENT_TIME_LIMIT_REACHED);
|
||||
}
|
||||
|
||||
// Generate a scrcpy id to differentiate multiple running scrcpy instances
|
||||
static uint32_t
|
||||
scrcpy_generate_scid() {
|
||||
scrcpy_generate_scid(void) {
|
||||
struct sc_rand rand;
|
||||
sc_rand_init(&rand);
|
||||
// Only use 31 bits to avoid issues with signed values on the Java-side
|
||||
@ -321,6 +336,8 @@ scrcpy(struct scrcpy_options *options) {
|
||||
bool controller_initialized = false;
|
||||
bool controller_started = false;
|
||||
bool screen_initialized = false;
|
||||
bool timeout_initialized = false;
|
||||
bool timeout_started = false;
|
||||
|
||||
struct sc_acksync *acksync = NULL;
|
||||
|
||||
@ -334,6 +351,9 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.log_level = options->log_level,
|
||||
.video_codec = options->video_codec,
|
||||
.audio_codec = options->audio_codec,
|
||||
.video_source = options->video_source,
|
||||
.audio_source = options->audio_source,
|
||||
.camera_facing = options->camera_facing,
|
||||
.crop = options->crop,
|
||||
.port_range = options->port_range,
|
||||
.tunnel_host = options->tunnel_host,
|
||||
@ -345,6 +365,7 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.lock_video_orientation = options->lock_video_orientation,
|
||||
.control = options->control,
|
||||
.display_id = options->display_id,
|
||||
.video = options->video,
|
||||
.audio = options->audio,
|
||||
.show_touches = options->show_touches,
|
||||
.stay_awake = options->stay_awake,
|
||||
@ -352,6 +373,9 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.audio_codec_options = options->audio_codec_options,
|
||||
.video_encoder = options->video_encoder,
|
||||
.audio_encoder = options->audio_encoder,
|
||||
.camera_id = options->camera_id,
|
||||
.camera_size = options->camera_size,
|
||||
.camera_ar = options->camera_ar,
|
||||
.force_adb_forward = options->force_adb_forward,
|
||||
.power_off_on_close = options->power_off_on_close,
|
||||
.clipboard_autosync = options->clipboard_autosync,
|
||||
@ -360,8 +384,8 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.tcpip_dst = options->tcpip_dst,
|
||||
.cleanup = options->cleanup,
|
||||
.power_on = options->power_on,
|
||||
.list_encoders = options->list_encoders,
|
||||
.list_displays = options->list_displays,
|
||||
.kill_adb_on_close = options->kill_adb_on_close,
|
||||
.list = options->list,
|
||||
};
|
||||
|
||||
static const struct sc_server_callbacks cbs = {
|
||||
@ -379,30 +403,32 @@ scrcpy(struct scrcpy_options *options) {
|
||||
|
||||
server_started = true;
|
||||
|
||||
if (options->list_encoders || options->list_displays) {
|
||||
if (options->list) {
|
||||
bool ok = await_for_server(NULL);
|
||||
ret = ok ? SCRCPY_EXIT_SUCCESS : SCRCPY_EXIT_FAILURE;
|
||||
goto end;
|
||||
}
|
||||
|
||||
if (options->display) {
|
||||
sdl_set_hints(options->render_driver);
|
||||
}
|
||||
// playback implies capture
|
||||
assert(!options->video_playback || options->video);
|
||||
assert(!options->audio_playback || options->audio);
|
||||
|
||||
// Initialize SDL video in addition if display is enabled
|
||||
if (options->display) {
|
||||
if (options->video_playback) {
|
||||
sdl_set_hints(options->render_driver);
|
||||
if (SDL_Init(SDL_INIT_VIDEO)) {
|
||||
LOGE("Could not initialize SDL video: %s", SDL_GetError());
|
||||
goto end;
|
||||
}
|
||||
}
|
||||
|
||||
if (options->audio && SDL_Init(SDL_INIT_AUDIO)) {
|
||||
if (options->audio_playback) {
|
||||
if (SDL_Init(SDL_INIT_AUDIO)) {
|
||||
LOGE("Could not initialize SDL audio: %s", SDL_GetError());
|
||||
goto end;
|
||||
}
|
||||
}
|
||||
|
||||
sdl_configure(options->display, options->disable_screensaver);
|
||||
sdl_configure(options->video_playback, options->disable_screensaver);
|
||||
|
||||
// Await for server without blocking Ctrl+C handling
|
||||
bool connected;
|
||||
@ -428,7 +454,7 @@ scrcpy(struct scrcpy_options *options) {
|
||||
|
||||
struct sc_file_pusher *fp = NULL;
|
||||
|
||||
if (options->display && options->control) {
|
||||
if (options->video_playback && options->control) {
|
||||
if (!sc_file_pusher_init(&s->file_pusher, serial,
|
||||
options->push_target)) {
|
||||
goto end;
|
||||
@ -437,11 +463,13 @@ scrcpy(struct scrcpy_options *options) {
|
||||
file_pusher_initialized = true;
|
||||
}
|
||||
|
||||
static const struct sc_demuxer_callbacks video_demuxer_cbs = {
|
||||
.on_ended = sc_video_demuxer_on_ended,
|
||||
};
|
||||
sc_demuxer_init(&s->video_demuxer, "video", s->server.video_socket,
|
||||
&video_demuxer_cbs, NULL);
|
||||
if (options->video) {
|
||||
static const struct sc_demuxer_callbacks video_demuxer_cbs = {
|
||||
.on_ended = sc_video_demuxer_on_ended,
|
||||
};
|
||||
sc_demuxer_init(&s->video_demuxer, "video", s->server.video_socket,
|
||||
&video_demuxer_cbs, NULL);
|
||||
}
|
||||
|
||||
if (options->audio) {
|
||||
static const struct sc_demuxer_callbacks audio_demuxer_cbs = {
|
||||
@ -451,8 +479,8 @@ scrcpy(struct scrcpy_options *options) {
|
||||
&audio_demuxer_cbs, options);
|
||||
}
|
||||
|
||||
bool needs_video_decoder = options->display;
|
||||
bool needs_audio_decoder = options->audio && options->display;
|
||||
bool needs_video_decoder = options->video_playback;
|
||||
bool needs_audio_decoder = options->audio_playback;
|
||||
#ifdef HAVE_V4L2
|
||||
needs_video_decoder |= !!options->v4l2_device;
|
||||
#endif
|
||||
@ -472,8 +500,8 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.on_ended = sc_recorder_on_ended,
|
||||
};
|
||||
if (!sc_recorder_init(&s->recorder, options->record_filename,
|
||||
options->record_format, options->audio,
|
||||
&recorder_cbs, NULL)) {
|
||||
options->record_format, options->video,
|
||||
options->audio, &recorder_cbs, NULL)) {
|
||||
goto end;
|
||||
}
|
||||
recorder_initialized = true;
|
||||
@ -483,8 +511,10 @@ scrcpy(struct scrcpy_options *options) {
|
||||
}
|
||||
recorder_started = true;
|
||||
|
||||
sc_packet_source_add_sink(&s->video_demuxer.packet_source,
|
||||
&s->recorder.video_packet_sink);
|
||||
if (options->video) {
|
||||
sc_packet_source_add_sink(&s->video_demuxer.packet_source,
|
||||
&s->recorder.video_packet_sink);
|
||||
}
|
||||
if (options->audio) {
|
||||
sc_packet_source_add_sink(&s->audio_demuxer.packet_source,
|
||||
&s->recorder.audio_packet_sink);
|
||||
@ -630,23 +660,12 @@ aoa_hid_end:
|
||||
}
|
||||
controller_started = true;
|
||||
controller = &s->controller;
|
||||
|
||||
if (options->turn_screen_off) {
|
||||
struct sc_control_msg msg;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE;
|
||||
msg.set_screen_power_mode.mode = SC_SCREEN_POWER_MODE_OFF;
|
||||
|
||||
if (!sc_controller_push_msg(&s->controller, &msg)) {
|
||||
LOGW("Could not request 'set screen power mode'");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// There is a controller if and only if control is enabled
|
||||
assert(options->control == !!controller);
|
||||
|
||||
if (options->display) {
|
||||
if (options->video_playback) {
|
||||
const char *window_title =
|
||||
options->window_title ? options->window_title : info->device_name;
|
||||
|
||||
@ -672,11 +691,6 @@ aoa_hid_end:
|
||||
.start_fps_counter = options->start_fps_counter,
|
||||
};
|
||||
|
||||
if (!sc_screen_init(&s->screen, &screen_params)) {
|
||||
goto end;
|
||||
}
|
||||
screen_initialized = true;
|
||||
|
||||
struct sc_frame_source *src = &s->video_decoder.frame_source;
|
||||
if (options->display_buffer) {
|
||||
sc_delay_buffer_init(&s->display_buffer, options->display_buffer,
|
||||
@ -685,14 +699,19 @@ aoa_hid_end:
|
||||
src = &s->display_buffer.frame_source;
|
||||
}
|
||||
|
||||
sc_frame_source_add_sink(src, &s->screen.frame_sink);
|
||||
|
||||
if (options->audio) {
|
||||
sc_audio_player_init(&s->audio_player, options->audio_buffer,
|
||||
options->audio_output_buffer);
|
||||
sc_frame_source_add_sink(&s->audio_decoder.frame_source,
|
||||
&s->audio_player.frame_sink);
|
||||
if (!sc_screen_init(&s->screen, &screen_params)) {
|
||||
goto end;
|
||||
}
|
||||
screen_initialized = true;
|
||||
|
||||
sc_frame_source_add_sink(src, &s->screen.frame_sink);
|
||||
}
|
||||
|
||||
if (options->audio_playback) {
|
||||
sc_audio_player_init(&s->audio_player, options->audio_buffer,
|
||||
options->audio_output_buffer);
|
||||
sc_frame_source_add_sink(&s->audio_decoder.frame_source,
|
||||
&s->audio_player.frame_sink);
|
||||
}
|
||||
|
||||
#ifdef HAVE_V4L2
|
||||
@ -714,12 +733,15 @@ aoa_hid_end:
|
||||
}
|
||||
#endif
|
||||
|
||||
// now we consumed the header values, the socket receives the video stream
|
||||
// start the video demuxer
|
||||
if (!sc_demuxer_start(&s->video_demuxer)) {
|
||||
goto end;
|
||||
// Now that the header values have been consumed, the socket(s) will
|
||||
// receive the stream(s). Start the demuxer(s).
|
||||
|
||||
if (options->video) {
|
||||
if (!sc_demuxer_start(&s->video_demuxer)) {
|
||||
goto end;
|
||||
}
|
||||
video_demuxer_started = true;
|
||||
}
|
||||
video_demuxer_started = true;
|
||||
|
||||
if (options->audio) {
|
||||
if (!sc_demuxer_start(&s->audio_demuxer)) {
|
||||
@ -728,6 +750,39 @@ aoa_hid_end:
|
||||
audio_demuxer_started = true;
|
||||
}
|
||||
|
||||
// If the device screen is to be turned off, send the control message after
|
||||
// everything is set up
|
||||
if (options->control && options->turn_screen_off) {
|
||||
struct sc_control_msg msg;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE;
|
||||
msg.set_screen_power_mode.mode = SC_SCREEN_POWER_MODE_OFF;
|
||||
|
||||
if (!sc_controller_push_msg(&s->controller, &msg)) {
|
||||
LOGW("Could not request 'set screen power mode'");
|
||||
}
|
||||
}
|
||||
|
||||
if (options->time_limit) {
|
||||
bool ok = sc_timeout_init(&s->timeout);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
timeout_initialized = true;
|
||||
|
||||
sc_tick deadline = sc_tick_now() + options->time_limit;
|
||||
static const struct sc_timeout_callbacks cbs = {
|
||||
.on_timeout = sc_timeout_on_timeout,
|
||||
};
|
||||
|
||||
ok = sc_timeout_start(&s->timeout, deadline, &cbs, NULL);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
timeout_started = true;
|
||||
}
|
||||
|
||||
ret = event_loop(s);
|
||||
LOGD("quit...");
|
||||
|
||||
@ -736,6 +791,10 @@ aoa_hid_end:
|
||||
sc_screen_hide_window(&s->screen);
|
||||
|
||||
end:
|
||||
if (timeout_started) {
|
||||
sc_timeout_stop(&s->timeout);
|
||||
}
|
||||
|
||||
// The demuxer is not stopped explicitly, because it will stop by itself on
|
||||
// end-of-stream
|
||||
#ifdef HAVE_USB
|
||||
@ -771,6 +830,13 @@ end:
|
||||
sc_server_stop(&s->server);
|
||||
}
|
||||
|
||||
if (timeout_started) {
|
||||
sc_timeout_join(&s->timeout);
|
||||
}
|
||||
if (timeout_initialized) {
|
||||
sc_timeout_destroy(&s->timeout);
|
||||
}
|
||||
|
||||
// now that the sockets are shutdown, the demuxer and controller are
|
||||
// interrupted, we can join them
|
||||
if (video_demuxer_started) {
|
||||
|
198
app/src/screen.c
198
app/src/screen.c
@ -56,6 +56,7 @@ static void
|
||||
set_window_size(struct sc_screen *screen, struct sc_size new_size) {
|
||||
assert(!screen->fullscreen);
|
||||
assert(!screen->maximized);
|
||||
assert(!screen->minimized);
|
||||
SDL_SetWindowSize(screen->window, new_size.width, new_size.height);
|
||||
}
|
||||
|
||||
@ -239,35 +240,6 @@ sc_screen_update_content_rect(struct sc_screen *screen) {
|
||||
}
|
||||
}
|
||||
|
||||
static bool
|
||||
create_texture(struct sc_screen *screen) {
|
||||
SDL_Renderer *renderer = screen->renderer;
|
||||
struct sc_size size = screen->frame_size;
|
||||
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
|
||||
SDL_TEXTUREACCESS_STREAMING,
|
||||
size.width, size.height);
|
||||
if (!texture) {
|
||||
LOGE("Could not create texture: %s", SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
if (screen->mipmaps) {
|
||||
struct sc_opengl *gl = &screen->gl;
|
||||
|
||||
SDL_GL_BindTexture(texture, NULL, NULL);
|
||||
|
||||
// Enable trilinear filtering for downscaling
|
||||
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
|
||||
GL_LINEAR_MIPMAP_LINEAR);
|
||||
gl->TexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -1.f);
|
||||
|
||||
SDL_GL_UnbindTexture(texture);
|
||||
}
|
||||
|
||||
screen->texture = texture;
|
||||
return true;
|
||||
}
|
||||
|
||||
// render the texture to the renderer
|
||||
//
|
||||
// Set the update_content_rect flag if the window or content size may have
|
||||
@ -278,35 +250,11 @@ sc_screen_render(struct sc_screen *screen, bool update_content_rect) {
|
||||
sc_screen_update_content_rect(screen);
|
||||
}
|
||||
|
||||
SDL_RenderClear(screen->renderer);
|
||||
if (screen->rotation == 0) {
|
||||
SDL_RenderCopy(screen->renderer, screen->texture, NULL, &screen->rect);
|
||||
} else {
|
||||
// rotation in RenderCopyEx() is clockwise, while screen->rotation is
|
||||
// counterclockwise (to be consistent with --lock-video-orientation)
|
||||
int cw_rotation = (4 - screen->rotation) % 4;
|
||||
double angle = 90 * cw_rotation;
|
||||
|
||||
SDL_Rect *dstrect = NULL;
|
||||
SDL_Rect rect;
|
||||
if (screen->rotation & 1) {
|
||||
rect.x = screen->rect.x + (screen->rect.w - screen->rect.h) / 2;
|
||||
rect.y = screen->rect.y + (screen->rect.h - screen->rect.w) / 2;
|
||||
rect.w = screen->rect.h;
|
||||
rect.h = screen->rect.w;
|
||||
dstrect = ▭
|
||||
} else {
|
||||
assert(screen->rotation == 2);
|
||||
dstrect = &screen->rect;
|
||||
}
|
||||
|
||||
SDL_RenderCopyEx(screen->renderer, screen->texture, NULL, dstrect,
|
||||
angle, NULL, 0);
|
||||
}
|
||||
SDL_RenderPresent(screen->renderer);
|
||||
enum sc_display_result res =
|
||||
sc_display_render(&screen->display, &screen->rect, screen->rotation);
|
||||
(void) res; // any error already logged
|
||||
}
|
||||
|
||||
|
||||
#if defined(__APPLE__) || defined(__WINDOWS__)
|
||||
# define CONTINUOUS_RESIZING_WORKAROUND
|
||||
#endif
|
||||
@ -412,6 +360,7 @@ sc_screen_init(struct sc_screen *screen,
|
||||
screen->has_frame = false;
|
||||
screen->fullscreen = false;
|
||||
screen->maximized = false;
|
||||
screen->minimized = false;
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
|
||||
screen->req.x = params->window_x;
|
||||
@ -453,46 +402,11 @@ sc_screen_init(struct sc_screen *screen,
|
||||
goto error_destroy_fps_counter;
|
||||
}
|
||||
|
||||
screen->renderer = SDL_CreateRenderer(screen->window, -1,
|
||||
SDL_RENDERER_ACCELERATED);
|
||||
if (!screen->renderer) {
|
||||
LOGE("Could not create renderer: %s", SDL_GetError());
|
||||
ok = sc_display_init(&screen->display, screen->window, params->mipmaps);
|
||||
if (!ok) {
|
||||
goto error_destroy_window;
|
||||
}
|
||||
|
||||
SDL_RendererInfo renderer_info;
|
||||
int r = SDL_GetRendererInfo(screen->renderer, &renderer_info);
|
||||
const char *renderer_name = r ? NULL : renderer_info.name;
|
||||
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
|
||||
|
||||
screen->mipmaps = false;
|
||||
|
||||
// starts with "opengl"
|
||||
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
|
||||
if (use_opengl) {
|
||||
struct sc_opengl *gl = &screen->gl;
|
||||
sc_opengl_init(gl);
|
||||
|
||||
LOGI("OpenGL version: %s", gl->version);
|
||||
|
||||
if (params->mipmaps) {
|
||||
bool supports_mipmaps =
|
||||
sc_opengl_version_at_least(gl, 3, 0, /* OpenGL 3.0+ */
|
||||
2, 0 /* OpenGL ES 2.0+ */);
|
||||
if (supports_mipmaps) {
|
||||
LOGI("Trilinear filtering enabled");
|
||||
screen->mipmaps = true;
|
||||
} else {
|
||||
LOGW("Trilinear filtering disabled "
|
||||
"(OpenGL 3.0+ or ES 2.0+ required)");
|
||||
}
|
||||
} else {
|
||||
LOGI("Trilinear filtering disabled");
|
||||
}
|
||||
} else if (params->mipmaps) {
|
||||
LOGD("Trilinear filtering disabled (not an OpenGL renderer)");
|
||||
}
|
||||
|
||||
SDL_Surface *icon = scrcpy_icon_load();
|
||||
if (icon) {
|
||||
SDL_SetWindowIcon(screen->window, icon);
|
||||
@ -504,7 +418,7 @@ sc_screen_init(struct sc_screen *screen,
|
||||
screen->frame = av_frame_alloc();
|
||||
if (!screen->frame) {
|
||||
LOG_OOM();
|
||||
goto error_destroy_renderer;
|
||||
goto error_destroy_display;
|
||||
}
|
||||
|
||||
struct sc_input_manager_params im_params = {
|
||||
@ -539,8 +453,8 @@ sc_screen_init(struct sc_screen *screen,
|
||||
|
||||
return true;
|
||||
|
||||
error_destroy_renderer:
|
||||
SDL_DestroyRenderer(screen->renderer);
|
||||
error_destroy_display:
|
||||
sc_display_destroy(&screen->display);
|
||||
error_destroy_window:
|
||||
SDL_DestroyWindow(screen->window);
|
||||
error_destroy_fps_counter:
|
||||
@ -574,6 +488,7 @@ sc_screen_show_initial_window(struct sc_screen *screen) {
|
||||
}
|
||||
|
||||
SDL_ShowWindow(screen->window);
|
||||
sc_screen_update_content_rect(screen);
|
||||
}
|
||||
|
||||
void
|
||||
@ -596,11 +511,8 @@ sc_screen_destroy(struct sc_screen *screen) {
|
||||
#ifndef NDEBUG
|
||||
assert(!screen->open);
|
||||
#endif
|
||||
sc_display_destroy(&screen->display);
|
||||
av_frame_free(&screen->frame);
|
||||
if (screen->texture) {
|
||||
SDL_DestroyTexture(screen->texture);
|
||||
}
|
||||
SDL_DestroyRenderer(screen->renderer);
|
||||
SDL_DestroyWindow(screen->window);
|
||||
sc_fps_counter_destroy(&screen->fps_counter);
|
||||
sc_frame_buffer_destroy(&screen->fb);
|
||||
@ -622,11 +534,11 @@ resize_for_content(struct sc_screen *screen, struct sc_size old_content_size,
|
||||
|
||||
static void
|
||||
set_content_size(struct sc_screen *screen, struct sc_size new_content_size) {
|
||||
if (!screen->fullscreen && !screen->maximized) {
|
||||
if (!screen->fullscreen && !screen->maximized && !screen->minimized) {
|
||||
resize_for_content(screen, screen->content_size, new_content_size);
|
||||
} else if (!screen->resize_pending) {
|
||||
// Store the windowed size to be able to compute the optimal size once
|
||||
// fullscreen and maximized are disabled
|
||||
// fullscreen/maximized/minimized are disabled
|
||||
screen->windowed_content_size = screen->content_size;
|
||||
screen->resize_pending = true;
|
||||
}
|
||||
@ -638,6 +550,7 @@ static void
|
||||
apply_pending_resize(struct sc_screen *screen) {
|
||||
assert(!screen->fullscreen);
|
||||
assert(!screen->maximized);
|
||||
assert(!screen->minimized);
|
||||
if (screen->resize_pending) {
|
||||
resize_for_content(screen, screen->windowed_content_size,
|
||||
screen->content_size);
|
||||
@ -667,7 +580,6 @@ static bool
|
||||
sc_screen_init_size(struct sc_screen *screen) {
|
||||
// Before first frame
|
||||
assert(!screen->has_frame);
|
||||
assert(!screen->texture);
|
||||
|
||||
// The requested size is passed via screen->frame_size
|
||||
|
||||
@ -675,48 +587,29 @@ sc_screen_init_size(struct sc_screen *screen) {
|
||||
get_rotated_size(screen->frame_size, screen->rotation);
|
||||
screen->content_size = content_size;
|
||||
|
||||
LOGI("Initial texture: %" PRIu16 "x%" PRIu16,
|
||||
screen->frame_size.width, screen->frame_size.height);
|
||||
return create_texture(screen);
|
||||
enum sc_display_result res =
|
||||
sc_display_set_texture_size(&screen->display, screen->frame_size);
|
||||
return res != SC_DISPLAY_RESULT_ERROR;
|
||||
}
|
||||
|
||||
// recreate the texture and resize the window if the frame size has changed
|
||||
static bool
|
||||
static enum sc_display_result
|
||||
prepare_for_frame(struct sc_screen *screen, struct sc_size new_frame_size) {
|
||||
if (screen->frame_size.width != new_frame_size.width
|
||||
|| screen->frame_size.height != new_frame_size.height) {
|
||||
// frame dimension changed, destroy texture
|
||||
SDL_DestroyTexture(screen->texture);
|
||||
|
||||
screen->frame_size = new_frame_size;
|
||||
|
||||
struct sc_size new_content_size =
|
||||
get_rotated_size(new_frame_size, screen->rotation);
|
||||
set_content_size(screen, new_content_size);
|
||||
|
||||
sc_screen_update_content_rect(screen);
|
||||
|
||||
LOGI("New texture: %" PRIu16 "x%" PRIu16,
|
||||
screen->frame_size.width, screen->frame_size.height);
|
||||
return create_texture(screen);
|
||||
if (screen->frame_size.width == new_frame_size.width
|
||||
&& screen->frame_size.height == new_frame_size.height) {
|
||||
return SC_DISPLAY_RESULT_OK;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
// frame dimension changed
|
||||
screen->frame_size = new_frame_size;
|
||||
|
||||
// write the frame into the texture
|
||||
static void
|
||||
update_texture(struct sc_screen *screen, const AVFrame *frame) {
|
||||
SDL_UpdateYUVTexture(screen->texture, NULL,
|
||||
frame->data[0], frame->linesize[0],
|
||||
frame->data[1], frame->linesize[1],
|
||||
frame->data[2], frame->linesize[2]);
|
||||
struct sc_size new_content_size =
|
||||
get_rotated_size(new_frame_size, screen->rotation);
|
||||
set_content_size(screen, new_content_size);
|
||||
|
||||
if (screen->mipmaps) {
|
||||
SDL_GL_BindTexture(screen->texture, NULL, NULL);
|
||||
screen->gl.GenerateMipmap(GL_TEXTURE_2D);
|
||||
SDL_GL_UnbindTexture(screen->texture);
|
||||
}
|
||||
sc_screen_update_content_rect(screen);
|
||||
|
||||
return sc_display_set_texture_size(&screen->display, screen->frame_size);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -728,10 +621,23 @@ sc_screen_update_frame(struct sc_screen *screen) {
|
||||
sc_fps_counter_add_rendered_frame(&screen->fps_counter);
|
||||
|
||||
struct sc_size new_frame_size = {frame->width, frame->height};
|
||||
if (!prepare_for_frame(screen, new_frame_size)) {
|
||||
enum sc_display_result res = prepare_for_frame(screen, new_frame_size);
|
||||
if (res == SC_DISPLAY_RESULT_ERROR) {
|
||||
return false;
|
||||
}
|
||||
update_texture(screen, frame);
|
||||
if (res == SC_DISPLAY_RESULT_PENDING) {
|
||||
// Not an error, but do not continue
|
||||
return true;
|
||||
}
|
||||
|
||||
res = sc_display_update_texture(&screen->display, frame);
|
||||
if (res == SC_DISPLAY_RESULT_ERROR) {
|
||||
return false;
|
||||
}
|
||||
if (res == SC_DISPLAY_RESULT_PENDING) {
|
||||
// Not an error, but do not continue
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!screen->has_frame) {
|
||||
screen->has_frame = true;
|
||||
@ -757,7 +663,7 @@ sc_screen_switch_fullscreen(struct sc_screen *screen) {
|
||||
}
|
||||
|
||||
screen->fullscreen = !screen->fullscreen;
|
||||
if (!screen->fullscreen && !screen->maximized) {
|
||||
if (!screen->fullscreen && !screen->maximized && !screen->minimized) {
|
||||
apply_pending_resize(screen);
|
||||
}
|
||||
|
||||
@ -767,7 +673,7 @@ sc_screen_switch_fullscreen(struct sc_screen *screen) {
|
||||
|
||||
void
|
||||
sc_screen_resize_to_fit(struct sc_screen *screen) {
|
||||
if (screen->fullscreen || screen->maximized) {
|
||||
if (screen->fullscreen || screen->maximized || screen->minimized) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -791,7 +697,7 @@ sc_screen_resize_to_fit(struct sc_screen *screen) {
|
||||
|
||||
void
|
||||
sc_screen_resize_to_pixel_perfect(struct sc_screen *screen) {
|
||||
if (screen->fullscreen) {
|
||||
if (screen->fullscreen || screen->minimized) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -812,7 +718,7 @@ sc_screen_is_mouse_capture_key(SDL_Keycode key) {
|
||||
}
|
||||
|
||||
bool
|
||||
sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event) {
|
||||
sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event) {
|
||||
bool relative_mode = sc_screen_is_relative_mode(screen);
|
||||
|
||||
switch (event->type) {
|
||||
@ -848,6 +754,9 @@ sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event) {
|
||||
case SDL_WINDOWEVENT_MAXIMIZED:
|
||||
screen->maximized = true;
|
||||
break;
|
||||
case SDL_WINDOWEVENT_MINIMIZED:
|
||||
screen->minimized = true;
|
||||
break;
|
||||
case SDL_WINDOWEVENT_RESTORED:
|
||||
if (screen->fullscreen) {
|
||||
// On Windows, in maximized+fullscreen, disabling
|
||||
@ -858,6 +767,7 @@ sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event) {
|
||||
break;
|
||||
}
|
||||
screen->maximized = false;
|
||||
screen->minimized = false;
|
||||
apply_pending_resize(screen);
|
||||
sc_screen_render(screen, true);
|
||||
break;
|
||||
@ -939,6 +849,8 @@ sc_screen_convert_drawable_to_frame_coords(struct sc_screen *screen,
|
||||
int32_t w = screen->content_size.width;
|
||||
int32_t h = screen->content_size.height;
|
||||
|
||||
// screen->rect must be initialized to avoid a division by zero
|
||||
assert(screen->rect.w && screen->rect.h);
|
||||
|
||||
x = (int64_t) (x - screen->rect.x) * w / screen->rect.w;
|
||||
y = (int64_t) (y - screen->rect.y) * h / screen->rect.h;
|
||||
|
@ -9,6 +9,7 @@
|
||||
|
||||
#include "controller.h"
|
||||
#include "coords.h"
|
||||
#include "display.h"
|
||||
#include "fps_counter.h"
|
||||
#include "frame_buffer.h"
|
||||
#include "input_manager.h"
|
||||
@ -24,6 +25,7 @@ struct sc_screen {
|
||||
bool open; // track the open/close state to assert correct behavior
|
||||
#endif
|
||||
|
||||
struct sc_display display;
|
||||
struct sc_input_manager im;
|
||||
struct sc_frame_buffer fb;
|
||||
struct sc_fps_counter fps_counter;
|
||||
@ -39,9 +41,6 @@ struct sc_screen {
|
||||
} req;
|
||||
|
||||
SDL_Window *window;
|
||||
SDL_Renderer *renderer;
|
||||
SDL_Texture *texture;
|
||||
struct sc_opengl gl;
|
||||
struct sc_size frame_size;
|
||||
struct sc_size content_size; // rotated frame_size
|
||||
|
||||
@ -57,7 +56,7 @@ struct sc_screen {
|
||||
bool has_frame;
|
||||
bool fullscreen;
|
||||
bool maximized;
|
||||
bool mipmaps;
|
||||
bool minimized;
|
||||
|
||||
// To enable/disable mouse capture, a mouse capture key (LALT, LGUI or
|
||||
// RGUI) must be pressed. This variable tracks the pressed capture key.
|
||||
@ -137,7 +136,7 @@ sc_screen_set_rotation(struct sc_screen *screen, unsigned rotation);
|
||||
// react to SDL events
|
||||
// If this function returns false, scrcpy must exit with an error.
|
||||
bool
|
||||
sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event);
|
||||
sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event);
|
||||
|
||||
// convert point from window coordinates to frame coordinates
|
||||
// x and y are expressed in pixels
|
||||
|
156
app/src/server.c
156
app/src/server.c
@ -76,6 +76,8 @@ sc_server_params_destroy(struct sc_server_params *params) {
|
||||
free((char *) params->video_encoder);
|
||||
free((char *) params->audio_encoder);
|
||||
free((char *) params->tcpip_dst);
|
||||
free((char *) params->camera_id);
|
||||
free((char *) params->camera_ar);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -86,14 +88,15 @@ sc_server_params_copy(struct sc_server_params *dst,
|
||||
// The params reference user-allocated memory, so we must copy them to
|
||||
// handle them from another thread
|
||||
|
||||
#define COPY(FIELD) \
|
||||
#define COPY(FIELD) do { \
|
||||
dst->FIELD = NULL; \
|
||||
if (src->FIELD) { \
|
||||
dst->FIELD = strdup(src->FIELD); \
|
||||
if (!dst->FIELD) { \
|
||||
goto error; \
|
||||
} \
|
||||
}
|
||||
} \
|
||||
} while(0)
|
||||
|
||||
COPY(req_serial);
|
||||
COPY(crop);
|
||||
@ -102,6 +105,8 @@ sc_server_params_copy(struct sc_server_params *dst,
|
||||
COPY(video_encoder);
|
||||
COPY(audio_encoder);
|
||||
COPY(tcpip_dst);
|
||||
COPY(camera_id);
|
||||
COPY(camera_ar);
|
||||
#undef COPY
|
||||
|
||||
return true;
|
||||
@ -180,6 +185,20 @@ sc_server_get_codec_name(enum sc_codec codec) {
|
||||
}
|
||||
}
|
||||
|
||||
static const char *
|
||||
sc_server_get_camera_facing_name(enum sc_camera_facing camera_facing) {
|
||||
switch (camera_facing) {
|
||||
case SC_CAMERA_FACING_FRONT:
|
||||
return "front";
|
||||
case SC_CAMERA_FACING_BACK:
|
||||
return "back";
|
||||
case SC_CAMERA_FACING_EXTERNAL:
|
||||
return "external";
|
||||
default:
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static sc_pid
|
||||
execute_server(struct sc_server *server,
|
||||
const struct sc_server_params *params) {
|
||||
@ -215,23 +234,27 @@ execute_server(struct sc_server *server,
|
||||
cmd[count++] = SCRCPY_VERSION;
|
||||
|
||||
unsigned dyn_idx = count; // from there, the strings are allocated
|
||||
#define ADD_PARAM(fmt, ...) { \
|
||||
#define ADD_PARAM(fmt, ...) do { \
|
||||
char *p; \
|
||||
if (asprintf(&p, fmt, ## __VA_ARGS__) == -1) { \
|
||||
goto end; \
|
||||
} \
|
||||
cmd[count++] = p; \
|
||||
}
|
||||
} while(0)
|
||||
|
||||
ADD_PARAM("scid=%08x", params->scid);
|
||||
ADD_PARAM("log_level=%s", log_level_to_server_string(params->log_level));
|
||||
|
||||
if (!params->video) {
|
||||
ADD_PARAM("video=false");
|
||||
}
|
||||
if (params->video_bit_rate) {
|
||||
ADD_PARAM("video_bit_rate=%" PRIu32, params->video_bit_rate);
|
||||
}
|
||||
if (!params->audio) {
|
||||
ADD_PARAM("audio=false");
|
||||
} else if (params->audio_bit_rate) {
|
||||
}
|
||||
if (params->audio_bit_rate) {
|
||||
ADD_PARAM("audio_bit_rate=%" PRIu32, params->audio_bit_rate);
|
||||
}
|
||||
if (params->video_codec != SC_CODEC_H264) {
|
||||
@ -242,6 +265,13 @@ execute_server(struct sc_server *server,
|
||||
ADD_PARAM("audio_codec=%s",
|
||||
sc_server_get_codec_name(params->audio_codec));
|
||||
}
|
||||
if (params->video_source != SC_VIDEO_SOURCE_DISPLAY) {
|
||||
assert(params->video_source == SC_VIDEO_SOURCE_CAMERA);
|
||||
ADD_PARAM("video_source=camera");
|
||||
}
|
||||
if (params->audio_source == SC_AUDIO_SOURCE_MIC) {
|
||||
ADD_PARAM("audio_source=mic");
|
||||
}
|
||||
if (params->max_size) {
|
||||
ADD_PARAM("max_size=%" PRIu16, params->max_size);
|
||||
}
|
||||
@ -265,6 +295,19 @@ execute_server(struct sc_server *server,
|
||||
if (params->display_id) {
|
||||
ADD_PARAM("display_id=%" PRIu32, params->display_id);
|
||||
}
|
||||
if (params->camera_id) {
|
||||
ADD_PARAM("camera_id=%s", params->camera_id);
|
||||
}
|
||||
if (params->camera_size) {
|
||||
ADD_PARAM("camera_size=%s", params->camera_size);
|
||||
}
|
||||
if (params->camera_facing != SC_CAMERA_FACING_ANY) {
|
||||
ADD_PARAM("camera_facing=%s",
|
||||
sc_server_get_camera_facing_name(params->camera_facing));
|
||||
}
|
||||
if (params->camera_ar) {
|
||||
ADD_PARAM("camera_ar=%s", params->camera_ar);
|
||||
}
|
||||
if (params->show_touches) {
|
||||
ADD_PARAM("show_touches=true");
|
||||
}
|
||||
@ -302,12 +345,18 @@ execute_server(struct sc_server *server,
|
||||
// By default, power_on is true
|
||||
ADD_PARAM("power_on=false");
|
||||
}
|
||||
if (params->list_encoders) {
|
||||
if (params->list & SC_OPTION_LIST_ENCODERS) {
|
||||
ADD_PARAM("list_encoders=true");
|
||||
}
|
||||
if (params->list_displays) {
|
||||
if (params->list & SC_OPTION_LIST_DISPLAYS) {
|
||||
ADD_PARAM("list_displays=true");
|
||||
}
|
||||
if (params->list & SC_OPTION_LIST_CAMERAS) {
|
||||
ADD_PARAM("list_cameras=true");
|
||||
}
|
||||
if (params->list & SC_OPTION_LIST_CAMERA_SIZES) {
|
||||
ADD_PARAM("list_camera_sizes=true");
|
||||
}
|
||||
|
||||
#undef ADD_PARAM
|
||||
|
||||
@ -463,6 +512,7 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
const char *serial = server->serial;
|
||||
assert(serial);
|
||||
|
||||
bool video = server->params.video;
|
||||
bool audio = server->params.audio;
|
||||
bool control = server->params.control;
|
||||
|
||||
@ -470,9 +520,12 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
sc_socket audio_socket = SC_SOCKET_NONE;
|
||||
sc_socket control_socket = SC_SOCKET_NONE;
|
||||
if (!tunnel->forward) {
|
||||
video_socket = net_accept_intr(&server->intr, tunnel->server_socket);
|
||||
if (video_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
if (video) {
|
||||
video_socket =
|
||||
net_accept_intr(&server->intr, tunnel->server_socket);
|
||||
if (video_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
if (audio) {
|
||||
@ -503,35 +556,45 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
|
||||
unsigned attempts = 100;
|
||||
sc_tick delay = SC_TICK_FROM_MS(100);
|
||||
video_socket = connect_to_server(server, attempts, delay, tunnel_host,
|
||||
tunnel_port);
|
||||
if (video_socket == SC_SOCKET_NONE) {
|
||||
sc_socket first_socket = connect_to_server(server, attempts, delay,
|
||||
tunnel_host, tunnel_port);
|
||||
if (first_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
if (video) {
|
||||
video_socket = first_socket;
|
||||
}
|
||||
|
||||
if (audio) {
|
||||
audio_socket = net_socket();
|
||||
if (audio_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, audio_socket, tunnel_host,
|
||||
tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
if (!video) {
|
||||
audio_socket = first_socket;
|
||||
} else {
|
||||
audio_socket = net_socket();
|
||||
if (audio_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, audio_socket,
|
||||
tunnel_host, tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (control) {
|
||||
// we know that the device is listening, we don't need several
|
||||
// attempts
|
||||
control_socket = net_socket();
|
||||
if (control_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, control_socket,
|
||||
tunnel_host, tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
if (!video && !audio) {
|
||||
control_socket = first_socket;
|
||||
} else {
|
||||
control_socket = net_socket();
|
||||
if (control_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, control_socket,
|
||||
tunnel_host, tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -540,13 +603,17 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
sc_adb_tunnel_close(tunnel, &server->intr, serial,
|
||||
server->device_socket_name);
|
||||
|
||||
sc_socket first_socket = video ? video_socket
|
||||
: audio ? audio_socket
|
||||
: control_socket;
|
||||
|
||||
// The sockets will be closed on stop if device_read_info() fails
|
||||
bool ok = device_read_info(&server->intr, video_socket, info);
|
||||
bool ok = device_read_info(&server->intr, first_socket, info);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
assert(video_socket != SC_SOCKET_NONE);
|
||||
assert(!video || video_socket != SC_SOCKET_NONE);
|
||||
assert(!audio || audio_socket != SC_SOCKET_NONE);
|
||||
assert(!control || control_socket != SC_SOCKET_NONE);
|
||||
|
||||
@ -768,6 +835,15 @@ sc_server_configure_tcpip_unknown_address(struct sc_server *server,
|
||||
return sc_server_connect_to_tcpip(server, ip_port);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_server_kill_adb_if_requested(struct sc_server *server) {
|
||||
if (server->params.kill_adb_on_close) {
|
||||
LOGI("Killing adb server...");
|
||||
unsigned flags = SC_ADB_NO_STDOUT | SC_ADB_NO_STDERR | SC_ADB_NO_LOGERR;
|
||||
sc_adb_kill_server(&server->intr, flags);
|
||||
}
|
||||
}
|
||||
|
||||
static int
|
||||
run_server(void *data) {
|
||||
struct sc_server *server = data;
|
||||
@ -779,7 +855,7 @@ run_server(void *data) {
|
||||
// is parsed, so it is not output)
|
||||
bool ok = sc_adb_start_server(&server->intr, 0);
|
||||
if (!ok) {
|
||||
LOGE("Could not start adb daemon");
|
||||
LOGE("Could not start adb server");
|
||||
goto error_connection_failed;
|
||||
}
|
||||
|
||||
@ -860,7 +936,7 @@ run_server(void *data) {
|
||||
|
||||
// If --list-* is passed, then the server just prints the requested data
|
||||
// then exits.
|
||||
if (params->list_encoders || params->list_displays) {
|
||||
if (params->list) {
|
||||
sc_pid pid = execute_server(server, params);
|
||||
if (pid == SC_PROCESS_NONE) {
|
||||
goto error_connection_failed;
|
||||
@ -930,8 +1006,11 @@ run_server(void *data) {
|
||||
sc_mutex_unlock(&server->mutex);
|
||||
|
||||
// Interrupt sockets to wake up socket blocking calls on the server
|
||||
assert(server->video_socket != SC_SOCKET_NONE);
|
||||
net_interrupt(server->video_socket);
|
||||
|
||||
if (server->video_socket != SC_SOCKET_NONE) {
|
||||
// There is no video_socket if --no-video is set
|
||||
net_interrupt(server->video_socket);
|
||||
}
|
||||
|
||||
if (server->audio_socket != SC_SOCKET_NONE) {
|
||||
// There is no audio_socket if --no-audio is set
|
||||
@ -964,9 +1043,12 @@ run_server(void *data) {
|
||||
|
||||
sc_process_close(pid);
|
||||
|
||||
sc_server_kill_adb_if_requested(server);
|
||||
|
||||
return 0;
|
||||
|
||||
error_connection_failed:
|
||||
sc_server_kill_adb_if_requested(server);
|
||||
server->cbs->on_connection_failed(server, server->cbs_userdata);
|
||||
return -1;
|
||||
}
|
||||
|
@ -26,11 +26,17 @@ struct sc_server_params {
|
||||
enum sc_log_level log_level;
|
||||
enum sc_codec video_codec;
|
||||
enum sc_codec audio_codec;
|
||||
enum sc_video_source video_source;
|
||||
enum sc_audio_source audio_source;
|
||||
enum sc_camera_facing camera_facing;
|
||||
const char *crop;
|
||||
const char *video_codec_options;
|
||||
const char *audio_codec_options;
|
||||
const char *video_encoder;
|
||||
const char *audio_encoder;
|
||||
const char *camera_id;
|
||||
const char *camera_size;
|
||||
const char *camera_ar;
|
||||
struct sc_port_range port_range;
|
||||
uint32_t tunnel_host;
|
||||
uint16_t tunnel_port;
|
||||
@ -41,6 +47,7 @@ struct sc_server_params {
|
||||
int8_t lock_video_orientation;
|
||||
bool control;
|
||||
uint32_t display_id;
|
||||
bool video;
|
||||
bool audio;
|
||||
bool show_touches;
|
||||
bool stay_awake;
|
||||
@ -54,8 +61,8 @@ struct sc_server_params {
|
||||
bool select_tcpip;
|
||||
bool cleanup;
|
||||
bool power_on;
|
||||
bool list_encoders;
|
||||
bool list_displays;
|
||||
bool kill_adb_on_close;
|
||||
uint8_t list;
|
||||
};
|
||||
|
||||
struct sc_server {
|
||||
|
@ -27,7 +27,8 @@
|
||||
// keyboard support, though OS could support more keys via modifying the report
|
||||
// desc. 6 should be enough for scrcpy.
|
||||
#define HID_KEYBOARD_MAX_KEYS 6
|
||||
#define HID_KEYBOARD_EVENT_SIZE (2 + HID_KEYBOARD_MAX_KEYS)
|
||||
#define HID_KEYBOARD_EVENT_SIZE \
|
||||
(HID_KEYBOARD_INDEX_KEYS + HID_KEYBOARD_MAX_KEYS)
|
||||
|
||||
#define HID_RESERVED 0x00
|
||||
#define HID_ERROR_ROLL_OVER 0x01
|
||||
|
@ -83,7 +83,7 @@ scrcpy_otg(struct scrcpy_options *options) {
|
||||
#ifdef _WIN32
|
||||
// On Windows, only one process could open a USB device
|
||||
// <https://github.com/Genymobile/scrcpy/issues/2773>
|
||||
LOGI("Killing adb daemon (if any)...");
|
||||
LOGI("Killing adb server (if any)...");
|
||||
unsigned flags = SC_ADB_NO_STDOUT | SC_ADB_NO_STDERR | SC_ADB_NO_LOGERR;
|
||||
// uninterruptible (intr == NULL), but in practice it's very quick
|
||||
sc_adb_kill_server(NULL, flags);
|
||||
@ -105,10 +105,6 @@ scrcpy_otg(struct scrcpy_options *options) {
|
||||
|
||||
usb_device_initialized = true;
|
||||
|
||||
LOGI("USB device: %s (%04x:%04x) %s %s", usb_device.serial,
|
||||
(unsigned) usb_device.vid, (unsigned) usb_device.pid,
|
||||
usb_device.manufacturer, usb_device.product);
|
||||
|
||||
ok = sc_usb_connect(&s->usb, usb_device.device, &cbs, NULL);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
|
@ -93,7 +93,7 @@ sc_usb_device_move(struct sc_usb_device *dst, struct sc_usb_device *src) {
|
||||
src->product = NULL;
|
||||
}
|
||||
|
||||
void
|
||||
static void
|
||||
sc_usb_devices_destroy(struct sc_vec_usb_devices *usb_devices) {
|
||||
for (size_t i = 0; i < usb_devices->size; ++i) {
|
||||
sc_usb_device_destroy(&usb_devices->data[i]);
|
||||
@ -213,8 +213,8 @@ sc_usb_select_device(struct sc_usb *usb, const char *serial,
|
||||
assert(sel_count == 1); // sel_idx is valid only if sel_count == 1
|
||||
struct sc_usb_device *device = &vec.data[sel_idx];
|
||||
|
||||
LOGD("USB device found:");
|
||||
sc_usb_devices_log(SC_LOG_LEVEL_DEBUG, vec.data, vec.size);
|
||||
LOGI("USB device found:");
|
||||
sc_usb_devices_log(SC_LOG_LEVEL_INFO, vec.data, vec.size);
|
||||
|
||||
// Move device into out_device (do not destroy device)
|
||||
sc_usb_device_move(out_device, device);
|
||||
|
@ -147,7 +147,7 @@ sc_sdl_log_print(void *userdata, int category, SDL_LogPriority priority,
|
||||
}
|
||||
|
||||
void
|
||||
sc_log_configure() {
|
||||
sc_log_configure(void) {
|
||||
SDL_LogSetOutputFunction(sc_sdl_log_print, NULL);
|
||||
// Redirect FFmpeg logs to SDL logs
|
||||
av_log_set_callback(sc_av_log_callback);
|
||||
|
@ -36,6 +36,6 @@ sc_log_windows_error(const char *prefix, int error);
|
||||
#endif
|
||||
|
||||
void
|
||||
sc_log_configure();
|
||||
sc_log_configure(void);
|
||||
|
||||
#endif
|
||||
|
77
app/src/util/timeout.c
Normal file
77
app/src/util/timeout.c
Normal file
@ -0,0 +1,77 @@
|
||||
#include "timeout.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "log.h"
|
||||
|
||||
bool
|
||||
sc_timeout_init(struct sc_timeout *timeout) {
|
||||
bool ok = sc_mutex_init(&timeout->mutex);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&timeout->cond);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
timeout->stopped = false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static int
|
||||
run_timeout(void *data) {
|
||||
struct sc_timeout *timeout = data;
|
||||
sc_tick deadline = timeout->deadline;
|
||||
|
||||
sc_mutex_lock(&timeout->mutex);
|
||||
bool timed_out = false;
|
||||
while (!timeout->stopped && !timed_out) {
|
||||
timed_out = !sc_cond_timedwait(&timeout->cond, &timeout->mutex,
|
||||
deadline);
|
||||
}
|
||||
sc_mutex_unlock(&timeout->mutex);
|
||||
|
||||
timeout->cbs->on_timeout(timeout, timeout->cbs_userdata);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_timeout_start(struct sc_timeout *timeout, sc_tick deadline,
|
||||
const struct sc_timeout_callbacks *cbs, void *cbs_userdata) {
|
||||
bool ok = sc_thread_create(&timeout->thread, run_timeout, "scrcpy-timeout",
|
||||
timeout);
|
||||
if (!ok) {
|
||||
LOGE("Timeout: could not start thread");
|
||||
return false;
|
||||
}
|
||||
|
||||
timeout->deadline = deadline;
|
||||
|
||||
assert(cbs && cbs->on_timeout);
|
||||
timeout->cbs = cbs;
|
||||
timeout->cbs_userdata = cbs_userdata;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
sc_timeout_stop(struct sc_timeout *timeout) {
|
||||
sc_mutex_lock(&timeout->mutex);
|
||||
timeout->stopped = true;
|
||||
sc_mutex_unlock(&timeout->mutex);
|
||||
}
|
||||
|
||||
void
|
||||
sc_timeout_join(struct sc_timeout *timeout) {
|
||||
sc_thread_join(&timeout->thread, NULL);
|
||||
}
|
||||
|
||||
void
|
||||
sc_timeout_destroy(struct sc_timeout *timeout) {
|
||||
sc_mutex_destroy(&timeout->mutex);
|
||||
sc_cond_destroy(&timeout->cond);
|
||||
}
|
43
app/src/util/timeout.h
Normal file
43
app/src/util/timeout.h
Normal file
@ -0,0 +1,43 @@
|
||||
#ifndef SC_TIMEOUT_H
|
||||
#define SC_TIMEOUT_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
|
||||
#include "thread.h"
|
||||
#include "tick.h"
|
||||
|
||||
struct sc_timeout {
|
||||
sc_thread thread;
|
||||
sc_tick deadline;
|
||||
|
||||
sc_mutex mutex;
|
||||
sc_cond cond;
|
||||
bool stopped;
|
||||
|
||||
const struct sc_timeout_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
};
|
||||
|
||||
struct sc_timeout_callbacks {
|
||||
void (*on_timeout)(struct sc_timeout *timeout, void *userdata);
|
||||
};
|
||||
|
||||
bool
|
||||
sc_timeout_init(struct sc_timeout *timeout);
|
||||
|
||||
void
|
||||
sc_timeout_destroy(struct sc_timeout *timeout);
|
||||
|
||||
bool
|
||||
sc_timeout_start(struct sc_timeout *timeout, sc_tick deadline,
|
||||
const struct sc_timeout_callbacks *cbs, void *cbs_userdata);
|
||||
|
||||
void
|
||||
sc_timeout_stop(struct sc_timeout *timeout);
|
||||
|
||||
void
|
||||
sc_timeout_join(struct sc_timeout *timeout);
|
||||
|
||||
#endif
|
@ -190,10 +190,10 @@ sc_vecdeque_reallocdata_(void *ptr, size_t newcap, size_t item_size,
|
||||
|
||||
size_t right_len = MIN(size, oldcap - oldorigin);
|
||||
assert(right_len);
|
||||
memcpy(newptr, ptr + (oldorigin * item_size), right_len * item_size);
|
||||
memcpy(newptr, (char *) ptr + (oldorigin * item_size), right_len * item_size);
|
||||
|
||||
if (size > right_len) {
|
||||
memcpy(newptr + (right_len * item_size), ptr,
|
||||
memcpy((char *) newptr + (right_len * item_size), ptr,
|
||||
(size - right_len) * item_size);
|
||||
}
|
||||
|
||||
|
@ -217,6 +217,18 @@ static void test_get_ip_multiline_second_ok(void) {
|
||||
free(ip);
|
||||
}
|
||||
|
||||
static void test_get_ip_multiline_second_ok_without_cr(void) {
|
||||
char ip_route[] = "10.0.0.0/24 dev rmnet proto kernel scope link src "
|
||||
"10.0.0.3\n"
|
||||
"192.168.1.0/24 dev wlan0 proto kernel scope link src "
|
||||
"192.168.1.3\n";
|
||||
|
||||
char *ip = sc_adb_parse_device_ip(ip_route);
|
||||
assert(ip);
|
||||
assert(!strcmp(ip, "192.168.1.3"));
|
||||
free(ip);
|
||||
}
|
||||
|
||||
static void test_get_ip_no_wlan(void) {
|
||||
char ip_route[] = "192.168.1.0/24 dev rmnet proto kernel scope link src "
|
||||
"192.168.12.34\r\r\n";
|
||||
@ -259,6 +271,7 @@ int main(int argc, char *argv[]) {
|
||||
test_get_ip_single_line_with_trailing_space();
|
||||
test_get_ip_multiline_first_ok();
|
||||
test_get_ip_multiline_second_ok();
|
||||
test_get_ip_multiline_second_ok_without_cr();
|
||||
test_get_ip_no_wlan();
|
||||
test_get_ip_no_wlan_without_eol();
|
||||
test_get_ip_truncated();
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
#include "util/bytebuf.h"
|
||||
|
||||
void test_bytebuf_simple(void) {
|
||||
static void test_bytebuf_simple(void) {
|
||||
struct sc_bytebuf buf;
|
||||
uint8_t data[20];
|
||||
|
||||
@ -34,7 +34,7 @@ void test_bytebuf_simple(void) {
|
||||
sc_bytebuf_destroy(&buf);
|
||||
}
|
||||
|
||||
void test_bytebuf_boundaries(void) {
|
||||
static void test_bytebuf_boundaries(void) {
|
||||
struct sc_bytebuf buf;
|
||||
uint8_t data[20];
|
||||
|
||||
@ -71,7 +71,7 @@ void test_bytebuf_boundaries(void) {
|
||||
sc_bytebuf_destroy(&buf);
|
||||
}
|
||||
|
||||
void test_bytebuf_two_steps_write(void) {
|
||||
static void test_bytebuf_two_steps_write(void) {
|
||||
struct sc_bytebuf buf;
|
||||
uint8_t data[20];
|
||||
|
||||
|
@ -53,7 +53,7 @@ static void test_options(void) {
|
||||
"--max-size", "1024",
|
||||
"--lock-video-orientation=2", // optional arguments require '='
|
||||
// "--no-control" is not compatible with "--turn-screen-off"
|
||||
// "--no-display" is not compatible with "--fulscreen"
|
||||
// "--no-playback" is not compatible with "--fulscreen"
|
||||
"--port", "1234:1236",
|
||||
"--push-target", "/sdcard/Movies",
|
||||
"--record", "file",
|
||||
@ -108,8 +108,8 @@ static void test_options2(void) {
|
||||
char *argv[] = {
|
||||
"scrcpy",
|
||||
"--no-control",
|
||||
"--no-display",
|
||||
"--record", "file.mp4", // cannot enable --no-display without recording
|
||||
"--no-playback",
|
||||
"--record", "file.mp4", // cannot enable --no-playback without recording
|
||||
};
|
||||
|
||||
bool ok = scrcpy_parse_args(&args, ARRAY_LEN(argv), argv);
|
||||
@ -117,7 +117,8 @@ static void test_options2(void) {
|
||||
|
||||
const struct scrcpy_options *opts = &args.opts;
|
||||
assert(!opts->control);
|
||||
assert(!opts->display);
|
||||
assert(!opts->video_playback);
|
||||
assert(!opts->audio_playback);
|
||||
assert(!strcmp(opts->record_filename, "file.mp4"));
|
||||
assert(opts->record_format == SC_RECORD_FORMAT_MP4);
|
||||
}
|
||||
|
@ -269,21 +269,25 @@ static void test_parse_integer_with_suffix(void) {
|
||||
|
||||
char buf[32];
|
||||
|
||||
sprintf(buf, "%ldk", LONG_MAX / 2000);
|
||||
int r = snprintf(buf, sizeof(buf), "%ldk", LONG_MAX / 2000);
|
||||
assert(r >= 0 && (size_t) r < sizeof(buf));
|
||||
ok = sc_str_parse_integer_with_suffix(buf, &value);
|
||||
assert(ok);
|
||||
assert(value == LONG_MAX / 2000 * 1000);
|
||||
|
||||
sprintf(buf, "%ldm", LONG_MAX / 2000);
|
||||
r = snprintf(buf, sizeof(buf), "%ldm", LONG_MAX / 2000);
|
||||
assert(r >= 0 && (size_t) r < sizeof(buf));
|
||||
ok = sc_str_parse_integer_with_suffix(buf, &value);
|
||||
assert(!ok);
|
||||
|
||||
sprintf(buf, "%ldk", LONG_MIN / 2000);
|
||||
r = snprintf(buf, sizeof(buf), "%ldk", LONG_MIN / 2000);
|
||||
assert(r >= 0 && (size_t) r < sizeof(buf));
|
||||
ok = sc_str_parse_integer_with_suffix(buf, &value);
|
||||
assert(ok);
|
||||
assert(value == LONG_MIN / 2000 * 1000);
|
||||
|
||||
sprintf(buf, "%ldm", LONG_MIN / 2000);
|
||||
r = snprintf(buf, sizeof(buf), "%ldm", LONG_MIN / 2000);
|
||||
assert(r >= 0 && (size_t) r < sizeof(buf));
|
||||
ok = sc_str_parse_integer_with_suffix(buf, &value);
|
||||
assert(!ok);
|
||||
}
|
||||
@ -358,7 +362,7 @@ static void test_index_of_column(void) {
|
||||
assert(sc_str_index_of_column(" a bc d", 1, " ") == 2);
|
||||
}
|
||||
|
||||
static void test_remove_trailing_cr() {
|
||||
static void test_remove_trailing_cr(void) {
|
||||
char s[] = "abc\r";
|
||||
sc_str_remove_trailing_cr(s, sizeof(s) - 1);
|
||||
assert(!strcmp(s, "abc"));
|
||||
|
@ -102,7 +102,7 @@ static void test_vecdeque_reserve(void) {
|
||||
sc_vecdeque_destroy(&vdq);
|
||||
}
|
||||
|
||||
static void test_vecdeque_grow() {
|
||||
static void test_vecdeque_grow(void) {
|
||||
struct SC_VECDEQUE(int) vdq = SC_VECDEQUE_INITIALIZER;
|
||||
|
||||
bool ok = sc_vecdeque_reserve(&vdq, 20);
|
||||
@ -142,7 +142,7 @@ static void test_vecdeque_grow() {
|
||||
sc_vecdeque_destroy(&vdq);
|
||||
}
|
||||
|
||||
static void test_vecdeque_push_hole() {
|
||||
static void test_vecdeque_push_hole(void) {
|
||||
struct SC_VECDEQUE(int) vdq = SC_VECDEQUE_INITIALIZER;
|
||||
|
||||
bool ok = sc_vecdeque_reserve(&vdq, 20);
|
||||
|
@ -187,7 +187,7 @@ static void test_vector_index_of(void) {
|
||||
sc_vector_destroy(&vec);
|
||||
}
|
||||
|
||||
static void test_vector_grow() {
|
||||
static void test_vector_grow(void) {
|
||||
struct SC_VECTOR(int) vec = SC_VECTOR_INITIALIZER;
|
||||
|
||||
bool ok;
|
||||
|
@ -16,6 +16,6 @@ cpu = 'i686'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-2/win32'
|
||||
prebuilt_sdl2 = 'SDL2-2.26.4/i686-w64-mingw32'
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-4/win32'
|
||||
prebuilt_sdl2 = 'SDL2-2.28.0/i686-w64-mingw32'
|
||||
prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-Win32'
|
||||
|
@ -16,6 +16,6 @@ cpu = 'x86_64'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-2/win64'
|
||||
prebuilt_sdl2 = 'SDL2-2.26.4/x86_64-w64-mingw32'
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-4/win64'
|
||||
prebuilt_sdl2 = 'SDL2-2.28.0/x86_64-w64-mingw32'
|
||||
prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-x64'
|
||||
|
59
doc/audio.md
59
doc/audio.md
@ -24,6 +24,42 @@ To disable audio:
|
||||
scrcpy --no-audio
|
||||
```
|
||||
|
||||
To disable only the audio playback, see [no playback](video.md#no-playback).
|
||||
|
||||
## Audio only
|
||||
|
||||
To play audio only, disable the video:
|
||||
|
||||
```bash
|
||||
scrcpy --no-video
|
||||
# interrupt with Ctrl+C
|
||||
```
|
||||
|
||||
Without video, the audio latency is typically not criticial, so it might be
|
||||
interesting to add [buffering](#buffering) to minimize glitches:
|
||||
|
||||
```
|
||||
scrcpy --no-video --audio-buffer=200
|
||||
```
|
||||
|
||||
## Source
|
||||
|
||||
By default, the device audio output is forwarded.
|
||||
|
||||
It is possible to capture the device microphone instead:
|
||||
|
||||
```
|
||||
scrcpy --audio-source=mic
|
||||
```
|
||||
|
||||
For example, to use the device as a dictaphone and record a capture directly on
|
||||
the computer:
|
||||
|
||||
```
|
||||
scrcpy --audio-source=mic --no-video --no-playback --record=file.opus
|
||||
```
|
||||
|
||||
|
||||
## Codec
|
||||
|
||||
The audio codec can be selected. The possible values are `opus` (default), `aac`
|
||||
@ -35,6 +71,20 @@ scrcpy --audio-codec=aac
|
||||
scrcpy --audio-codec=raw
|
||||
```
|
||||
|
||||
In particular, if you get the following error:
|
||||
|
||||
> Failed to initialize audio/opus, error 0xfffffffe
|
||||
|
||||
then your device has no Opus encoder: try `scrcpy --audio-codec=aac`.
|
||||
|
||||
For advanced usage, to pass arbitrary parameters to the [`MediaFormat`],
|
||||
check `--audio-codec-options` in the manpage or in `scrcpy --help`.
|
||||
|
||||
[`MediaFormat`]: https://developer.android.com/reference/android/media/MediaFormat
|
||||
|
||||
|
||||
## Encoder
|
||||
|
||||
Several encoders may be available on the device. They can be listed by:
|
||||
|
||||
```bash
|
||||
@ -43,19 +93,14 @@ scrcpy --list-encoders
|
||||
|
||||
To select a specific encoder:
|
||||
|
||||
```
|
||||
```bash
|
||||
scrcpy --audio-codec=opus --audio-encoder='c2.android.opus.encoder'
|
||||
```
|
||||
|
||||
For advanced usage, to pass arbitrary parameters to the [`MediaFormat`],
|
||||
check `--audio-codec-options` in the manpage or in `scrcpy --help`.
|
||||
|
||||
[`MediaFormat`]: https://developer.android.com/reference/android/media/MediaFormat
|
||||
|
||||
|
||||
## Bit rate
|
||||
|
||||
The default video bit-rate is 128Kbps. To change it:
|
||||
The default audio bit rate is 128Kbps. To change it:
|
||||
|
||||
```bash
|
||||
scrcpy --audio-bit-rate=64K
|
||||
|
@ -77,7 +77,7 @@ pip3 install meson
|
||||
sudo dnf install https://download1.rpmfusion.org/free/fedora/rpmfusion-free-release-$(rpm -E %fedora).noarch.rpm
|
||||
|
||||
# client build dependencies
|
||||
sudo dnf install SDL2-devel ffms2-devel libusb-devel meson gcc make
|
||||
sudo dnf install SDL2-devel ffms2-devel libusb1-devel meson gcc make
|
||||
|
||||
# server build dependencies
|
||||
sudo dnf install java-devel
|
||||
@ -233,10 +233,10 @@ install` must be run as root)._
|
||||
|
||||
#### Option 2: Use prebuilt server
|
||||
|
||||
- [`scrcpy-server-v2.0`][direct-scrcpy-server]
|
||||
<sub>SHA-256: `9e241615f578cd690bb43311000debdecf6a9c50a7082b001952f18f6f21ddc2`</sub>
|
||||
- [`scrcpy-server-v2.1.1`][direct-scrcpy-server]
|
||||
<sub>SHA-256: `9558db6c56743a1dc03b38f59801fb40e91cc891f8fc0c89e5b0b067761f148e`</sub>
|
||||
|
||||
[direct-scrcpy-server]: https://github.com/Genymobile/scrcpy/releases/download/v2.0/scrcpy-server-v2.0
|
||||
[direct-scrcpy-server]: https://github.com/Genymobile/scrcpy/releases/download/v2.1.1/scrcpy-server-v2.1.1
|
||||
|
||||
Download the prebuilt server somewhere, and specify its path during the Meson
|
||||
configuration:
|
||||
|
125
doc/connection.md
Normal file
125
doc/connection.md
Normal file
@ -0,0 +1,125 @@
|
||||
# Connection
|
||||
|
||||
## Selection
|
||||
|
||||
If exactly one device is connected (i.e. listed by `adb devices`), then it is
|
||||
automatically selected.
|
||||
|
||||
However, if there are multiple devices connected, you must specify the one to
|
||||
use in one of 4 ways:
|
||||
- by its serial:
|
||||
```bash
|
||||
scrcpy --serial=0123456789abcdef
|
||||
scrcpy -s 0123456789abcdef # short version
|
||||
|
||||
# the serial is the ip:port if connected over TCP/IP (same behavior as adb)
|
||||
scrcpy --serial=192.168.1.1:5555
|
||||
```
|
||||
- the one connected over USB (if there is exactly one):
|
||||
```bash
|
||||
scrcpy --select-usb
|
||||
scrcpy -d # short version
|
||||
```
|
||||
- the one connected over TCP/IP (if there is exactly one):
|
||||
```bash
|
||||
scrcpy --select-tcpip
|
||||
scrcpy -e # short version
|
||||
```
|
||||
- a device already listening on TCP/IP (see [below](#tcpip-wireless)):
|
||||
```bash
|
||||
scrcpy --tcpip=192.168.1.1:5555
|
||||
scrcpy --tcpip=192.168.1.1 # default port is 5555
|
||||
```
|
||||
|
||||
The serial may also be provided via the environment variable `ANDROID_SERIAL`
|
||||
(also used by `adb`):
|
||||
|
||||
```bash
|
||||
# in bash
|
||||
export ANDROID_SERIAL=0123456789abcdef
|
||||
scrcpy
|
||||
```
|
||||
|
||||
```cmd
|
||||
:: in cmd
|
||||
set ANDROID_SERIAL=0123456789abcdef
|
||||
scrcpy
|
||||
```
|
||||
|
||||
```powershell
|
||||
# in PowerShell
|
||||
$env:ANDROID_SERIAL = '0123456789abcdef'
|
||||
scrcpy
|
||||
```
|
||||
|
||||
|
||||
## TCP/IP (wireless)
|
||||
|
||||
_Scrcpy_ uses `adb` to communicate with the device, and `adb` can [connect] to a
|
||||
device over TCP/IP. The device must be connected on the same network as the
|
||||
computer.
|
||||
|
||||
[connect]: https://developer.android.com/studio/command-line/adb.html#wireless
|
||||
|
||||
|
||||
### Automatic
|
||||
|
||||
An option `--tcpip` allows to configure the connection automatically. There are
|
||||
two variants.
|
||||
|
||||
If the device (accessible at 192.168.1.1 in this example) already listens on a
|
||||
port (typically 5555) for incoming _adb_ connections, then run:
|
||||
|
||||
```bash
|
||||
scrcpy --tcpip=192.168.1.1 # default port is 5555
|
||||
scrcpy --tcpip=192.168.1.1:5555
|
||||
```
|
||||
|
||||
If _adb_ TCP/IP mode is disabled on the device (or if you don't know the IP
|
||||
address), connect the device over USB, then run:
|
||||
|
||||
```bash
|
||||
scrcpy --tcpip # without arguments
|
||||
```
|
||||
|
||||
It will automatically find the device IP address and adb port, enable TCP/IP
|
||||
mode if necessary, then connect to the device before starting.
|
||||
|
||||
|
||||
### Manual
|
||||
|
||||
Alternatively, it is possible to enable the TCP/IP connection manually using
|
||||
`adb`:
|
||||
|
||||
1. Plug the device into a USB port on your computer.
|
||||
2. Connect the device to the same Wi-Fi network as your computer.
|
||||
3. Get your device IP address, in Settings → About phone → Status, or by
|
||||
executing this command:
|
||||
|
||||
```bash
|
||||
adb shell ip route | awk '{print $9}'
|
||||
```
|
||||
|
||||
4. Enable `adb` over TCP/IP on your device: `adb tcpip 5555`.
|
||||
5. Unplug your device.
|
||||
6. Connect to your device: `adb connect DEVICE_IP:5555` _(replace `DEVICE_IP`
|
||||
with the device IP address you found)_.
|
||||
7. Run `scrcpy` as usual.
|
||||
8. Run `adb disconnect` once you're done.
|
||||
|
||||
Since Android 11, a [wireless debugging option][adb-wireless] allows to bypass
|
||||
having to physically connect your device directly to your computer.
|
||||
|
||||
[adb-wireless]: https://developer.android.com/studio/command-line/adb#wireless-android11-command-line
|
||||
|
||||
|
||||
## Autostart
|
||||
|
||||
A small tool (by the scrcpy author) allows to run arbitrary commands whenever a
|
||||
new Android device is connected: [AutoAdb]. It can be used to start scrcpy:
|
||||
|
||||
```bash
|
||||
autoadb scrcpy -s '{}'
|
||||
```
|
||||
|
||||
[AutoAdb]: https://github.com/rom1v/autoadb
|
445
doc/develop.md
445
doc/develop.md
@ -9,16 +9,52 @@ This application is composed of two parts:
|
||||
The client is responsible to push the server to the device and start its
|
||||
execution.
|
||||
|
||||
Once the client and the server are connected to each other, the server initially
|
||||
sends device information (name and initial screen dimensions), then starts to
|
||||
send a raw H.264 video stream of the device screen. The client decodes the video
|
||||
frames, and display them as soon as possible, without buffering, to minimize
|
||||
latency. The client is not aware of the device rotation (which is handled by the
|
||||
server), it just knows the dimensions of the video frames.
|
||||
The client and the server establish communication using separate sockets for
|
||||
video, audio and controls. Any of them may be disabled (but not all), so
|
||||
there are 1, 2 or 3 socket(s).
|
||||
|
||||
The client captures relevant keyboard and mouse events, that it transmits to the
|
||||
server, which injects them to the device.
|
||||
The server initially sends the device name on the first socket (it is used for
|
||||
the scrcpy window title), then each socket is used for its own purpose. All
|
||||
reads and writes are performed from a dedicated thread for each socket, both on
|
||||
the client and on the server.
|
||||
|
||||
If video is enabled, then the server sends a raw video stream (H.264 by default)
|
||||
of the device screen, with some additional headers for each packet. The client
|
||||
decodes the video frames, and displays them as soon as possible, without
|
||||
buffering (unless `--display-buffer=delay` is specified) to minimize latency.
|
||||
The client is not aware of the device rotation (which is handled by the server),
|
||||
it just knows the dimensions of the video frames it receives.
|
||||
|
||||
Similarly, if audio is enabled, then the server sends a raw audio stream (OPUS
|
||||
by default) of the device audio output (or the microphone if
|
||||
`--audio-source=mic` is specified), with some additional headers for each
|
||||
packet. The client decodes the stream, attempts to keep a minimal latency by
|
||||
maintaining an average buffering. The [blog post][scrcpy2] of the scrcpy v2.0
|
||||
release gives more details about the audio feature.
|
||||
|
||||
If control is enabled, then the client captures relevant keyboard and mouse
|
||||
events, that it transmits to the server, which injects them to the device. This
|
||||
is the only socket which is used in both direction: input events are sent from
|
||||
the client to the device, and when the device clipboard changes, the new content
|
||||
is sent from the device to the client to support seamless copy-paste.
|
||||
|
||||
[scrcpy2]: https://blog.rom1v.com/2023/03/scrcpy-2-0-with-audio/
|
||||
|
||||
Note that the client-server roles are expressed at the application level:
|
||||
|
||||
- the server _serves_ video and audio streams, and handle requests from the
|
||||
client,
|
||||
- the client _controls_ the device through the server.
|
||||
|
||||
However, by default (when `--force-adb-forward` is not set), the roles are
|
||||
reversed at the network level:
|
||||
|
||||
- the client opens a server socket and listen on a port before starting the
|
||||
server,
|
||||
- the server connects to the client.
|
||||
|
||||
This role inversion guarantees that the connection will not fail due to race
|
||||
conditions without polling.
|
||||
|
||||
|
||||
## Server
|
||||
@ -32,15 +68,14 @@ The server is a Java application (with a [`public static void main(String...
|
||||
args)`][main] method), compiled against the Android framework, and executed as
|
||||
`shell` on the Android device.
|
||||
|
||||
[main]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/Server.java#L123
|
||||
[main]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Server.java#L193
|
||||
|
||||
To run such a Java application, the classes must be [_dexed_][dex] (typically,
|
||||
to `classes.dex`). If `my.package.MainClass` is the main class, compiled to
|
||||
`classes.dex`, pushed to the device in `/data/local/tmp`, then it can be run
|
||||
with:
|
||||
|
||||
adb shell CLASSPATH=/data/local/tmp/classes.dex \
|
||||
app_process / my.package.MainClass
|
||||
adb shell CLASSPATH=/data/local/tmp/classes.dex app_process / my.package.MainClass
|
||||
|
||||
_The path `/data/local/tmp` is a good candidate to push the server, since it's
|
||||
readable and writable by `shell`, but not world-writable, so a malicious
|
||||
@ -49,7 +84,7 @@ application may not replace the server just before the client executes it._
|
||||
Instead of a raw _dex_ file, `app_process` accepts a _jar_ containing
|
||||
`classes.dex` (e.g. an [APK]). For simplicity, and to benefit from the gradle
|
||||
build system, the server is built to an (unsigned) APK (renamed to
|
||||
`scrcpy-server`).
|
||||
`scrcpy-server.jar`).
|
||||
|
||||
[dex]: https://en.wikipedia.org/wiki/Dalvik_(software)
|
||||
[apk]: https://en.wikipedia.org/wiki/Android_application_package
|
||||
@ -65,42 +100,77 @@ They can be called using reflection though. The communication with hidden
|
||||
components is provided by [_wrappers_ classes][wrappers] and [aidl].
|
||||
|
||||
[hidden]: https://stackoverflow.com/a/31908373/1987178
|
||||
[wrappers]: https://github.com/Genymobile/scrcpy/tree/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/wrappers
|
||||
[aidl]: https://github.com/Genymobile/scrcpy/tree/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/aidl/android/view
|
||||
[wrappers]: https://github.com/Genymobile/scrcpy/tree/master/server/src/main/java/com/genymobile/scrcpy/wrappers
|
||||
[aidl]: https://github.com/Genymobile/scrcpy/tree/master/server/src/main/aidl
|
||||
|
||||
|
||||
### Threading
|
||||
|
||||
The server uses 3 threads:
|
||||
### Execution
|
||||
|
||||
- the **main** thread, encoding and streaming the video to the client;
|
||||
- the **controller** thread, listening for _control messages_ (typically,
|
||||
keyboard and mouse events) from the client;
|
||||
- the **receiver** thread (managed by the controller), sending _device messages_
|
||||
to the clients (currently, it is only used to send the device clipboard
|
||||
content).
|
||||
The server is started by the client basically by executing the following
|
||||
commands:
|
||||
|
||||
Since the video encoding is typically hardware, there would be no benefit in
|
||||
encoding and streaming in two different threads.
|
||||
```bash
|
||||
adb push scrcpy-server /data/local/tmp/scrcpy-server.jar
|
||||
adb forward tcp:27183 localabstract:scrcpy
|
||||
adb shell CLASSPATH=/data/local/tmp/scrcpy-server.jar app_process / com.genymobile.scrcpy.Server 2.1
|
||||
```
|
||||
|
||||
The first argument (`2.1` in the example) is the client scrcpy version. The
|
||||
server fails if the client and the server do not have the exact same version.
|
||||
The protocol between the client and the server may change from version to
|
||||
version (see [protocol](#protocol) below), and there is no backward or forward
|
||||
compatibility (there is no point to use different client and server versions).
|
||||
This check allows to detect misconfiguration (running an older or newer server
|
||||
by mistake).
|
||||
|
||||
It is followed by any number of arguments, in the form of `key=value` pairs.
|
||||
Their order is irrelevant. The possible keys and associated value types can be
|
||||
found in the [server][server-options] and [client][client-options] code.
|
||||
|
||||
[server-options]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Options.java#L181
|
||||
[client-options]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/server.c#L226
|
||||
|
||||
For example, if we execute `scrcpy -m1920 --no-audio`, then the server
|
||||
execution will look like this:
|
||||
|
||||
```bash
|
||||
# scid is a random number to identify different clients running on the same device
|
||||
adb shell CLASSPATH=/data/local/tmp/scrcpy-server.jar app_process / com.genymobile.scrcpy.Server 2.1 scid=12345678 log_level=info audio=false max_size=1920
|
||||
```
|
||||
|
||||
### Components
|
||||
|
||||
When executed, its [`main()`][main] method is executed (on the "main" thread).
|
||||
It parses the arguments, establishes the connection with the client and starts
|
||||
the other "components":
|
||||
- the **video** streamer: it captures the video screen and send encoded video
|
||||
packets on the _video_ socket (from the _video_ thread).
|
||||
- the **audio** streamer: it uses several threads to capture raw packets,
|
||||
submits them to encoding and retrieve encoded packets, which it sends on the
|
||||
_audio_ socket.
|
||||
- the **controller**: it receives _control messages_ (typically input events)
|
||||
on the _control_ socket from one thread, and sends _device messages_ (e.g. to
|
||||
transmit the device clipboard content to the client) on the same _control
|
||||
socket_ from another thread. Thus, the _control_ socket is used in both
|
||||
directions (contrary to the _video_ and _audio_ sockets).
|
||||
|
||||
|
||||
### Screen video encoding
|
||||
|
||||
The encoding is managed by [`ScreenEncoder`].
|
||||
|
||||
The video is encoded using the [`MediaCodec`] API. The codec takes its input
|
||||
from a [surface] associated to the display, and writes the resulting H.264
|
||||
stream to the provided output stream (the socket connected to the client).
|
||||
The video is encoded using the [`MediaCodec`] API. The codec encodes the content
|
||||
of a `Surface` associated to the display, and writes the encoding packets to the
|
||||
client (on the _video_ socket).
|
||||
|
||||
[`ScreenEncoder`]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java
|
||||
[`ScreenEncoder`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java
|
||||
[`MediaCodec`]: https://developer.android.com/reference/android/media/MediaCodec.html
|
||||
[surface]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L68-L69
|
||||
|
||||
On device [rotation], the codec, surface and display are reinitialized, and a
|
||||
new video stream is produced.
|
||||
On device rotation (or folding), the encoding session is [reset] and restarted.
|
||||
|
||||
New frames are produced only when changes occur on the surface. This is good
|
||||
because it avoids to send unnecessary frames, but there are drawbacks:
|
||||
New frames are produced only when changes occur on the surface. This avoids to
|
||||
send unnecessary frames, but by default there might be drawbacks:
|
||||
|
||||
- it does not send any frame on start if the device screen does not change,
|
||||
- after fast motion changes, the last frame may have poor quality.
|
||||
@ -108,11 +178,24 @@ because it avoids to send unnecessary frames, but there are drawbacks:
|
||||
Both problems are [solved][repeat] by the flag
|
||||
[`KEY_REPEAT_PREVIOUS_FRAME_AFTER`][repeat-flag].
|
||||
|
||||
[reset]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L179
|
||||
[rotation]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L90
|
||||
[repeat]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L147-L148
|
||||
[repeat]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L246-L247
|
||||
[repeat-flag]: https://developer.android.com/reference/android/media/MediaFormat.html#KEY_REPEAT_PREVIOUS_FRAME_AFTER
|
||||
|
||||
|
||||
### Audio encoding
|
||||
|
||||
Similarly, the audio is [captured] using an [`AudioRecord`], and [encoded] using
|
||||
the [`MediaCodec`] asynchronous API.
|
||||
|
||||
More details are available on the [blog post][scrcpy2] introducing the audio feature.
|
||||
|
||||
[captured]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/AudioCapture.java
|
||||
[encoded]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/AudioEncoder.java
|
||||
[`AudioRecord`]: https://developer.android.com/reference/android/media/AudioRecord
|
||||
|
||||
|
||||
### Input events injection
|
||||
|
||||
_Control messages_ are received from the client by the [`Controller`] (run in a
|
||||
@ -124,13 +207,13 @@ separate thread). There are several types of input events:
|
||||
- other commands (e.g. to switch the screen on or to copy the clipboard).
|
||||
|
||||
Some of them need to inject input events to the system. To do so, they use the
|
||||
_hidden_ method [`InputManager.injectInputEvent`] (exposed by our
|
||||
_hidden_ method [`InputManager.injectInputEvent()`] (exposed by the
|
||||
[`InputManager` wrapper][inject-wrapper]).
|
||||
|
||||
[`Controller`]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/Controller.java#L81
|
||||
[`Controller`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Controller.java
|
||||
[`KeyEvent`]: https://developer.android.com/reference/android/view/KeyEvent.html
|
||||
[`MotionEvent`]: https://developer.android.com/reference/android/view/MotionEvent.html
|
||||
[`InputManager.injectInputEvent`]: https://android.googlesource.com/platform/frameworks/base/+/oreo-release/core/java/android/hardware/input/InputManager.java#857
|
||||
[`InputManager.injectInputEvent()`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/wrappers/InputManager.java#L34
|
||||
[inject-wrapper]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/wrappers/InputManager.java#L27
|
||||
|
||||
|
||||
@ -140,126 +223,222 @@ _hidden_ method [`InputManager.injectInputEvent`] (exposed by our
|
||||
The client relies on [SDL], which provides cross-platform API for UI, input
|
||||
events, threading, etc.
|
||||
|
||||
The video stream is decoded by [libav] (FFmpeg).
|
||||
The video and audio streams are decoded by [FFmpeg].
|
||||
|
||||
[SDL]: https://www.libsdl.org
|
||||
[libav]: https://www.libav.org/
|
||||
[ffmpeg]: https://ffmpeg.org/
|
||||
|
||||
|
||||
### Initialization
|
||||
|
||||
On startup, in addition to _libav_ and _SDL_ initialization, the client must
|
||||
push and start the server on the device, and open two sockets (one for the video
|
||||
stream, one for control) so that they may communicate.
|
||||
The client parses the command line arguments, then [runs one of two code
|
||||
paths][run]:
|
||||
- scrcpy in "normal" mode ([`scrcpy.c`])
|
||||
- scrcpy in [OTG mode](hid-otg.md) ([`scrcpy_otg.c`])
|
||||
|
||||
Note that the client-server roles are expressed at the application level:
|
||||
[run]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/main.c#L81-L82
|
||||
[`scrcpy.c`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/scrcpy.c#L292-L293
|
||||
[`scrcpy_otg.c`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/usb/scrcpy_otg.c#L51-L52
|
||||
|
||||
- the server _serves_ video stream and handle requests from the client,
|
||||
- the client _controls_ the device through the server.
|
||||
In the remaining of this document, we assume that the "normal" mode is used
|
||||
(read the code for the OTG mode).
|
||||
|
||||
However, the roles are reversed at the network level:
|
||||
|
||||
- the client opens a server socket and listen on a port before starting the
|
||||
server,
|
||||
- the server connects to the client.
|
||||
|
||||
This role inversion guarantees that the connection will not fail due to race
|
||||
conditions, and avoids polling.
|
||||
|
||||
_(Note that over TCP/IP, the roles are not reversed, due to a bug in `adb
|
||||
reverse`. See commit [1038bad] and [issue #5].)_
|
||||
|
||||
Once the server is connected, it sends the device information (name and initial
|
||||
screen dimensions). Thus, the client may init the window and renderer, before
|
||||
the first frame is available.
|
||||
|
||||
To minimize startup time, SDL initialization is performed while listening for
|
||||
the connection from the server (see commit [90a46b4]).
|
||||
|
||||
[1038bad]: https://github.com/Genymobile/scrcpy/commit/1038bad3850f18717a048a4d5c0f8110e54ee172
|
||||
[issue #5]: https://github.com/Genymobile/scrcpy/issues/5
|
||||
[90a46b4]: https://github.com/Genymobile/scrcpy/commit/90a46b4c45637d083e877020d85ade52a9a5fa8e
|
||||
On startup, the client:
|
||||
- opens the _video_, _audio_ and _control_ sockets;
|
||||
- pushes and starts the server on the device;
|
||||
- initializes its components (demuxers, decoders, recorder…).
|
||||
|
||||
|
||||
### Threading
|
||||
### Video and audio streams
|
||||
|
||||
The client uses 4 threads:
|
||||
|
||||
- the **main** thread, executing the SDL event loop,
|
||||
- the **stream** thread, receiving the video and used for decoding and
|
||||
recording,
|
||||
- the **controller** thread, sending _control messages_ to the server,
|
||||
- the **receiver** thread (managed by the controller), receiving _device
|
||||
messages_ from the server.
|
||||
|
||||
In addition, another thread can be started if necessary to handle APK
|
||||
installation or file push requests (via drag&drop on the main window) or to
|
||||
print the framerate regularly in the console.
|
||||
|
||||
|
||||
|
||||
### Stream
|
||||
|
||||
The video [stream] is received from the socket (connected to the server on the
|
||||
device) in a separate thread.
|
||||
|
||||
If a [decoder] is present (i.e. `--no-display` is not set), then it uses _libav_
|
||||
to decode the H.264 stream from the socket, and notifies the main thread when a
|
||||
new frame is available.
|
||||
|
||||
There are two [frames][video_buffer] simultaneously in memory:
|
||||
- the **decoding** frame, written by the decoder from the decoder thread,
|
||||
- the **rendering** frame, rendered in a texture from the main thread.
|
||||
|
||||
When a new decoded frame is available, the decoder _swaps_ the decoding and
|
||||
rendering frame (with proper synchronization). Thus, it immediately starts
|
||||
to decode a new frame while the main thread renders the last one.
|
||||
|
||||
If a [recorder] is present (i.e. `--record` is enabled), then it muxes the raw
|
||||
H.264 packet to the output video file.
|
||||
|
||||
[stream]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/stream.h
|
||||
[decoder]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/decoder.h
|
||||
[video_buffer]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/video_buffer.h
|
||||
[recorder]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/recorder.h
|
||||
Depending on the arguments passed to `scrcpy`, several components may be used.
|
||||
Here is an overview of the video and audio components:
|
||||
|
||||
```
|
||||
+----------+ +----------+
|
||||
---> | decoder | ---> | screen |
|
||||
+---------+ / +----------+ +----------+
|
||||
socket ---> | stream | ----
|
||||
+---------+ \ +----------+
|
||||
---> | recorder |
|
||||
+----------+
|
||||
V4L2 sink
|
||||
/
|
||||
decoder
|
||||
/ \
|
||||
VIDEO -------------> demuxer display
|
||||
\
|
||||
recorder
|
||||
/
|
||||
AUDIO -------------> demuxer
|
||||
\
|
||||
decoder --- audio player
|
||||
```
|
||||
|
||||
The _demuxer_ is responsible to extract video and audio packets (read some
|
||||
header, split the video stream into packets at correct boundaries, etc.).
|
||||
|
||||
The demuxed packets may be sent to a _decoder_ (one per stream, to produce
|
||||
frames) and to a recorder (receiving both video and audio stream to record a
|
||||
single file). The packets are encoded on the device (by `MediaCodec`), but when
|
||||
recording, they are _muxed_ (asynchronously) into a container (MKV or MP4) on
|
||||
the client side.
|
||||
|
||||
Video frames are sent to the screen/display to be rendered in the scrcpy window.
|
||||
They may also be sent to a [V4L2 sink](v4l2.md).
|
||||
|
||||
Audio "frames" (an array of decoded samples) are sent to the audio player.
|
||||
|
||||
|
||||
### Controller
|
||||
|
||||
The [controller] is responsible to send _control messages_ to the device. It
|
||||
The _controller_ is responsible to send _control messages_ to the device. It
|
||||
runs in a separate thread, to avoid I/O on the main thread.
|
||||
|
||||
On SDL event, received on the main thread, the [input manager][inputmanager]
|
||||
creates appropriate [_control messages_][controlmsg]. It is responsible to
|
||||
convert SDL events to Android events (using [convert]). It pushes the _control
|
||||
messages_ to a queue hold by the controller. On its own thread, the controller
|
||||
takes messages from the queue, that it serializes and sends to the client.
|
||||
|
||||
[controller]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/controller.h
|
||||
[controlmsg]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/control_msg.h
|
||||
[inputmanager]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/input_manager.h
|
||||
[convert]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/convert.h
|
||||
On SDL event, received on the main thread, the _input manager_ creates
|
||||
appropriate _control messages_. It is responsible to convert SDL events to
|
||||
Android events. It then pushes the _control messages_ to a queue hold by the
|
||||
controller. On its own thread, the controller takes messages from the queue,
|
||||
that it serializes and sends to the client.
|
||||
|
||||
|
||||
### UI and event loop
|
||||
## Protocol
|
||||
|
||||
Initialization, input events and rendering are all [managed][scrcpy] in the main
|
||||
thread.
|
||||
The protocol between the client and the server must be considered _internal_: it
|
||||
may (and will) change at any time for any reason. Everything may change (the
|
||||
number of sockets, the order in which the sockets must be opened, the data
|
||||
format on the wire…) from version to version. A client must always be run with a
|
||||
matching server version.
|
||||
|
||||
Events are handled in the [event loop], which either updates the [screen] or
|
||||
delegates to the [input manager][inputmanager].
|
||||
This section documents the current protocol in scrcpy v2.1.
|
||||
|
||||
[scrcpy]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/scrcpy.c
|
||||
[event loop]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/scrcpy.c#L201
|
||||
[screen]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/screen.h
|
||||
### Connection
|
||||
|
||||
Firstly, the client sets up an adb tunnel:
|
||||
|
||||
```bash
|
||||
# By default, a reverse redirection: the computer listens, the device connects
|
||||
adb reverse localabstract:scrcpy_<SCID> tcp:27183
|
||||
|
||||
# As a fallback (or if --force-adb forward is set), a forward redirection:
|
||||
# the device listens, the computer connects
|
||||
adb forward tcp:27183 localabstract:scrcpy_<SCID>
|
||||
```
|
||||
|
||||
(`<SCID>` is a 31-bit random number, so that it does not fail when several
|
||||
scrcpy instances start "at the same time" for the same device.)
|
||||
|
||||
Then, up to 3 sockets are opened, in that order:
|
||||
- a _video_ socket
|
||||
- an _audio_ socket
|
||||
- a _control_ socket
|
||||
|
||||
Each one may be disabled (respectively by `--no-video`, `--no-audio` and
|
||||
`--no-control`, directly or indirectly). For example, if `--no-audio` is set,
|
||||
then the _video_ socket is opened first, then the _control_ socket.
|
||||
|
||||
On the _first_ socket opened (whichever it is), if the tunnel is _forward_, then
|
||||
a [dummy byte] is sent from the device to the client. This allows to detect a
|
||||
connection error (the client connection does not fail as long as there is an adb
|
||||
forward redirection, even if nothing is listening on the device side).
|
||||
|
||||
Still on this _first_ socket, the device sends some [metadata][device meta] to
|
||||
the client (currently only the device name, used as the window title, but there
|
||||
might be other fields in the future).
|
||||
|
||||
[dummy byte]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/DesktopConnection.java#L93
|
||||
[device meta]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/DesktopConnection.java#L151
|
||||
|
||||
You can read the [client][client-connection] and [server][server-connection]
|
||||
code for more details.
|
||||
|
||||
[client-connection]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/server.c#L465-L466
|
||||
[server-connection]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/DesktopConnection.java#L63
|
||||
|
||||
Then each socket is used for its intended purpose.
|
||||
|
||||
### Video and audio
|
||||
|
||||
On the _video_ and _audio_ sockets, the device first sends some [codec
|
||||
metadata]:
|
||||
- On the _video_ socket, 12 bytes:
|
||||
- the codec id (`u32`) (H264, H265 or AV1)
|
||||
- the initial video width (`u32`)
|
||||
- the initial video height (`u32`)
|
||||
- On the _audio_ socket, 4 bytes:
|
||||
- the codec id (`u32`) (OPUS, AAC or RAW)
|
||||
|
||||
[codec metadata]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Streamer.java#L33-L51
|
||||
|
||||
Then each packet produced by `MediaCodec` is sent, prefixed by a 12-byte [frame
|
||||
header]:
|
||||
- config packet flag (`u1`)
|
||||
- key frame flag (`u1`)
|
||||
- PTS (`u62`)
|
||||
- packet size (`u32`)
|
||||
|
||||
Here is a schema describing the frame header:
|
||||
|
||||
```
|
||||
[. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ...
|
||||
<-------------> <-----> <-----------------------------...
|
||||
PTS packet raw packet
|
||||
size
|
||||
<--------------------->
|
||||
frame header
|
||||
|
||||
The most significant bits of the PTS are used for packet flags:
|
||||
|
||||
byte 7 byte 6 byte 5 byte 4 byte 3 byte 2 byte 1 byte 0
|
||||
CK...... ........ ........ ........ ........ ........ ........ ........
|
||||
^^<------------------------------------------------------------------->
|
||||
|| PTS
|
||||
| `- key frame
|
||||
`-- config packet
|
||||
```
|
||||
|
||||
[frame header]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Streamer.java#L83
|
||||
|
||||
|
||||
### Controls
|
||||
|
||||
Controls messages are sent via a custom binary protocol.
|
||||
|
||||
The only documentation for this protocol is the set of unit tests on both sides:
|
||||
- `ControlMessage` (from client to device): [serialization](https://github.com/Genymobile/scrcpy/blob/master/app/tests/test_control_msg_serialize.c) | [deserialization](https://github.com/Genymobile/scrcpy/blob/master/server/src/test/java/com/genymobile/scrcpy/ControlMessageReaderTest.java)
|
||||
- `DeviceMessage` (from device to client) [serialization](https://github.com/Genymobile/scrcpy/blob/master/server/src/test/java/com/genymobile/scrcpy/DeviceMessageWriterTest.java) | [deserialization](https://github.com/Genymobile/scrcpy/blob/master/app/tests/test_device_msg_deserialize.c)
|
||||
|
||||
|
||||
## Standalone server
|
||||
|
||||
Although the server is designed to work for the scrcpy client, it can be used
|
||||
with any client which uses the same protocol.
|
||||
|
||||
For simplicity, some [server-specific options] have been added to produce raw
|
||||
streams easily:
|
||||
- `send_device_meta=false`: disable the device metata (in practice, the device
|
||||
name) sent on the _first_ socket
|
||||
- `send_frame_meta=false`: disable the 12-byte header for each packet
|
||||
- `send_dummy_byte`: disable the dummy byte sent on forward connections
|
||||
- `send_codec_meta`: disable the codec information (and initial device size for
|
||||
video)
|
||||
- `raw_stream`: disable all the above
|
||||
|
||||
[server-specific options]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Options.java#L309-L329
|
||||
|
||||
Concretely, here is how to expose a raw H.264 stream on a TCP socket:
|
||||
|
||||
```bash
|
||||
adb push scrcpy-server-v2.1 /data/local/tmp/scrcpy-server-manual.jar
|
||||
adb forward tcp:1234 localabstract:scrcpy
|
||||
adb shell CLASSPATH=/data/local/tmp/scrcpy-server-manual.jar \
|
||||
app_process / com.genymobile.scrcpy.Server 2.1 \
|
||||
tunnel_forward=true audio=false control=false cleanup=false \
|
||||
raw_stream=true max_size=1920
|
||||
```
|
||||
|
||||
As soon as a client connects over TCP on port 1234, the device will start
|
||||
streaming the video. For example, VLC can play the video (although you will
|
||||
experience a very high latency, more details [here][vlc-0latency]):
|
||||
|
||||
```
|
||||
vlc -Idummy --demux=h264 --network-caching=0 tcp://localhost:1234
|
||||
```
|
||||
|
||||
[vlc-0latency]: https://code.videolan.org/rom1v/vlc/-/merge_requests/20
|
||||
|
||||
|
||||
## Hack
|
||||
|
158
doc/device.md
158
doc/device.md
@ -1,156 +1,9 @@
|
||||
# Device
|
||||
|
||||
## Selection
|
||||
|
||||
If exactly one device is connected (i.e. listed by `adb devices`), then it is
|
||||
automatically selected.
|
||||
|
||||
However, if there are multiple devices connected, you must specify the one to
|
||||
use in one of 4 ways:
|
||||
- by its serial:
|
||||
```bash
|
||||
scrcpy --serial=0123456789abcdef
|
||||
scrcpy -s 0123456789abcdef # short version
|
||||
|
||||
# the serial is the ip:port if connected over TCP/IP (same behavior as adb)
|
||||
scrcpy --serial=192.168.1.1:5555
|
||||
```
|
||||
- the one connected over USB (if there is exactly one):
|
||||
```bash
|
||||
scrcpy --select-usb
|
||||
scrcpy -d # short version
|
||||
```
|
||||
- the one connected over TCP/IP (if there is exactly one):
|
||||
```bash
|
||||
scrcpy --select-tcpip
|
||||
scrcpy -e # short version
|
||||
```
|
||||
- a device already listening on TCP/IP (see [below](#tcpip-wireless)):
|
||||
```bash
|
||||
scrcpy --tcpip=192.168.1.1:5555
|
||||
scrcpy --tcpip=192.168.1.1 # default port is 5555
|
||||
```
|
||||
|
||||
The serial may also be provided via the environment variable `ANDROID_SERIAL`
|
||||
(also used by `adb`):
|
||||
|
||||
```bash
|
||||
# in bash
|
||||
export ANDROID_SERIAL=0123456789abcdef
|
||||
scrcpy
|
||||
```
|
||||
|
||||
```cmd
|
||||
:: in cmd
|
||||
set ANDROID_SERIAL=0123456789abcdef
|
||||
scrcpy
|
||||
```
|
||||
|
||||
```powershell
|
||||
# in PowerShell
|
||||
$env:ANDROID_SERIAL = '0123456789abcdef'
|
||||
scrcpy
|
||||
```
|
||||
|
||||
|
||||
## TCP/IP (wireless)
|
||||
|
||||
_Scrcpy_ uses `adb` to communicate with the device, and `adb` can [connect] to a
|
||||
device over TCP/IP. The device must be connected on the same network as the
|
||||
computer.
|
||||
|
||||
[connect]: https://developer.android.com/studio/command-line/adb.html#wireless
|
||||
|
||||
|
||||
### Automatic
|
||||
|
||||
An option `--tcpip` allows to configure the connection automatically. There are
|
||||
two variants.
|
||||
|
||||
If the device (accessible at 192.168.1.1 in this example) already listens on a
|
||||
port (typically 5555) for incoming _adb_ connections, then run:
|
||||
|
||||
```bash
|
||||
scrcpy --tcpip=192.168.1.1 # default port is 5555
|
||||
scrcpy --tcpip=192.168.1.1:5555
|
||||
```
|
||||
|
||||
If _adb_ TCP/IP mode is disabled on the device (or if you don't know the IP
|
||||
address), connect the device over USB, then run:
|
||||
|
||||
```bash
|
||||
scrcpy --tcpip # without arguments
|
||||
```
|
||||
|
||||
It will automatically find the device IP address and adb port, enable TCP/IP
|
||||
mode if necessary, then connect to the device before starting.
|
||||
|
||||
|
||||
### Manual
|
||||
|
||||
Alternatively, it is possible to enable the TCP/IP connection manually using
|
||||
`adb`:
|
||||
|
||||
1. Plug the device into a USB port on your computer.
|
||||
2. Connect the device to the same Wi-Fi network as your computer.
|
||||
3. Get your device IP address, in Settings → About phone → Status, or by
|
||||
executing this command:
|
||||
|
||||
```bash
|
||||
adb shell ip route | awk '{print $9}'
|
||||
```
|
||||
|
||||
4. Enable `adb` over TCP/IP on your device: `adb tcpip 5555`.
|
||||
5. Unplug your device.
|
||||
6. Connect to your device: `adb connect DEVICE_IP:5555` _(replace `DEVICE_IP`
|
||||
with the device IP address you found)_.
|
||||
7. Run `scrcpy` as usual.
|
||||
8. Run `adb disconnect` once you're done.
|
||||
|
||||
Since Android 11, a [wireless debugging option][adb-wireless] allows to bypass
|
||||
having to physically connect your device directly to your computer.
|
||||
|
||||
[adb-wireless]: https://developer.android.com/studio/command-line/adb#wireless-android11-command-line
|
||||
|
||||
|
||||
## Autostart
|
||||
|
||||
A small tool (by the scrcpy author) allows to run arbitrary commands whenever a
|
||||
new Android device is connected: [AutoAdb]. It can be used to start scrcpy:
|
||||
|
||||
```bash
|
||||
autoadb scrcpy -s '{}'
|
||||
```
|
||||
|
||||
[AutoAdb]: https://github.com/rom1v/autoadb
|
||||
|
||||
|
||||
## Display
|
||||
|
||||
If several displays are available on the Android device, it is possible to
|
||||
select the display to mirror:
|
||||
|
||||
```bash
|
||||
scrcpy --display=1
|
||||
```
|
||||
|
||||
The list of display ids can be retrieved by:
|
||||
|
||||
```bash
|
||||
scrcpy --list-displays
|
||||
```
|
||||
|
||||
A secondary display may only be controlled if the device runs at least Android
|
||||
10 (otherwise it is mirrored as read-only).
|
||||
|
||||
|
||||
## Actions
|
||||
|
||||
Some command line arguments perform actions on the device itself while scrcpy is
|
||||
running.
|
||||
|
||||
|
||||
### Stay awake
|
||||
## Stay awake
|
||||
|
||||
To prevent the device from sleeping after a delay **when the device is plugged
|
||||
in**:
|
||||
@ -166,7 +19,7 @@ If the device is not plugged in (i.e. only connected over TCP/IP),
|
||||
`--stay-awake` has no effect (this is the Android behavior).
|
||||
|
||||
|
||||
### Turn screen off
|
||||
## Turn screen off
|
||||
|
||||
It is possible to turn the device screen off while mirroring on start with a
|
||||
command-line option:
|
||||
@ -194,7 +47,7 @@ scrcpy -Sw # short version
|
||||
```
|
||||
|
||||
|
||||
### Show touches
|
||||
## Show touches
|
||||
|
||||
For presentations, it may be useful to show physical touches (on the physical
|
||||
device). Android exposes this feature in _Developers options_.
|
||||
@ -210,7 +63,7 @@ scrcpy -t # short version
|
||||
Note that it only shows _physical_ touches (by a finger on the device).
|
||||
|
||||
|
||||
### Power off on close
|
||||
## Power off on close
|
||||
|
||||
To turn the device screen off when closing _scrcpy_:
|
||||
|
||||
@ -218,11 +71,10 @@ To turn the device screen off when closing _scrcpy_:
|
||||
scrcpy --power-off-on-close
|
||||
```
|
||||
|
||||
### Power on on start
|
||||
## Power on on start
|
||||
|
||||
By default, on start, the device is powered on. To prevent this behavior:
|
||||
|
||||
```bash
|
||||
scrcpy --no-power-on
|
||||
```
|
||||
|
||||
|
@ -106,3 +106,7 @@ scrcpy --otg # keyboard and mouse
|
||||
|
||||
Like `--hid-keyboard` and `--hid-mouse`, it only works if the device is
|
||||
connected over USB.
|
||||
|
||||
## HID/OTG issues on Windows
|
||||
|
||||
See [FAQ](/FAQ.md#hidotg-issues-on-windows).
|
||||
|
@ -9,12 +9,10 @@ Scrcpy is packaged in several distributions and package managers:
|
||||
- Debian/Ubuntu: `apt install scrcpy`
|
||||
- Arch Linux: `pacman -S scrcpy`
|
||||
- Fedora: `dnf copr enable zeno/scrcpy && dnf install scrcpy`
|
||||
- Gentoo: [ebuild][ebuild-link] file
|
||||
- Gentoo: `emerge scrcpy`
|
||||
- Snap: `snap install scrcpy`
|
||||
- … (see [repology](https://repology.org/project/scrcpy/versions))
|
||||
|
||||
[ebuild-link]: https://github.com/maggu2810/maggu2810-overlay/tree/master/app-mobilephone/scrcpy
|
||||
|
||||
### Latest version
|
||||
|
||||
However, the packaged version is not always the latest release. To install the
|
||||
|
@ -13,14 +13,12 @@ To record only the video:
|
||||
scrcpy --no-audio --record=file.mp4
|
||||
```
|
||||
|
||||
_It is currently not possible to record only the audio._
|
||||
|
||||
To disable mirroring while recording:
|
||||
To record only the audio:
|
||||
|
||||
```bash
|
||||
scrcpy --no-display --record=file.mp4
|
||||
scrcpy -Nr file.mkv
|
||||
# interrupt recording with Ctrl+C
|
||||
scrcpy --no-video --record=file.opus
|
||||
scrcpy --no-video --audio-codec=aac --record=file.aac
|
||||
# .m4a/.mp4 and .mka/.mkv are also supported for both opus and aac
|
||||
```
|
||||
|
||||
Timestamps are captured on the device, so [packet delay variation] does not
|
||||
@ -29,6 +27,9 @@ course, not if you capture your scrcpy window and audio output on the computer).
|
||||
|
||||
[packet delay variation]: https://en.wikipedia.org/wiki/Packet_delay_variation
|
||||
|
||||
|
||||
## Format
|
||||
|
||||
The video and audio streams are encoded on the device, but are muxed on the
|
||||
client side. Two formats (containers) are supported:
|
||||
- Matroska (`.mkv`)
|
||||
@ -42,3 +43,36 @@ needs not end with `.mkv` or `.mp4`):
|
||||
```
|
||||
scrcpy --record=file --record-format=mkv
|
||||
```
|
||||
|
||||
|
||||
## No playback
|
||||
|
||||
To disable playback while recording:
|
||||
|
||||
```bash
|
||||
scrcpy --no-playback --record=file.mp4
|
||||
scrcpy -Nr file.mkv
|
||||
# interrupt recording with Ctrl+C
|
||||
```
|
||||
|
||||
It is also possible to disable video and audio playback separately:
|
||||
|
||||
```bash
|
||||
# Record both video and audio, but only play video
|
||||
scrcpy --record=file.mkv --no-audio-playback
|
||||
```
|
||||
|
||||
## Time limit
|
||||
|
||||
To limit the recording time:
|
||||
|
||||
```bash
|
||||
scrcpy --record=file.mkv --time-limit=20 # in seconds
|
||||
```
|
||||
|
||||
The `--time-limit` option is not limited to recording, it also impacts simple
|
||||
mirroring:
|
||||
|
||||
```
|
||||
scrcpy --time-limit=20
|
||||
```
|
||||
|
@ -29,7 +29,7 @@ _<kbd>[Super]</kbd> is typically the <kbd>Windows</kbd> or <kbd>Cmd</kbd> key._
|
||||
| Resize window to 1:1 (pixel-perfect) | <kbd>MOD</kbd>+<kbd>g</kbd>
|
||||
| Resize window to remove black borders | <kbd>MOD</kbd>+<kbd>w</kbd> \| _Double-left-click¹_
|
||||
| Click on `HOME` | <kbd>MOD</kbd>+<kbd>h</kbd> \| _Middle-click_
|
||||
| Click on `BACK` | <kbd>MOD</kbd>+<kbd>b</kbd> \| _Right-click²_
|
||||
| Click on `BACK` | <kbd>MOD</kbd>+<kbd>b</kbd> \| <kbd>MOD</kbd>+<kbd>Backspace</kbd> \| _Right-click²_
|
||||
| Click on `APP_SWITCH` | <kbd>MOD</kbd>+<kbd>s</kbd> \| _4th-click³_
|
||||
| Click on `MENU` (unlock screen)⁴ | <kbd>MOD</kbd>+<kbd>m</kbd>
|
||||
| Click on `VOLUME_UP` | <kbd>MOD</kbd>+<kbd>↑</kbd> _(up)_
|
||||
|
@ -35,7 +35,7 @@ To start `scrcpy` using a v4l2 sink:
|
||||
|
||||
```bash
|
||||
scrcpy --v4l2-sink=/dev/videoN
|
||||
scrcpy --v4l2-sink=/dev/videoN --no-display # disable mirroring window
|
||||
scrcpy --v4l2-sink=/dev/videoN --no-video-playback # disable playback window
|
||||
```
|
||||
|
||||
(replace `N` with the device ID, check with `ls /dev/video*`)
|
||||
|
65
doc/video.md
65
doc/video.md
@ -21,7 +21,7 @@ If encoding fails, scrcpy automatically tries again with a lower definition
|
||||
|
||||
## Bit rate
|
||||
|
||||
The default video bit-rate is 8 Mbps. To change it:
|
||||
The default video bit rate is 8 Mbps. To change it:
|
||||
|
||||
```bash
|
||||
scrcpy --video-bit-rate=2M
|
||||
@ -66,6 +66,14 @@ scrcpy --video-codec=av1
|
||||
H265 may provide better quality, but H264 should provide lower latency.
|
||||
AV1 encoders are not common on current Android devices.
|
||||
|
||||
For advanced usage, to pass arbitrary parameters to the [`MediaFormat`],
|
||||
check `--video-codec-options` in the manpage or in `scrcpy --help`.
|
||||
|
||||
[`MediaFormat`]: https://developer.android.com/reference/android/media/MediaFormat
|
||||
|
||||
|
||||
## Encoder
|
||||
|
||||
Several encoders may be available on the device. They can be listed by:
|
||||
|
||||
```bash
|
||||
@ -79,11 +87,6 @@ try another one:
|
||||
scrcpy --video-codec=h264 --video-encoder='OMX.qcom.video.encoder.avc'
|
||||
```
|
||||
|
||||
For advanced usage, to pass arbitrary parameters to the [`MediaFormat`],
|
||||
check `--video-codec-options` in the manpage or in `scrcpy --help`.
|
||||
|
||||
[`MediaFormat`]: https://developer.android.com/reference/android/media/MediaFormat
|
||||
|
||||
|
||||
## Rotation
|
||||
|
||||
@ -134,6 +137,25 @@ phone, landscape for a tablet).
|
||||
If `--max-size` is also specified, resizing is applied after cropping.
|
||||
|
||||
|
||||
## Display
|
||||
|
||||
If several displays are available on the Android device, it is possible to
|
||||
select the display to mirror:
|
||||
|
||||
```bash
|
||||
scrcpy --display-id=1
|
||||
```
|
||||
|
||||
The list of display ids can be retrieved by:
|
||||
|
||||
```bash
|
||||
scrcpy --list-displays
|
||||
```
|
||||
|
||||
A secondary display may only be controlled if the device runs at least Android
|
||||
10 (otherwise it is mirrored as read-only).
|
||||
|
||||
|
||||
## Buffering
|
||||
|
||||
By default, there is no video buffering, to get the lowest possible latency.
|
||||
@ -159,17 +181,38 @@ scrcpy --display-buffer=50 --v4l2-buffer=300
|
||||
```
|
||||
|
||||
|
||||
## No display
|
||||
## No playback
|
||||
|
||||
It is possible to capture an Android device without displaying a mirroring
|
||||
window. This option is available if either [recording](recording.md) or
|
||||
It is possible to capture an Android device without playing video or audio on
|
||||
the computer. This option is useful when [recording](recording.md) or when
|
||||
[v4l2](#video4linux) is enabled:
|
||||
|
||||
```bash
|
||||
scrcpy --v4l2-sink=/dev/video2 --no-display
|
||||
scrcpy --record=file.mkv --no-display
|
||||
scrcpy --v4l2-sink=/dev/video2 --no-playback
|
||||
scrcpy --record=file.mkv --no-playback
|
||||
# interrupt with Ctrl+C
|
||||
```
|
||||
|
||||
It is also possible to disable video and audio playback separately:
|
||||
|
||||
```bash
|
||||
# Send video to V4L2 sink without playing it, but keep audio playback
|
||||
scrcpy --v4l2-sink=/dev/video2 --no-video-playback
|
||||
|
||||
# Record both video and audio, but only play video
|
||||
scrcpy --record=file.mkv --no-audio-playback
|
||||
```
|
||||
|
||||
|
||||
## No video
|
||||
|
||||
To disable video forwarding completely, so that only audio is forwarded:
|
||||
|
||||
```
|
||||
scrcpy --no-video
|
||||
```
|
||||
|
||||
|
||||
## Video4Linux
|
||||
|
||||
See the dedicated [Video4Linux](v4l2.md) page.
|
||||
|
@ -4,18 +4,24 @@
|
||||
|
||||
Download the [latest release]:
|
||||
|
||||
- [`scrcpy-win64-v2.0.zip`][direct-win64] (64-bit)
|
||||
<sub>SHA-256: `ae4c8d37a496b43f8974ba8f07f708e22a9570ba0cddc3dc3a36edbccd4d2a20`</sub>
|
||||
- [`scrcpy-win32-v2.0.zip`][direct-win32] (32-bit)
|
||||
<sub>SHA-256: `15d98c02cb0e0bbd84f8b5d54991e0f6925569b1286a86a40743944fcb1c2d8c`</sub>
|
||||
- [`scrcpy-win64-v2.1.1.zip`][direct-win64] (64-bit)
|
||||
<sub>SHA-256: `f77281e1bce2f9934617699c581f063d5b327f012eff602ee98fb2ef550c25c2`</sub>
|
||||
- [`scrcpy-win32-v2.1.1.zip`][direct-win32] (32-bit)
|
||||
<sub>SHA-256: `ef7ae7fbe9449f2643febdc2244fb186d1a746a3c736394150cfd14f06d3c943`</sub>
|
||||
|
||||
[latest release]: https://github.com/Genymobile/scrcpy/releases/latest
|
||||
[direct-win64]: https://github.com/Genymobile/scrcpy/releases/download/v2.0/scrcpy-win64-v2.0.zip
|
||||
[direct-win32]: https://github.com/Genymobile/scrcpy/releases/download/v2.0/scrcpy-win32-v2.0.zip
|
||||
[direct-win64]: https://github.com/Genymobile/scrcpy/releases/download/v2.1.1/scrcpy-win64-v2.1.1.zip
|
||||
[direct-win32]: https://github.com/Genymobile/scrcpy/releases/download/v2.1.1/scrcpy-win32-v2.1.1.zip
|
||||
|
||||
and extract it.
|
||||
|
||||
Alternatively, you could install it from packages manager, like [Chocolatey]:
|
||||
Alternatively, you could install it from packages manager, like [Winget]:
|
||||
|
||||
```bash
|
||||
winget install scrcpy
|
||||
```
|
||||
|
||||
or [Chocolatey]:
|
||||
|
||||
```bash
|
||||
choco install scrcpy
|
||||
@ -30,6 +36,7 @@ scoop install scrcpy
|
||||
scoop install adb # if you don't have it yet
|
||||
```
|
||||
|
||||
[Winget]: https://github.com/microsoft/winget-cli
|
||||
[Chocolatey]: https://chocolatey.org/
|
||||
[Scoop]: https://scoop.sh
|
||||
|
||||
|
@ -2,8 +2,8 @@
|
||||
set -e
|
||||
|
||||
BUILDDIR=build-auto
|
||||
PREBUILT_SERVER_URL=https://github.com/Genymobile/scrcpy/releases/download/v2.0/scrcpy-server-v2.0
|
||||
PREBUILT_SERVER_SHA256=9e241615f578cd690bb43311000debdecf6a9c50a7082b001952f18f6f21ddc2
|
||||
PREBUILT_SERVER_URL=https://github.com/Genymobile/scrcpy/releases/download/v2.1.1/scrcpy-server-v2.1.1
|
||||
PREBUILT_SERVER_SHA256=9558db6c56743a1dc03b38f59801fb40e91cc891f8fc0c89e5b0b067761f148e
|
||||
|
||||
echo "[scrcpy] Downloading prebuilt server..."
|
||||
wget "$PREBUILT_SERVER_URL" -O scrcpy-server
|
||||
|
@ -1,5 +1,5 @@
|
||||
project('scrcpy', 'c',
|
||||
version: '2.0',
|
||||
version: '2.1.1',
|
||||
meson_version: '>= 0.48',
|
||||
default_options: [
|
||||
'c_std=c11',
|
||||
@ -7,6 +7,8 @@ project('scrcpy', 'c',
|
||||
'b_ndebug=if-release',
|
||||
])
|
||||
|
||||
add_project_arguments('-Wmissing-prototypes', language: 'c')
|
||||
|
||||
if get_option('compile_app')
|
||||
subdir('app')
|
||||
endif
|
||||
|
34
release.mk
34
release.mk
@ -94,15 +94,14 @@ dist-win32: build-server build-win32
|
||||
cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN32_TARGET_DIR)"
|
||||
cp app/data/icon.png "$(DIST)/$(WIN32_TARGET_DIR)"
|
||||
cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN32_TARGET_DIR)"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/avutil-58.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/avcodec-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/avformat-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/swresample-4.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/zlib1.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/adb.exe "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinUsbApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/SDL2-2.26.4/i686-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avutil-58.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avcodec-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avformat-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/swresample-4.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.3/adb.exe "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.3/AdbWinApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.3/AdbWinUsbApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/SDL2-2.28.0/i686-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/libusb-1.0.26/libusb-MinGW-Win32/bin/msys-usb-1.0.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
|
||||
dist-win64: build-server build-win64
|
||||
@ -113,15 +112,14 @@ dist-win64: build-server build-win64
|
||||
cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN64_TARGET_DIR)"
|
||||
cp app/data/icon.png "$(DIST)/$(WIN64_TARGET_DIR)"
|
||||
cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN64_TARGET_DIR)"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/avutil-58.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/avcodec-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/avformat-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/swresample-4.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/zlib1.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/adb.exe "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinUsbApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/SDL2-2.26.4/x86_64-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avutil-58.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avcodec-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avformat-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/swresample-4.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.3/adb.exe "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.3/AdbWinApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.3/AdbWinUsbApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/SDL2-2.28.0/x86_64-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/libusb-1.0.26/libusb-MinGW-x64/bin/msys-usb-1.0.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
|
||||
zip-win32: dist-win32
|
||||
|
@ -7,8 +7,8 @@ android {
|
||||
applicationId "com.genymobile.scrcpy"
|
||||
minSdkVersion 21
|
||||
targetSdkVersion 33
|
||||
versionCode 20000
|
||||
versionName "2.0"
|
||||
versionCode 20101
|
||||
versionName "2.1.1"
|
||||
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
|
||||
}
|
||||
buildTypes {
|
||||
|
@ -12,10 +12,10 @@
|
||||
set -e
|
||||
|
||||
SCRCPY_DEBUG=false
|
||||
SCRCPY_VERSION_NAME=2.0
|
||||
SCRCPY_VERSION_NAME=2.1.1
|
||||
|
||||
PLATFORM=${ANDROID_PLATFORM:-23}
|
||||
BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-23.0.3}
|
||||
PLATFORM=${ANDROID_PLATFORM:-33}
|
||||
BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-33.0.0}
|
||||
BUILD_TOOLS_DIR="$ANDROID_HOME/build-tools/$BUILD_TOOLS"
|
||||
|
||||
BUILD_DIR="$(realpath ${BUILD_DIR:-build_manual})"
|
||||
@ -43,27 +43,17 @@ public final class BuildConfig {
|
||||
}
|
||||
EOF
|
||||
|
||||
STUBS_DIR="$BUILD_DIR/stubs"
|
||||
rm -rf "$STUBS_DIR"
|
||||
mkdir -p "$STUBS_DIR"
|
||||
echo "Generating SDK stubs..."
|
||||
cd "$SERVER_DIR/src/main/stubs"
|
||||
javac -bootclasspath "$ANDROID_JAR" \
|
||||
-d "$STUBS_DIR" \
|
||||
-source 1.8 -target 1.8 \
|
||||
android/content/*
|
||||
cd -
|
||||
|
||||
echo "Generating java from aidl..."
|
||||
cd "$SERVER_DIR/src/main/aidl"
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" android/view/IRotationWatcher.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" \
|
||||
android/content/IOnPrimaryClipChangedListener.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" android/view/IDisplayFoldListener.aidl
|
||||
|
||||
echo "Compiling java sources..."
|
||||
cd ../java
|
||||
javac -bootclasspath "$ANDROID_JAR" \
|
||||
-cp "$LAMBDA_JAR:$GEN_DIR:$STUBS_DIR" \
|
||||
-cp "$LAMBDA_JAR:$GEN_DIR" \
|
||||
-d "$CLASSES_DIR" \
|
||||
-source 1.8 -target 1.8 \
|
||||
com/genymobile/scrcpy/*.java \
|
||||
|
26
server/src/main/aidl/android/view/IDisplayFoldListener.aidl
Normal file
26
server/src/main/aidl/android/view/IDisplayFoldListener.aidl
Normal file
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright (C) 2019 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.view;
|
||||
|
||||
/**
|
||||
* {@hide}
|
||||
*/
|
||||
oneway interface IDisplayFoldListener
|
||||
{
|
||||
/** Called when the foldedness of a display changes */
|
||||
void onDisplayFoldChanged(int displayId, boolean folded);
|
||||
}
|
@ -1,7 +1,16 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
public interface AsyncProcessor {
|
||||
void start();
|
||||
interface TerminationListener {
|
||||
/**
|
||||
* Notify processor termination
|
||||
*
|
||||
* @param fatalError {@code true} if this must cause the termination of the whole scrcpy-server.
|
||||
*/
|
||||
void onTerminated(boolean fatalError);
|
||||
}
|
||||
|
||||
void start(TerminationListener listener);
|
||||
void stop();
|
||||
void join() throws InterruptedException;
|
||||
}
|
||||
|
@ -5,17 +5,14 @@ import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.ComponentName;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.media.AudioFormat;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.AudioTimestamp;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaRecorder;
|
||||
import android.os.Build;
|
||||
import android.os.SystemClock;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public final class AudioCapture {
|
||||
@ -23,53 +20,45 @@ public final class AudioCapture {
|
||||
public static final int SAMPLE_RATE = 48000;
|
||||
public static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_STEREO;
|
||||
public static final int CHANNELS = 2;
|
||||
public static final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;
|
||||
public static final int CHANNEL_MASK = AudioFormat.CHANNEL_IN_LEFT | AudioFormat.CHANNEL_IN_RIGHT;
|
||||
public static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
|
||||
public static final int BYTES_PER_SAMPLE = 2;
|
||||
|
||||
private final int audioSource;
|
||||
|
||||
private AudioRecord recorder;
|
||||
|
||||
private final AudioTimestamp timestamp = new AudioTimestamp();
|
||||
private long previousPts = 0;
|
||||
private long nextPts = 0;
|
||||
|
||||
public AudioCapture(AudioSource audioSource) {
|
||||
this.audioSource = audioSource.value();
|
||||
}
|
||||
|
||||
public static int millisToBytes(int millis) {
|
||||
return SAMPLE_RATE * CHANNELS * BYTES_PER_SAMPLE * millis / 1000;
|
||||
}
|
||||
|
||||
private static AudioFormat createAudioFormat() {
|
||||
AudioFormat.Builder builder = new AudioFormat.Builder();
|
||||
builder.setEncoding(FORMAT);
|
||||
builder.setEncoding(ENCODING);
|
||||
builder.setSampleRate(SAMPLE_RATE);
|
||||
builder.setChannelMask(CHANNEL_CONFIG);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private static Method setBuilderContext;
|
||||
|
||||
@TargetApi(23)
|
||||
private static void setBuilderContext(AudioRecord.Builder builder, Context context) {
|
||||
try {
|
||||
if (setBuilderContext == null) {
|
||||
setBuilderContext = AudioRecord.Builder.class.getMethod("setContext", Context.class);
|
||||
}
|
||||
setBuilderContext.invoke(builder, context);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not call AudioRecord.Builder.setContext() method");
|
||||
//throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
@SuppressLint({"WrongConstant", "MissingPermission"})
|
||||
private static AudioRecord createAudioRecord() {
|
||||
private static AudioRecord createAudioRecord(int audioSource) {
|
||||
AudioRecord.Builder builder = new AudioRecord.Builder();
|
||||
if (Build.VERSION.SDK_INT >= 31) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
|
||||
setBuilderContext(builder, FakeContext.get());
|
||||
builder.setContext(FakeContext.get());
|
||||
}
|
||||
builder.setAudioSource(MediaRecorder.AudioSource.REMOTE_SUBMIX);
|
||||
builder.setAudioSource(audioSource);
|
||||
builder.setAudioFormat(createAudioFormat());
|
||||
int minBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, FORMAT);
|
||||
int minBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, ENCODING);
|
||||
// This buffer size does not impact latency
|
||||
builder.setBufferSizeInBytes(8 * minBufferSize);
|
||||
return builder.build();
|
||||
@ -103,8 +92,8 @@ public final class AudioCapture {
|
||||
} catch (UnsupportedOperationException e) {
|
||||
if (attempts == 0) {
|
||||
Ln.e("Failed to start audio capture");
|
||||
Ln.e("On Android 11, audio capture must be started in the foreground, make sure that the device is unlocked when starting " +
|
||||
"scrcpy.");
|
||||
Ln.e("On Android 11, audio capture must be started in the foreground, make sure that the device is unlocked when starting "
|
||||
+ "scrcpy.");
|
||||
throw new AudioCaptureForegroundException();
|
||||
} else {
|
||||
Ln.d("Failed to start audio capture, retrying...");
|
||||
@ -114,15 +103,22 @@ public final class AudioCapture {
|
||||
}
|
||||
|
||||
private void startRecording() {
|
||||
recorder = createAudioRecord();
|
||||
try {
|
||||
recorder = createAudioRecord(audioSource);
|
||||
} catch (NullPointerException e) {
|
||||
// Creating an AudioRecord using an AudioRecord.Builder does not work on Vivo phones:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/3805>
|
||||
// - <https://github.com/Genymobile/scrcpy/pull/3862>
|
||||
recorder = Workarounds.createAudioRecord(audioSource, SAMPLE_RATE, CHANNEL_CONFIG, CHANNELS, CHANNEL_MASK, ENCODING);
|
||||
}
|
||||
recorder.startRecording();
|
||||
}
|
||||
|
||||
public void start() throws AudioCaptureForegroundException {
|
||||
if (Build.VERSION.SDK_INT == 30) {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
startWorkaroundAndroid11();
|
||||
try {
|
||||
tryStartRecording(3, 100);
|
||||
tryStartRecording(5, 100);
|
||||
} finally {
|
||||
stopWorkaroundAndroid11();
|
||||
}
|
||||
@ -138,21 +134,7 @@ public final class AudioCapture {
|
||||
}
|
||||
}
|
||||
|
||||
private static Method getTimestampMethod;
|
||||
|
||||
private static int getRecorderTimestamp(AudioRecord recorder, AudioTimestamp timestamp) {
|
||||
try {
|
||||
if (getTimestampMethod == null) {
|
||||
getTimestampMethod = AudioRecord.class.getMethod("getTimestamp", AudioTimestamp.class, int.class);
|
||||
}
|
||||
return (int) getTimestampMethod.invoke(recorder, timestamp, 0);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not call AudioRecord.getTimestamp() method");
|
||||
return AudioRecord.ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(24)
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
public int read(ByteBuffer directBuffer, int size, MediaCodec.BufferInfo outBufferInfo) {
|
||||
int r = recorder.read(directBuffer, size);
|
||||
if (r <= 0) {
|
||||
@ -161,7 +143,7 @@ public final class AudioCapture {
|
||||
|
||||
long pts;
|
||||
|
||||
int ret = getRecorderTimestamp(recorder, timestamp);
|
||||
int ret = recorder.getTimestamp(timestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
|
||||
if (ret == AudioRecord.SUCCESS) {
|
||||
pts = timestamp.nanoTime / 1000;
|
||||
} else {
|
||||
|
@ -40,6 +40,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
private static final int READ_MS = 5; // milliseconds
|
||||
private static final int READ_SIZE = AudioCapture.millisToBytes(READ_MS);
|
||||
|
||||
private final AudioCapture capture;
|
||||
private final Streamer streamer;
|
||||
private final int bitRate;
|
||||
private final List<CodecOption> codecOptions;
|
||||
@ -58,7 +59,8 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
|
||||
private boolean ended;
|
||||
|
||||
public AudioEncoder(Streamer streamer, int bitRate, List<CodecOption> codecOptions, String encoderName) {
|
||||
public AudioEncoder(AudioCapture capture, Streamer streamer, int bitRate, List<CodecOption> codecOptions, String encoderName) {
|
||||
this.capture = capture;
|
||||
this.streamer = streamer;
|
||||
this.bitRate = bitRate;
|
||||
this.codecOptions = codecOptions;
|
||||
@ -84,7 +86,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
return format;
|
||||
}
|
||||
|
||||
@TargetApi(24)
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
private void inputThread(MediaCodec mediaCodec, AudioCapture capture) throws IOException, InterruptedException {
|
||||
final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
@ -114,21 +116,29 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
public void start() {
|
||||
@Override
|
||||
public void start(TerminationListener listener) {
|
||||
thread = new Thread(() -> {
|
||||
boolean fatalError = false;
|
||||
try {
|
||||
encode();
|
||||
} catch (ConfigurationException | AudioCaptureForegroundException e) {
|
||||
} catch (ConfigurationException e) {
|
||||
// Do not print stack trace, a user-friendly error-message has already been logged
|
||||
fatalError = true;
|
||||
} catch (AudioCaptureForegroundException e) {
|
||||
// Do not print stack trace, a user-friendly error-message has already been logged
|
||||
} catch (IOException e) {
|
||||
Ln.e("Audio encoding error", e);
|
||||
fatalError = true;
|
||||
} finally {
|
||||
Ln.d("Audio encoder stopped");
|
||||
listener.onTerminated(fatalError);
|
||||
}
|
||||
});
|
||||
}, "audio-encoder");
|
||||
thread.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
// Just wake up the blocking wait from the thread, so that it properly releases all its resources and terminates
|
||||
@ -136,6 +146,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
@ -159,21 +170,20 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
public void encode() throws IOException, ConfigurationException, AudioCaptureForegroundException {
|
||||
if (Build.VERSION.SDK_INT < 30) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
streamer.writeDisableStream(false);
|
||||
return;
|
||||
}
|
||||
|
||||
MediaCodec mediaCodec = null;
|
||||
AudioCapture capture = new AudioCapture();
|
||||
|
||||
boolean mediaCodecStarted = false;
|
||||
try {
|
||||
Codec codec = streamer.getCodec();
|
||||
mediaCodec = createMediaCodec(codec, encoderName);
|
||||
|
||||
mediaCodecThread = new HandlerThread("AudioEncoder");
|
||||
mediaCodecThread = new HandlerThread("media-codec");
|
||||
mediaCodecThread.start();
|
||||
|
||||
MediaFormat format = createFormat(codec.getMimeType(), bitRate, codecOptions);
|
||||
@ -183,16 +193,15 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
capture.start();
|
||||
|
||||
final MediaCodec mediaCodecRef = mediaCodec;
|
||||
final AudioCapture captureRef = capture;
|
||||
inputThread = new Thread(() -> {
|
||||
try {
|
||||
inputThread(mediaCodecRef, captureRef);
|
||||
inputThread(mediaCodecRef, capture);
|
||||
} catch (IOException | InterruptedException e) {
|
||||
Ln.e("Audio capture error", e);
|
||||
} finally {
|
||||
end();
|
||||
}
|
||||
});
|
||||
}, "audio-in");
|
||||
|
||||
outputThread = new Thread(() -> {
|
||||
try {
|
||||
@ -207,7 +216,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
} finally {
|
||||
end();
|
||||
}
|
||||
});
|
||||
}, "audio-out");
|
||||
|
||||
mediaCodec.start();
|
||||
mediaCodecStarted = true;
|
||||
@ -290,7 +299,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
|
||||
private class EncoderCallback extends MediaCodec.Callback {
|
||||
@TargetApi(24)
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@Override
|
||||
public void onInputBufferAvailable(MediaCodec codec, int index) {
|
||||
try {
|
||||
|
@ -1,12 +1,14 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public final class AudioRawRecorder implements AsyncProcessor {
|
||||
|
||||
private final AudioCapture capture;
|
||||
private final Streamer streamer;
|
||||
|
||||
private Thread thread;
|
||||
@ -14,15 +16,21 @@ public final class AudioRawRecorder implements AsyncProcessor {
|
||||
private static final int READ_MS = 5; // milliseconds
|
||||
private static final int READ_SIZE = AudioCapture.millisToBytes(READ_MS);
|
||||
|
||||
public AudioRawRecorder(Streamer streamer) {
|
||||
public AudioRawRecorder(AudioCapture capture, Streamer streamer) {
|
||||
this.capture = capture;
|
||||
this.streamer = streamer;
|
||||
}
|
||||
|
||||
private void record() throws IOException, AudioCaptureForegroundException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
streamer.writeDisableStream(false);
|
||||
return;
|
||||
}
|
||||
|
||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(READ_SIZE);
|
||||
final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
AudioCapture capture = new AudioCapture();
|
||||
try {
|
||||
capture.start();
|
||||
|
||||
@ -46,27 +54,33 @@ public final class AudioRawRecorder implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
public void start() {
|
||||
@Override
|
||||
public void start(TerminationListener listener) {
|
||||
thread = new Thread(() -> {
|
||||
boolean fatalError = false;
|
||||
try {
|
||||
record();
|
||||
} catch (AudioCaptureForegroundException e) {
|
||||
// Do not print stack trace, a user-friendly error-message has already been logged
|
||||
} catch (IOException e) {
|
||||
Ln.e("Audio recording error", e);
|
||||
fatalError = true;
|
||||
} finally {
|
||||
Ln.d("Audio recorder stopped");
|
||||
listener.onTerminated(fatalError);
|
||||
}
|
||||
});
|
||||
}, "audio-raw");
|
||||
thread.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
thread.interrupt();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
|
30
server/src/main/java/com/genymobile/scrcpy/AudioSource.java
Normal file
30
server/src/main/java/com/genymobile/scrcpy/AudioSource.java
Normal file
@ -0,0 +1,30 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.media.MediaRecorder;
|
||||
|
||||
public enum AudioSource {
|
||||
OUTPUT("output", MediaRecorder.AudioSource.REMOTE_SUBMIX),
|
||||
MIC("mic", MediaRecorder.AudioSource.MIC);
|
||||
|
||||
private final String name;
|
||||
private final int value;
|
||||
|
||||
AudioSource(String name, int value) {
|
||||
this.name = name;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
int value() {
|
||||
return value;
|
||||
}
|
||||
|
||||
static AudioSource findByName(String name) {
|
||||
for (AudioSource audioSource : AudioSource.values()) {
|
||||
if (name.equals(audioSource.name)) {
|
||||
return audioSource;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
public final class CameraAspectRatio {
|
||||
private static final float SENSOR = -1;
|
||||
|
||||
private float ar;
|
||||
|
||||
private CameraAspectRatio(float ar) {
|
||||
this.ar = ar;
|
||||
}
|
||||
|
||||
public static CameraAspectRatio fromFloat(float ar) {
|
||||
if (ar < 0) {
|
||||
throw new IllegalArgumentException("Invalid aspect ratio: " + ar);
|
||||
}
|
||||
return new CameraAspectRatio(ar);
|
||||
}
|
||||
|
||||
public static CameraAspectRatio fromFraction(int w, int h) {
|
||||
if (w <= 0 || h <= 0) {
|
||||
throw new IllegalArgumentException("Invalid aspect ratio: " + w + ":" + h);
|
||||
}
|
||||
return new CameraAspectRatio((float) w / h);
|
||||
}
|
||||
|
||||
public static CameraAspectRatio sensorAspectRatio() {
|
||||
return new CameraAspectRatio(SENSOR);
|
||||
}
|
||||
|
||||
public boolean isSensor() {
|
||||
return ar == SENSOR;
|
||||
}
|
||||
|
||||
public float getAspectRatio() {
|
||||
return ar;
|
||||
}
|
||||
}
|
330
server/src/main/java/com/genymobile/scrcpy/CameraCapture.java
Normal file
330
server/src/main/java/com/genymobile/scrcpy/CameraCapture.java
Normal file
@ -0,0 +1,330 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Rect;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCaptureSession;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraDevice;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.CaptureFailure;
|
||||
import android.hardware.camera2.CaptureRequest;
|
||||
import android.hardware.camera2.params.OutputConfiguration;
|
||||
import android.hardware.camera2.params.SessionConfiguration;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.view.Surface;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class CameraCapture extends SurfaceCapture {
|
||||
|
||||
private final String explicitCameraId;
|
||||
private final CameraFacing cameraFacing;
|
||||
private final Size explicitSize;
|
||||
private int maxSize;
|
||||
private final CameraAspectRatio aspectRatio;
|
||||
|
||||
private String cameraId;
|
||||
private Size size;
|
||||
|
||||
private HandlerThread cameraThread;
|
||||
private Handler cameraHandler;
|
||||
private CameraDevice cameraDevice;
|
||||
private Executor cameraExecutor;
|
||||
|
||||
private final AtomicBoolean disconnected = new AtomicBoolean();
|
||||
|
||||
public CameraCapture(String explicitCameraId, CameraFacing cameraFacing, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio) {
|
||||
this.explicitCameraId = explicitCameraId;
|
||||
this.cameraFacing = cameraFacing;
|
||||
this.explicitSize = explicitSize;
|
||||
this.maxSize = maxSize;
|
||||
this.aspectRatio = aspectRatio;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() throws IOException {
|
||||
cameraThread = new HandlerThread("camera");
|
||||
cameraThread.start();
|
||||
cameraHandler = new Handler(cameraThread.getLooper());
|
||||
cameraExecutor = new HandlerExecutor(cameraHandler);
|
||||
|
||||
try {
|
||||
cameraId = selectCamera(explicitCameraId, cameraFacing);
|
||||
if (cameraId == null) {
|
||||
throw new IOException("No matching camera found");
|
||||
}
|
||||
|
||||
size = selectSize(cameraId, explicitSize, maxSize, aspectRatio);
|
||||
if (size == null) {
|
||||
throw new IOException("Could not select camera size");
|
||||
}
|
||||
|
||||
Ln.i("Using camera '" + cameraId + "'");
|
||||
cameraDevice = openCamera(cameraId);
|
||||
} catch (CameraAccessException | InterruptedException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private static String selectCamera(String explicitCameraId, CameraFacing cameraFacing) throws CameraAccessException {
|
||||
if (explicitCameraId != null) {
|
||||
return explicitCameraId;
|
||||
}
|
||||
|
||||
CameraManager cameraManager = ServiceManager.getCameraManager();
|
||||
|
||||
String[] cameraIds = cameraManager.getCameraIdList();
|
||||
if (cameraFacing == null) {
|
||||
// Use the first one
|
||||
return cameraIds.length > 0 ? cameraIds[0] : null;
|
||||
}
|
||||
|
||||
for (String cameraId : cameraIds) {
|
||||
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
|
||||
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
|
||||
if (cameraFacing.value() == facing) {
|
||||
return cameraId;
|
||||
}
|
||||
}
|
||||
|
||||
// Not found
|
||||
return null;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
private static Size selectSize(String cameraId, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio) throws CameraAccessException {
|
||||
if (explicitSize != null) {
|
||||
return explicitSize;
|
||||
}
|
||||
|
||||
CameraManager cameraManager = ServiceManager.getCameraManager();
|
||||
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
|
||||
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
|
||||
Stream<android.util.Size> stream = Arrays.stream(sizes);
|
||||
if (maxSize > 0) {
|
||||
stream = stream.filter(it -> it.getWidth() <= maxSize && it.getHeight() <= maxSize);
|
||||
}
|
||||
|
||||
Float targetAspectRatio = resolveAspectRatio(aspectRatio, characteristics);
|
||||
if (targetAspectRatio != null) {
|
||||
stream = stream.filter(it -> {
|
||||
float ar = ((float) it.getWidth() / it.getHeight());
|
||||
float arRatio = ar / targetAspectRatio;
|
||||
// Accept if the aspect ratio is the target aspect ratio + or - 10%
|
||||
return arRatio >= 0.9f && arRatio <= 1.1f;
|
||||
});
|
||||
}
|
||||
|
||||
Optional<android.util.Size> selected = stream.max((s1, s2) -> {
|
||||
// Greater width is better
|
||||
int cmp = Integer.compare(s1.getWidth(), s2.getWidth());
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
if (targetAspectRatio != null) {
|
||||
// Closer to the target aspect ratio is better
|
||||
float ar1 = ((float) s1.getWidth() / s1.getHeight());
|
||||
float arRatio1 = ar1 / targetAspectRatio;
|
||||
float distance1 = Math.abs(1 - arRatio1);
|
||||
|
||||
float ar2 = ((float) s2.getWidth() / s2.getHeight());
|
||||
float arRatio2 = ar2 / targetAspectRatio;
|
||||
float distance2 = Math.abs(1 - arRatio2);
|
||||
|
||||
// Reverse the order because lower distance is better
|
||||
return Float.compare(distance2, distance1);
|
||||
}
|
||||
|
||||
// Greater height is better
|
||||
return Integer.compare(s1.getHeight(), s2.getHeight());
|
||||
});
|
||||
|
||||
if (selected.isPresent()) {
|
||||
android.util.Size size = selected.get();
|
||||
return new Size(size.getWidth(), size.getHeight());
|
||||
}
|
||||
|
||||
// Not found
|
||||
return null;
|
||||
}
|
||||
|
||||
private static Float resolveAspectRatio(CameraAspectRatio ratio, CameraCharacteristics characteristics) {
|
||||
if (ratio == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (ratio.isSensor()) {
|
||||
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
|
||||
return (float) activeSize.width() / activeSize.height();
|
||||
}
|
||||
|
||||
return ratio.getAspectRatio();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(Surface surface) throws IOException {
|
||||
try {
|
||||
CameraCaptureSession session = createCaptureSession(cameraDevice, surface);
|
||||
CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
|
||||
requestBuilder.addTarget(surface);
|
||||
CaptureRequest request = requestBuilder.build();
|
||||
setRepeatingRequest(session, request);
|
||||
} catch (CameraAccessException | InterruptedException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (cameraDevice != null) {
|
||||
cameraDevice.close();
|
||||
}
|
||||
if (cameraThread != null) {
|
||||
cameraThread.quitSafely();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Size getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean setMaxSize(int maxSize) {
|
||||
if (explicitSize != null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.maxSize = maxSize;
|
||||
try {
|
||||
size = selectSize(cameraId, null, maxSize, aspectRatio);
|
||||
return true;
|
||||
} catch (CameraAccessException e) {
|
||||
Ln.w("Could not select camera size", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressLint("MissingPermission")
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
|
||||
CompletableFuture<CameraDevice> future = new CompletableFuture<>();
|
||||
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
|
||||
@Override
|
||||
public void onOpened(CameraDevice camera) {
|
||||
Ln.d("Camera opened successfully");
|
||||
future.complete(camera);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnected(CameraDevice camera) {
|
||||
Ln.w("Camera disconnected");
|
||||
disconnected.set(true);
|
||||
requestReset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(CameraDevice camera, int error) {
|
||||
int cameraAccessExceptionErrorCode;
|
||||
switch (error) {
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
|
||||
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_IN_USE;
|
||||
break;
|
||||
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
|
||||
cameraAccessExceptionErrorCode = CameraAccessException.MAX_CAMERAS_IN_USE;
|
||||
break;
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
|
||||
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_DISABLED;
|
||||
break;
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
|
||||
default:
|
||||
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_ERROR;
|
||||
break;
|
||||
}
|
||||
future.completeExceptionally(new CameraAccessException(cameraAccessExceptionErrorCode));
|
||||
}
|
||||
}, cameraHandler);
|
||||
|
||||
try {
|
||||
return future.get();
|
||||
} catch (ExecutionException e) {
|
||||
throw (CameraAccessException) e.getCause();
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
|
||||
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
|
||||
OutputConfiguration outputConfig = new OutputConfiguration(surface);
|
||||
List<OutputConfiguration> outputs = Arrays.asList(outputConfig);
|
||||
SessionConfiguration sessionConfig = new SessionConfiguration(SessionConfiguration.SESSION_REGULAR, outputs, cameraExecutor,
|
||||
new CameraCaptureSession.StateCallback() {
|
||||
@Override
|
||||
public void onConfigured(CameraCaptureSession session) {
|
||||
future.complete(session);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConfigureFailed(CameraCaptureSession session) {
|
||||
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
|
||||
}
|
||||
});
|
||||
|
||||
camera.createCaptureSession(sessionConfig);
|
||||
|
||||
try {
|
||||
return future.get();
|
||||
} catch (ExecutionException e) {
|
||||
throw (CameraAccessException) e.getCause();
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
|
||||
CompletableFuture<Void> future = new CompletableFuture<>();
|
||||
session.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
|
||||
@Override
|
||||
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
|
||||
future.complete(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
|
||||
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
|
||||
}
|
||||
}, cameraHandler);
|
||||
|
||||
try {
|
||||
future.get();
|
||||
} catch (ExecutionException e) {
|
||||
throw (CameraAccessException) e.getCause();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isClosed() {
|
||||
return disconnected.get();
|
||||
}
|
||||
}
|
31
server/src/main/java/com/genymobile/scrcpy/CameraFacing.java
Normal file
31
server/src/main/java/com/genymobile/scrcpy/CameraFacing.java
Normal file
@ -0,0 +1,31 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
|
||||
public enum CameraFacing {
|
||||
FRONT("front", CameraCharacteristics.LENS_FACING_FRONT),
|
||||
BACK("back", CameraCharacteristics.LENS_FACING_BACK),
|
||||
EXTERNAL("external", CameraCharacteristics.LENS_FACING_EXTERNAL);
|
||||
|
||||
private final String name;
|
||||
private final int value;
|
||||
|
||||
CameraFacing(String name, int value) {
|
||||
this.name = name;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
int value() {
|
||||
return value;
|
||||
}
|
||||
|
||||
static CameraFacing findByName(String name) {
|
||||
for (CameraFacing facing : CameraFacing.values()) {
|
||||
if (name.equals(facing.name)) {
|
||||
return facing;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -84,7 +84,8 @@ public class Controller implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
public void start() {
|
||||
@Override
|
||||
public void start(TerminationListener listener) {
|
||||
thread = new Thread(() -> {
|
||||
try {
|
||||
control();
|
||||
@ -92,12 +93,14 @@ public class Controller implements AsyncProcessor {
|
||||
// this is expected on close
|
||||
} finally {
|
||||
Ln.d("Controller stopped");
|
||||
listener.onTerminated(true);
|
||||
}
|
||||
});
|
||||
}, "control-recv");
|
||||
thread.start();
|
||||
sender.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
thread.interrupt();
|
||||
@ -105,6 +108,7 @@ public class Controller implements AsyncProcessor {
|
||||
sender.stop();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
@ -373,8 +377,8 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
private void getClipboard(int copyKey) {
|
||||
// On Android >= 7, press the COPY or CUT key if requested
|
||||
if (copyKey != ControlMessage.COPY_KEY_NONE && Build.VERSION.SDK_INT >= 24 && device.supportsInputEvents()) {
|
||||
int key = copyKey == ControlMessage.COPY_KEY_COPY ? 278 : 277;
|
||||
if (copyKey != ControlMessage.COPY_KEY_NONE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && device.supportsInputEvents()) {
|
||||
int key = copyKey == ControlMessage.COPY_KEY_COPY ? KeyEvent.KEYCODE_COPY : KeyEvent.KEYCODE_CUT;
|
||||
// Wait until the event is finished, to ensure that the clipboard text we read just after is the correct one
|
||||
device.pressReleaseKeycode(key, Device.INJECT_MODE_WAIT_FOR_FINISH);
|
||||
}
|
||||
@ -397,8 +401,8 @@ public class Controller implements AsyncProcessor {
|
||||
}
|
||||
|
||||
// On Android >= 7, also press the PASTE key if requested
|
||||
if (paste && Build.VERSION.SDK_INT >= 24 && device.supportsInputEvents()) {
|
||||
device.pressReleaseKeycode(279, Device.INJECT_MODE_ASYNC);
|
||||
if (paste && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && device.supportsInputEvents()) {
|
||||
device.pressReleaseKeycode(KeyEvent.KEYCODE_PASTE, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
if (sequence != ControlMessage.SEQUENCE_INVALID) {
|
||||
|
@ -41,7 +41,7 @@ public final class DesktopConnection implements Closeable {
|
||||
controlInputStream = null;
|
||||
controlOutputStream = null;
|
||||
}
|
||||
videoFd = videoSocket.getFileDescriptor();
|
||||
videoFd = videoSocket != null ? videoSocket.getFileDescriptor() : null;
|
||||
audioFd = audioSocket != null ? audioSocket.getFileDescriptor() : null;
|
||||
}
|
||||
|
||||
@ -60,7 +60,8 @@ public final class DesktopConnection implements Closeable {
|
||||
return SOCKET_NAME_PREFIX + String.format("_%08x", scid);
|
||||
}
|
||||
|
||||
public static DesktopConnection open(int scid, boolean tunnelForward, boolean audio, boolean control, boolean sendDummyByte) throws IOException {
|
||||
public static DesktopConnection open(int scid, boolean tunnelForward, boolean video, boolean audio, boolean control, boolean sendDummyByte)
|
||||
throws IOException {
|
||||
String socketName = getSocketName(scid);
|
||||
|
||||
LocalSocket videoSocket = null;
|
||||
@ -68,24 +69,36 @@ public final class DesktopConnection implements Closeable {
|
||||
LocalSocket controlSocket = null;
|
||||
try {
|
||||
if (tunnelForward) {
|
||||
LocalServerSocket localServerSocket = new LocalServerSocket(socketName);
|
||||
try {
|
||||
videoSocket = localServerSocket.accept();
|
||||
if (sendDummyByte) {
|
||||
// send one byte so the client may read() to detect a connection error
|
||||
videoSocket.getOutputStream().write(0);
|
||||
try (LocalServerSocket localServerSocket = new LocalServerSocket(socketName)) {
|
||||
if (video) {
|
||||
videoSocket = localServerSocket.accept();
|
||||
if (sendDummyByte) {
|
||||
// send one byte so the client may read() to detect a connection error
|
||||
videoSocket.getOutputStream().write(0);
|
||||
sendDummyByte = false;
|
||||
}
|
||||
}
|
||||
if (audio) {
|
||||
audioSocket = localServerSocket.accept();
|
||||
if (sendDummyByte) {
|
||||
// send one byte so the client may read() to detect a connection error
|
||||
audioSocket.getOutputStream().write(0);
|
||||
sendDummyByte = false;
|
||||
}
|
||||
}
|
||||
if (control) {
|
||||
controlSocket = localServerSocket.accept();
|
||||
if (sendDummyByte) {
|
||||
// send one byte so the client may read() to detect a connection error
|
||||
controlSocket.getOutputStream().write(0);
|
||||
sendDummyByte = false;
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
localServerSocket.close();
|
||||
}
|
||||
} else {
|
||||
videoSocket = connect(socketName);
|
||||
if (video) {
|
||||
videoSocket = connect(socketName);
|
||||
}
|
||||
if (audio) {
|
||||
audioSocket = connect(socketName);
|
||||
}
|
||||
@ -109,10 +122,22 @@ public final class DesktopConnection implements Closeable {
|
||||
return new DesktopConnection(videoSocket, audioSocket, controlSocket);
|
||||
}
|
||||
|
||||
private LocalSocket getFirstSocket() {
|
||||
if (videoSocket != null) {
|
||||
return videoSocket;
|
||||
}
|
||||
if (audioSocket != null) {
|
||||
return audioSocket;
|
||||
}
|
||||
return controlSocket;
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
videoSocket.shutdownInput();
|
||||
videoSocket.shutdownOutput();
|
||||
videoSocket.close();
|
||||
if (videoSocket != null) {
|
||||
videoSocket.shutdownInput();
|
||||
videoSocket.shutdownOutput();
|
||||
videoSocket.close();
|
||||
}
|
||||
if (audioSocket != null) {
|
||||
audioSocket.shutdownInput();
|
||||
audioSocket.shutdownOutput();
|
||||
@ -133,7 +158,8 @@ public final class DesktopConnection implements Closeable {
|
||||
System.arraycopy(deviceNameBytes, 0, buffer, 0, len);
|
||||
// byte[] are always 0-initialized in java, no need to set '\0' explicitly
|
||||
|
||||
IO.writeFully(videoFd, buffer, 0, buffer.length);
|
||||
FileDescriptor fd = getFirstSocket().getFileDescriptor();
|
||||
IO.writeFully(fd, buffer, 0, buffer.length);
|
||||
}
|
||||
|
||||
public FileDescriptor getVideoFd() {
|
||||
|
@ -12,6 +12,7 @@ import android.os.Build;
|
||||
import android.os.IBinder;
|
||||
import android.os.SystemClock;
|
||||
import android.view.IRotationWatcher;
|
||||
import android.view.IDisplayFoldListener;
|
||||
import android.view.InputDevice;
|
||||
import android.view.InputEvent;
|
||||
import android.view.KeyCharacterMap;
|
||||
@ -35,6 +36,10 @@ public final class Device {
|
||||
void onRotationChanged(int rotation);
|
||||
}
|
||||
|
||||
public interface FoldListener {
|
||||
void onFoldChanged(int displayId, boolean folded);
|
||||
}
|
||||
|
||||
public interface ClipboardListener {
|
||||
void onClipboardTextChanged(String text);
|
||||
}
|
||||
@ -46,6 +51,7 @@ public final class Device {
|
||||
|
||||
private ScreenInfo screenInfo;
|
||||
private RotationListener rotationListener;
|
||||
private FoldListener foldListener;
|
||||
private ClipboardListener clipboardListener;
|
||||
private final AtomicBoolean isSettingClipboard = new AtomicBoolean();
|
||||
|
||||
@ -93,6 +99,33 @@ public final class Device {
|
||||
}
|
||||
}, displayId);
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
ServiceManager.getWindowManager().registerDisplayFoldListener(new IDisplayFoldListener.Stub() {
|
||||
@Override
|
||||
public void onDisplayFoldChanged(int displayId, boolean folded) {
|
||||
if (Device.this.displayId != displayId) {
|
||||
// Ignore events related to other display ids
|
||||
return;
|
||||
}
|
||||
|
||||
synchronized (Device.this) {
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (displayInfo == null) {
|
||||
Ln.e("Display " + displayId + " not found\n" + LogUtils.buildDisplayListMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), displayInfo.getSize(), options.getCrop(),
|
||||
options.getMaxSize(), options.getLockVideoOrientation());
|
||||
// notify
|
||||
if (foldListener != null) {
|
||||
foldListener.onFoldChanged(displayId, folded);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (options.getControl() && options.getClipboardAutosync()) {
|
||||
// If control and autosync are enabled, synchronize Android clipboard to the computer automatically
|
||||
ClipboardManager clipboardManager = ServiceManager.getClipboardManager();
|
||||
@ -124,7 +157,7 @@ public final class Device {
|
||||
}
|
||||
|
||||
// main display or any display on Android >= Q
|
||||
supportsInputEvents = displayId == 0 || Build.VERSION.SDK_INT >= 29;
|
||||
supportsInputEvents = displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
|
||||
if (!supportsInputEvents) {
|
||||
Ln.w("Input events are not supported for secondary displays before Android 10");
|
||||
}
|
||||
@ -173,7 +206,7 @@ public final class Device {
|
||||
}
|
||||
|
||||
public static boolean supportsInputEvents(int displayId) {
|
||||
return displayId == 0 || Build.VERSION.SDK_INT >= 29;
|
||||
return displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
|
||||
}
|
||||
|
||||
public boolean supportsInputEvents() {
|
||||
@ -224,6 +257,10 @@ public final class Device {
|
||||
this.rotationListener = rotationListener;
|
||||
}
|
||||
|
||||
public synchronized void setFoldListener(FoldListener foldlistener) {
|
||||
this.foldListener = foldlistener;
|
||||
}
|
||||
|
||||
public synchronized void setClipboardListener(ClipboardListener clipboardListener) {
|
||||
this.clipboardListener = clipboardListener;
|
||||
}
|
||||
@ -277,7 +314,7 @@ public final class Device {
|
||||
* @param mode one of the {@code POWER_MODE_*} constants
|
||||
*/
|
||||
public static boolean setScreenPowerMode(int mode) {
|
||||
if (Build.VERSION.SDK_INT >= 29) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
// Change the power mode for all physical displays
|
||||
long[] physicalDisplayIds = SurfaceControl.getPhysicalDisplayIds();
|
||||
if (physicalDisplayIds == null) {
|
||||
|
@ -60,7 +60,7 @@ public final class DeviceMessageSender {
|
||||
} finally {
|
||||
Ln.d("Device message sender stopped");
|
||||
}
|
||||
});
|
||||
}, "control-send");
|
||||
thread.start();
|
||||
}
|
||||
|
||||
|
@ -2,11 +2,11 @@ package com.genymobile.scrcpy;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.AttributionSource;
|
||||
import android.content.ContextWrapper;
|
||||
import android.content.MutableContextWrapper;
|
||||
import android.os.Build;
|
||||
import android.os.Process;
|
||||
|
||||
public final class FakeContext extends ContextWrapper {
|
||||
public final class FakeContext extends MutableContextWrapper {
|
||||
|
||||
public static final String PACKAGE_NAME = "com.android.shell";
|
||||
public static final int ROOT_UID = 0; // Like android.os.Process.ROOT_UID, but before API 29
|
||||
@ -26,13 +26,15 @@ public final class FakeContext extends ContextWrapper {
|
||||
return PACKAGE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOpPackageName() {
|
||||
return PACKAGE_NAME;
|
||||
}
|
||||
|
||||
@TargetApi(31)
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@Override
|
||||
public AttributionSource getAttributionSource() {
|
||||
AttributionSource.Builder builder = new AttributionSource.Builder(0);
|
||||
AttributionSource.Builder builder = new AttributionSource.Builder(Process.SHELL_UID);
|
||||
builder.setPackageName(PACKAGE_NAME);
|
||||
return builder.build();
|
||||
}
|
||||
|
@ -0,0 +1,23 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.os.Handler;
|
||||
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.RejectedExecutionException;
|
||||
|
||||
// Inspired from hidden android.os.HandlerExecutor
|
||||
|
||||
public class HandlerExecutor implements Executor {
|
||||
private final Handler handler;
|
||||
|
||||
public HandlerExecutor(Handler handler) {
|
||||
this.handler = handler;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(Runnable command) {
|
||||
if (!handler.post(command)) {
|
||||
throw new RejectedExecutionException(handler + " is shutting down");
|
||||
}
|
||||
}
|
||||
}
|
@ -3,6 +3,13 @@ package com.genymobile.scrcpy;
|
||||
import com.genymobile.scrcpy.wrappers.DisplayManager;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.media.MediaCodec;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public final class LogUtils {
|
||||
@ -47,7 +54,7 @@ public final class LogUtils {
|
||||
builder.append("\n (none)");
|
||||
} else {
|
||||
for (int id : displayIds) {
|
||||
builder.append("\n --display=").append(id).append(" (");
|
||||
builder.append("\n --display-id=").append(id).append(" (");
|
||||
DisplayInfo displayInfo = displayManager.getDisplayInfo(id);
|
||||
if (displayInfo != null) {
|
||||
Size size = displayInfo.getSize();
|
||||
@ -60,4 +67,50 @@ public final class LogUtils {
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
private static String getCameraFacingName(int facing) {
|
||||
switch (facing) {
|
||||
case CameraCharacteristics.LENS_FACING_FRONT:
|
||||
return "front";
|
||||
case CameraCharacteristics.LENS_FACING_BACK:
|
||||
return "back";
|
||||
case CameraCharacteristics.LENS_FACING_EXTERNAL:
|
||||
return "external";
|
||||
default:
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
public static String buildCameraListMessage(boolean includeSizes) {
|
||||
StringBuilder builder = new StringBuilder("List of cameras:");
|
||||
CameraManager cameraManager = ServiceManager.getCameraManager();
|
||||
try {
|
||||
String[] cameraIds = cameraManager.getCameraIdList();
|
||||
if (cameraIds == null || cameraIds.length == 0) {
|
||||
builder.append("\n (none)");
|
||||
} else {
|
||||
for (String id : cameraIds) {
|
||||
builder.append("\n --video-source=camera --camera-id=").append(id);
|
||||
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
|
||||
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
|
||||
builder.append(" (").append(getCameraFacingName(facing)).append(", ");
|
||||
|
||||
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
|
||||
builder.append(activeSize.width()).append("x").append(activeSize.height());
|
||||
builder.append(')');
|
||||
|
||||
if (includeSizes) {
|
||||
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
|
||||
for (android.util.Size size : sizes) {
|
||||
builder.append("\n - ").append(size.getWidth()).append('x').append(size.getHeight());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (CameraAccessException e) {
|
||||
builder.append("\n (access denied)");
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
|
@ -3,15 +3,19 @@ package com.genymobile.scrcpy;
|
||||
import android.graphics.Rect;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
public class Options {
|
||||
|
||||
private Ln.Level logLevel = Ln.Level.DEBUG;
|
||||
private int scid = -1; // 31-bit non-negative value, or -1
|
||||
private boolean video = true;
|
||||
private boolean audio = true;
|
||||
private int maxSize;
|
||||
private VideoCodec videoCodec = VideoCodec.H264;
|
||||
private AudioCodec audioCodec = AudioCodec.OPUS;
|
||||
private VideoSource videoSource = VideoSource.DISPLAY;
|
||||
private AudioSource audioSource = AudioSource.OUTPUT;
|
||||
private int videoBitRate = 8000000;
|
||||
private int audioBitRate = 128000;
|
||||
private int maxFps;
|
||||
@ -20,6 +24,10 @@ public class Options {
|
||||
private Rect crop;
|
||||
private boolean control = true;
|
||||
private int displayId;
|
||||
private String cameraId;
|
||||
private Size cameraSize;
|
||||
private CameraFacing cameraFacing;
|
||||
private CameraAspectRatio cameraAspectRatio;
|
||||
private boolean showTouches;
|
||||
private boolean stayAwake;
|
||||
private List<CodecOption> videoCodecOptions;
|
||||
@ -35,6 +43,8 @@ public class Options {
|
||||
|
||||
private boolean listEncoders;
|
||||
private boolean listDisplays;
|
||||
private boolean listCameras;
|
||||
private boolean listCameraSizes;
|
||||
|
||||
// Options not used by the scrcpy client, but useful to use scrcpy-server directly
|
||||
private boolean sendDeviceMeta = true; // send device name and size
|
||||
@ -46,166 +56,110 @@ public class Options {
|
||||
return logLevel;
|
||||
}
|
||||
|
||||
public void setLogLevel(Ln.Level logLevel) {
|
||||
this.logLevel = logLevel;
|
||||
}
|
||||
|
||||
public int getScid() {
|
||||
return scid;
|
||||
}
|
||||
|
||||
public void setScid(int scid) {
|
||||
this.scid = scid;
|
||||
public boolean getVideo() {
|
||||
return video;
|
||||
}
|
||||
|
||||
public boolean getAudio() {
|
||||
return audio;
|
||||
}
|
||||
|
||||
public void setAudio(boolean audio) {
|
||||
this.audio = audio;
|
||||
}
|
||||
|
||||
public int getMaxSize() {
|
||||
return maxSize;
|
||||
}
|
||||
|
||||
public void setMaxSize(int maxSize) {
|
||||
this.maxSize = maxSize;
|
||||
}
|
||||
|
||||
public VideoCodec getVideoCodec() {
|
||||
return videoCodec;
|
||||
}
|
||||
|
||||
public void setVideoCodec(VideoCodec videoCodec) {
|
||||
this.videoCodec = videoCodec;
|
||||
}
|
||||
|
||||
public AudioCodec getAudioCodec() {
|
||||
return audioCodec;
|
||||
}
|
||||
|
||||
public void setAudioCodec(AudioCodec audioCodec) {
|
||||
this.audioCodec = audioCodec;
|
||||
public VideoSource getVideoSource() {
|
||||
return videoSource;
|
||||
}
|
||||
|
||||
public AudioSource getAudioSource() {
|
||||
return audioSource;
|
||||
}
|
||||
|
||||
public int getVideoBitRate() {
|
||||
return videoBitRate;
|
||||
}
|
||||
|
||||
public void setVideoBitRate(int videoBitRate) {
|
||||
this.videoBitRate = videoBitRate;
|
||||
}
|
||||
|
||||
public int getAudioBitRate() {
|
||||
return audioBitRate;
|
||||
}
|
||||
|
||||
public void setAudioBitRate(int audioBitRate) {
|
||||
this.audioBitRate = audioBitRate;
|
||||
}
|
||||
|
||||
public int getMaxFps() {
|
||||
return maxFps;
|
||||
}
|
||||
|
||||
public void setMaxFps(int maxFps) {
|
||||
this.maxFps = maxFps;
|
||||
}
|
||||
|
||||
public int getLockVideoOrientation() {
|
||||
return lockVideoOrientation;
|
||||
}
|
||||
|
||||
public void setLockVideoOrientation(int lockVideoOrientation) {
|
||||
this.lockVideoOrientation = lockVideoOrientation;
|
||||
}
|
||||
|
||||
public boolean isTunnelForward() {
|
||||
return tunnelForward;
|
||||
}
|
||||
|
||||
public void setTunnelForward(boolean tunnelForward) {
|
||||
this.tunnelForward = tunnelForward;
|
||||
}
|
||||
|
||||
public Rect getCrop() {
|
||||
return crop;
|
||||
}
|
||||
|
||||
public void setCrop(Rect crop) {
|
||||
this.crop = crop;
|
||||
}
|
||||
|
||||
public boolean getControl() {
|
||||
return control;
|
||||
}
|
||||
|
||||
public void setControl(boolean control) {
|
||||
this.control = control;
|
||||
}
|
||||
|
||||
public int getDisplayId() {
|
||||
return displayId;
|
||||
}
|
||||
|
||||
public void setDisplayId(int displayId) {
|
||||
this.displayId = displayId;
|
||||
public String getCameraId() {
|
||||
return cameraId;
|
||||
}
|
||||
|
||||
public Size getCameraSize() {
|
||||
return cameraSize;
|
||||
}
|
||||
|
||||
public CameraFacing getCameraFacing() {
|
||||
return cameraFacing;
|
||||
}
|
||||
|
||||
public CameraAspectRatio getCameraAspectRatio() {
|
||||
return cameraAspectRatio;
|
||||
}
|
||||
|
||||
public boolean getShowTouches() {
|
||||
return showTouches;
|
||||
}
|
||||
|
||||
public void setShowTouches(boolean showTouches) {
|
||||
this.showTouches = showTouches;
|
||||
}
|
||||
|
||||
public boolean getStayAwake() {
|
||||
return stayAwake;
|
||||
}
|
||||
|
||||
public void setStayAwake(boolean stayAwake) {
|
||||
this.stayAwake = stayAwake;
|
||||
}
|
||||
|
||||
public List<CodecOption> getVideoCodecOptions() {
|
||||
return videoCodecOptions;
|
||||
}
|
||||
|
||||
public void setVideoCodecOptions(List<CodecOption> videoCodecOptions) {
|
||||
this.videoCodecOptions = videoCodecOptions;
|
||||
}
|
||||
|
||||
public List<CodecOption> getAudioCodecOptions() {
|
||||
return audioCodecOptions;
|
||||
}
|
||||
|
||||
public void setAudioCodecOptions(List<CodecOption> audioCodecOptions) {
|
||||
this.audioCodecOptions = audioCodecOptions;
|
||||
}
|
||||
|
||||
public String getVideoEncoder() {
|
||||
return videoEncoder;
|
||||
}
|
||||
|
||||
public void setVideoEncoder(String videoEncoder) {
|
||||
this.videoEncoder = videoEncoder;
|
||||
}
|
||||
|
||||
public String getAudioEncoder() {
|
||||
return audioEncoder;
|
||||
}
|
||||
|
||||
public void setAudioEncoder(String audioEncoder) {
|
||||
this.audioEncoder = audioEncoder;
|
||||
}
|
||||
|
||||
public void setPowerOffScreenOnClose(boolean powerOffScreenOnClose) {
|
||||
this.powerOffScreenOnClose = powerOffScreenOnClose;
|
||||
}
|
||||
|
||||
public boolean getPowerOffScreenOnClose() {
|
||||
return this.powerOffScreenOnClose;
|
||||
}
|
||||
@ -214,79 +168,289 @@ public class Options {
|
||||
return clipboardAutosync;
|
||||
}
|
||||
|
||||
public void setClipboardAutosync(boolean clipboardAutosync) {
|
||||
this.clipboardAutosync = clipboardAutosync;
|
||||
}
|
||||
|
||||
public boolean getDownsizeOnError() {
|
||||
return downsizeOnError;
|
||||
}
|
||||
|
||||
public void setDownsizeOnError(boolean downsizeOnError) {
|
||||
this.downsizeOnError = downsizeOnError;
|
||||
}
|
||||
|
||||
public boolean getCleanup() {
|
||||
return cleanup;
|
||||
}
|
||||
|
||||
public void setCleanup(boolean cleanup) {
|
||||
this.cleanup = cleanup;
|
||||
}
|
||||
|
||||
public boolean getPowerOn() {
|
||||
return powerOn;
|
||||
}
|
||||
|
||||
public void setPowerOn(boolean powerOn) {
|
||||
this.powerOn = powerOn;
|
||||
public boolean getList() {
|
||||
return listEncoders || listDisplays || listCameras || listCameraSizes;
|
||||
}
|
||||
|
||||
public boolean getListEncoders() {
|
||||
return listEncoders;
|
||||
}
|
||||
|
||||
public void setListEncoders(boolean listEncoders) {
|
||||
this.listEncoders = listEncoders;
|
||||
}
|
||||
|
||||
public boolean getListDisplays() {
|
||||
return listDisplays;
|
||||
}
|
||||
|
||||
public void setListDisplays(boolean listDisplays) {
|
||||
this.listDisplays = listDisplays;
|
||||
public boolean getListCameras() {
|
||||
return listCameras;
|
||||
}
|
||||
|
||||
public boolean getListCameraSizes() {
|
||||
return listCameraSizes;
|
||||
}
|
||||
|
||||
public boolean getSendDeviceMeta() {
|
||||
return sendDeviceMeta;
|
||||
}
|
||||
|
||||
public void setSendDeviceMeta(boolean sendDeviceMeta) {
|
||||
this.sendDeviceMeta = sendDeviceMeta;
|
||||
}
|
||||
|
||||
public boolean getSendFrameMeta() {
|
||||
return sendFrameMeta;
|
||||
}
|
||||
|
||||
public void setSendFrameMeta(boolean sendFrameMeta) {
|
||||
this.sendFrameMeta = sendFrameMeta;
|
||||
}
|
||||
|
||||
public boolean getSendDummyByte() {
|
||||
return sendDummyByte;
|
||||
}
|
||||
|
||||
public void setSendDummyByte(boolean sendDummyByte) {
|
||||
this.sendDummyByte = sendDummyByte;
|
||||
}
|
||||
|
||||
public boolean getSendCodecMeta() {
|
||||
return sendCodecMeta;
|
||||
}
|
||||
|
||||
public void setSendCodecMeta(boolean sendCodecMeta) {
|
||||
this.sendCodecMeta = sendCodecMeta;
|
||||
@SuppressWarnings("MethodLength")
|
||||
public static Options parse(String... args) {
|
||||
if (args.length < 1) {
|
||||
throw new IllegalArgumentException("Missing client version");
|
||||
}
|
||||
|
||||
String clientVersion = args[0];
|
||||
if (!clientVersion.equals(BuildConfig.VERSION_NAME)) {
|
||||
throw new IllegalArgumentException(
|
||||
"The server version (" + BuildConfig.VERSION_NAME + ") does not match the client " + "(" + clientVersion + ")");
|
||||
}
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
for (int i = 1; i < args.length; ++i) {
|
||||
String arg = args[i];
|
||||
int equalIndex = arg.indexOf('=');
|
||||
if (equalIndex == -1) {
|
||||
throw new IllegalArgumentException("Invalid key=value pair: \"" + arg + "\"");
|
||||
}
|
||||
String key = arg.substring(0, equalIndex);
|
||||
String value = arg.substring(equalIndex + 1);
|
||||
switch (key) {
|
||||
case "scid":
|
||||
int scid = Integer.parseInt(value, 0x10);
|
||||
if (scid < -1) {
|
||||
throw new IllegalArgumentException("scid may not be negative (except -1 for 'none'): " + scid);
|
||||
}
|
||||
options.scid = scid;
|
||||
break;
|
||||
case "log_level":
|
||||
options.logLevel = Ln.Level.valueOf(value.toUpperCase(Locale.ENGLISH));
|
||||
break;
|
||||
case "video":
|
||||
options.video = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "audio":
|
||||
options.audio = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "video_codec":
|
||||
VideoCodec videoCodec = VideoCodec.findByName(value);
|
||||
if (videoCodec == null) {
|
||||
throw new IllegalArgumentException("Video codec " + value + " not supported");
|
||||
}
|
||||
options.videoCodec = videoCodec;
|
||||
break;
|
||||
case "audio_codec":
|
||||
AudioCodec audioCodec = AudioCodec.findByName(value);
|
||||
if (audioCodec == null) {
|
||||
throw new IllegalArgumentException("Audio codec " + value + " not supported");
|
||||
}
|
||||
options.audioCodec = audioCodec;
|
||||
break;
|
||||
case "video_source":
|
||||
VideoSource videoSource = VideoSource.findByName(value);
|
||||
if (videoSource == null) {
|
||||
throw new IllegalArgumentException("Video source " + value + " not supported");
|
||||
}
|
||||
options.videoSource = videoSource;
|
||||
break;
|
||||
case "audio_source":
|
||||
AudioSource audioSource = AudioSource.findByName(value);
|
||||
if (audioSource == null) {
|
||||
throw new IllegalArgumentException("Audio source " + value + " not supported");
|
||||
}
|
||||
options.audioSource = audioSource;
|
||||
break;
|
||||
case "max_size":
|
||||
options.maxSize = Integer.parseInt(value) & ~7; // multiple of 8
|
||||
break;
|
||||
case "video_bit_rate":
|
||||
options.videoBitRate = Integer.parseInt(value);
|
||||
break;
|
||||
case "audio_bit_rate":
|
||||
options.audioBitRate = Integer.parseInt(value);
|
||||
break;
|
||||
case "max_fps":
|
||||
options.maxFps = Integer.parseInt(value);
|
||||
break;
|
||||
case "lock_video_orientation":
|
||||
options.lockVideoOrientation = Integer.parseInt(value);
|
||||
break;
|
||||
case "tunnel_forward":
|
||||
options.tunnelForward = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "crop":
|
||||
if (!value.isEmpty()) {
|
||||
options.crop = parseCrop(value);
|
||||
}
|
||||
break;
|
||||
case "control":
|
||||
options.control = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "display_id":
|
||||
options.displayId = Integer.parseInt(value);
|
||||
break;
|
||||
case "show_touches":
|
||||
options.showTouches = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "stay_awake":
|
||||
options.stayAwake = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "video_codec_options":
|
||||
options.videoCodecOptions = CodecOption.parse(value);
|
||||
break;
|
||||
case "audio_codec_options":
|
||||
options.audioCodecOptions = CodecOption.parse(value);
|
||||
break;
|
||||
case "video_encoder":
|
||||
if (!value.isEmpty()) {
|
||||
options.videoEncoder = value;
|
||||
}
|
||||
break;
|
||||
case "audio_encoder":
|
||||
if (!value.isEmpty()) {
|
||||
options.audioEncoder = value;
|
||||
}
|
||||
case "power_off_on_close":
|
||||
options.powerOffScreenOnClose = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "clipboard_autosync":
|
||||
options.clipboardAutosync = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "downsize_on_error":
|
||||
options.downsizeOnError = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "cleanup":
|
||||
options.cleanup = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "power_on":
|
||||
options.powerOn = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "list_encoders":
|
||||
options.listEncoders = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "list_displays":
|
||||
options.listDisplays = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "list_cameras":
|
||||
options.listCameras = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "list_camera_sizes":
|
||||
options.listCameraSizes = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "camera_id":
|
||||
if (!value.isEmpty()) {
|
||||
options.cameraId = value;
|
||||
}
|
||||
break;
|
||||
case "camera_size":
|
||||
if (!value.isEmpty()) {
|
||||
options.cameraSize = parseSize(value);
|
||||
}
|
||||
break;
|
||||
case "camera_facing":
|
||||
if (!value.isEmpty()) {
|
||||
CameraFacing facing = CameraFacing.findByName(value);
|
||||
if (facing == null) {
|
||||
throw new IllegalArgumentException("Camera facing " + value + " not supported");
|
||||
}
|
||||
options.cameraFacing = facing;
|
||||
}
|
||||
break;
|
||||
case "camera_ar":
|
||||
if (!value.isEmpty()) {
|
||||
options.cameraAspectRatio = parseCameraAspectRatio(value);
|
||||
}
|
||||
break;
|
||||
case "send_device_meta":
|
||||
options.sendDeviceMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "send_frame_meta":
|
||||
options.sendFrameMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "send_dummy_byte":
|
||||
options.sendDummyByte = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "send_codec_meta":
|
||||
options.sendCodecMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "raw_stream":
|
||||
boolean rawStream = Boolean.parseBoolean(value);
|
||||
if (rawStream) {
|
||||
options.sendDeviceMeta = false;
|
||||
options.sendFrameMeta = false;
|
||||
options.sendDummyByte = false;
|
||||
options.sendCodecMeta = false;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
Ln.w("Unknown server option: " + key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
private static Rect parseCrop(String crop) {
|
||||
// input format: "width:height:x:y"
|
||||
String[] tokens = crop.split(":");
|
||||
if (tokens.length != 4) {
|
||||
throw new IllegalArgumentException("Crop must contains 4 values separated by colons: \"" + crop + "\"");
|
||||
}
|
||||
int width = Integer.parseInt(tokens[0]);
|
||||
int height = Integer.parseInt(tokens[1]);
|
||||
int x = Integer.parseInt(tokens[2]);
|
||||
int y = Integer.parseInt(tokens[3]);
|
||||
return new Rect(x, y, x + width, y + height);
|
||||
}
|
||||
|
||||
private static Size parseSize(String size) {
|
||||
// input format: "<width>x<height>"
|
||||
String[] tokens = size.split("x");
|
||||
if (tokens.length != 2) {
|
||||
throw new IllegalArgumentException("Invalid size format (expected <width>x<height>): \"" + size + "\"");
|
||||
}
|
||||
int width = Integer.parseInt(tokens[0]);
|
||||
int height = Integer.parseInt(tokens[1]);
|
||||
return new Size(width, height);
|
||||
}
|
||||
|
||||
private static CameraAspectRatio parseCameraAspectRatio(String ar) {
|
||||
if ("sensor".equals(ar)) {
|
||||
return CameraAspectRatio.sensorAspectRatio();
|
||||
}
|
||||
|
||||
String[] tokens = ar.split(":");
|
||||
if (tokens.length == 2) {
|
||||
int w = Integer.parseInt(tokens[0]);
|
||||
int h = Integer.parseInt(tokens[1]);
|
||||
return CameraAspectRatio.fromFraction(w, h);
|
||||
}
|
||||
|
||||
float floatAr = Float.parseFloat(tokens[0]);
|
||||
return CameraAspectRatio.fromFloat(floatAr);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,84 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import com.genymobile.scrcpy.wrappers.SurfaceControl;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.os.Build;
|
||||
import android.os.IBinder;
|
||||
import android.view.Surface;
|
||||
|
||||
public class ScreenCapture extends SurfaceCapture implements Device.RotationListener, Device.FoldListener {
|
||||
|
||||
private final Device device;
|
||||
private IBinder display;
|
||||
|
||||
public ScreenCapture(Device device) {
|
||||
this.device = device;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
display = createDisplay();
|
||||
device.setRotationListener(this);
|
||||
device.setFoldListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(Surface surface) {
|
||||
ScreenInfo screenInfo = device.getScreenInfo();
|
||||
Rect contentRect = screenInfo.getContentRect();
|
||||
|
||||
// does not include the locked video orientation
|
||||
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
|
||||
int videoRotation = screenInfo.getVideoRotation();
|
||||
int layerStack = device.getLayerStack();
|
||||
setDisplaySurface(display, surface, videoRotation, contentRect, unlockedVideoRect, layerStack);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
device.setRotationListener(null);
|
||||
device.setFoldListener(null);
|
||||
SurfaceControl.destroyDisplay(display);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Size getSize() {
|
||||
return device.getScreenInfo().getVideoSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean setMaxSize(int size) {
|
||||
device.setMaxSize(size);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFoldChanged(int displayId, boolean folded) {
|
||||
requestReset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
requestReset();
|
||||
}
|
||||
|
||||
private static IBinder createDisplay() {
|
||||
// Since Android 12 (preview), secure displays could not be created with shell permissions anymore.
|
||||
// On Android 12 preview, SDK_INT is still R (not S), but CODENAME is "S".
|
||||
boolean secure = Build.VERSION.SDK_INT < Build.VERSION_CODES.R || (Build.VERSION.SDK_INT == Build.VERSION_CODES.R && !"S".equals(
|
||||
Build.VERSION.CODENAME));
|
||||
return SurfaceControl.createDisplay("scrcpy", secure);
|
||||
}
|
||||
|
||||
private static void setDisplaySurface(IBinder display, Surface surface, int orientation, Rect deviceRect, Rect displayRect, int layerStack) {
|
||||
SurfaceControl.openTransaction();
|
||||
try {
|
||||
SurfaceControl.setDisplaySurface(display, surface);
|
||||
SurfaceControl.setDisplayProjection(display, orientation, deviceRect, displayRect);
|
||||
SurfaceControl.setDisplayLayerStack(display, layerStack);
|
||||
} finally {
|
||||
SurfaceControl.closeTransaction();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,16 +1,43 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.os.BatteryManager;
|
||||
import android.os.Build;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
public final class Server {
|
||||
|
||||
private static class Completion {
|
||||
private int running;
|
||||
private boolean fatalError;
|
||||
|
||||
Completion(int running) {
|
||||
this.running = running;
|
||||
}
|
||||
|
||||
synchronized void addCompleted(boolean fatalError) {
|
||||
--running;
|
||||
if (fatalError) {
|
||||
this.fatalError = true;
|
||||
}
|
||||
if (running == 0 || this.fatalError) {
|
||||
notify();
|
||||
}
|
||||
}
|
||||
|
||||
synchronized void await() {
|
||||
try {
|
||||
while (running > 0 && !fatalError) {
|
||||
wait();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Server() {
|
||||
// not instantiable
|
||||
}
|
||||
@ -60,7 +87,13 @@ public final class Server {
|
||||
}
|
||||
|
||||
private static void scrcpy(Options options) throws IOException, ConfigurationException {
|
||||
Ln.i("Device: " + Build.MANUFACTURER + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
|
||||
Ln.i("Device: [" + Build.MANUFACTURER + "] " + Build.BRAND + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
|
||||
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && options.getVideoSource() == VideoSource.CAMERA) {
|
||||
Ln.e("Camera mirroring is not supported before Android 12");
|
||||
throw new ConfigurationException("Camera mirroring is not supported");
|
||||
}
|
||||
|
||||
final Device device = new Device(options);
|
||||
|
||||
Thread initThread = startInitThread(options);
|
||||
@ -68,33 +101,17 @@ public final class Server {
|
||||
int scid = options.getScid();
|
||||
boolean tunnelForward = options.isTunnelForward();
|
||||
boolean control = options.getControl();
|
||||
boolean video = options.getVideo();
|
||||
boolean audio = options.getAudio();
|
||||
boolean sendDummyByte = options.getSendDummyByte();
|
||||
|
||||
Workarounds.prepareMainLooper();
|
||||
|
||||
// Workarounds must be applied for Meizu phones:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/240>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/365>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/2656>
|
||||
//
|
||||
// But only apply when strictly necessary, since workarounds can cause other issues:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/940>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/994>
|
||||
if (Build.BRAND.equalsIgnoreCase("meizu")) {
|
||||
Workarounds.fillAppInfo();
|
||||
}
|
||||
|
||||
// Before Android 11, audio is not supported.
|
||||
// Since Android 12, we can properly set a context on the AudioRecord.
|
||||
// Only on Android 11 we must fill the application context for the AudioRecord to work.
|
||||
if (audio && Build.VERSION.SDK_INT == 30) {
|
||||
Workarounds.fillAppContext();
|
||||
}
|
||||
boolean camera = true;
|
||||
Workarounds.apply(audio, camera);
|
||||
|
||||
List<AsyncProcessor> asyncProcessors = new ArrayList<>();
|
||||
|
||||
try (DesktopConnection connection = DesktopConnection.open(scid, tunnelForward, audio, control, sendDummyByte)) {
|
||||
DesktopConnection connection = DesktopConnection.open(scid, tunnelForward, video, audio, control, sendDummyByte);
|
||||
try {
|
||||
if (options.getSendDeviceMeta()) {
|
||||
connection.sendDeviceMeta(Device.getDeviceName());
|
||||
}
|
||||
@ -107,38 +124,42 @@ public final class Server {
|
||||
|
||||
if (audio) {
|
||||
AudioCodec audioCodec = options.getAudioCodec();
|
||||
Streamer audioStreamer = new Streamer(connection.getAudioFd(), audioCodec, options.getSendCodecMeta(),
|
||||
options.getSendFrameMeta());
|
||||
AudioCapture audioCapture = new AudioCapture(options.getAudioSource());
|
||||
Streamer audioStreamer = new Streamer(connection.getAudioFd(), audioCodec, options.getSendCodecMeta(), options.getSendFrameMeta());
|
||||
AsyncProcessor audioRecorder;
|
||||
if (audioCodec == AudioCodec.RAW) {
|
||||
audioRecorder = new AudioRawRecorder(audioStreamer);
|
||||
audioRecorder = new AudioRawRecorder(audioCapture, audioStreamer);
|
||||
} else {
|
||||
audioRecorder = new AudioEncoder(audioStreamer, options.getAudioBitRate(), options.getAudioCodecOptions(),
|
||||
audioRecorder = new AudioEncoder(audioCapture, audioStreamer, options.getAudioBitRate(), options.getAudioCodecOptions(),
|
||||
options.getAudioEncoder());
|
||||
}
|
||||
asyncProcessors.add(audioRecorder);
|
||||
}
|
||||
|
||||
Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(),
|
||||
options.getSendFrameMeta());
|
||||
ScreenEncoder screenEncoder = new ScreenEncoder(device, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
|
||||
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
|
||||
|
||||
for (AsyncProcessor asyncProcessor : asyncProcessors) {
|
||||
asyncProcessor.start();
|
||||
}
|
||||
|
||||
try {
|
||||
// synchronous
|
||||
screenEncoder.streamScreen();
|
||||
} catch (IOException e) {
|
||||
// Broken pipe is expected on close, because the socket is closed by the client
|
||||
if (!IO.isBrokenPipe(e)) {
|
||||
Ln.e("Video encoding error", e);
|
||||
if (video) {
|
||||
Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(),
|
||||
options.getSendFrameMeta());
|
||||
SurfaceCapture surfaceCapture;
|
||||
if (options.getVideoSource() == VideoSource.DISPLAY) {
|
||||
surfaceCapture = new ScreenCapture(device);
|
||||
} else {
|
||||
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(),
|
||||
options.getMaxSize(), options.getCameraAspectRatio());
|
||||
}
|
||||
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
|
||||
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
|
||||
asyncProcessors.add(surfaceEncoder);
|
||||
}
|
||||
|
||||
Completion completion = new Completion(asyncProcessors.size());
|
||||
for (AsyncProcessor asyncProcessor : asyncProcessors) {
|
||||
asyncProcessor.start((fatalError) -> {
|
||||
completion.addCompleted(fatalError);
|
||||
});
|
||||
}
|
||||
|
||||
completion.await();
|
||||
} finally {
|
||||
Ln.d("Screen streaming stopped");
|
||||
initThread.interrupt();
|
||||
for (AsyncProcessor asyncProcessor : asyncProcessors) {
|
||||
asyncProcessor.stop();
|
||||
@ -152,216 +173,27 @@ public final class Server {
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
|
||||
private static Thread startInitThread(final Options options) {
|
||||
Thread thread = new Thread(() -> initAndCleanUp(options));
|
||||
Thread thread = new Thread(() -> initAndCleanUp(options), "init-cleanup");
|
||||
thread.start();
|
||||
return thread;
|
||||
}
|
||||
|
||||
@SuppressWarnings("MethodLength")
|
||||
private static Options createOptions(String... args) {
|
||||
if (args.length < 1) {
|
||||
throw new IllegalArgumentException("Missing client version");
|
||||
}
|
||||
|
||||
String clientVersion = args[0];
|
||||
if (!clientVersion.equals(BuildConfig.VERSION_NAME)) {
|
||||
throw new IllegalArgumentException(
|
||||
"The server version (" + BuildConfig.VERSION_NAME + ") does not match the client " + "(" + clientVersion + ")");
|
||||
}
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
for (int i = 1; i < args.length; ++i) {
|
||||
String arg = args[i];
|
||||
int equalIndex = arg.indexOf('=');
|
||||
if (equalIndex == -1) {
|
||||
throw new IllegalArgumentException("Invalid key=value pair: \"" + arg + "\"");
|
||||
}
|
||||
String key = arg.substring(0, equalIndex);
|
||||
String value = arg.substring(equalIndex + 1);
|
||||
switch (key) {
|
||||
case "scid":
|
||||
int scid = Integer.parseInt(value, 0x10);
|
||||
if (scid < -1) {
|
||||
throw new IllegalArgumentException("scid may not be negative (except -1 for 'none'): " + scid);
|
||||
}
|
||||
options.setScid(scid);
|
||||
break;
|
||||
case "log_level":
|
||||
Ln.Level level = Ln.Level.valueOf(value.toUpperCase(Locale.ENGLISH));
|
||||
options.setLogLevel(level);
|
||||
break;
|
||||
case "audio":
|
||||
boolean audio = Boolean.parseBoolean(value);
|
||||
options.setAudio(audio);
|
||||
break;
|
||||
case "video_codec":
|
||||
VideoCodec videoCodec = VideoCodec.findByName(value);
|
||||
if (videoCodec == null) {
|
||||
throw new IllegalArgumentException("Video codec " + value + " not supported");
|
||||
}
|
||||
options.setVideoCodec(videoCodec);
|
||||
break;
|
||||
case "audio_codec":
|
||||
AudioCodec audioCodec = AudioCodec.findByName(value);
|
||||
if (audioCodec == null) {
|
||||
throw new IllegalArgumentException("Audio codec " + value + " not supported");
|
||||
}
|
||||
options.setAudioCodec(audioCodec);
|
||||
break;
|
||||
case "max_size":
|
||||
int maxSize = Integer.parseInt(value) & ~7; // multiple of 8
|
||||
options.setMaxSize(maxSize);
|
||||
break;
|
||||
case "video_bit_rate":
|
||||
int videoBitRate = Integer.parseInt(value);
|
||||
options.setVideoBitRate(videoBitRate);
|
||||
break;
|
||||
case "audio_bit_rate":
|
||||
int audioBitRate = Integer.parseInt(value);
|
||||
options.setAudioBitRate(audioBitRate);
|
||||
break;
|
||||
case "max_fps":
|
||||
int maxFps = Integer.parseInt(value);
|
||||
options.setMaxFps(maxFps);
|
||||
break;
|
||||
case "lock_video_orientation":
|
||||
int lockVideoOrientation = Integer.parseInt(value);
|
||||
options.setLockVideoOrientation(lockVideoOrientation);
|
||||
break;
|
||||
case "tunnel_forward":
|
||||
boolean tunnelForward = Boolean.parseBoolean(value);
|
||||
options.setTunnelForward(tunnelForward);
|
||||
break;
|
||||
case "crop":
|
||||
Rect crop = parseCrop(value);
|
||||
options.setCrop(crop);
|
||||
break;
|
||||
case "control":
|
||||
boolean control = Boolean.parseBoolean(value);
|
||||
options.setControl(control);
|
||||
break;
|
||||
case "display_id":
|
||||
int displayId = Integer.parseInt(value);
|
||||
options.setDisplayId(displayId);
|
||||
break;
|
||||
case "show_touches":
|
||||
boolean showTouches = Boolean.parseBoolean(value);
|
||||
options.setShowTouches(showTouches);
|
||||
break;
|
||||
case "stay_awake":
|
||||
boolean stayAwake = Boolean.parseBoolean(value);
|
||||
options.setStayAwake(stayAwake);
|
||||
break;
|
||||
case "video_codec_options":
|
||||
List<CodecOption> videoCodecOptions = CodecOption.parse(value);
|
||||
options.setVideoCodecOptions(videoCodecOptions);
|
||||
break;
|
||||
case "audio_codec_options":
|
||||
List<CodecOption> audioCodecOptions = CodecOption.parse(value);
|
||||
options.setAudioCodecOptions(audioCodecOptions);
|
||||
break;
|
||||
case "video_encoder":
|
||||
if (!value.isEmpty()) {
|
||||
options.setVideoEncoder(value);
|
||||
}
|
||||
break;
|
||||
case "audio_encoder":
|
||||
if (!value.isEmpty()) {
|
||||
options.setAudioEncoder(value);
|
||||
}
|
||||
case "power_off_on_close":
|
||||
boolean powerOffScreenOnClose = Boolean.parseBoolean(value);
|
||||
options.setPowerOffScreenOnClose(powerOffScreenOnClose);
|
||||
break;
|
||||
case "clipboard_autosync":
|
||||
boolean clipboardAutosync = Boolean.parseBoolean(value);
|
||||
options.setClipboardAutosync(clipboardAutosync);
|
||||
break;
|
||||
case "downsize_on_error":
|
||||
boolean downsizeOnError = Boolean.parseBoolean(value);
|
||||
options.setDownsizeOnError(downsizeOnError);
|
||||
break;
|
||||
case "cleanup":
|
||||
boolean cleanup = Boolean.parseBoolean(value);
|
||||
options.setCleanup(cleanup);
|
||||
break;
|
||||
case "power_on":
|
||||
boolean powerOn = Boolean.parseBoolean(value);
|
||||
options.setPowerOn(powerOn);
|
||||
break;
|
||||
case "list_encoders":
|
||||
boolean listEncoders = Boolean.parseBoolean(value);
|
||||
options.setListEncoders(listEncoders);
|
||||
break;
|
||||
case "list_displays":
|
||||
boolean listDisplays = Boolean.parseBoolean(value);
|
||||
options.setListDisplays(listDisplays);
|
||||
break;
|
||||
case "send_device_meta":
|
||||
boolean sendDeviceMeta = Boolean.parseBoolean(value);
|
||||
options.setSendDeviceMeta(sendDeviceMeta);
|
||||
break;
|
||||
case "send_frame_meta":
|
||||
boolean sendFrameMeta = Boolean.parseBoolean(value);
|
||||
options.setSendFrameMeta(sendFrameMeta);
|
||||
break;
|
||||
case "send_dummy_byte":
|
||||
boolean sendDummyByte = Boolean.parseBoolean(value);
|
||||
options.setSendDummyByte(sendDummyByte);
|
||||
break;
|
||||
case "send_codec_meta":
|
||||
boolean sendCodecMeta = Boolean.parseBoolean(value);
|
||||
options.setSendCodecMeta(sendCodecMeta);
|
||||
break;
|
||||
case "raw_video_stream":
|
||||
boolean rawVideoStream = Boolean.parseBoolean(value);
|
||||
if (rawVideoStream) {
|
||||
options.setSendDeviceMeta(false);
|
||||
options.setSendFrameMeta(false);
|
||||
options.setSendDummyByte(false);
|
||||
options.setSendCodecMeta(false);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
Ln.w("Unknown server option: " + key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
private static Rect parseCrop(String crop) {
|
||||
if (crop.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
// input format: "width:height:x:y"
|
||||
String[] tokens = crop.split(":");
|
||||
if (tokens.length != 4) {
|
||||
throw new IllegalArgumentException("Crop must contains 4 values separated by colons: \"" + crop + "\"");
|
||||
}
|
||||
int width = Integer.parseInt(tokens[0]);
|
||||
int height = Integer.parseInt(tokens[1]);
|
||||
int x = Integer.parseInt(tokens[2]);
|
||||
int y = Integer.parseInt(tokens[3]);
|
||||
return new Rect(x, y, x + width, y + height);
|
||||
}
|
||||
|
||||
public static void main(String... args) throws Exception {
|
||||
Thread.setDefaultUncaughtExceptionHandler((t, e) -> {
|
||||
Ln.e("Exception on thread " + t, e);
|
||||
});
|
||||
|
||||
Options options = createOptions(args);
|
||||
Options options = Options.parse(args);
|
||||
|
||||
Ln.initLogLevel(options.getLogLevel());
|
||||
|
||||
if (options.getListEncoders() || options.getListDisplays()) {
|
||||
if (options.getList()) {
|
||||
if (options.getCleanup()) {
|
||||
CleanUp.unlinkSelf();
|
||||
}
|
||||
@ -373,6 +205,10 @@ public final class Server {
|
||||
if (options.getListDisplays()) {
|
||||
Ln.i(LogUtils.buildDisplayListMessage());
|
||||
}
|
||||
if (options.getListCameras() || options.getListCameraSizes()) {
|
||||
Workarounds.apply(false, true);
|
||||
Ln.i(LogUtils.buildCameraListMessage(options.getListCameraSizes()));
|
||||
}
|
||||
// Just print the requested data, do not mirror
|
||||
return;
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static String getValue(String table, String key) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= 30) {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
return provider.getValue(table, key);
|
||||
@ -47,7 +47,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static void putValue(String table, String key, String value) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= 30) {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
provider.putValue(table, key, value);
|
||||
@ -60,7 +60,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static String getAndPutValue(String table, String key, String value) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= 30) {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
String oldValue = provider.getValue(table, key);
|
||||
|
@ -0,0 +1,71 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.view.Surface;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
* A video source which can be rendered on a Surface for encoding.
|
||||
*/
|
||||
public abstract class SurfaceCapture {
|
||||
|
||||
private final AtomicBoolean resetCapture = new AtomicBoolean();
|
||||
|
||||
/**
|
||||
* Request the encoding session to be restarted, for example if the capture implementation detects that the video source size has changed (on
|
||||
* device rotation for example).
|
||||
*/
|
||||
protected void requestReset() {
|
||||
resetCapture.set(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Consume the reset request (intended to be called by the encoder).
|
||||
*
|
||||
* @return {@code true} if a reset request was pending, {@code false} otherwise.
|
||||
*/
|
||||
public boolean consumeReset() {
|
||||
return resetCapture.getAndSet(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called once before the capture starts.
|
||||
*/
|
||||
public abstract void init() throws IOException;
|
||||
|
||||
/**
|
||||
* Called after the capture ends (if and only if {@link #init()} has been called).
|
||||
*/
|
||||
public abstract void release();
|
||||
|
||||
/**
|
||||
* Start the capture to the target surface.
|
||||
*
|
||||
* @param surface the surface which will be encoded
|
||||
*/
|
||||
public abstract void start(Surface surface) throws IOException;
|
||||
|
||||
/**
|
||||
* Return the video size
|
||||
*
|
||||
* @return the video size
|
||||
*/
|
||||
public abstract Size getSize();
|
||||
|
||||
/**
|
||||
* Set the maximum capture size (set by the encoder if it does not support the current size).
|
||||
*
|
||||
* @param size Maximum size
|
||||
*/
|
||||
public abstract boolean setMaxSize(int size);
|
||||
|
||||
/**
|
||||
* Indicate if the capture has been closed internally.
|
||||
*
|
||||
* @return {@code true} is the capture is closed, {@code false} otherwise.
|
||||
*/
|
||||
public boolean isClosed() {
|
||||
return false;
|
||||
}
|
||||
}
|
@ -1,13 +1,9 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import com.genymobile.scrcpy.wrappers.SurfaceControl;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaFormat;
|
||||
import android.os.Build;
|
||||
import android.os.IBinder;
|
||||
import android.os.Looper;
|
||||
import android.os.SystemClock;
|
||||
import android.view.Surface;
|
||||
|
||||
@ -16,7 +12,7 @@ import java.nio.ByteBuffer;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
public class ScreenEncoder implements Device.RotationListener {
|
||||
public class SurfaceEncoder implements AsyncProcessor {
|
||||
|
||||
private static final int DEFAULT_I_FRAME_INTERVAL = 10; // seconds
|
||||
private static final int REPEAT_FRAME_DELAY_US = 100_000; // repeat after 100ms
|
||||
@ -26,9 +22,7 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
private static final int[] MAX_SIZE_FALLBACK = {2560, 1920, 1600, 1280, 1024, 800};
|
||||
private static final int MAX_CONSECUTIVE_ERRORS = 3;
|
||||
|
||||
private final AtomicBoolean rotationChanged = new AtomicBoolean();
|
||||
|
||||
private final Device device;
|
||||
private final SurfaceCapture capture;
|
||||
private final Streamer streamer;
|
||||
private final String encoderName;
|
||||
private final List<CodecOption> codecOptions;
|
||||
@ -39,9 +33,12 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
private boolean firstFrameSent;
|
||||
private int consecutiveErrors;
|
||||
|
||||
public ScreenEncoder(Device device, Streamer streamer, int videoBitRate, int maxFps, List<CodecOption> codecOptions, String encoderName,
|
||||
private Thread thread;
|
||||
private final AtomicBoolean stopped = new AtomicBoolean();
|
||||
|
||||
public SurfaceEncoder(SurfaceCapture capture, Streamer streamer, int videoBitRate, int maxFps, List<CodecOption> codecOptions, String encoderName,
|
||||
boolean downsizeOnError) {
|
||||
this.device = device;
|
||||
this.capture = capture;
|
||||
this.streamer = streamer;
|
||||
this.videoBitRate = videoBitRate;
|
||||
this.maxFps = maxFps;
|
||||
@ -50,45 +47,29 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
this.downsizeOnError = downsizeOnError;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
rotationChanged.set(true);
|
||||
}
|
||||
|
||||
public boolean consumeRotationChange() {
|
||||
return rotationChanged.getAndSet(false);
|
||||
}
|
||||
|
||||
public void streamScreen() throws IOException, ConfigurationException {
|
||||
private void streamScreen() throws IOException, ConfigurationException {
|
||||
Codec codec = streamer.getCodec();
|
||||
MediaCodec mediaCodec = createMediaCodec(codec, encoderName);
|
||||
MediaFormat format = createFormat(codec.getMimeType(), videoBitRate, maxFps, codecOptions);
|
||||
IBinder display = createDisplay();
|
||||
device.setRotationListener(this);
|
||||
|
||||
streamer.writeVideoHeader(device.getScreenInfo().getVideoSize());
|
||||
capture.init();
|
||||
|
||||
boolean alive;
|
||||
try {
|
||||
do {
|
||||
ScreenInfo screenInfo = device.getScreenInfo();
|
||||
Rect contentRect = screenInfo.getContentRect();
|
||||
streamer.writeVideoHeader(capture.getSize());
|
||||
|
||||
// include the locked video orientation
|
||||
Rect videoRect = screenInfo.getVideoSize().toRect();
|
||||
format.setInteger(MediaFormat.KEY_WIDTH, videoRect.width());
|
||||
format.setInteger(MediaFormat.KEY_HEIGHT, videoRect.height());
|
||||
boolean alive;
|
||||
|
||||
do {
|
||||
Size size = capture.getSize();
|
||||
format.setInteger(MediaFormat.KEY_WIDTH, size.getWidth());
|
||||
format.setInteger(MediaFormat.KEY_HEIGHT, size.getHeight());
|
||||
|
||||
Surface surface = null;
|
||||
try {
|
||||
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
surface = mediaCodec.createInputSurface();
|
||||
|
||||
// does not include the locked video orientation
|
||||
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
|
||||
int videoRotation = screenInfo.getVideoRotation();
|
||||
int layerStack = device.getLayerStack();
|
||||
setDisplaySurface(display, surface, videoRotation, contentRect, unlockedVideoRect, layerStack);
|
||||
capture.start(surface);
|
||||
|
||||
mediaCodec.start();
|
||||
|
||||
@ -97,7 +78,7 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
mediaCodec.stop();
|
||||
} catch (IllegalStateException | IllegalArgumentException e) {
|
||||
Ln.e("Encoding error: " + e.getClass().getName() + ": " + e.getMessage());
|
||||
if (!prepareRetry(device, screenInfo)) {
|
||||
if (!prepareRetry(size)) {
|
||||
throw e;
|
||||
}
|
||||
Ln.i("Retrying...");
|
||||
@ -111,12 +92,11 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
} while (alive);
|
||||
} finally {
|
||||
mediaCodec.release();
|
||||
device.setRotationListener(null);
|
||||
SurfaceControl.destroyDisplay(display);
|
||||
capture.release();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean prepareRetry(Device device, ScreenInfo screenInfo) {
|
||||
private boolean prepareRetry(Size currentSize) {
|
||||
if (firstFrameSent) {
|
||||
++consecutiveErrors;
|
||||
if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) {
|
||||
@ -136,16 +116,19 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
|
||||
// Downsizing on error is only enabled if an encoding failure occurs before the first frame (downsizing later could be surprising)
|
||||
|
||||
int newMaxSize = chooseMaxSizeFallback(screenInfo.getVideoSize());
|
||||
Ln.i("newMaxSize = " + newMaxSize);
|
||||
int newMaxSize = chooseMaxSizeFallback(currentSize);
|
||||
if (newMaxSize == 0) {
|
||||
// Must definitively fail
|
||||
return false;
|
||||
}
|
||||
|
||||
// Retry with a smaller device size
|
||||
boolean accepted = capture.setMaxSize(newMaxSize);
|
||||
if (!accepted) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Retry with a smaller size
|
||||
Ln.i("Retrying with -m" + newMaxSize + "...");
|
||||
device.setMaxSize(newMaxSize);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -163,12 +146,17 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
|
||||
private boolean encode(MediaCodec codec, Streamer streamer) throws IOException {
|
||||
boolean eof = false;
|
||||
boolean alive = true;
|
||||
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
while (!consumeRotationChange() && !eof) {
|
||||
while (!capture.consumeReset() && !eof) {
|
||||
if (stopped.get()) {
|
||||
alive = false;
|
||||
break;
|
||||
}
|
||||
int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, -1);
|
||||
try {
|
||||
if (consumeRotationChange()) {
|
||||
if (capture.consumeReset()) {
|
||||
// must restart encoding with new size
|
||||
break;
|
||||
}
|
||||
@ -193,7 +181,12 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
}
|
||||
}
|
||||
|
||||
return !eof;
|
||||
if (capture.isClosed()) {
|
||||
// The capture might have been closed internally (for example if the camera is disconnected)
|
||||
alive = false;
|
||||
}
|
||||
|
||||
return !eof && alive;
|
||||
}
|
||||
|
||||
private static MediaCodec createMediaCodec(Codec codec, String encoderName) throws IOException, ConfigurationException {
|
||||
@ -249,22 +242,41 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
return format;
|
||||
}
|
||||
|
||||
private static IBinder createDisplay() {
|
||||
// Since Android 12 (preview), secure displays could not be created with shell permissions anymore.
|
||||
// On Android 12 preview, SDK_INT is still R (not S), but CODENAME is "S".
|
||||
boolean secure = Build.VERSION.SDK_INT < 30 || (Build.VERSION.SDK_INT == 30 && !"S"
|
||||
.equals(Build.VERSION.CODENAME));
|
||||
return SurfaceControl.createDisplay("scrcpy", secure);
|
||||
@Override
|
||||
public void start(TerminationListener listener) {
|
||||
thread = new Thread(() -> {
|
||||
// Some devices (Meizu) deadlock if the video encoding thread has no Looper
|
||||
// <https://github.com/Genymobile/scrcpy/issues/4143>
|
||||
Looper.prepare();
|
||||
|
||||
try {
|
||||
streamScreen();
|
||||
} catch (ConfigurationException e) {
|
||||
// Do not print stack trace, a user-friendly error-message has already been logged
|
||||
} catch (IOException e) {
|
||||
// Broken pipe is expected on close, because the socket is closed by the client
|
||||
if (!IO.isBrokenPipe(e)) {
|
||||
Ln.e("Video encoding error", e);
|
||||
}
|
||||
} finally {
|
||||
Ln.d("Screen streaming stopped");
|
||||
listener.onTerminated(true);
|
||||
}
|
||||
}, "video");
|
||||
thread.start();
|
||||
}
|
||||
|
||||
private static void setDisplaySurface(IBinder display, Surface surface, int orientation, Rect deviceRect, Rect displayRect, int layerStack) {
|
||||
SurfaceControl.openTransaction();
|
||||
try {
|
||||
SurfaceControl.setDisplaySurface(display, surface);
|
||||
SurfaceControl.setDisplayProjection(display, orientation, deviceRect, displayRect);
|
||||
SurfaceControl.setDisplayLayerStack(display, layerStack);
|
||||
} finally {
|
||||
SurfaceControl.closeTransaction();
|
||||
@Override
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
stopped.set(true);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
}
|
||||
}
|
||||
}
|
@ -7,7 +7,7 @@ public enum VideoCodec implements Codec {
|
||||
H264(0x68_32_36_34, "h264", MediaFormat.MIMETYPE_VIDEO_AVC),
|
||||
H265(0x68_32_36_35, "h265", MediaFormat.MIMETYPE_VIDEO_HEVC),
|
||||
@SuppressLint("InlinedApi") // introduced in API 21
|
||||
AV1(0x00_61_76_31, "av1", "video/av01");
|
||||
AV1(0x00_61_76_31, "av1", MediaFormat.MIMETYPE_VIDEO_AV1);
|
||||
|
||||
private final int id; // 4-byte ASCII representation of the name
|
||||
private final String name;
|
||||
|
22
server/src/main/java/com/genymobile/scrcpy/VideoSource.java
Normal file
22
server/src/main/java/com/genymobile/scrcpy/VideoSource.java
Normal file
@ -0,0 +1,22 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
public enum VideoSource {
|
||||
DISPLAY("display"),
|
||||
CAMERA("camera");
|
||||
|
||||
private final String name;
|
||||
|
||||
VideoSource(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
static VideoSource findByName(String name) {
|
||||
for (VideoSource videoSource : VideoSource.values()) {
|
||||
if (name.equals(videoSource.name)) {
|
||||
return videoSource;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -1,13 +1,23 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.app.Application;
|
||||
import android.content.AttributionSource;
|
||||
import android.content.Context;
|
||||
import android.content.ContextWrapper;
|
||||
import android.content.pm.ApplicationInfo;
|
||||
import android.media.AudioAttributes;
|
||||
import android.media.AudioManager;
|
||||
import android.media.AudioRecord;
|
||||
import android.os.Build;
|
||||
import android.os.Looper;
|
||||
import android.os.Parcel;
|
||||
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
public final class Workarounds {
|
||||
|
||||
@ -18,8 +28,60 @@ public final class Workarounds {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static void apply(boolean audio, boolean camera) {
|
||||
Workarounds.prepareMainLooper();
|
||||
|
||||
boolean mustFillAppInfo = false;
|
||||
boolean mustFillBaseContext = false;
|
||||
boolean mustFillAppContext = false;
|
||||
|
||||
if (Build.BRAND.equalsIgnoreCase("meizu")) {
|
||||
// Workarounds must be applied for Meizu phones:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/240>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/365>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/2656>
|
||||
//
|
||||
// But only apply when strictly necessary, since workarounds can cause other issues:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/940>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/994>
|
||||
mustFillAppInfo = true;
|
||||
} else if (Build.BRAND.equalsIgnoreCase("honor")) {
|
||||
// More workarounds must be applied for Honor devices:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/4015>
|
||||
//
|
||||
// The system context must not be set for all devices, because it would cause other problems:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/4015#issuecomment-1595382142>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/3805#issuecomment-1596148031>
|
||||
mustFillAppInfo = true;
|
||||
mustFillBaseContext = true;
|
||||
mustFillAppContext = true;
|
||||
}
|
||||
|
||||
if (audio && Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
// Before Android 11, audio is not supported.
|
||||
// Since Android 12, we can properly set a context on the AudioRecord.
|
||||
// Only on Android 11 we must fill the application context for the AudioRecord to work.
|
||||
mustFillAppContext = true;
|
||||
}
|
||||
|
||||
if (camera) {
|
||||
mustFillAppInfo = true;
|
||||
mustFillBaseContext = true;
|
||||
}
|
||||
|
||||
if (mustFillAppInfo) {
|
||||
Workarounds.fillAppInfo();
|
||||
}
|
||||
if (mustFillBaseContext) {
|
||||
Workarounds.fillBaseContext();
|
||||
}
|
||||
if (mustFillAppContext) {
|
||||
Workarounds.fillAppContext();
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public static void prepareMainLooper() {
|
||||
private static void prepareMainLooper() {
|
||||
// Some devices internally create a Handler when creating an input Surface, causing an exception:
|
||||
// "Can't create handler inside thread that has not called Looper.prepare()"
|
||||
// <https://github.com/Genymobile/scrcpy/issues/240>
|
||||
@ -48,7 +110,7 @@ public final class Workarounds {
|
||||
}
|
||||
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public static void fillAppInfo() {
|
||||
private static void fillAppInfo() {
|
||||
try {
|
||||
fillActivityThread();
|
||||
|
||||
@ -77,7 +139,7 @@ public final class Workarounds {
|
||||
}
|
||||
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public static void fillAppContext() {
|
||||
private static void fillAppContext() {
|
||||
try {
|
||||
fillActivityThread();
|
||||
|
||||
@ -95,4 +157,153 @@ public final class Workarounds {
|
||||
Ln.d("Could not fill app context: " + throwable.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private static void fillBaseContext() {
|
||||
try {
|
||||
fillActivityThread();
|
||||
|
||||
Method getSystemContextMethod = activityThreadClass.getDeclaredMethod("getSystemContext");
|
||||
Context context = (Context) getSystemContextMethod.invoke(activityThread);
|
||||
FakeContext.get().setBaseContext(context);
|
||||
} catch (Throwable throwable) {
|
||||
// this is a workaround, so failing is not an error
|
||||
Ln.d("Could not fill base context: " + throwable.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.R)
|
||||
@SuppressLint("WrongConstant,MissingPermission,BlockedPrivateApi,SoonBlockedPrivateApi,DiscouragedPrivateApi")
|
||||
public static AudioRecord createAudioRecord(int source, int sampleRate, int channelConfig, int channels, int channelMask, int encoding) {
|
||||
// Vivo (and maybe some other third-party ROMs) modified `AudioRecord`'s constructor, requiring `Context`s from real App environment.
|
||||
//
|
||||
// This method invokes the `AudioRecord(long nativeRecordInJavaObj)` constructor to create an empty `AudioRecord` instance, then uses
|
||||
// reflections to initialize it like the normal constructor do (or the `AudioRecord.Builder.build()` method do).
|
||||
// As a result, the modified code was not executed.
|
||||
try {
|
||||
// AudioRecord audioRecord = new AudioRecord(0L);
|
||||
Constructor<AudioRecord> audioRecordConstructor = AudioRecord.class.getDeclaredConstructor(long.class);
|
||||
audioRecordConstructor.setAccessible(true);
|
||||
AudioRecord audioRecord = audioRecordConstructor.newInstance(0L);
|
||||
|
||||
// audioRecord.mRecordingState = RECORDSTATE_STOPPED;
|
||||
Field mRecordingStateField = AudioRecord.class.getDeclaredField("mRecordingState");
|
||||
mRecordingStateField.setAccessible(true);
|
||||
mRecordingStateField.set(audioRecord, AudioRecord.RECORDSTATE_STOPPED);
|
||||
|
||||
Looper looper = Looper.myLooper();
|
||||
if (looper == null) {
|
||||
looper = Looper.getMainLooper();
|
||||
}
|
||||
|
||||
// audioRecord.mInitializationLooper = looper;
|
||||
Field mInitializationLooperField = AudioRecord.class.getDeclaredField("mInitializationLooper");
|
||||
mInitializationLooperField.setAccessible(true);
|
||||
mInitializationLooperField.set(audioRecord, looper);
|
||||
|
||||
// Create `AudioAttributes` with fixed capture preset
|
||||
int capturePreset = source;
|
||||
AudioAttributes.Builder audioAttributesBuilder = new AudioAttributes.Builder();
|
||||
Method setInternalCapturePresetMethod = AudioAttributes.Builder.class.getMethod("setInternalCapturePreset", int.class);
|
||||
setInternalCapturePresetMethod.invoke(audioAttributesBuilder, capturePreset);
|
||||
AudioAttributes attributes = audioAttributesBuilder.build();
|
||||
|
||||
// audioRecord.mAudioAttributes = attributes;
|
||||
Field mAudioAttributesField = AudioRecord.class.getDeclaredField("mAudioAttributes");
|
||||
mAudioAttributesField.setAccessible(true);
|
||||
mAudioAttributesField.set(audioRecord, attributes);
|
||||
|
||||
// audioRecord.audioParamCheck(capturePreset, sampleRate, encoding);
|
||||
Method audioParamCheckMethod = AudioRecord.class.getDeclaredMethod("audioParamCheck", int.class, int.class, int.class);
|
||||
audioParamCheckMethod.setAccessible(true);
|
||||
audioParamCheckMethod.invoke(audioRecord, capturePreset, sampleRate, encoding);
|
||||
|
||||
// audioRecord.mChannelCount = channels
|
||||
Field mChannelCountField = AudioRecord.class.getDeclaredField("mChannelCount");
|
||||
mChannelCountField.setAccessible(true);
|
||||
mChannelCountField.set(audioRecord, channels);
|
||||
|
||||
// audioRecord.mChannelMask = channelMask
|
||||
Field mChannelMaskField = AudioRecord.class.getDeclaredField("mChannelMask");
|
||||
mChannelMaskField.setAccessible(true);
|
||||
mChannelMaskField.set(audioRecord, channelMask);
|
||||
|
||||
int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, encoding);
|
||||
int bufferSizeInBytes = minBufferSize * 8;
|
||||
|
||||
// audioRecord.audioBuffSizeCheck(bufferSizeInBytes)
|
||||
Method audioBuffSizeCheckMethod = AudioRecord.class.getDeclaredMethod("audioBuffSizeCheck", int.class);
|
||||
audioBuffSizeCheckMethod.setAccessible(true);
|
||||
audioBuffSizeCheckMethod.invoke(audioRecord, bufferSizeInBytes);
|
||||
|
||||
final int channelIndexMask = 0;
|
||||
|
||||
int[] sampleRateArray = new int[]{sampleRate};
|
||||
int[] session = new int[]{AudioManager.AUDIO_SESSION_ID_GENERATE};
|
||||
|
||||
int initResult;
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S) {
|
||||
// private native final int native_setup(Object audiorecord_this,
|
||||
// Object /*AudioAttributes*/ attributes,
|
||||
// int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
|
||||
// int buffSizeInBytes, int[] sessionId, String opPackageName,
|
||||
// long nativeRecordInJavaObj);
|
||||
Method nativeSetupMethod = AudioRecord.class.getDeclaredMethod("native_setup", Object.class, Object.class, int[].class, int.class,
|
||||
int.class, int.class, int.class, int[].class, String.class, long.class);
|
||||
nativeSetupMethod.setAccessible(true);
|
||||
initResult = (int) nativeSetupMethod.invoke(audioRecord, new WeakReference<AudioRecord>(audioRecord), attributes, sampleRateArray,
|
||||
channelMask, channelIndexMask, audioRecord.getAudioFormat(), bufferSizeInBytes, session, FakeContext.get().getOpPackageName(),
|
||||
0L);
|
||||
} else {
|
||||
// Assume `context` is never `null`
|
||||
AttributionSource attributionSource = FakeContext.get().getAttributionSource();
|
||||
|
||||
// Assume `attributionSource.getPackageName()` is never null
|
||||
|
||||
// ScopedParcelState attributionSourceState = attributionSource.asScopedParcelState()
|
||||
Method asScopedParcelStateMethod = AttributionSource.class.getDeclaredMethod("asScopedParcelState");
|
||||
asScopedParcelStateMethod.setAccessible(true);
|
||||
|
||||
try (AutoCloseable attributionSourceState = (AutoCloseable) asScopedParcelStateMethod.invoke(attributionSource)) {
|
||||
Method getParcelMethod = attributionSourceState.getClass().getDeclaredMethod("getParcel");
|
||||
Parcel attributionSourceParcel = (Parcel) getParcelMethod.invoke(attributionSourceState);
|
||||
|
||||
// private native int native_setup(Object audiorecordThis,
|
||||
// Object /*AudioAttributes*/ attributes,
|
||||
// int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
|
||||
// int buffSizeInBytes, int[] sessionId, @NonNull Parcel attributionSource,
|
||||
// long nativeRecordInJavaObj, int maxSharedAudioHistoryMs);
|
||||
Method nativeSetupMethod = AudioRecord.class.getDeclaredMethod("native_setup", Object.class, Object.class, int[].class, int.class,
|
||||
int.class, int.class, int.class, int[].class, Parcel.class, long.class, int.class);
|
||||
nativeSetupMethod.setAccessible(true);
|
||||
initResult = (int) nativeSetupMethod.invoke(audioRecord, new WeakReference<AudioRecord>(audioRecord), attributes, sampleRateArray,
|
||||
channelMask, channelIndexMask, audioRecord.getAudioFormat(), bufferSizeInBytes, session, attributionSourceParcel, 0L, 0);
|
||||
}
|
||||
}
|
||||
|
||||
if (initResult != AudioRecord.SUCCESS) {
|
||||
Ln.e("Error code " + initResult + " when initializing native AudioRecord object.");
|
||||
throw new RuntimeException("Cannot create AudioRecord");
|
||||
}
|
||||
|
||||
// mSampleRate = sampleRate[0]
|
||||
Field mSampleRateField = AudioRecord.class.getDeclaredField("mSampleRate");
|
||||
mSampleRateField.setAccessible(true);
|
||||
mSampleRateField.set(audioRecord, sampleRateArray[0]);
|
||||
|
||||
// audioRecord.mSessionId = session[0]
|
||||
Field mSessionIdField = AudioRecord.class.getDeclaredField("mSessionId");
|
||||
mSessionIdField.setAccessible(true);
|
||||
mSessionIdField.set(audioRecord, session[0]);
|
||||
|
||||
// audioRecord.mState = AudioRecord.STATE_INITIALIZED
|
||||
Field mStateField = AudioRecord.class.getDeclaredField("mState");
|
||||
mStateField.setAccessible(true);
|
||||
mStateField.set(audioRecord, AudioRecord.STATE_INITIALIZED);
|
||||
|
||||
return audioRecord;
|
||||
} catch (Exception e) {
|
||||
Ln.e("Failed to invoke AudioRecord.<init>.", e);
|
||||
throw new RuntimeException("Cannot create AudioRecord");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public class ActivityManager {
|
||||
public final class ActivityManager {
|
||||
|
||||
private final IInterface manager;
|
||||
private Method getContentProviderExternalMethod;
|
||||
@ -51,7 +51,7 @@ public class ActivityManager {
|
||||
return removeContentProviderExternalMethod;
|
||||
}
|
||||
|
||||
@TargetApi(29)
|
||||
@TargetApi(Build.VERSION_CODES.Q)
|
||||
private ContentProvider getContentProviderExternal(String name, IBinder token) {
|
||||
try {
|
||||
Method method = getGetContentProviderExternalMethod();
|
||||
|
@ -11,7 +11,7 @@ import android.os.IInterface;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
public class ClipboardManager {
|
||||
public final class ClipboardManager {
|
||||
private final IInterface manager;
|
||||
private Method getPrimaryClipMethod;
|
||||
private Method setPrimaryClipMethod;
|
||||
@ -26,7 +26,7 @@ public class ClipboardManager {
|
||||
|
||||
private Method getGetPrimaryClipMethod() throws NoSuchMethodException {
|
||||
if (getPrimaryClipMethod == null) {
|
||||
if (Build.VERSION.SDK_INT < 29) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class);
|
||||
} else {
|
||||
try {
|
||||
@ -37,8 +37,13 @@ public class ClipboardManager {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class);
|
||||
getMethodVersion = 1;
|
||||
} catch (NoSuchMethodException e2) {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class, int.class);
|
||||
getMethodVersion = 2;
|
||||
try {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class, int.class);
|
||||
getMethodVersion = 2;
|
||||
} catch (NoSuchMethodException e3) {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, int.class, String.class);
|
||||
getMethodVersion = 3;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -48,7 +53,7 @@ public class ClipboardManager {
|
||||
|
||||
private Method getSetPrimaryClipMethod() throws NoSuchMethodException {
|
||||
if (setPrimaryClipMethod == null) {
|
||||
if (Build.VERSION.SDK_INT < 29) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class);
|
||||
} else {
|
||||
try {
|
||||
@ -71,7 +76,7 @@ public class ClipboardManager {
|
||||
|
||||
private static ClipData getPrimaryClip(Method method, int methodVersion, IInterface manager)
|
||||
throws InvocationTargetException, IllegalAccessException {
|
||||
if (Build.VERSION.SDK_INT < 29) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME);
|
||||
}
|
||||
|
||||
@ -80,14 +85,16 @@ public class ClipboardManager {
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID);
|
||||
case 1:
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID);
|
||||
default:
|
||||
case 2:
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID, 0);
|
||||
default:
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID, null);
|
||||
}
|
||||
}
|
||||
|
||||
private static void setPrimaryClip(Method method, int methodVersion, IInterface manager, ClipData clipData)
|
||||
throws InvocationTargetException, IllegalAccessException {
|
||||
if (Build.VERSION.SDK_INT < 29) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME);
|
||||
return;
|
||||
}
|
||||
@ -133,7 +140,7 @@ public class ClipboardManager {
|
||||
|
||||
private static void addPrimaryClipChangedListener(Method method, int methodVersion, IInterface manager,
|
||||
IOnPrimaryClipChangedListener listener) throws InvocationTargetException, IllegalAccessException {
|
||||
if (Build.VERSION.SDK_INT < 29) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME);
|
||||
return;
|
||||
}
|
||||
@ -153,7 +160,7 @@ public class ClipboardManager {
|
||||
|
||||
private Method getAddPrimaryClipChangedListener() throws NoSuchMethodException {
|
||||
if (addPrimaryClipChangedListener == null) {
|
||||
if (Build.VERSION.SDK_INT < 29) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
addPrimaryClipChangedListener = manager.getClass()
|
||||
.getMethod("addPrimaryClipChangedListener", IOnPrimaryClipChangedListener.class, String.class);
|
||||
} else {
|
||||
|
@ -14,7 +14,7 @@ import java.io.Closeable;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
public class ContentProvider implements Closeable {
|
||||
public final class ContentProvider implements Closeable {
|
||||
|
||||
public static final String TABLE_SYSTEM = "system";
|
||||
public static final String TABLE_SECURE = "secure";
|
||||
@ -54,7 +54,7 @@ public class ContentProvider implements Closeable {
|
||||
@SuppressLint("PrivateApi")
|
||||
private Method getCallMethod() throws NoSuchMethodException {
|
||||
if (callMethod == null) {
|
||||
if (Build.VERSION.SDK_INT >= 31) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
callMethod = provider.getClass().getMethod("call", AttributionSource.class, String.class, String.class, String.class, Bundle.class);
|
||||
callMethodVersion = 0;
|
||||
} else {
|
||||
@ -83,7 +83,7 @@ public class ContentProvider implements Closeable {
|
||||
Method method = getCallMethod();
|
||||
Object[] args;
|
||||
|
||||
if (Build.VERSION.SDK_INT >= 31 && callMethodVersion == 0) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && callMethodVersion == 0) {
|
||||
args = new Object[]{FakeContext.get().getAttributionSource(), "settings", callMethod, arg, extras};
|
||||
} else {
|
||||
switch (callMethodVersion) {
|
||||
|
@ -14,13 +14,13 @@ public final class InputManager {
|
||||
public static final int INJECT_INPUT_EVENT_MODE_WAIT_FOR_RESULT = 1;
|
||||
public static final int INJECT_INPUT_EVENT_MODE_WAIT_FOR_FINISH = 2;
|
||||
|
||||
private final android.hardware.input.InputManager manager;
|
||||
private final Object manager;
|
||||
private Method injectInputEventMethod;
|
||||
|
||||
private static Method setDisplayIdMethod;
|
||||
private static Method setActionButtonMethod;
|
||||
|
||||
public InputManager(android.hardware.input.InputManager manager) {
|
||||
public InputManager(Object manager) {
|
||||
this.manager = manager;
|
||||
}
|
||||
|
||||
|
@ -1,9 +1,14 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.Context;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.os.IBinder;
|
||||
import android.os.IInterface;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
@ -26,6 +31,7 @@ public final class ServiceManager {
|
||||
private static StatusBarManager statusBarManager;
|
||||
private static ClipboardManager clipboardManager;
|
||||
private static ActivityManager activityManager;
|
||||
private static CameraManager cameraManager;
|
||||
|
||||
private ServiceManager() {
|
||||
/* not instantiable */
|
||||
@ -62,11 +68,21 @@ public final class ServiceManager {
|
||||
return displayManager;
|
||||
}
|
||||
|
||||
public static Class<?> getInputManagerClass() {
|
||||
try {
|
||||
// Parts of the InputManager class have been moved to a new InputManagerGlobal class in Android 14 preview
|
||||
return Class.forName("android.hardware.input.InputManagerGlobal");
|
||||
} catch (ClassNotFoundException e) {
|
||||
return android.hardware.input.InputManager.class;
|
||||
}
|
||||
}
|
||||
|
||||
public static InputManager getInputManager() {
|
||||
if (inputManager == null) {
|
||||
try {
|
||||
Method getInstanceMethod = android.hardware.input.InputManager.class.getDeclaredMethod("getInstance");
|
||||
android.hardware.input.InputManager im = (android.hardware.input.InputManager) getInstanceMethod.invoke(null);
|
||||
Class<?> inputManagerClass = getInputManagerClass();
|
||||
Method getInstanceMethod = inputManagerClass.getDeclaredMethod("getInstance");
|
||||
Object im = getInstanceMethod.invoke(null);
|
||||
inputManager = new InputManager(im);
|
||||
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw new AssertionError(e);
|
||||
@ -119,4 +135,16 @@ public final class ServiceManager {
|
||||
|
||||
return activityManager;
|
||||
}
|
||||
|
||||
public static CameraManager getCameraManager() {
|
||||
if (cameraManager == null) {
|
||||
try {
|
||||
Constructor<CameraManager> ctor = CameraManager.class.getDeclaredConstructor(Context.class);
|
||||
cameraManager = ctor.newInstance(FakeContext.get());
|
||||
} catch (Exception e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
}
|
||||
return cameraManager;
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ import android.os.IInterface;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
public class StatusBarManager {
|
||||
public final class StatusBarManager {
|
||||
|
||||
private final IInterface manager;
|
||||
private Method expandNotificationsPanelMethod;
|
||||
|
@ -90,7 +90,7 @@ public final class SurfaceControl {
|
||||
if (getBuiltInDisplayMethod == null) {
|
||||
// the method signature has changed in Android Q
|
||||
// <https://github.com/Genymobile/scrcpy/issues/586>
|
||||
if (Build.VERSION.SDK_INT < 29) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
getBuiltInDisplayMethod = CLASS.getMethod("getBuiltInDisplay", int.class);
|
||||
} else {
|
||||
getBuiltInDisplayMethod = CLASS.getMethod("getInternalDisplayToken");
|
||||
@ -102,7 +102,7 @@ public final class SurfaceControl {
|
||||
public static IBinder getBuiltInDisplay() {
|
||||
try {
|
||||
Method method = getGetBuiltInDisplayMethod();
|
||||
if (Build.VERSION.SDK_INT < 29) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
// call getBuiltInDisplay(0)
|
||||
return (IBinder) method.invoke(null, 0);
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user