Compare commits
52 Commits
android-fr
...
fix_audio_
Author | SHA1 | Date | |
---|---|---|---|
434033d2c3 | |||
e849b6dbb5 | |||
58f1057f01 | |||
379caf8551 | |||
2aec7b4c9d | |||
fc52b24503 | |||
ff5ffc892f | |||
360f2fea1e | |||
24999d0d32 | |||
8e2c0d6407 | |||
9a2abba098 | |||
b2d860382f | |||
4c4a03ebe1 | |||
798dfd240e | |||
c4caa6b81d | |||
1efbfe1175 | |||
751c09f47a | |||
6ad46d70b8 | |||
f46758d1c5 | |||
e71f5358b3 | |||
a2c8910006 | |||
cab354102d | |||
597d2ccc01 | |||
38900d7730 | |||
e926bf1fe8 | |||
6298ef095f | |||
7d33798b40 | |||
d500550212 | |||
a166eee909 | |||
b11b363e8e | |||
7321db6f28 | |||
d6bcde565f | |||
98f4f4e68a | |||
be86e14e05 | |||
8c650e53cd | |||
e89e772c7c | |||
feab87053a | |||
751a3653a0 | |||
9c08eb79cb | |||
92483fe11b | |||
6928acdeac | |||
0f3af2d20b | |||
c083a7cc90 | |||
9eb6591913 | |||
9cfea347d0 | |||
ce064fb5e0 | |||
afcdfc7fd7 | |||
051b74c883 | |||
2e532afd2b | |||
fdf465851c | |||
669e9a8d1e | |||
f77e1c474e |
@ -7,6 +7,7 @@ _scrcpy() {
|
||||
--audio-codec=
|
||||
--audio-codec-options=
|
||||
--audio-encoder=
|
||||
--audio-source=
|
||||
--audio-output-buffer=
|
||||
-b --video-bit-rate=
|
||||
--crop=
|
||||
@ -15,9 +16,9 @@ _scrcpy() {
|
||||
--display=
|
||||
--display-buffer=
|
||||
-e --select-tcpip
|
||||
-f --fullscreen
|
||||
--force-adb-forward
|
||||
--forward-all-clicks
|
||||
-f --fullscreen
|
||||
-K --hid-keyboard
|
||||
-h --help
|
||||
--legacy-paste
|
||||
@ -25,33 +26,36 @@ _scrcpy() {
|
||||
--list-encoders
|
||||
--lock-video-orientation
|
||||
--lock-video-orientation=
|
||||
--max-fps=
|
||||
-M --hid-mouse
|
||||
-m --max-size=
|
||||
-M --hid-mouse
|
||||
--max-fps=
|
||||
-n --no-control
|
||||
-N --no-playback
|
||||
--no-audio
|
||||
--no-audio-playback
|
||||
--no-cleanup
|
||||
--no-clipboard-autosync
|
||||
--no-downsize-on-error
|
||||
-n --no-control
|
||||
-N --no-display
|
||||
--no-key-repeat
|
||||
--no-mipmaps
|
||||
--no-power-on
|
||||
--no-video
|
||||
--no-video-playback
|
||||
--otg
|
||||
-p --port=
|
||||
--power-off-on-close
|
||||
--prefer-text
|
||||
--print-fps
|
||||
--push-target=
|
||||
--raw-key-events
|
||||
-r --record=
|
||||
--raw-key-events
|
||||
--record-format=
|
||||
--render-driver=
|
||||
--require-audio
|
||||
--rotation=
|
||||
-s --serial=
|
||||
--shortcut-mod=
|
||||
-S --turn-screen-off
|
||||
--shortcut-mod=
|
||||
-t --show-touches
|
||||
--tcpip
|
||||
--tcpip=
|
||||
@ -59,8 +63,8 @@ _scrcpy() {
|
||||
--tunnel-port=
|
||||
--v4l2-buffer=
|
||||
--v4l2-sink=
|
||||
-V --verbosity=
|
||||
-v --version
|
||||
-V --verbosity=
|
||||
--video-codec=
|
||||
--video-codec-options=
|
||||
--video-encoder=
|
||||
@ -83,6 +87,10 @@ _scrcpy() {
|
||||
COMPREPLY=($(compgen -W 'opus aac raw' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
--audio-source)
|
||||
COMPREPLY=($(compgen -W 'output mic' -- "$cur"))
|
||||
return
|
||||
;;
|
||||
--lock-video-orientation)
|
||||
COMPREPLY=($(compgen -W 'unlocked initial 0 1 2 3' -- "$cur"))
|
||||
return
|
||||
|
@ -14,6 +14,7 @@ arguments=(
|
||||
'--audio-codec=[Select the audio codec]:codec:(opus aac raw)'
|
||||
'--audio-codec-options=[Set a list of comma-separated key\:type=value options for the device audio encoder]'
|
||||
'--audio-encoder=[Use a specific MediaCodec audio encoder]'
|
||||
'--audio-source=[Select the audio source]:source:(output mic)'
|
||||
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
|
||||
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
|
||||
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
|
||||
@ -22,50 +23,53 @@ arguments=(
|
||||
'--display=[Specify the display id to mirror]'
|
||||
'--display-buffer=[Add a buffering delay \(in milliseconds\) before displaying]'
|
||||
{-e,--select-tcpip}'[Use TCP/IP device]'
|
||||
{-f,--fullscreen}'[Start in fullscreen]'
|
||||
'--force-adb-forward[Do not attempt to use \"adb reverse\" to connect to the device]'
|
||||
'--forward-all-clicks[Forward clicks to device]'
|
||||
{-f,--fullscreen}'[Start in fullscreen]'
|
||||
{-K,--hid-keyboard}'[Simulate a physical keyboard by using HID over AOAv2]'
|
||||
{-h,--help}'[Print the help]'
|
||||
'--legacy-paste[Inject computer clipboard text as a sequence of key events on Ctrl+v]'
|
||||
'--list-displays[List displays available on the device]'
|
||||
'--list-encoders[List video and audio encoders available on the device]'
|
||||
'--lock-video-orientation=[Lock video orientation]:orientation:(unlocked initial 0 1 2 3)'
|
||||
'--max-fps=[Limit the frame rate of screen capture]'
|
||||
{-M,--hid-mouse}'[Simulate a physical mouse by using HID over AOAv2]'
|
||||
{-m,--max-size=}'[Limit both the width and height of the video to value]'
|
||||
{-M,--hid-mouse}'[Simulate a physical mouse by using HID over AOAv2]'
|
||||
'--max-fps=[Limit the frame rate of screen capture]'
|
||||
{-n,--no-control}'[Disable device control \(mirror the device in read only\)]'
|
||||
{-N,--no-playback}'[Disable video and audio playback]'
|
||||
'--no-audio[Disable audio forwarding]'
|
||||
'--no-audio-playback[Disable audio playback]'
|
||||
'--no-cleanup[Disable device cleanup actions on exit]'
|
||||
'--no-clipboard-autosync[Disable automatic clipboard synchronization]'
|
||||
'--no-downsize-on-error[Disable lowering definition on MediaCodec error]'
|
||||
{-n,--no-control}'[Disable device control \(mirror the device in read only\)]'
|
||||
{-N,--no-display}'[Do not display device \(during screen recording or when V4L2 sink is enabled\)]'
|
||||
'--no-key-repeat[Do not forward repeated key events when a key is held down]'
|
||||
'--no-mipmaps[Disable the generation of mipmaps]'
|
||||
'--no-power-on[Do not power on the device on start]'
|
||||
'--no-video[Disable video forwarding]'
|
||||
'--no-video-playback[Disable video playback]'
|
||||
'--otg[Run in OTG mode \(simulating physical keyboard and mouse\)]'
|
||||
{-p,--port=}'[\[port\[\:port\]\] Set the TCP port \(range\) used by the client to listen]'
|
||||
'--power-off-on-close[Turn the device screen off when closing scrcpy]'
|
||||
'--prefer-text[Inject alpha characters and space as text events instead of key events]'
|
||||
'--print-fps[Start FPS counter, to print frame logs to the console]'
|
||||
'--push-target=[Set the target directory for pushing files to the device by drag and drop]'
|
||||
'--raw-key-events[Inject key events for all input keys, and ignore text events]'
|
||||
{-r,--record=}'[Record screen to file]:record file:_files'
|
||||
'--raw-key-events[Inject key events for all input keys, and ignore text events]'
|
||||
'--record-format=[Force recording format]:format:(mp4 mkv)'
|
||||
'--render-driver=[Request SDL to use the given render driver]:driver name:(direct3d opengl opengles2 opengles metal software)'
|
||||
'--require-audio=[Make scrcpy fail if audio is enabled but does not work]'
|
||||
'--rotation=[Set the initial display rotation]:rotation values:(0 1 2 3)'
|
||||
{-s,--serial=}'[The device serial number \(mandatory for multiple devices only\)]:serial:($("${ADB-adb}" devices | awk '\''$2 == "device" {print $1}'\''))'
|
||||
'--shortcut-mod=[\[key1,key2+key3,...\] Specify the modifiers to use for scrcpy shortcuts]:shortcut mod:(lctrl rctrl lalt ralt lsuper rsuper)'
|
||||
{-S,--turn-screen-off}'[Turn the device screen off immediately]'
|
||||
'--shortcut-mod=[\[key1,key2+key3,...\] Specify the modifiers to use for scrcpy shortcuts]:shortcut mod:(lctrl rctrl lalt ralt lsuper rsuper)'
|
||||
{-t,--show-touches}'[Show physical touches]'
|
||||
'--tcpip[\(optional \[ip\:port\]\) Configure and connect the device over TCP/IP]'
|
||||
'--tunnel-host=[Set the IP address of the adb tunnel to reach the scrcpy server]'
|
||||
'--tunnel-port=[Set the TCP port of the adb tunnel to reach the scrcpy server]'
|
||||
'--v4l2-buffer=[Add a buffering delay \(in milliseconds\) before pushing frames]'
|
||||
'--v4l2-sink=[\[\/dev\/videoN\] Output to v4l2loopback device]'
|
||||
{-V,--verbosity=}'[Set the log level]:verbosity:(verbose debug info warn error)'
|
||||
{-v,--version}'[Print the version of scrcpy]'
|
||||
{-V,--verbosity=}'[Set the log level]:verbosity:(verbose debug info warn error)'
|
||||
'--video-codec=[Select the video codec]:codec:(h264 h265 av1)'
|
||||
'--video-codec-options=[Set a list of comma-separated key\:type=value options for the device video encoder]'
|
||||
'--video-encoder=[Use a specific MediaCodec video encoder]'
|
||||
|
@ -14,6 +14,7 @@ src = [
|
||||
'src/delay_buffer.c',
|
||||
'src/demuxer.c',
|
||||
'src/device_msg.c',
|
||||
'src/display.c',
|
||||
'src/icon.c',
|
||||
'src/file_pusher.c',
|
||||
'src/fps_counter.c',
|
||||
|
@ -6,11 +6,11 @@ cd "$DIR"
|
||||
mkdir -p "$PREBUILT_DATA_DIR"
|
||||
cd "$PREBUILT_DATA_DIR"
|
||||
|
||||
VERSION=6.0-scrcpy-2
|
||||
VERSION=6.0-scrcpy-4
|
||||
DEP_DIR="ffmpeg-$VERSION"
|
||||
|
||||
FILENAME="$DEP_DIR".7z
|
||||
SHA256SUM=98ef97f8607c97a5c4f9c5a0a991b78f105d002a3619145011d16ffb92501b14
|
||||
SHA256SUM=39274b321491ce83e76cab5d24e7cbe3f402d3ccf382f739b13be5651c146b60
|
||||
|
||||
if [[ -d "$DEP_DIR" ]]
|
||||
then
|
||||
|
118
app/scrcpy.1
118
app/scrcpy.1
@ -33,14 +33,6 @@ Lower values decrease the latency, but increase the likelyhood of buffer underru
|
||||
|
||||
Default is 50.
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-output\-buffer ms
|
||||
Configure the size of the SDL audio output buffer (in milliseconds).
|
||||
|
||||
If you get "robotic" audio playback, you should test with a higher value (10). Do not change this setting otherwise.
|
||||
|
||||
Default is 5.
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-codec " name
|
||||
Select an audio codec (opus, aac or raw).
|
||||
@ -63,6 +55,20 @@ Use a specific MediaCodec audio encoder (depending on the codec provided by \fB\
|
||||
|
||||
The available encoders can be listed by \-\-list\-encoders.
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-source " source
|
||||
Select the audio source (output or mic).
|
||||
|
||||
Default is output.
|
||||
|
||||
.TP
|
||||
.BI "\-\-audio\-output\-buffer ms
|
||||
Configure the size of the SDL audio output buffer (in milliseconds).
|
||||
|
||||
If you get "robotic" audio playback, you should test with a higher value (10). Do not change this setting otherwise.
|
||||
|
||||
Default is 5.
|
||||
|
||||
.TP
|
||||
.BI "\-b, \-\-video\-bit\-rate " value
|
||||
Encode the video at the given bit\-rate, expressed in bits/s. Unit suffixes are supported: '\fBK\fR' (x1000) and '\fBM\fR' (x1000000).
|
||||
@ -107,6 +113,10 @@ Use TCP/IP device (if there is exactly one, like adb -e).
|
||||
|
||||
Also see \fB\-d\fR (\fB\-\-select\-usb\fR).
|
||||
|
||||
.TP
|
||||
.B \-f, \-\-fullscreen
|
||||
Start in fullscreen.
|
||||
|
||||
.TP
|
||||
.B \-\-force\-adb\-forward
|
||||
Do not attempt to use "adb reverse" to connect to the device.
|
||||
@ -115,10 +125,6 @@ Do not attempt to use "adb reverse" to connect to the device.
|
||||
.B \-\-forward\-all\-clicks
|
||||
By default, right-click triggers BACK (or POWER on) and middle-click triggers HOME. This option disables these shortcuts and forward the clicks to the device instead.
|
||||
|
||||
.TP
|
||||
.B \-f, \-\-fullscreen
|
||||
Start in fullscreen.
|
||||
|
||||
.TP
|
||||
.B \-h, \-\-help
|
||||
Print this help.
|
||||
@ -161,10 +167,6 @@ Default is "unlocked".
|
||||
|
||||
Passing the option without argument is equivalent to passing "initial".
|
||||
|
||||
.TP
|
||||
.BI "\-\-max\-fps " value
|
||||
Limit the framerate of screen capture (officially supported since Android 10, but may work on earlier versions).
|
||||
|
||||
.TP
|
||||
.BI "\-m, \-\-max\-size " value
|
||||
Limit both the width and height of the video to \fIvalue\fR. The other dimension is computed so that the device aspect\-ratio is preserved.
|
||||
@ -183,6 +185,26 @@ It may only work over USB.
|
||||
|
||||
Also see \fB\-\-hid\-keyboard\fR.
|
||||
|
||||
.TP
|
||||
.BI "\-\-max\-fps " value
|
||||
Limit the framerate of screen capture (officially supported since Android 10, but may work on earlier versions).
|
||||
|
||||
.TP
|
||||
.B \-n, \-\-no\-control
|
||||
Disable device control (mirror the device in read\-only).
|
||||
|
||||
.TP
|
||||
.B \-N, \-\-no\-playback
|
||||
Disable video and audio playback on the computer (equivalent to --no-video-playback --no-audio-playback).
|
||||
|
||||
.TP
|
||||
.B \-\-no\-audio
|
||||
Disable audio forwarding.
|
||||
|
||||
.TP
|
||||
.B \-\-no\-audio\-playback
|
||||
Disable audio playback on the computer.
|
||||
|
||||
.TP
|
||||
.B \-\-no\-cleanup
|
||||
By default, scrcpy removes the server binary from the device and restores the device state (show touches, stay awake and power mode) on exit.
|
||||
@ -201,14 +223,6 @@ By default, on MediaCodec error, scrcpy automatically tries again with a lower d
|
||||
|
||||
This option disables this behavior.
|
||||
|
||||
.TP
|
||||
.B \-n, \-\-no\-control
|
||||
Disable device control (mirror the device in read\-only).
|
||||
|
||||
.TP
|
||||
.B \-N, \-\-no\-display
|
||||
Do not display device (only when screen recording is enabled).
|
||||
|
||||
.TP
|
||||
.B \-\-no\-key\-repeat
|
||||
Do not forward repeated key events when a key is held down.
|
||||
@ -221,6 +235,14 @@ If the renderer is OpenGL 3.0+ or OpenGL ES 2.0+, then mipmaps are automatically
|
||||
.B \-\-no\-power\-on
|
||||
Do not power on the device on start.
|
||||
|
||||
.TP
|
||||
.B \-\-no\-video
|
||||
Disable video forwarding.
|
||||
|
||||
.TP
|
||||
.B \-\-no\-video\-playback
|
||||
Disable video playback on the computer.
|
||||
|
||||
.TP
|
||||
.B \-\-otg
|
||||
Run in OTG mode: simulate physical keyboard and mouse, as if the computer keyboard and mouse were plugged directly to the device via an OTG cable.
|
||||
@ -262,10 +284,6 @@ Set the target directory for pushing files to the device by drag & drop. It is p
|
||||
|
||||
Default is "/sdcard/Download/".
|
||||
|
||||
.TP
|
||||
.B \-\-raw\-key\-events
|
||||
Inject key events for all input keys, and ignore text events.
|
||||
|
||||
.TP
|
||||
.BI "\-r, \-\-record " file
|
||||
Record screen to
|
||||
@ -275,6 +293,10 @@ The format is determined by the
|
||||
.B \-\-record\-format
|
||||
option if set, or by the file extension (.mp4 or .mkv).
|
||||
|
||||
.TP
|
||||
.B \-\-raw\-key\-events
|
||||
Inject key events for all input keys, and ignore text events.
|
||||
|
||||
.TP
|
||||
.BI "\-\-record\-format " format
|
||||
Force recording format (either mp4 or mkv).
|
||||
@ -300,6 +322,10 @@ Set the initial display rotation. Possibles values are 0, 1, 2 and 3. Each incre
|
||||
.BI "\-s, \-\-serial " number
|
||||
The device serial number. Mandatory only if several devices are connected to adb.
|
||||
|
||||
.TP
|
||||
.B \-S, \-\-turn\-screen\-off
|
||||
Turn the device screen off immediately.
|
||||
|
||||
.TP
|
||||
.BI "\-\-shortcut\-mod " key\fR[+...]][,...]
|
||||
Specify the modifiers to use for scrcpy shortcuts. Possible keys are "lctrl", "rctrl", "lalt", "ralt", "lsuper" and "rsuper".
|
||||
@ -310,6 +336,12 @@ For example, to use either LCtrl+LAlt or LSuper for scrcpy shortcuts, pass "lctr
|
||||
|
||||
Default is "lalt,lsuper" (left-Alt or left-Super).
|
||||
|
||||
.TP
|
||||
.B \-t, \-\-show\-touches
|
||||
Enable "show touches" on start, restore the initial value on exit.
|
||||
|
||||
It only shows physical touches (not clicks from scrcpy).
|
||||
|
||||
.TP
|
||||
.BI "\-\-tcpip\fR[=\fIip\fR[:\fIport\fR]]
|
||||
Configure and reconnect the device over TCP/IP.
|
||||
@ -318,16 +350,6 @@ If a destination address is provided, then scrcpy connects to this address befor
|
||||
|
||||
If no destination address is provided, then scrcpy attempts to find the IP address and adb port of the current device (typically connected over USB), enables TCP/IP mode if necessary, then connects to this address before starting.
|
||||
|
||||
.TP
|
||||
.B \-S, \-\-turn\-screen\-off
|
||||
Turn the device screen off immediately.
|
||||
|
||||
.TP
|
||||
.B \-t, \-\-show\-touches
|
||||
Enable "show touches" on start, restore the initial value on exit.
|
||||
|
||||
It only shows physical touches (not clicks from scrcpy).
|
||||
|
||||
.TP
|
||||
.BI "\-\-tunnel\-host " ip
|
||||
Set the IP address of the adb tunnel to reach the scrcpy server. This option automatically enables --force-adb-forward.
|
||||
@ -340,6 +362,16 @@ Set the TCP port of the adb tunnel to reach the scrcpy server. This option autom
|
||||
|
||||
Default is 0 (not forced): the local port used for establishing the tunnel will be used.
|
||||
|
||||
.TP
|
||||
.B \-v, \-\-version
|
||||
Print the version of scrcpy.
|
||||
|
||||
.TP
|
||||
.BI "\-V, \-\-verbosity " value
|
||||
Set the log level ("verbose", "debug", "info", "warn" or "error").
|
||||
|
||||
Default is "info" for release builds, "debug" for debug builds.
|
||||
|
||||
.TP
|
||||
.BI "\-\-v4l2-sink " /dev/videoN
|
||||
Output to v4l2loopback device.
|
||||
@ -354,16 +386,6 @@ This option is similar to \fB\-\-display\-buffer\fR, but specific to V4L2 sink.
|
||||
|
||||
Default is 0 (no buffering).
|
||||
|
||||
.TP
|
||||
.BI "\-V, \-\-verbosity " value
|
||||
Set the log level ("verbose", "debug", "info", "warn" or "error").
|
||||
|
||||
Default is "info" for release builds, "debug" for debug builds.
|
||||
|
||||
.TP
|
||||
.B \-v, \-\-version
|
||||
Print the version of scrcpy.
|
||||
|
||||
.TP
|
||||
.BI "\-\-video\-codec " name
|
||||
Select a video codec (h264, h265 or av1).
|
||||
|
@ -204,6 +204,7 @@ sc_adb_parse_device_ip(char *str) {
|
||||
while (str[idx_line] != '\0') {
|
||||
char *line = &str[idx_line];
|
||||
size_t len = strcspn(line, "\n");
|
||||
bool is_last_line = line[len] == '\0';
|
||||
|
||||
// The same, but without any trailing '\r'
|
||||
size_t line_len = sc_str_remove_trailing_cr(line, len);
|
||||
@ -215,12 +216,12 @@ sc_adb_parse_device_ip(char *str) {
|
||||
return ip;
|
||||
}
|
||||
|
||||
idx_line += len;
|
||||
|
||||
if (str[idx_line] != '\0') {
|
||||
// The next line starts after the '\n'
|
||||
++idx_line;
|
||||
if (is_last_line) {
|
||||
break;
|
||||
}
|
||||
|
||||
// The next line starts after the '\n'
|
||||
idx_line += len + 1;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
|
@ -107,7 +107,7 @@ sc_audio_player_sdl_callback(void *userdata, uint8_t *stream, int len_int) {
|
||||
// latency.
|
||||
LOGD("[Audio] Buffer underflow, inserting silence: %" PRIu32 " samples",
|
||||
silence);
|
||||
memset(stream + read, 0, TO_BYTES(silence));
|
||||
memset(stream + TO_BYTES(read), 0, TO_BYTES(silence));
|
||||
|
||||
if (ap->received) {
|
||||
// Inserting additional samples immediately increases buffering
|
||||
|
352
app/src/cli.c
352
app/src/cli.c
@ -72,6 +72,11 @@ enum {
|
||||
OPT_REQUIRE_AUDIO,
|
||||
OPT_AUDIO_BUFFER,
|
||||
OPT_AUDIO_OUTPUT_BUFFER,
|
||||
OPT_NO_DISPLAY,
|
||||
OPT_NO_VIDEO,
|
||||
OPT_NO_AUDIO_PLAYBACK,
|
||||
OPT_NO_VIDEO_PLAYBACK,
|
||||
OPT_AUDIO_SOURCE,
|
||||
};
|
||||
|
||||
struct sc_option {
|
||||
@ -130,16 +135,6 @@ static const struct sc_option options[] = {
|
||||
"likelyhood of buffer underrun (causing audio glitches).\n"
|
||||
"Default is 50.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_AUDIO_OUTPUT_BUFFER,
|
||||
.longopt = "audio-output-buffer",
|
||||
.argdesc = "ms",
|
||||
.text = "Configure the size of the SDL audio output buffer (in "
|
||||
"milliseconds).\n"
|
||||
"If you get \"robotic\" audio playback, you should test with "
|
||||
"a higher value (10). Do not change this setting otherwise.\n"
|
||||
"Default is 5.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_AUDIO_CODEC,
|
||||
.longopt = "audio-codec",
|
||||
@ -167,6 +162,23 @@ static const struct sc_option options[] = {
|
||||
"codec provided by --audio-codec).\n"
|
||||
"The available encoders can be listed by --list-encoders.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_AUDIO_SOURCE,
|
||||
.longopt = "audio-source",
|
||||
.argdesc = "source",
|
||||
.text = "Select the audio source (output or mic).\n"
|
||||
"Default is output.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_AUDIO_OUTPUT_BUFFER,
|
||||
.longopt = "audio-output-buffer",
|
||||
.argdesc = "ms",
|
||||
.text = "Configure the size of the SDL audio output buffer (in "
|
||||
"milliseconds).\n"
|
||||
"If you get \"robotic\" audio playback, you should test with "
|
||||
"a higher value (10). Do not change this setting otherwise.\n"
|
||||
"Default is 5.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'b',
|
||||
.longopt = "video-bit-rate",
|
||||
@ -245,6 +257,11 @@ static const struct sc_option options[] = {
|
||||
.longopt = "encoder",
|
||||
.argdesc = "name",
|
||||
},
|
||||
{
|
||||
.shortopt = 'f',
|
||||
.longopt = "fullscreen",
|
||||
.text = "Start in fullscreen.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_FORCE_ADB_FORWARD,
|
||||
.longopt = "force-adb-forward",
|
||||
@ -258,11 +275,6 @@ static const struct sc_option options[] = {
|
||||
"middle-click triggers HOME. This option disables these "
|
||||
"shortcuts and forwards the clicks to the device instead.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'f',
|
||||
.longopt = "fullscreen",
|
||||
.text = "Start in fullscreen.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'K',
|
||||
.longopt = "hid-keyboard",
|
||||
@ -318,11 +330,13 @@ static const struct sc_option options[] = {
|
||||
"\"initial\".",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_MAX_FPS,
|
||||
.longopt = "max-fps",
|
||||
.shortopt = 'm',
|
||||
.longopt = "max-size",
|
||||
.argdesc = "value",
|
||||
.text = "Limit the frame rate of screen capture (officially supported "
|
||||
"since Android 10, but may work on earlier versions).",
|
||||
.text = "Limit both the width and height of the video to value. The "
|
||||
"other dimension is computed so that the device aspect-ratio "
|
||||
"is preserved.\n"
|
||||
"Default is 0 (unlimited).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'M',
|
||||
@ -336,19 +350,33 @@ static const struct sc_option options[] = {
|
||||
"Also see --hid-keyboard.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'm',
|
||||
.longopt = "max-size",
|
||||
.longopt_id = OPT_MAX_FPS,
|
||||
.longopt = "max-fps",
|
||||
.argdesc = "value",
|
||||
.text = "Limit both the width and height of the video to value. The "
|
||||
"other dimension is computed so that the device aspect-ratio "
|
||||
"is preserved.\n"
|
||||
"Default is 0 (unlimited).",
|
||||
.text = "Limit the frame rate of screen capture (officially supported "
|
||||
"since Android 10, but may work on earlier versions).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'n',
|
||||
.longopt = "no-control",
|
||||
.text = "Disable device control (mirror the device in read-only).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'N',
|
||||
.longopt = "no-playback",
|
||||
.text = "Disable video and audio playback on the computer (equivalent "
|
||||
"to --no-video-playback --no-audio-playback).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_AUDIO,
|
||||
.longopt = "no-audio",
|
||||
.text = "Disable audio forwarding.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_AUDIO_PLAYBACK,
|
||||
.longopt = "no-audio-playback",
|
||||
.text = "Disable audio playback on the computer.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_CLEANUP,
|
||||
.longopt = "no-cleanup",
|
||||
@ -374,15 +402,9 @@ static const struct sc_option options[] = {
|
||||
"This option disables this behavior.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'n',
|
||||
.longopt = "no-control",
|
||||
.text = "Disable device control (mirror the device in read-only).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'N',
|
||||
// deprecated
|
||||
.longopt_id = OPT_NO_DISPLAY,
|
||||
.longopt = "no-display",
|
||||
.text = "Do not display device (only when screen recording or V4L2 "
|
||||
"sink is enabled).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_KEY_REPEAT,
|
||||
@ -401,6 +423,16 @@ static const struct sc_option options[] = {
|
||||
.longopt = "no-power-on",
|
||||
.text = "Do not power on the device on start.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_VIDEO,
|
||||
.longopt = "no-video",
|
||||
.text = "Disable video forwarding.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_VIDEO_PLAYBACK,
|
||||
.longopt = "no-video-playback",
|
||||
.text = "Disable video playback on the computer.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_OTG,
|
||||
.longopt = "otg",
|
||||
@ -452,11 +484,6 @@ static const struct sc_option options[] = {
|
||||
"drag & drop. It is passed as is to \"adb push\".\n"
|
||||
"Default is \"/sdcard/Download/\".",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_RAW_KEY_EVENTS,
|
||||
.longopt = "raw-key-events",
|
||||
.text = "Inject key events for all input keys, and ignore text events."
|
||||
},
|
||||
{
|
||||
.shortopt = 'r',
|
||||
.longopt = "record",
|
||||
@ -465,6 +492,11 @@ static const struct sc_option options[] = {
|
||||
"The format is determined by the --record-format option if "
|
||||
"set, or by the file extension (.mp4 or .mkv).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_RAW_KEY_EVENTS,
|
||||
.longopt = "raw-key-events",
|
||||
.text = "Inject key events for all input keys, and ignore text events."
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_RECORD_FORMAT,
|
||||
.longopt = "record-format",
|
||||
@ -503,6 +535,11 @@ static const struct sc_option options[] = {
|
||||
.text = "The device serial number. Mandatory only if several devices "
|
||||
"are connected to adb.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'S',
|
||||
.longopt = "turn-screen-off",
|
||||
.text = "Turn the device screen off immediately.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_SHORTCUT_MOD,
|
||||
.longopt = "shortcut-mod",
|
||||
@ -516,11 +553,6 @@ static const struct sc_option options[] = {
|
||||
"shortcuts, pass \"lctrl+lalt,lsuper\".\n"
|
||||
"Default is \"lalt,lsuper\" (left-Alt or left-Super).",
|
||||
},
|
||||
{
|
||||
.shortopt = 'S',
|
||||
.longopt = "turn-screen-off",
|
||||
.text = "Turn the device screen off immediately.",
|
||||
},
|
||||
{
|
||||
.shortopt = 't',
|
||||
.longopt = "show-touches",
|
||||
@ -561,6 +593,22 @@ static const struct sc_option options[] = {
|
||||
"Default is 0 (not forced): the local port used for "
|
||||
"establishing the tunnel will be used.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'v',
|
||||
.longopt = "version",
|
||||
.text = "Print the version of scrcpy.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'V',
|
||||
.longopt = "verbosity",
|
||||
.argdesc = "value",
|
||||
.text = "Set the log level (verbose, debug, info, warn or error).\n"
|
||||
#ifndef NDEBUG
|
||||
"Default is debug.",
|
||||
#else
|
||||
"Default is info.",
|
||||
#endif
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_V4L2_SINK,
|
||||
.longopt = "v4l2-sink",
|
||||
@ -581,22 +629,6 @@ static const struct sc_option options[] = {
|
||||
"Default is 0 (no buffering).\n"
|
||||
"This option is only available on Linux.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'V',
|
||||
.longopt = "verbosity",
|
||||
.argdesc = "value",
|
||||
.text = "Set the log level (verbose, debug, info, warn or error).\n"
|
||||
#ifndef NDEBUG
|
||||
"Default is debug.",
|
||||
#else
|
||||
"Default is info.",
|
||||
#endif
|
||||
},
|
||||
{
|
||||
.shortopt = 'v',
|
||||
.longopt = "version",
|
||||
.text = "Print the version of scrcpy.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_VIDEO_CODEC,
|
||||
.longopt = "video-codec",
|
||||
@ -1467,18 +1499,39 @@ sc_parse_shortcut_mods(const char *s, struct sc_shortcut_mods *mods) {
|
||||
}
|
||||
#endif
|
||||
|
||||
static enum sc_record_format
|
||||
get_record_format(const char *name) {
|
||||
if (!strcmp(name, "mp4")) {
|
||||
return SC_RECORD_FORMAT_MP4;
|
||||
}
|
||||
if (!strcmp(name, "mkv")) {
|
||||
return SC_RECORD_FORMAT_MKV;
|
||||
}
|
||||
if (!strcmp(name, "m4a")) {
|
||||
return SC_RECORD_FORMAT_M4A;
|
||||
}
|
||||
if (!strcmp(name, "mka")) {
|
||||
return SC_RECORD_FORMAT_MKA;
|
||||
}
|
||||
if (!strcmp(name, "opus")) {
|
||||
return SC_RECORD_FORMAT_OPUS;
|
||||
}
|
||||
if (!strcmp(name, "aac")) {
|
||||
return SC_RECORD_FORMAT_AAC;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_record_format(const char *optarg, enum sc_record_format *format) {
|
||||
if (!strcmp(optarg, "mp4")) {
|
||||
*format = SC_RECORD_FORMAT_MP4;
|
||||
return true;
|
||||
enum sc_record_format fmt = get_record_format(optarg);
|
||||
if (!fmt) {
|
||||
LOGE("Unsupported format: %s (expected mp4 or mkv)", optarg);
|
||||
return false;
|
||||
}
|
||||
if (!strcmp(optarg, "mkv")) {
|
||||
*format = SC_RECORD_FORMAT_MKV;
|
||||
return true;
|
||||
}
|
||||
LOGE("Unsupported format: %s (expected mp4 or mkv)", optarg);
|
||||
return false;
|
||||
|
||||
*format = fmt;
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -1498,18 +1551,13 @@ parse_port(const char *optarg, uint16_t *port) {
|
||||
|
||||
static enum sc_record_format
|
||||
guess_record_format(const char *filename) {
|
||||
size_t len = strlen(filename);
|
||||
if (len < 4) {
|
||||
const char *dot = strrchr(filename, '.');
|
||||
if (!dot) {
|
||||
return 0;
|
||||
}
|
||||
const char *ext = &filename[len - 4];
|
||||
if (!strcmp(ext, ".mp4")) {
|
||||
return SC_RECORD_FORMAT_MP4;
|
||||
}
|
||||
if (!strcmp(ext, ".mkv")) {
|
||||
return SC_RECORD_FORMAT_MKV;
|
||||
}
|
||||
return 0;
|
||||
|
||||
const char *ext = dot + 1;
|
||||
return get_record_format(ext);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -1548,6 +1596,22 @@ parse_audio_codec(const char *optarg, enum sc_codec *codec) {
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_audio_source(const char *optarg, enum sc_audio_source *source) {
|
||||
if (!strcmp(optarg, "mic")) {
|
||||
*source = SC_AUDIO_SOURCE_MIC;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!strcmp(optarg, "output")) {
|
||||
*source = SC_AUDIO_SOURCE_OUTPUT;
|
||||
return true;
|
||||
}
|
||||
|
||||
LOGE("Unsupported audio source: %s (expected output or mic)", optarg);
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
const char *optstring, const struct option *longopts) {
|
||||
@ -1642,8 +1706,18 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
case 'n':
|
||||
opts->control = false;
|
||||
break;
|
||||
case OPT_NO_DISPLAY:
|
||||
LOGW("--no-display is deprecated, use --no-playback instead.");
|
||||
// fall through
|
||||
case 'N':
|
||||
opts->display = false;
|
||||
opts->video_playback = false;
|
||||
opts->audio_playback = false;
|
||||
break;
|
||||
case OPT_NO_VIDEO_PLAYBACK:
|
||||
opts->video_playback = false;
|
||||
break;
|
||||
case OPT_NO_AUDIO_PLAYBACK:
|
||||
opts->audio_playback = false;
|
||||
break;
|
||||
case 'p':
|
||||
if (!parse_port_range(optarg, &opts->port_range)) {
|
||||
@ -1788,6 +1862,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
case OPT_NO_DOWNSIZE_ON_ERROR:
|
||||
opts->downsize_on_error = false;
|
||||
break;
|
||||
case OPT_NO_VIDEO:
|
||||
opts->video = false;
|
||||
break;
|
||||
case OPT_NO_AUDIO:
|
||||
opts->audio = false;
|
||||
break;
|
||||
@ -1838,7 +1915,8 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
}
|
||||
break;
|
||||
#else
|
||||
LOGE("V4L2 (--v4l2-buffer) is only available on Linux.");
|
||||
LOGE("V4L2 (--v4l2-buffer) is disabled (or unsupported on this "
|
||||
"platform).");
|
||||
return false;
|
||||
#endif
|
||||
case OPT_LIST_ENCODERS:
|
||||
@ -1861,6 +1939,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_AUDIO_SOURCE:
|
||||
if (!parse_audio_source(optarg, &opts->audio_source)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// getopt prints the error message on stderr
|
||||
return false;
|
||||
@ -1889,14 +1972,52 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
|
||||
bool otg = false;
|
||||
bool v4l2 = false;
|
||||
#ifdef HAVE_USB
|
||||
otg = opts->otg;
|
||||
#endif
|
||||
#ifdef HAVE_V4L2
|
||||
if (!opts->display && !opts->record_filename && !opts->v4l2_device) {
|
||||
LOGE("-N/--no-display requires either screen recording (-r/--record)"
|
||||
" or sink to v4l2loopback device (--v4l2-sink)");
|
||||
v4l2 = !!opts->v4l2_device;
|
||||
#endif
|
||||
|
||||
if (!opts->video) {
|
||||
opts->video_playback = false;
|
||||
}
|
||||
|
||||
if (!opts->audio) {
|
||||
opts->audio_playback = false;
|
||||
}
|
||||
|
||||
if (!opts->video_playback && !otg) {
|
||||
// If video playback is disabled and OTG are disabled, then there is
|
||||
// no way to control the device.
|
||||
opts->control = false;
|
||||
}
|
||||
|
||||
if (opts->video && !opts->video_playback && !opts->record_filename
|
||||
&& !v4l2) {
|
||||
LOGI("No video playback, no recording, no V4L2 sink: video disabled");
|
||||
opts->video = false;
|
||||
}
|
||||
|
||||
if (opts->audio && !opts->audio_playback && !opts->record_filename) {
|
||||
LOGI("No audio playback, no recording: audio disabled");
|
||||
opts->audio = false;
|
||||
}
|
||||
|
||||
if (!opts->video && !opts->audio && !otg) {
|
||||
LOGE("No video, no audio, no OTG: nothing to do");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->v4l2_device) {
|
||||
if (!opts->video && !otg) {
|
||||
// If video is disabled, then scrcpy must exit on audio failure.
|
||||
opts->require_audio = true;
|
||||
}
|
||||
|
||||
#ifdef HAVE_V4L2
|
||||
if (v4l2) {
|
||||
if (opts->lock_video_orientation ==
|
||||
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
|
||||
LOGI("Video orientation is locked for v4l2 sink. "
|
||||
@ -1914,18 +2035,8 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
LOGE("V4L2 buffer value without V4L2 sink\n");
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
if (!opts->display && !opts->record_filename) {
|
||||
LOGE("-N/--no-display requires screen recording (-r/--record)");
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
if (opts->audio && !opts->display && !opts->record_filename) {
|
||||
LOGI("No display and no recording: audio disabled");
|
||||
opts->audio = false;
|
||||
}
|
||||
|
||||
if ((opts->tunnel_host || opts->tunnel_port) && !opts->force_adb_forward) {
|
||||
LOGI("Tunnel host/port is set, "
|
||||
"--force-adb-forward automatically enabled.");
|
||||
@ -1937,19 +2048,41 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->record_filename && !opts->record_format) {
|
||||
opts->record_format = guess_record_format(opts->record_filename);
|
||||
if (opts->record_filename) {
|
||||
if (!opts->record_format) {
|
||||
LOGE("No format specified for \"%s\" "
|
||||
"(try with --record-format=mkv)",
|
||||
opts->record_filename);
|
||||
opts->record_format = guess_record_format(opts->record_filename);
|
||||
if (!opts->record_format) {
|
||||
LOGE("No format specified for \"%s\" "
|
||||
"(try with --record-format=mkv)",
|
||||
opts->record_filename);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->audio_codec == SC_CODEC_RAW) {
|
||||
LOGW("Recording does not support RAW audio codec");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->record_filename && opts->audio_codec == SC_CODEC_RAW) {
|
||||
LOGW("Recording does not support RAW audio codec");
|
||||
return false;
|
||||
if (opts->video
|
||||
&& sc_record_format_is_audio_only(opts->record_format)) {
|
||||
LOGE("Audio container does not support video stream");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->record_format == SC_RECORD_FORMAT_OPUS
|
||||
&& opts->audio_codec != SC_CODEC_OPUS) {
|
||||
LOGE("Recording to OPUS file requires an OPUS audio stream "
|
||||
"(try with --audio-codec=opus)");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->record_format == SC_RECORD_FORMAT_AAC
|
||||
&& opts->audio_codec != SC_CODEC_AAC) {
|
||||
LOGE("Recording to AAC file requires an AAC audio stream "
|
||||
"(try with --audio-codec=aac)");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (opts->audio_codec == SC_CODEC_RAW) {
|
||||
@ -1983,11 +2116,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_USB
|
||||
|
||||
# ifdef _WIN32
|
||||
if (!opts->otg && (opts->keyboard_input_mode == SC_KEYBOARD_INPUT_MODE_HID
|
||||
|| opts->mouse_input_mode == SC_MOUSE_INPUT_MODE_HID)) {
|
||||
if (!otg && (opts->keyboard_input_mode == SC_KEYBOARD_INPUT_MODE_HID
|
||||
|| opts->mouse_input_mode == SC_MOUSE_INPUT_MODE_HID)) {
|
||||
LOGE("On Windows, it is not possible to open a USB device already open "
|
||||
"by another process (like adb).");
|
||||
LOGE("Therefore, -K/--hid-keyboard and -M/--hid-mouse may only work in "
|
||||
@ -1996,7 +2127,7 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
}
|
||||
# endif
|
||||
|
||||
if (opts->otg) {
|
||||
if (otg) {
|
||||
// OTG mode is compatible with only very few options.
|
||||
// Only report obvious errors.
|
||||
if (opts->record_filename) {
|
||||
@ -2023,14 +2154,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
LOGE("OTG mode: could not select display");
|
||||
return false;
|
||||
}
|
||||
# ifdef HAVE_V4L2
|
||||
if (opts->v4l2_device) {
|
||||
if (v4l2) {
|
||||
LOGE("OTG mode: could not sink to V4L2 device");
|
||||
return false;
|
||||
}
|
||||
# endif
|
||||
}
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -25,6 +25,12 @@
|
||||
# define SCRCPY_LAVF_REQUIRES_REGISTER_ALL
|
||||
#endif
|
||||
|
||||
// Not documented in ffmpeg/doc/APIchanges, but AV_CODEC_ID_AV1 has been added
|
||||
// by FFmpeg commit d42809f9835a4e9e5c7c63210abb09ad0ef19cfb (included in tag
|
||||
// n3.3).
|
||||
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 89, 100)
|
||||
# define SCRCPY_LAVC_HAS_AV1
|
||||
#endif
|
||||
|
||||
// In ffmpeg/doc/APIchanges:
|
||||
// 2018-01-28 - ea3672b7d6 - lavf 58.7.100 - avformat.h
|
||||
|
@ -33,7 +33,12 @@ sc_demuxer_to_avcodec_id(uint32_t codec_id) {
|
||||
case SC_CODEC_ID_H265:
|
||||
return AV_CODEC_ID_HEVC;
|
||||
case SC_CODEC_ID_AV1:
|
||||
#ifdef SCRCPY_LAVC_HAS_AV1
|
||||
return AV_CODEC_ID_AV1;
|
||||
#else
|
||||
LOGE("AV1 not supported by this FFmpeg version");
|
||||
return AV_CODEC_ID_NONE;
|
||||
#endif
|
||||
case SC_CODEC_ID_OPUS:
|
||||
return AV_CODEC_ID_OPUS;
|
||||
case SC_CODEC_ID_AAC:
|
||||
@ -74,9 +79,8 @@ sc_demuxer_recv_video_size(struct sc_demuxer *demuxer, uint32_t *width,
|
||||
|
||||
static bool
|
||||
sc_demuxer_recv_packet(struct sc_demuxer *demuxer, AVPacket *packet) {
|
||||
// The video stream contains raw packets, without time information. When we
|
||||
// record, we retrieve the timestamps separately, from a "meta" header
|
||||
// added by the server before each raw packet.
|
||||
// The video and audio streams contain a sequence of raw packets (as
|
||||
// provided by MediaCodec), each prefixed with a "meta" header.
|
||||
//
|
||||
// The "meta" header length is 12 bytes:
|
||||
// [. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ...
|
||||
|
285
app/src/display.c
Normal file
285
app/src/display.c
Normal file
@ -0,0 +1,285 @@
|
||||
#include "display.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "util/log.h"
|
||||
|
||||
bool
|
||||
sc_display_init(struct sc_display *display, SDL_Window *window, bool mipmaps) {
|
||||
display->renderer =
|
||||
SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);
|
||||
if (!display->renderer) {
|
||||
LOGE("Could not create renderer: %s", SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
SDL_RendererInfo renderer_info;
|
||||
int r = SDL_GetRendererInfo(display->renderer, &renderer_info);
|
||||
const char *renderer_name = r ? NULL : renderer_info.name;
|
||||
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
|
||||
|
||||
display->mipmaps = false;
|
||||
|
||||
// starts with "opengl"
|
||||
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
|
||||
if (use_opengl) {
|
||||
|
||||
#ifdef SC_DISPLAY_FORCE_OPENGL_CORE_PROFILE
|
||||
// Persuade macOS to give us something better than OpenGL 2.1.
|
||||
// If we create a Core Profile context, we get the best OpenGL version.
|
||||
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK,
|
||||
SDL_GL_CONTEXT_PROFILE_CORE);
|
||||
|
||||
LOGD("Creating OpenGL Core Profile context");
|
||||
display->gl_context = SDL_GL_CreateContext(window);
|
||||
if (!display->gl_context) {
|
||||
LOGE("Could not create OpenGL context: %s", SDL_GetError());
|
||||
SDL_DestroyRenderer(display->renderer);
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
struct sc_opengl *gl = &display->gl;
|
||||
sc_opengl_init(gl);
|
||||
|
||||
LOGI("OpenGL version: %s", gl->version);
|
||||
|
||||
if (mipmaps) {
|
||||
bool supports_mipmaps =
|
||||
sc_opengl_version_at_least(gl, 3, 0, /* OpenGL 3.0+ */
|
||||
2, 0 /* OpenGL ES 2.0+ */);
|
||||
if (supports_mipmaps) {
|
||||
LOGI("Trilinear filtering enabled");
|
||||
display->mipmaps = true;
|
||||
} else {
|
||||
LOGW("Trilinear filtering disabled "
|
||||
"(OpenGL 3.0+ or ES 2.0+ required");
|
||||
}
|
||||
} else {
|
||||
LOGI("Trilinear filtering disabled");
|
||||
}
|
||||
} else if (mipmaps) {
|
||||
LOGD("Trilinear filtering disabled (not an OpenGL renderer");
|
||||
}
|
||||
|
||||
display->pending.flags = 0;
|
||||
display->pending.frame = NULL;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
sc_display_destroy(struct sc_display *display) {
|
||||
if (display->pending.frame) {
|
||||
av_frame_free(&display->pending.frame);
|
||||
}
|
||||
#ifdef SC_DISPLAY_FORCE_OPENGL_CORE_PROFILE
|
||||
SDL_GL_DeleteContext(display->gl_context);
|
||||
#endif
|
||||
if (display->texture) {
|
||||
SDL_DestroyTexture(display->texture);
|
||||
}
|
||||
SDL_DestroyRenderer(display->renderer);
|
||||
}
|
||||
|
||||
static SDL_Texture *
|
||||
sc_display_create_texture(struct sc_display *display,
|
||||
struct sc_size size) {
|
||||
SDL_Renderer *renderer = display->renderer;
|
||||
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
|
||||
SDL_TEXTUREACCESS_STREAMING,
|
||||
size.width, size.height);
|
||||
if (!texture) {
|
||||
LOGD("Could not create texture: %s", SDL_GetError());
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (display->mipmaps) {
|
||||
struct sc_opengl *gl = &display->gl;
|
||||
|
||||
SDL_GL_BindTexture(texture, NULL, NULL);
|
||||
|
||||
// Enable trilinear filtering for downscaling
|
||||
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
|
||||
GL_LINEAR_MIPMAP_LINEAR);
|
||||
gl->TexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -1.f);
|
||||
|
||||
SDL_GL_UnbindTexture(texture);
|
||||
}
|
||||
|
||||
return texture;
|
||||
}
|
||||
|
||||
static inline void
|
||||
sc_display_set_pending_size(struct sc_display *display, struct sc_size size) {
|
||||
assert(!display->texture);
|
||||
display->pending.size = size;
|
||||
display->pending.flags |= SC_DISPLAY_PENDING_FLAG_SIZE;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_display_set_pending_frame(struct sc_display *display, const AVFrame *frame) {
|
||||
if (!display->pending.frame) {
|
||||
display->pending.frame = av_frame_alloc();
|
||||
if (!display->pending.frame) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
int r = av_frame_ref(display->pending.frame, frame);
|
||||
if (r) {
|
||||
LOGE("Could not ref frame: %d", r);
|
||||
return false;
|
||||
}
|
||||
|
||||
display->pending.flags |= SC_DISPLAY_PENDING_FLAG_FRAME;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_display_apply_pending(struct sc_display *display) {
|
||||
if (display->pending.flags & SC_DISPLAY_PENDING_FLAG_SIZE) {
|
||||
assert(!display->texture);
|
||||
display->texture =
|
||||
sc_display_create_texture(display, display->pending.size);
|
||||
if (!display->texture) {
|
||||
return false;
|
||||
}
|
||||
|
||||
display->pending.flags &= ~SC_DISPLAY_PENDING_FLAG_SIZE;
|
||||
}
|
||||
|
||||
if (display->pending.flags & SC_DISPLAY_PENDING_FLAG_FRAME) {
|
||||
assert(display->pending.frame);
|
||||
bool ok = sc_display_update_texture(display, display->pending.frame);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
av_frame_unref(display->pending.frame);
|
||||
display->pending.flags &= ~SC_DISPLAY_PENDING_FLAG_FRAME;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_display_set_texture_size_internal(struct sc_display *display,
|
||||
struct sc_size size) {
|
||||
assert(size.width && size.height);
|
||||
|
||||
if (display->texture) {
|
||||
SDL_DestroyTexture(display->texture);
|
||||
}
|
||||
|
||||
display->texture = sc_display_create_texture(display, size);
|
||||
if (!display->texture) {
|
||||
return false;
|
||||
}
|
||||
|
||||
LOGI("Texture: %" PRIu16 "x%" PRIu16, size.width, size.height);
|
||||
return true;
|
||||
}
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_set_texture_size(struct sc_display *display, struct sc_size size) {
|
||||
bool ok = sc_display_set_texture_size_internal(display, size);
|
||||
if (!ok) {
|
||||
sc_display_set_pending_size(display, size);
|
||||
return SC_DISPLAY_RESULT_PENDING;
|
||||
|
||||
}
|
||||
|
||||
return SC_DISPLAY_RESULT_OK;
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_display_update_texture_internal(struct sc_display *display,
|
||||
const AVFrame *frame) {
|
||||
int ret = SDL_UpdateYUVTexture(display->texture, NULL,
|
||||
frame->data[0], frame->linesize[0],
|
||||
frame->data[1], frame->linesize[1],
|
||||
frame->data[2], frame->linesize[2]);
|
||||
if (ret) {
|
||||
LOGD("Could not update texture: %s", SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
if (display->mipmaps) {
|
||||
SDL_GL_BindTexture(display->texture, NULL, NULL);
|
||||
display->gl.GenerateMipmap(GL_TEXTURE_2D);
|
||||
SDL_GL_UnbindTexture(display->texture);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_update_texture(struct sc_display *display, const AVFrame *frame) {
|
||||
bool ok = sc_display_update_texture_internal(display, frame);
|
||||
if (!ok) {
|
||||
ok = sc_display_set_pending_frame(display, frame);
|
||||
if (!ok) {
|
||||
LOGE("Could not set pending frame");
|
||||
return SC_DISPLAY_RESULT_ERROR;
|
||||
}
|
||||
|
||||
return SC_DISPLAY_RESULT_PENDING;
|
||||
}
|
||||
|
||||
return SC_DISPLAY_RESULT_OK;
|
||||
}
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_render(struct sc_display *display, const SDL_Rect *geometry,
|
||||
unsigned rotation) {
|
||||
SDL_RenderClear(display->renderer);
|
||||
|
||||
if (display->pending.flags) {
|
||||
bool ok = sc_display_apply_pending(display);
|
||||
if (!ok) {
|
||||
return SC_DISPLAY_RESULT_PENDING;
|
||||
}
|
||||
}
|
||||
|
||||
SDL_Renderer *renderer = display->renderer;
|
||||
SDL_Texture *texture = display->texture;
|
||||
|
||||
if (rotation == 0) {
|
||||
int ret = SDL_RenderCopy(renderer, texture, NULL, geometry);
|
||||
if (ret) {
|
||||
LOGE("Could not render texture: %s", SDL_GetError());
|
||||
return SC_DISPLAY_RESULT_ERROR;
|
||||
}
|
||||
} else {
|
||||
// rotation in RenderCopyEx() is clockwise, while screen->rotation is
|
||||
// counterclockwise (to be consistent with --lock-video-orientation)
|
||||
int cw_rotation = (4 - rotation) % 4;
|
||||
double angle = 90 * cw_rotation;
|
||||
|
||||
const SDL_Rect *dstrect = NULL;
|
||||
SDL_Rect rect;
|
||||
if (rotation & 1) {
|
||||
rect.x = geometry->x + (geometry->w - geometry->h) / 2;
|
||||
rect.y = geometry->y + (geometry->h - geometry->w) / 2;
|
||||
rect.w = geometry->h;
|
||||
rect.h = geometry->w;
|
||||
dstrect = ▭
|
||||
} else {
|
||||
assert(rotation == 2);
|
||||
dstrect = geometry;
|
||||
}
|
||||
|
||||
int ret = SDL_RenderCopyEx(renderer, texture, NULL, dstrect, angle,
|
||||
NULL, 0);
|
||||
if (ret) {
|
||||
LOGE("Could not render texture: %s", SDL_GetError());
|
||||
return SC_DISPLAY_RESULT_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
SDL_RenderPresent(display->renderer);
|
||||
return SC_DISPLAY_RESULT_OK;
|
||||
}
|
59
app/src/display.h
Normal file
59
app/src/display.h
Normal file
@ -0,0 +1,59 @@
|
||||
#ifndef SC_DISPLAY_H
|
||||
#define SC_DISPLAY_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <SDL2/SDL.h>
|
||||
|
||||
#include "coords.h"
|
||||
#include "opengl.h"
|
||||
|
||||
#ifdef __APPLE__
|
||||
# define SC_DISPLAY_FORCE_OPENGL_CORE_PROFILE
|
||||
#endif
|
||||
|
||||
struct sc_display {
|
||||
SDL_Renderer *renderer;
|
||||
SDL_Texture *texture;
|
||||
|
||||
struct sc_opengl gl;
|
||||
#ifdef SC_DISPLAY_FORCE_OPENGL_CORE_PROFILE
|
||||
SDL_GLContext *gl_context;
|
||||
#endif
|
||||
|
||||
bool mipmaps;
|
||||
|
||||
struct {
|
||||
#define SC_DISPLAY_PENDING_FLAG_SIZE 1
|
||||
#define SC_DISPLAY_PENDING_FLAG_FRAME 2
|
||||
int8_t flags;
|
||||
struct sc_size size;
|
||||
AVFrame *frame;
|
||||
} pending;
|
||||
};
|
||||
|
||||
enum sc_display_result {
|
||||
SC_DISPLAY_RESULT_OK,
|
||||
SC_DISPLAY_RESULT_PENDING,
|
||||
SC_DISPLAY_RESULT_ERROR,
|
||||
};
|
||||
|
||||
bool
|
||||
sc_display_init(struct sc_display *display, SDL_Window *window, bool mipmaps);
|
||||
|
||||
void
|
||||
sc_display_destroy(struct sc_display *display);
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_set_texture_size(struct sc_display *display, struct sc_size size);
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_update_texture(struct sc_display *display, const AVFrame *frame);
|
||||
|
||||
enum sc_display_result
|
||||
sc_display_render(struct sc_display *display, const SDL_Rect *geometry,
|
||||
unsigned rotation);
|
||||
|
||||
#endif
|
@ -797,7 +797,8 @@ sc_input_manager_process_file(struct sc_input_manager *im,
|
||||
}
|
||||
|
||||
void
|
||||
sc_input_manager_handle_event(struct sc_input_manager *im, SDL_Event *event) {
|
||||
sc_input_manager_handle_event(struct sc_input_manager *im,
|
||||
const SDL_Event *event) {
|
||||
bool control = im->controller;
|
||||
switch (event->type) {
|
||||
case SDL_TEXTINPUT:
|
||||
|
@ -61,6 +61,7 @@ sc_input_manager_init(struct sc_input_manager *im,
|
||||
const struct sc_input_manager_params *params);
|
||||
|
||||
void
|
||||
sc_input_manager_handle_event(struct sc_input_manager *im, SDL_Event *event);
|
||||
sc_input_manager_handle_event(struct sc_input_manager *im,
|
||||
const SDL_Event *event);
|
||||
|
||||
#endif
|
||||
|
@ -11,12 +11,10 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.audio_codec_options = NULL,
|
||||
.video_encoder = NULL,
|
||||
.audio_encoder = NULL,
|
||||
#ifdef HAVE_V4L2
|
||||
.v4l2_device = NULL,
|
||||
#endif
|
||||
.log_level = SC_LOG_LEVEL_INFO,
|
||||
.video_codec = SC_CODEC_H264,
|
||||
.audio_codec = SC_CODEC_OPUS,
|
||||
.audio_source = SC_AUDIO_SOURCE_OUTPUT,
|
||||
.record_format = SC_RECORD_FORMAT_AUTO,
|
||||
.keyboard_input_mode = SC_KEYBOARD_INPUT_MODE_INJECT,
|
||||
.mouse_input_mode = SC_MOUSE_INPUT_MODE_INJECT,
|
||||
@ -42,9 +40,12 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.window_height = 0,
|
||||
.display_id = 0,
|
||||
.display_buffer = 0,
|
||||
.v4l2_buffer = 0,
|
||||
.audio_buffer = SC_TICK_FROM_MS(50),
|
||||
.audio_output_buffer = SC_TICK_FROM_MS(5),
|
||||
#ifdef HAVE_V4L2
|
||||
.v4l2_device = NULL,
|
||||
.v4l2_buffer = 0,
|
||||
#endif
|
||||
#ifdef HAVE_USB
|
||||
.otg = false,
|
||||
#endif
|
||||
@ -52,7 +53,8 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.fullscreen = false,
|
||||
.always_on_top = false,
|
||||
.control = true,
|
||||
.display = true,
|
||||
.video_playback = true,
|
||||
.audio_playback = true,
|
||||
.turn_screen_off = false,
|
||||
.key_inject_mode = SC_KEY_INJECT_MODE_MIXED,
|
||||
.window_borderless = false,
|
||||
@ -73,6 +75,7 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.cleanup = true,
|
||||
.start_fps_counter = false,
|
||||
.power_on = true,
|
||||
.video = true,
|
||||
.audio = true,
|
||||
.require_audio = false,
|
||||
.list_encoders = false,
|
||||
|
@ -21,8 +21,20 @@ enum sc_record_format {
|
||||
SC_RECORD_FORMAT_AUTO,
|
||||
SC_RECORD_FORMAT_MP4,
|
||||
SC_RECORD_FORMAT_MKV,
|
||||
SC_RECORD_FORMAT_M4A,
|
||||
SC_RECORD_FORMAT_MKA,
|
||||
SC_RECORD_FORMAT_OPUS,
|
||||
SC_RECORD_FORMAT_AAC,
|
||||
};
|
||||
|
||||
static inline bool
|
||||
sc_record_format_is_audio_only(enum sc_record_format fmt) {
|
||||
return fmt == SC_RECORD_FORMAT_M4A
|
||||
|| fmt == SC_RECORD_FORMAT_MKA
|
||||
|| fmt == SC_RECORD_FORMAT_OPUS
|
||||
|| fmt == SC_RECORD_FORMAT_AAC;
|
||||
}
|
||||
|
||||
enum sc_codec {
|
||||
SC_CODEC_H264,
|
||||
SC_CODEC_H265,
|
||||
@ -32,6 +44,11 @@ enum sc_codec {
|
||||
SC_CODEC_RAW,
|
||||
};
|
||||
|
||||
enum sc_audio_source {
|
||||
SC_AUDIO_SOURCE_OUTPUT,
|
||||
SC_AUDIO_SOURCE_MIC,
|
||||
};
|
||||
|
||||
enum sc_lock_video_orientation {
|
||||
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED = -1,
|
||||
// lock the current orientation when scrcpy starts
|
||||
@ -100,12 +117,10 @@ struct scrcpy_options {
|
||||
const char *audio_codec_options;
|
||||
const char *video_encoder;
|
||||
const char *audio_encoder;
|
||||
#ifdef HAVE_V4L2
|
||||
const char *v4l2_device;
|
||||
#endif
|
||||
enum sc_log_level log_level;
|
||||
enum sc_codec video_codec;
|
||||
enum sc_codec audio_codec;
|
||||
enum sc_audio_source audio_source;
|
||||
enum sc_record_format record_format;
|
||||
enum sc_keyboard_input_mode keyboard_input_mode;
|
||||
enum sc_mouse_input_mode mouse_input_mode;
|
||||
@ -125,9 +140,12 @@ struct scrcpy_options {
|
||||
uint16_t window_height;
|
||||
uint32_t display_id;
|
||||
sc_tick display_buffer;
|
||||
sc_tick v4l2_buffer;
|
||||
sc_tick audio_buffer;
|
||||
sc_tick audio_output_buffer;
|
||||
#ifdef HAVE_V4L2
|
||||
const char *v4l2_device;
|
||||
sc_tick v4l2_buffer;
|
||||
#endif
|
||||
#ifdef HAVE_USB
|
||||
bool otg;
|
||||
#endif
|
||||
@ -135,7 +153,8 @@ struct scrcpy_options {
|
||||
bool fullscreen;
|
||||
bool always_on_top;
|
||||
bool control;
|
||||
bool display;
|
||||
bool video_playback;
|
||||
bool audio_playback;
|
||||
bool turn_screen_off;
|
||||
enum sc_key_inject_mode key_inject_mode;
|
||||
bool window_borderless;
|
||||
@ -156,6 +175,7 @@ struct scrcpy_options {
|
||||
bool cleanup;
|
||||
bool start_fps_counter;
|
||||
bool power_on;
|
||||
bool video;
|
||||
bool audio;
|
||||
bool require_audio;
|
||||
bool list_encoders;
|
||||
|
@ -60,9 +60,17 @@ sc_recorder_queue_clear(struct sc_recorder_queue *queue) {
|
||||
static const char *
|
||||
sc_recorder_get_format_name(enum sc_record_format format) {
|
||||
switch (format) {
|
||||
case SC_RECORD_FORMAT_MP4: return "mp4";
|
||||
case SC_RECORD_FORMAT_MKV: return "matroska";
|
||||
default: return NULL;
|
||||
case SC_RECORD_FORMAT_MP4:
|
||||
case SC_RECORD_FORMAT_M4A:
|
||||
case SC_RECORD_FORMAT_AAC:
|
||||
return "mp4";
|
||||
case SC_RECORD_FORMAT_MKV:
|
||||
case SC_RECORD_FORMAT_MKA:
|
||||
return "matroska";
|
||||
case SC_RECORD_FORMAT_OPUS:
|
||||
return "opus";
|
||||
default:
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
@ -88,23 +96,29 @@ sc_recorder_rescale_packet(AVStream *stream, AVPacket *packet) {
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_recorder_write_stream(struct sc_recorder *recorder, int stream_index,
|
||||
AVPacket *packet) {
|
||||
AVStream *stream = recorder->ctx->streams[stream_index];
|
||||
sc_recorder_write_stream(struct sc_recorder *recorder,
|
||||
struct sc_recorder_stream *st, AVPacket *packet) {
|
||||
AVStream *stream = recorder->ctx->streams[st->index];
|
||||
sc_recorder_rescale_packet(stream, packet);
|
||||
if (st->last_pts != AV_NOPTS_VALUE && packet->pts <= st->last_pts) {
|
||||
LOGW("Fixing PTS non monotonically increasing "
|
||||
"(%" PRIi64 " >= %" PRIi64 ")", st->last_pts, packet->pts);
|
||||
packet->pts = ++st->last_pts;
|
||||
packet->dts = packet->pts;
|
||||
} else {
|
||||
st->last_pts = packet->pts;
|
||||
}
|
||||
return av_interleaved_write_frame(recorder->ctx, packet) >= 0;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_recorder_write_video(struct sc_recorder *recorder, AVPacket *packet) {
|
||||
return sc_recorder_write_stream(recorder, recorder->video_stream_index,
|
||||
packet);
|
||||
return sc_recorder_write_stream(recorder, &recorder->video_stream, packet);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_recorder_write_audio(struct sc_recorder *recorder, AVPacket *packet) {
|
||||
return sc_recorder_write_stream(recorder, recorder->audio_stream_index,
|
||||
packet);
|
||||
return sc_recorder_write_stream(recorder, &recorder->audio_stream, packet);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -152,7 +166,7 @@ sc_recorder_close_output_file(struct sc_recorder *recorder) {
|
||||
|
||||
static inline bool
|
||||
sc_recorder_has_empty_queues(struct sc_recorder *recorder) {
|
||||
if (sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
if (recorder->video && sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
// The video queue is empty
|
||||
return true;
|
||||
}
|
||||
@ -170,13 +184,14 @@ static bool
|
||||
sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
|
||||
while (!recorder->stopped && (!recorder->video_init
|
||||
|| !recorder->audio_init
|
||||
|| sc_recorder_has_empty_queues(recorder))) {
|
||||
sc_cond_wait(&recorder->stream_cond, &recorder->mutex);
|
||||
while (!recorder->stopped &&
|
||||
((recorder->video && !recorder->video_init)
|
||||
|| (recorder->audio && !recorder->audio_init)
|
||||
|| sc_recorder_has_empty_queues(recorder))) {
|
||||
sc_cond_wait(&recorder->cond, &recorder->mutex);
|
||||
}
|
||||
|
||||
if (sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
if (recorder->video && sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
assert(recorder->stopped);
|
||||
// If the recorder is stopped, don't process anything if there are not
|
||||
// at least video packets
|
||||
@ -184,7 +199,11 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AVPacket *video_pkt = sc_vecdeque_pop(&recorder->video_queue);
|
||||
AVPacket *video_pkt = NULL;
|
||||
if (!sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
assert(recorder->video);
|
||||
video_pkt = sc_vecdeque_pop(&recorder->video_queue);
|
||||
}
|
||||
|
||||
AVPacket *audio_pkt = NULL;
|
||||
if (!sc_vecdeque_is_empty(&recorder->audio_queue)) {
|
||||
@ -196,17 +215,19 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
|
||||
int ret = false;
|
||||
|
||||
if (video_pkt->pts != AV_NOPTS_VALUE) {
|
||||
LOGE("The first video packet is not a config packet");
|
||||
goto end;
|
||||
}
|
||||
if (video_pkt) {
|
||||
if (video_pkt->pts != AV_NOPTS_VALUE) {
|
||||
LOGE("The first video packet is not a config packet");
|
||||
goto end;
|
||||
}
|
||||
|
||||
assert(recorder->video_stream_index >= 0);
|
||||
AVStream *video_stream =
|
||||
recorder->ctx->streams[recorder->video_stream_index];
|
||||
bool ok = sc_recorder_set_extradata(video_stream, video_pkt);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
assert(recorder->video_stream.index >= 0);
|
||||
AVStream *video_stream =
|
||||
recorder->ctx->streams[recorder->video_stream.index];
|
||||
bool ok = sc_recorder_set_extradata(video_stream, video_pkt);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
}
|
||||
}
|
||||
|
||||
if (audio_pkt) {
|
||||
@ -215,16 +236,16 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
assert(recorder->audio_stream_index >= 0);
|
||||
assert(recorder->audio_stream.index >= 0);
|
||||
AVStream *audio_stream =
|
||||
recorder->ctx->streams[recorder->audio_stream_index];
|
||||
ok = sc_recorder_set_extradata(audio_stream, audio_pkt);
|
||||
recorder->ctx->streams[recorder->audio_stream.index];
|
||||
bool ok = sc_recorder_set_extradata(audio_stream, audio_pkt);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
}
|
||||
}
|
||||
|
||||
ok = avformat_write_header(recorder->ctx, NULL) >= 0;
|
||||
bool ok = avformat_write_header(recorder->ctx, NULL) >= 0;
|
||||
if (!ok) {
|
||||
LOGE("Failed to write header to %s", recorder->filename);
|
||||
goto end;
|
||||
@ -233,7 +254,9 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
ret = true;
|
||||
|
||||
end:
|
||||
av_packet_free(&video_pkt);
|
||||
if (video_pkt) {
|
||||
av_packet_free(&video_pkt);
|
||||
}
|
||||
if (audio_pkt) {
|
||||
av_packet_free(&audio_pkt);
|
||||
}
|
||||
@ -263,7 +286,8 @@ sc_recorder_process_packets(struct sc_recorder *recorder) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
|
||||
while (!recorder->stopped) {
|
||||
if (!video_pkt && !sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
if (recorder->video && !video_pkt &&
|
||||
!sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
// A new packet may be assigned to video_pkt and be processed
|
||||
break;
|
||||
}
|
||||
@ -272,12 +296,17 @@ sc_recorder_process_packets(struct sc_recorder *recorder) {
|
||||
// A new packet may be assigned to audio_pkt and be processed
|
||||
break;
|
||||
}
|
||||
sc_cond_wait(&recorder->queue_cond, &recorder->mutex);
|
||||
sc_cond_wait(&recorder->cond, &recorder->mutex);
|
||||
}
|
||||
|
||||
// If stopped is set, continue to process the remaining events (to
|
||||
// finish the recording) before actually stopping.
|
||||
|
||||
// If there is no video, then the video_queue will remain empty forever
|
||||
// and video_pkt will always be NULL.
|
||||
assert(recorder->video || (!video_pkt
|
||||
&& sc_vecdeque_is_empty(&recorder->video_queue)));
|
||||
|
||||
// If there is no audio, then the audio_queue will remain empty forever
|
||||
// and audio_pkt will always be NULL.
|
||||
assert(recorder->audio || (!audio_pkt
|
||||
@ -319,6 +348,9 @@ sc_recorder_process_packets(struct sc_recorder *recorder) {
|
||||
if (!recorder->audio) {
|
||||
assert(video_pkt);
|
||||
pts_origin = video_pkt->pts;
|
||||
} else if (!recorder->video) {
|
||||
assert(audio_pkt);
|
||||
pts_origin = audio_pkt->pts;
|
||||
} else if (video_pkt && audio_pkt) {
|
||||
pts_origin = MIN(video_pkt->pts, audio_pkt->pts);
|
||||
} else if (recorder->stopped) {
|
||||
@ -479,10 +511,10 @@ sc_recorder_video_packet_sink_open(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
recorder->video_stream_index = stream->index;
|
||||
recorder->video_stream.index = stream->index;
|
||||
|
||||
recorder->video_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
|
||||
return true;
|
||||
@ -497,7 +529,7 @@ sc_recorder_video_packet_sink_close(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
// EOS also stops the recorder
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -523,7 +555,7 @@ sc_recorder_video_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
rec->stream_index = recorder->video_stream_index;
|
||||
rec->stream_index = recorder->video_stream.index;
|
||||
|
||||
bool ok = sc_vecdeque_push(&recorder->video_queue, rec);
|
||||
if (!ok) {
|
||||
@ -532,7 +564,7 @@ sc_recorder_video_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
return true;
|
||||
@ -560,10 +592,10 @@ sc_recorder_audio_packet_sink_open(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
recorder->audio_stream_index = stream->index;
|
||||
recorder->audio_stream.index = stream->index;
|
||||
|
||||
recorder->audio_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
|
||||
return true;
|
||||
@ -579,7 +611,7 @@ sc_recorder_audio_packet_sink_close(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
// EOS also stops the recorder
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -606,7 +638,7 @@ sc_recorder_audio_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
rec->stream_index = recorder->audio_stream_index;
|
||||
rec->stream_index = recorder->audio_stream.index;
|
||||
|
||||
bool ok = sc_vecdeque_push(&recorder->audio_queue, rec);
|
||||
if (!ok) {
|
||||
@ -615,7 +647,7 @@ sc_recorder_audio_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
return true;
|
||||
@ -633,13 +665,19 @@ sc_recorder_audio_packet_sink_disable(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
recorder->audio = false;
|
||||
recorder->audio_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_recorder_stream_init(struct sc_recorder_stream *stream) {
|
||||
stream->index = -1;
|
||||
stream->last_pts = AV_NOPTS_VALUE;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
enum sc_record_format format, bool audio,
|
||||
enum sc_record_format format, bool video, bool audio,
|
||||
const struct sc_recorder_callbacks *cbs, void *cbs_userdata) {
|
||||
recorder->filename = strdup(filename);
|
||||
if (!recorder->filename) {
|
||||
@ -652,16 +690,13 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
goto error_free_filename;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&recorder->queue_cond);
|
||||
ok = sc_cond_init(&recorder->cond);
|
||||
if (!ok) {
|
||||
goto error_mutex_destroy;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&recorder->stream_cond);
|
||||
if (!ok) {
|
||||
goto error_queue_cond_destroy;
|
||||
}
|
||||
|
||||
assert(video || audio);
|
||||
recorder->video = video;
|
||||
recorder->audio = audio;
|
||||
|
||||
sc_vecdeque_init(&recorder->video_queue);
|
||||
@ -671,8 +706,8 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
recorder->video_init = false;
|
||||
recorder->audio_init = false;
|
||||
|
||||
recorder->video_stream_index = -1;
|
||||
recorder->audio_stream_index = -1;
|
||||
sc_recorder_stream_init(&recorder->video_stream);
|
||||
sc_recorder_stream_init(&recorder->audio_stream);
|
||||
|
||||
recorder->format = format;
|
||||
|
||||
@ -680,13 +715,15 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
recorder->cbs = cbs;
|
||||
recorder->cbs_userdata = cbs_userdata;
|
||||
|
||||
static const struct sc_packet_sink_ops video_ops = {
|
||||
.open = sc_recorder_video_packet_sink_open,
|
||||
.close = sc_recorder_video_packet_sink_close,
|
||||
.push = sc_recorder_video_packet_sink_push,
|
||||
};
|
||||
if (video) {
|
||||
static const struct sc_packet_sink_ops video_ops = {
|
||||
.open = sc_recorder_video_packet_sink_open,
|
||||
.close = sc_recorder_video_packet_sink_close,
|
||||
.push = sc_recorder_video_packet_sink_push,
|
||||
};
|
||||
|
||||
recorder->video_packet_sink.ops = &video_ops;
|
||||
recorder->video_packet_sink.ops = &video_ops;
|
||||
}
|
||||
|
||||
if (audio) {
|
||||
static const struct sc_packet_sink_ops audio_ops = {
|
||||
@ -701,8 +738,6 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
|
||||
return true;
|
||||
|
||||
error_queue_cond_destroy:
|
||||
sc_cond_destroy(&recorder->queue_cond);
|
||||
error_mutex_destroy:
|
||||
sc_mutex_destroy(&recorder->mutex);
|
||||
error_free_filename:
|
||||
@ -727,8 +762,7 @@ void
|
||||
sc_recorder_stop(struct sc_recorder *recorder) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -739,8 +773,7 @@ sc_recorder_join(struct sc_recorder *recorder) {
|
||||
|
||||
void
|
||||
sc_recorder_destroy(struct sc_recorder *recorder) {
|
||||
sc_cond_destroy(&recorder->stream_cond);
|
||||
sc_cond_destroy(&recorder->queue_cond);
|
||||
sc_cond_destroy(&recorder->cond);
|
||||
sc_mutex_destroy(&recorder->mutex);
|
||||
free(recorder->filename);
|
||||
}
|
||||
|
@ -14,6 +14,11 @@
|
||||
|
||||
struct sc_recorder_queue SC_VECDEQUE(AVPacket *);
|
||||
|
||||
struct sc_recorder_stream {
|
||||
int index;
|
||||
int64_t last_pts;
|
||||
};
|
||||
|
||||
struct sc_recorder {
|
||||
struct sc_packet_sink video_packet_sink;
|
||||
struct sc_packet_sink audio_packet_sink;
|
||||
@ -27,6 +32,7 @@ struct sc_recorder {
|
||||
* may access it without data races.
|
||||
*/
|
||||
bool audio;
|
||||
bool video;
|
||||
|
||||
char *filename;
|
||||
enum sc_record_format format;
|
||||
@ -34,19 +40,18 @@ struct sc_recorder {
|
||||
|
||||
sc_thread thread;
|
||||
sc_mutex mutex;
|
||||
sc_cond queue_cond;
|
||||
sc_cond cond;
|
||||
// set on sc_recorder_stop(), packet_sink close or recording failure
|
||||
bool stopped;
|
||||
struct sc_recorder_queue video_queue;
|
||||
struct sc_recorder_queue audio_queue;
|
||||
|
||||
// wake up the recorder thread once the video or audio codec is known
|
||||
sc_cond stream_cond;
|
||||
bool video_init;
|
||||
bool audio_init;
|
||||
|
||||
int video_stream_index;
|
||||
int audio_stream_index;
|
||||
struct sc_recorder_stream video_stream;
|
||||
struct sc_recorder_stream audio_stream;
|
||||
|
||||
const struct sc_recorder_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
@ -59,7 +64,7 @@ struct sc_recorder_callbacks {
|
||||
|
||||
bool
|
||||
sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
enum sc_record_format format, bool audio,
|
||||
enum sc_record_format format, bool video, bool audio,
|
||||
const struct sc_recorder_callbacks *cbs, void *cbs_userdata);
|
||||
|
||||
bool
|
||||
|
114
app/src/scrcpy.c
114
app/src/scrcpy.c
@ -137,7 +137,7 @@ sdl_set_hints(const char *render_driver) {
|
||||
}
|
||||
|
||||
static void
|
||||
sdl_configure(bool display, bool disable_screensaver) {
|
||||
sdl_configure(bool video_playback, bool disable_screensaver) {
|
||||
#ifdef _WIN32
|
||||
// Clean up properly on Ctrl+C on Windows
|
||||
bool ok = SetConsoleCtrlHandler(windows_ctrl_handler, TRUE);
|
||||
@ -146,7 +146,7 @@ sdl_configure(bool display, bool disable_screensaver) {
|
||||
}
|
||||
#endif // _WIN32
|
||||
|
||||
if (!display) {
|
||||
if (!video_playback) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -334,6 +334,7 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.log_level = options->log_level,
|
||||
.video_codec = options->video_codec,
|
||||
.audio_codec = options->audio_codec,
|
||||
.audio_source = options->audio_source,
|
||||
.crop = options->crop,
|
||||
.port_range = options->port_range,
|
||||
.tunnel_host = options->tunnel_host,
|
||||
@ -345,6 +346,7 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.lock_video_orientation = options->lock_video_orientation,
|
||||
.control = options->control,
|
||||
.display_id = options->display_id,
|
||||
.video = options->video,
|
||||
.audio = options->audio,
|
||||
.show_touches = options->show_touches,
|
||||
.stay_awake = options->stay_awake,
|
||||
@ -385,24 +387,26 @@ scrcpy(struct scrcpy_options *options) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
if (options->display) {
|
||||
sdl_set_hints(options->render_driver);
|
||||
}
|
||||
// playback implies capture
|
||||
assert(!options->video_playback || options->video);
|
||||
assert(!options->audio_playback || options->audio);
|
||||
|
||||
// Initialize SDL video in addition if display is enabled
|
||||
if (options->display) {
|
||||
if (options->video_playback) {
|
||||
sdl_set_hints(options->render_driver);
|
||||
if (SDL_Init(SDL_INIT_VIDEO)) {
|
||||
LOGE("Could not initialize SDL video: %s", SDL_GetError());
|
||||
goto end;
|
||||
}
|
||||
}
|
||||
|
||||
if (options->audio && SDL_Init(SDL_INIT_AUDIO)) {
|
||||
if (options->audio_playback) {
|
||||
if (SDL_Init(SDL_INIT_AUDIO)) {
|
||||
LOGE("Could not initialize SDL audio: %s", SDL_GetError());
|
||||
goto end;
|
||||
}
|
||||
}
|
||||
|
||||
sdl_configure(options->display, options->disable_screensaver);
|
||||
sdl_configure(options->video_playback, options->disable_screensaver);
|
||||
|
||||
// Await for server without blocking Ctrl+C handling
|
||||
bool connected;
|
||||
@ -428,7 +432,9 @@ scrcpy(struct scrcpy_options *options) {
|
||||
|
||||
struct sc_file_pusher *fp = NULL;
|
||||
|
||||
if (options->display && options->control) {
|
||||
// control implies video playback
|
||||
assert(!options->control || options->video_playback);
|
||||
if (options->control) {
|
||||
if (!sc_file_pusher_init(&s->file_pusher, serial,
|
||||
options->push_target)) {
|
||||
goto end;
|
||||
@ -437,11 +443,13 @@ scrcpy(struct scrcpy_options *options) {
|
||||
file_pusher_initialized = true;
|
||||
}
|
||||
|
||||
static const struct sc_demuxer_callbacks video_demuxer_cbs = {
|
||||
.on_ended = sc_video_demuxer_on_ended,
|
||||
};
|
||||
sc_demuxer_init(&s->video_demuxer, "video", s->server.video_socket,
|
||||
&video_demuxer_cbs, NULL);
|
||||
if (options->video) {
|
||||
static const struct sc_demuxer_callbacks video_demuxer_cbs = {
|
||||
.on_ended = sc_video_demuxer_on_ended,
|
||||
};
|
||||
sc_demuxer_init(&s->video_demuxer, "video", s->server.video_socket,
|
||||
&video_demuxer_cbs, NULL);
|
||||
}
|
||||
|
||||
if (options->audio) {
|
||||
static const struct sc_demuxer_callbacks audio_demuxer_cbs = {
|
||||
@ -451,8 +459,8 @@ scrcpy(struct scrcpy_options *options) {
|
||||
&audio_demuxer_cbs, options);
|
||||
}
|
||||
|
||||
bool needs_video_decoder = options->display;
|
||||
bool needs_audio_decoder = options->audio && options->display;
|
||||
bool needs_video_decoder = options->video_playback;
|
||||
bool needs_audio_decoder = options->audio_playback;
|
||||
#ifdef HAVE_V4L2
|
||||
needs_video_decoder |= !!options->v4l2_device;
|
||||
#endif
|
||||
@ -472,8 +480,8 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.on_ended = sc_recorder_on_ended,
|
||||
};
|
||||
if (!sc_recorder_init(&s->recorder, options->record_filename,
|
||||
options->record_format, options->audio,
|
||||
&recorder_cbs, NULL)) {
|
||||
options->record_format, options->video,
|
||||
options->audio, &recorder_cbs, NULL)) {
|
||||
goto end;
|
||||
}
|
||||
recorder_initialized = true;
|
||||
@ -483,8 +491,10 @@ scrcpy(struct scrcpy_options *options) {
|
||||
}
|
||||
recorder_started = true;
|
||||
|
||||
sc_packet_source_add_sink(&s->video_demuxer.packet_source,
|
||||
&s->recorder.video_packet_sink);
|
||||
if (options->video) {
|
||||
sc_packet_source_add_sink(&s->video_demuxer.packet_source,
|
||||
&s->recorder.video_packet_sink);
|
||||
}
|
||||
if (options->audio) {
|
||||
sc_packet_source_add_sink(&s->audio_demuxer.packet_source,
|
||||
&s->recorder.audio_packet_sink);
|
||||
@ -630,23 +640,12 @@ aoa_hid_end:
|
||||
}
|
||||
controller_started = true;
|
||||
controller = &s->controller;
|
||||
|
||||
if (options->turn_screen_off) {
|
||||
struct sc_control_msg msg;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE;
|
||||
msg.set_screen_power_mode.mode = SC_SCREEN_POWER_MODE_OFF;
|
||||
|
||||
if (!sc_controller_push_msg(&s->controller, &msg)) {
|
||||
LOGW("Could not request 'set screen power mode'");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// There is a controller if and only if control is enabled
|
||||
assert(options->control == !!controller);
|
||||
|
||||
if (options->display) {
|
||||
if (options->video_playback) {
|
||||
const char *window_title =
|
||||
options->window_title ? options->window_title : info->device_name;
|
||||
|
||||
@ -672,11 +671,6 @@ aoa_hid_end:
|
||||
.start_fps_counter = options->start_fps_counter,
|
||||
};
|
||||
|
||||
if (!sc_screen_init(&s->screen, &screen_params)) {
|
||||
goto end;
|
||||
}
|
||||
screen_initialized = true;
|
||||
|
||||
struct sc_frame_source *src = &s->video_decoder.frame_source;
|
||||
if (options->display_buffer) {
|
||||
sc_delay_buffer_init(&s->display_buffer, options->display_buffer,
|
||||
@ -685,14 +679,19 @@ aoa_hid_end:
|
||||
src = &s->display_buffer.frame_source;
|
||||
}
|
||||
|
||||
sc_frame_source_add_sink(src, &s->screen.frame_sink);
|
||||
|
||||
if (options->audio) {
|
||||
sc_audio_player_init(&s->audio_player, options->audio_buffer,
|
||||
options->audio_output_buffer);
|
||||
sc_frame_source_add_sink(&s->audio_decoder.frame_source,
|
||||
&s->audio_player.frame_sink);
|
||||
if (!sc_screen_init(&s->screen, &screen_params)) {
|
||||
goto end;
|
||||
}
|
||||
screen_initialized = true;
|
||||
|
||||
sc_frame_source_add_sink(src, &s->screen.frame_sink);
|
||||
}
|
||||
|
||||
if (options->audio_playback) {
|
||||
sc_audio_player_init(&s->audio_player, options->audio_buffer,
|
||||
options->audio_output_buffer);
|
||||
sc_frame_source_add_sink(&s->audio_decoder.frame_source,
|
||||
&s->audio_player.frame_sink);
|
||||
}
|
||||
|
||||
#ifdef HAVE_V4L2
|
||||
@ -714,12 +713,15 @@ aoa_hid_end:
|
||||
}
|
||||
#endif
|
||||
|
||||
// now we consumed the header values, the socket receives the video stream
|
||||
// start the video demuxer
|
||||
if (!sc_demuxer_start(&s->video_demuxer)) {
|
||||
goto end;
|
||||
// Now that the header values have been consumed, the socket(s) will
|
||||
// receive the stream(s). Start the demuxer(s).
|
||||
|
||||
if (options->video) {
|
||||
if (!sc_demuxer_start(&s->video_demuxer)) {
|
||||
goto end;
|
||||
}
|
||||
video_demuxer_started = true;
|
||||
}
|
||||
video_demuxer_started = true;
|
||||
|
||||
if (options->audio) {
|
||||
if (!sc_demuxer_start(&s->audio_demuxer)) {
|
||||
@ -728,6 +730,18 @@ aoa_hid_end:
|
||||
audio_demuxer_started = true;
|
||||
}
|
||||
|
||||
// If the device screen is to be turned off, send the control message after
|
||||
// everything is set up
|
||||
if (options->control && options->turn_screen_off) {
|
||||
struct sc_control_msg msg;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_SET_SCREEN_POWER_MODE;
|
||||
msg.set_screen_power_mode.mode = SC_SCREEN_POWER_MODE_OFF;
|
||||
|
||||
if (!sc_controller_push_msg(&s->controller, &msg)) {
|
||||
LOGW("Could not request 'set screen power mode'");
|
||||
}
|
||||
}
|
||||
|
||||
ret = event_loop(s);
|
||||
LOGD("quit...");
|
||||
|
||||
|
195
app/src/screen.c
195
app/src/screen.c
@ -56,6 +56,7 @@ static void
|
||||
set_window_size(struct sc_screen *screen, struct sc_size new_size) {
|
||||
assert(!screen->fullscreen);
|
||||
assert(!screen->maximized);
|
||||
assert(!screen->minimized);
|
||||
SDL_SetWindowSize(screen->window, new_size.width, new_size.height);
|
||||
}
|
||||
|
||||
@ -239,35 +240,6 @@ sc_screen_update_content_rect(struct sc_screen *screen) {
|
||||
}
|
||||
}
|
||||
|
||||
static bool
|
||||
create_texture(struct sc_screen *screen) {
|
||||
SDL_Renderer *renderer = screen->renderer;
|
||||
struct sc_size size = screen->frame_size;
|
||||
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
|
||||
SDL_TEXTUREACCESS_STREAMING,
|
||||
size.width, size.height);
|
||||
if (!texture) {
|
||||
LOGE("Could not create texture: %s", SDL_GetError());
|
||||
return false;
|
||||
}
|
||||
|
||||
if (screen->mipmaps) {
|
||||
struct sc_opengl *gl = &screen->gl;
|
||||
|
||||
SDL_GL_BindTexture(texture, NULL, NULL);
|
||||
|
||||
// Enable trilinear filtering for downscaling
|
||||
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
|
||||
GL_LINEAR_MIPMAP_LINEAR);
|
||||
gl->TexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -1.f);
|
||||
|
||||
SDL_GL_UnbindTexture(texture);
|
||||
}
|
||||
|
||||
screen->texture = texture;
|
||||
return true;
|
||||
}
|
||||
|
||||
// render the texture to the renderer
|
||||
//
|
||||
// Set the update_content_rect flag if the window or content size may have
|
||||
@ -278,35 +250,11 @@ sc_screen_render(struct sc_screen *screen, bool update_content_rect) {
|
||||
sc_screen_update_content_rect(screen);
|
||||
}
|
||||
|
||||
SDL_RenderClear(screen->renderer);
|
||||
if (screen->rotation == 0) {
|
||||
SDL_RenderCopy(screen->renderer, screen->texture, NULL, &screen->rect);
|
||||
} else {
|
||||
// rotation in RenderCopyEx() is clockwise, while screen->rotation is
|
||||
// counterclockwise (to be consistent with --lock-video-orientation)
|
||||
int cw_rotation = (4 - screen->rotation) % 4;
|
||||
double angle = 90 * cw_rotation;
|
||||
|
||||
SDL_Rect *dstrect = NULL;
|
||||
SDL_Rect rect;
|
||||
if (screen->rotation & 1) {
|
||||
rect.x = screen->rect.x + (screen->rect.w - screen->rect.h) / 2;
|
||||
rect.y = screen->rect.y + (screen->rect.h - screen->rect.w) / 2;
|
||||
rect.w = screen->rect.h;
|
||||
rect.h = screen->rect.w;
|
||||
dstrect = ▭
|
||||
} else {
|
||||
assert(screen->rotation == 2);
|
||||
dstrect = &screen->rect;
|
||||
}
|
||||
|
||||
SDL_RenderCopyEx(screen->renderer, screen->texture, NULL, dstrect,
|
||||
angle, NULL, 0);
|
||||
}
|
||||
SDL_RenderPresent(screen->renderer);
|
||||
enum sc_display_result res =
|
||||
sc_display_render(&screen->display, &screen->rect, screen->rotation);
|
||||
(void) res; // any error already logged
|
||||
}
|
||||
|
||||
|
||||
#if defined(__APPLE__) || defined(__WINDOWS__)
|
||||
# define CONTINUOUS_RESIZING_WORKAROUND
|
||||
#endif
|
||||
@ -412,6 +360,7 @@ sc_screen_init(struct sc_screen *screen,
|
||||
screen->has_frame = false;
|
||||
screen->fullscreen = false;
|
||||
screen->maximized = false;
|
||||
screen->minimized = false;
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
|
||||
screen->req.x = params->window_x;
|
||||
@ -453,46 +402,11 @@ sc_screen_init(struct sc_screen *screen,
|
||||
goto error_destroy_fps_counter;
|
||||
}
|
||||
|
||||
screen->renderer = SDL_CreateRenderer(screen->window, -1,
|
||||
SDL_RENDERER_ACCELERATED);
|
||||
if (!screen->renderer) {
|
||||
LOGE("Could not create renderer: %s", SDL_GetError());
|
||||
ok = sc_display_init(&screen->display, screen->window, params->mipmaps);
|
||||
if (!ok) {
|
||||
goto error_destroy_window;
|
||||
}
|
||||
|
||||
SDL_RendererInfo renderer_info;
|
||||
int r = SDL_GetRendererInfo(screen->renderer, &renderer_info);
|
||||
const char *renderer_name = r ? NULL : renderer_info.name;
|
||||
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
|
||||
|
||||
screen->mipmaps = false;
|
||||
|
||||
// starts with "opengl"
|
||||
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
|
||||
if (use_opengl) {
|
||||
struct sc_opengl *gl = &screen->gl;
|
||||
sc_opengl_init(gl);
|
||||
|
||||
LOGI("OpenGL version: %s", gl->version);
|
||||
|
||||
if (params->mipmaps) {
|
||||
bool supports_mipmaps =
|
||||
sc_opengl_version_at_least(gl, 3, 0, /* OpenGL 3.0+ */
|
||||
2, 0 /* OpenGL ES 2.0+ */);
|
||||
if (supports_mipmaps) {
|
||||
LOGI("Trilinear filtering enabled");
|
||||
screen->mipmaps = true;
|
||||
} else {
|
||||
LOGW("Trilinear filtering disabled "
|
||||
"(OpenGL 3.0+ or ES 2.0+ required)");
|
||||
}
|
||||
} else {
|
||||
LOGI("Trilinear filtering disabled");
|
||||
}
|
||||
} else if (params->mipmaps) {
|
||||
LOGD("Trilinear filtering disabled (not an OpenGL renderer)");
|
||||
}
|
||||
|
||||
SDL_Surface *icon = scrcpy_icon_load();
|
||||
if (icon) {
|
||||
SDL_SetWindowIcon(screen->window, icon);
|
||||
@ -504,7 +418,7 @@ sc_screen_init(struct sc_screen *screen,
|
||||
screen->frame = av_frame_alloc();
|
||||
if (!screen->frame) {
|
||||
LOG_OOM();
|
||||
goto error_destroy_renderer;
|
||||
goto error_destroy_display;
|
||||
}
|
||||
|
||||
struct sc_input_manager_params im_params = {
|
||||
@ -539,8 +453,8 @@ sc_screen_init(struct sc_screen *screen,
|
||||
|
||||
return true;
|
||||
|
||||
error_destroy_renderer:
|
||||
SDL_DestroyRenderer(screen->renderer);
|
||||
error_destroy_display:
|
||||
sc_display_destroy(&screen->display);
|
||||
error_destroy_window:
|
||||
SDL_DestroyWindow(screen->window);
|
||||
error_destroy_fps_counter:
|
||||
@ -596,11 +510,8 @@ sc_screen_destroy(struct sc_screen *screen) {
|
||||
#ifndef NDEBUG
|
||||
assert(!screen->open);
|
||||
#endif
|
||||
sc_display_destroy(&screen->display);
|
||||
av_frame_free(&screen->frame);
|
||||
if (screen->texture) {
|
||||
SDL_DestroyTexture(screen->texture);
|
||||
}
|
||||
SDL_DestroyRenderer(screen->renderer);
|
||||
SDL_DestroyWindow(screen->window);
|
||||
sc_fps_counter_destroy(&screen->fps_counter);
|
||||
sc_frame_buffer_destroy(&screen->fb);
|
||||
@ -622,11 +533,11 @@ resize_for_content(struct sc_screen *screen, struct sc_size old_content_size,
|
||||
|
||||
static void
|
||||
set_content_size(struct sc_screen *screen, struct sc_size new_content_size) {
|
||||
if (!screen->fullscreen && !screen->maximized) {
|
||||
if (!screen->fullscreen && !screen->maximized && !screen->minimized) {
|
||||
resize_for_content(screen, screen->content_size, new_content_size);
|
||||
} else if (!screen->resize_pending) {
|
||||
// Store the windowed size to be able to compute the optimal size once
|
||||
// fullscreen and maximized are disabled
|
||||
// fullscreen/maximized/minimized are disabled
|
||||
screen->windowed_content_size = screen->content_size;
|
||||
screen->resize_pending = true;
|
||||
}
|
||||
@ -638,6 +549,7 @@ static void
|
||||
apply_pending_resize(struct sc_screen *screen) {
|
||||
assert(!screen->fullscreen);
|
||||
assert(!screen->maximized);
|
||||
assert(!screen->minimized);
|
||||
if (screen->resize_pending) {
|
||||
resize_for_content(screen, screen->windowed_content_size,
|
||||
screen->content_size);
|
||||
@ -667,7 +579,6 @@ static bool
|
||||
sc_screen_init_size(struct sc_screen *screen) {
|
||||
// Before first frame
|
||||
assert(!screen->has_frame);
|
||||
assert(!screen->texture);
|
||||
|
||||
// The requested size is passed via screen->frame_size
|
||||
|
||||
@ -675,48 +586,29 @@ sc_screen_init_size(struct sc_screen *screen) {
|
||||
get_rotated_size(screen->frame_size, screen->rotation);
|
||||
screen->content_size = content_size;
|
||||
|
||||
LOGI("Initial texture: %" PRIu16 "x%" PRIu16,
|
||||
screen->frame_size.width, screen->frame_size.height);
|
||||
return create_texture(screen);
|
||||
enum sc_display_result res =
|
||||
sc_display_set_texture_size(&screen->display, screen->frame_size);
|
||||
return res != SC_DISPLAY_RESULT_ERROR;
|
||||
}
|
||||
|
||||
// recreate the texture and resize the window if the frame size has changed
|
||||
static bool
|
||||
static enum sc_display_result
|
||||
prepare_for_frame(struct sc_screen *screen, struct sc_size new_frame_size) {
|
||||
if (screen->frame_size.width != new_frame_size.width
|
||||
|| screen->frame_size.height != new_frame_size.height) {
|
||||
// frame dimension changed, destroy texture
|
||||
SDL_DestroyTexture(screen->texture);
|
||||
|
||||
screen->frame_size = new_frame_size;
|
||||
|
||||
struct sc_size new_content_size =
|
||||
get_rotated_size(new_frame_size, screen->rotation);
|
||||
set_content_size(screen, new_content_size);
|
||||
|
||||
sc_screen_update_content_rect(screen);
|
||||
|
||||
LOGI("New texture: %" PRIu16 "x%" PRIu16,
|
||||
screen->frame_size.width, screen->frame_size.height);
|
||||
return create_texture(screen);
|
||||
if (screen->frame_size.width == new_frame_size.width
|
||||
&& screen->frame_size.height == new_frame_size.height) {
|
||||
return SC_DISPLAY_RESULT_OK;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
// frame dimension changed
|
||||
screen->frame_size = new_frame_size;
|
||||
|
||||
// write the frame into the texture
|
||||
static void
|
||||
update_texture(struct sc_screen *screen, const AVFrame *frame) {
|
||||
SDL_UpdateYUVTexture(screen->texture, NULL,
|
||||
frame->data[0], frame->linesize[0],
|
||||
frame->data[1], frame->linesize[1],
|
||||
frame->data[2], frame->linesize[2]);
|
||||
struct sc_size new_content_size =
|
||||
get_rotated_size(new_frame_size, screen->rotation);
|
||||
set_content_size(screen, new_content_size);
|
||||
|
||||
if (screen->mipmaps) {
|
||||
SDL_GL_BindTexture(screen->texture, NULL, NULL);
|
||||
screen->gl.GenerateMipmap(GL_TEXTURE_2D);
|
||||
SDL_GL_UnbindTexture(screen->texture);
|
||||
}
|
||||
sc_screen_update_content_rect(screen);
|
||||
|
||||
return sc_display_set_texture_size(&screen->display, screen->frame_size);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -728,10 +620,23 @@ sc_screen_update_frame(struct sc_screen *screen) {
|
||||
sc_fps_counter_add_rendered_frame(&screen->fps_counter);
|
||||
|
||||
struct sc_size new_frame_size = {frame->width, frame->height};
|
||||
if (!prepare_for_frame(screen, new_frame_size)) {
|
||||
enum sc_display_result res = prepare_for_frame(screen, new_frame_size);
|
||||
if (res == SC_DISPLAY_RESULT_ERROR) {
|
||||
return false;
|
||||
}
|
||||
update_texture(screen, frame);
|
||||
if (res == SC_DISPLAY_RESULT_PENDING) {
|
||||
// Not an error, but do not continue
|
||||
return true;
|
||||
}
|
||||
|
||||
res = sc_display_update_texture(&screen->display, frame);
|
||||
if (res == SC_DISPLAY_RESULT_ERROR) {
|
||||
return false;
|
||||
}
|
||||
if (res == SC_DISPLAY_RESULT_PENDING) {
|
||||
// Not an error, but do not continue
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!screen->has_frame) {
|
||||
screen->has_frame = true;
|
||||
@ -757,7 +662,7 @@ sc_screen_switch_fullscreen(struct sc_screen *screen) {
|
||||
}
|
||||
|
||||
screen->fullscreen = !screen->fullscreen;
|
||||
if (!screen->fullscreen && !screen->maximized) {
|
||||
if (!screen->fullscreen && !screen->maximized && !screen->minimized) {
|
||||
apply_pending_resize(screen);
|
||||
}
|
||||
|
||||
@ -767,7 +672,7 @@ sc_screen_switch_fullscreen(struct sc_screen *screen) {
|
||||
|
||||
void
|
||||
sc_screen_resize_to_fit(struct sc_screen *screen) {
|
||||
if (screen->fullscreen || screen->maximized) {
|
||||
if (screen->fullscreen || screen->maximized || screen->minimized) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -791,7 +696,7 @@ sc_screen_resize_to_fit(struct sc_screen *screen) {
|
||||
|
||||
void
|
||||
sc_screen_resize_to_pixel_perfect(struct sc_screen *screen) {
|
||||
if (screen->fullscreen) {
|
||||
if (screen->fullscreen || screen->minimized) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -812,7 +717,7 @@ sc_screen_is_mouse_capture_key(SDL_Keycode key) {
|
||||
}
|
||||
|
||||
bool
|
||||
sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event) {
|
||||
sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event) {
|
||||
bool relative_mode = sc_screen_is_relative_mode(screen);
|
||||
|
||||
switch (event->type) {
|
||||
@ -848,6 +753,9 @@ sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event) {
|
||||
case SDL_WINDOWEVENT_MAXIMIZED:
|
||||
screen->maximized = true;
|
||||
break;
|
||||
case SDL_WINDOWEVENT_MINIMIZED:
|
||||
screen->minimized = true;
|
||||
break;
|
||||
case SDL_WINDOWEVENT_RESTORED:
|
||||
if (screen->fullscreen) {
|
||||
// On Windows, in maximized+fullscreen, disabling
|
||||
@ -858,6 +766,7 @@ sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event) {
|
||||
break;
|
||||
}
|
||||
screen->maximized = false;
|
||||
screen->minimized = false;
|
||||
apply_pending_resize(screen);
|
||||
sc_screen_render(screen, true);
|
||||
break;
|
||||
|
@ -9,6 +9,7 @@
|
||||
|
||||
#include "controller.h"
|
||||
#include "coords.h"
|
||||
#include "display.h"
|
||||
#include "fps_counter.h"
|
||||
#include "frame_buffer.h"
|
||||
#include "input_manager.h"
|
||||
@ -24,6 +25,7 @@ struct sc_screen {
|
||||
bool open; // track the open/close state to assert correct behavior
|
||||
#endif
|
||||
|
||||
struct sc_display display;
|
||||
struct sc_input_manager im;
|
||||
struct sc_frame_buffer fb;
|
||||
struct sc_fps_counter fps_counter;
|
||||
@ -39,9 +41,6 @@ struct sc_screen {
|
||||
} req;
|
||||
|
||||
SDL_Window *window;
|
||||
SDL_Renderer *renderer;
|
||||
SDL_Texture *texture;
|
||||
struct sc_opengl gl;
|
||||
struct sc_size frame_size;
|
||||
struct sc_size content_size; // rotated frame_size
|
||||
|
||||
@ -57,7 +56,7 @@ struct sc_screen {
|
||||
bool has_frame;
|
||||
bool fullscreen;
|
||||
bool maximized;
|
||||
bool mipmaps;
|
||||
bool minimized;
|
||||
|
||||
// To enable/disable mouse capture, a mouse capture key (LALT, LGUI or
|
||||
// RGUI) must be pressed. This variable tracks the pressed capture key.
|
||||
@ -137,7 +136,7 @@ sc_screen_set_rotation(struct sc_screen *screen, unsigned rotation);
|
||||
// react to SDL events
|
||||
// If this function returns false, scrcpy must exit with an error.
|
||||
bool
|
||||
sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event);
|
||||
sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event);
|
||||
|
||||
// convert point from window coordinates to frame coordinates
|
||||
// x and y are expressed in pixels
|
||||
|
@ -226,12 +226,16 @@ execute_server(struct sc_server *server,
|
||||
ADD_PARAM("scid=%08x", params->scid);
|
||||
ADD_PARAM("log_level=%s", log_level_to_server_string(params->log_level));
|
||||
|
||||
if (!params->video) {
|
||||
ADD_PARAM("video=false");
|
||||
}
|
||||
if (params->video_bit_rate) {
|
||||
ADD_PARAM("video_bit_rate=%" PRIu32, params->video_bit_rate);
|
||||
}
|
||||
if (!params->audio) {
|
||||
ADD_PARAM("audio=false");
|
||||
} else if (params->audio_bit_rate) {
|
||||
}
|
||||
if (params->audio_bit_rate) {
|
||||
ADD_PARAM("audio_bit_rate=%" PRIu32, params->audio_bit_rate);
|
||||
}
|
||||
if (params->video_codec != SC_CODEC_H264) {
|
||||
@ -242,6 +246,10 @@ execute_server(struct sc_server *server,
|
||||
ADD_PARAM("audio_codec=%s",
|
||||
sc_server_get_codec_name(params->audio_codec));
|
||||
}
|
||||
if (params->audio_source != SC_AUDIO_SOURCE_OUTPUT) {
|
||||
assert(params->audio_source == SC_AUDIO_SOURCE_MIC);
|
||||
ADD_PARAM("audio_source=mic");
|
||||
}
|
||||
if (params->max_size) {
|
||||
ADD_PARAM("max_size=%" PRIu16, params->max_size);
|
||||
}
|
||||
@ -463,6 +471,7 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
const char *serial = server->serial;
|
||||
assert(serial);
|
||||
|
||||
bool video = server->params.video;
|
||||
bool audio = server->params.audio;
|
||||
bool control = server->params.control;
|
||||
|
||||
@ -470,9 +479,12 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
sc_socket audio_socket = SC_SOCKET_NONE;
|
||||
sc_socket control_socket = SC_SOCKET_NONE;
|
||||
if (!tunnel->forward) {
|
||||
video_socket = net_accept_intr(&server->intr, tunnel->server_socket);
|
||||
if (video_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
if (video) {
|
||||
video_socket =
|
||||
net_accept_intr(&server->intr, tunnel->server_socket);
|
||||
if (video_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
|
||||
if (audio) {
|
||||
@ -503,35 +515,45 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
|
||||
unsigned attempts = 100;
|
||||
sc_tick delay = SC_TICK_FROM_MS(100);
|
||||
video_socket = connect_to_server(server, attempts, delay, tunnel_host,
|
||||
tunnel_port);
|
||||
if (video_socket == SC_SOCKET_NONE) {
|
||||
sc_socket first_socket = connect_to_server(server, attempts, delay,
|
||||
tunnel_host, tunnel_port);
|
||||
if (first_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
if (video) {
|
||||
video_socket = first_socket;
|
||||
}
|
||||
|
||||
if (audio) {
|
||||
audio_socket = net_socket();
|
||||
if (audio_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, audio_socket, tunnel_host,
|
||||
tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
if (!video) {
|
||||
audio_socket = first_socket;
|
||||
} else {
|
||||
audio_socket = net_socket();
|
||||
if (audio_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, audio_socket, tunnel_host,
|
||||
tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (control) {
|
||||
// we know that the device is listening, we don't need several
|
||||
// attempts
|
||||
control_socket = net_socket();
|
||||
if (control_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, control_socket,
|
||||
tunnel_host, tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
if (!video && !audio) {
|
||||
control_socket = first_socket;
|
||||
} else {
|
||||
control_socket = net_socket();
|
||||
if (control_socket == SC_SOCKET_NONE) {
|
||||
goto fail;
|
||||
}
|
||||
bool ok = net_connect_intr(&server->intr, control_socket,
|
||||
tunnel_host, tunnel_port);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -540,13 +562,17 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
|
||||
sc_adb_tunnel_close(tunnel, &server->intr, serial,
|
||||
server->device_socket_name);
|
||||
|
||||
sc_socket first_socket = video ? video_socket
|
||||
: audio ? audio_socket
|
||||
: control_socket;
|
||||
|
||||
// The sockets will be closed on stop if device_read_info() fails
|
||||
bool ok = device_read_info(&server->intr, video_socket, info);
|
||||
bool ok = device_read_info(&server->intr, first_socket, info);
|
||||
if (!ok) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
assert(video_socket != SC_SOCKET_NONE);
|
||||
assert(!video || video_socket != SC_SOCKET_NONE);
|
||||
assert(!audio || audio_socket != SC_SOCKET_NONE);
|
||||
assert(!control || control_socket != SC_SOCKET_NONE);
|
||||
|
||||
@ -930,8 +956,11 @@ run_server(void *data) {
|
||||
sc_mutex_unlock(&server->mutex);
|
||||
|
||||
// Interrupt sockets to wake up socket blocking calls on the server
|
||||
assert(server->video_socket != SC_SOCKET_NONE);
|
||||
net_interrupt(server->video_socket);
|
||||
|
||||
if (server->video_socket != SC_SOCKET_NONE) {
|
||||
// There is no video_socket if --no-video is set
|
||||
net_interrupt(server->video_socket);
|
||||
}
|
||||
|
||||
if (server->audio_socket != SC_SOCKET_NONE) {
|
||||
// There is no audio_socket if --no-audio is set
|
||||
|
@ -26,6 +26,7 @@ struct sc_server_params {
|
||||
enum sc_log_level log_level;
|
||||
enum sc_codec video_codec;
|
||||
enum sc_codec audio_codec;
|
||||
enum sc_audio_source audio_source;
|
||||
const char *crop;
|
||||
const char *video_codec_options;
|
||||
const char *audio_codec_options;
|
||||
@ -41,6 +42,7 @@ struct sc_server_params {
|
||||
int8_t lock_video_orientation;
|
||||
bool control;
|
||||
uint32_t display_id;
|
||||
bool video;
|
||||
bool audio;
|
||||
bool show_touches;
|
||||
bool stay_awake;
|
||||
|
@ -217,6 +217,18 @@ static void test_get_ip_multiline_second_ok(void) {
|
||||
free(ip);
|
||||
}
|
||||
|
||||
static void test_get_ip_multiline_second_ok_without_cr(void) {
|
||||
char ip_route[] = "10.0.0.0/24 dev rmnet proto kernel scope link src "
|
||||
"10.0.0.3\n"
|
||||
"192.168.1.0/24 dev wlan0 proto kernel scope link src "
|
||||
"192.168.1.3\n";
|
||||
|
||||
char *ip = sc_adb_parse_device_ip(ip_route);
|
||||
assert(ip);
|
||||
assert(!strcmp(ip, "192.168.1.3"));
|
||||
free(ip);
|
||||
}
|
||||
|
||||
static void test_get_ip_no_wlan(void) {
|
||||
char ip_route[] = "192.168.1.0/24 dev rmnet proto kernel scope link src "
|
||||
"192.168.12.34\r\r\n";
|
||||
@ -259,6 +271,7 @@ int main(int argc, char *argv[]) {
|
||||
test_get_ip_single_line_with_trailing_space();
|
||||
test_get_ip_multiline_first_ok();
|
||||
test_get_ip_multiline_second_ok();
|
||||
test_get_ip_multiline_second_ok_without_cr();
|
||||
test_get_ip_no_wlan();
|
||||
test_get_ip_no_wlan_without_eol();
|
||||
test_get_ip_truncated();
|
||||
|
@ -53,7 +53,7 @@ static void test_options(void) {
|
||||
"--max-size", "1024",
|
||||
"--lock-video-orientation=2", // optional arguments require '='
|
||||
// "--no-control" is not compatible with "--turn-screen-off"
|
||||
// "--no-display" is not compatible with "--fulscreen"
|
||||
// "--no-playback" is not compatible with "--fulscreen"
|
||||
"--port", "1234:1236",
|
||||
"--push-target", "/sdcard/Movies",
|
||||
"--record", "file",
|
||||
@ -108,8 +108,8 @@ static void test_options2(void) {
|
||||
char *argv[] = {
|
||||
"scrcpy",
|
||||
"--no-control",
|
||||
"--no-display",
|
||||
"--record", "file.mp4", // cannot enable --no-display without recording
|
||||
"--no-playback",
|
||||
"--record", "file.mp4", // cannot enable --no-playback without recording
|
||||
};
|
||||
|
||||
bool ok = scrcpy_parse_args(&args, ARRAY_LEN(argv), argv);
|
||||
@ -117,7 +117,8 @@ static void test_options2(void) {
|
||||
|
||||
const struct scrcpy_options *opts = &args.opts;
|
||||
assert(!opts->control);
|
||||
assert(!opts->display);
|
||||
assert(!opts->video_playback);
|
||||
assert(!opts->audio_playback);
|
||||
assert(!strcmp(opts->record_filename, "file.mp4"));
|
||||
assert(opts->record_format == SC_RECORD_FORMAT_MP4);
|
||||
}
|
||||
|
@ -16,6 +16,6 @@ cpu = 'i686'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-2/win32'
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-4/win32'
|
||||
prebuilt_sdl2 = 'SDL2-2.26.4/i686-w64-mingw32'
|
||||
prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-Win32'
|
||||
|
@ -16,6 +16,6 @@ cpu = 'x86_64'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-2/win64'
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-4/win64'
|
||||
prebuilt_sdl2 = 'SDL2-2.26.4/x86_64-w64-mingw32'
|
||||
prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-x64'
|
||||
|
36
doc/audio.md
36
doc/audio.md
@ -24,6 +24,42 @@ To disable audio:
|
||||
scrcpy --no-audio
|
||||
```
|
||||
|
||||
To disable only the audio playback, see [no playback](video.md#no-playback).
|
||||
|
||||
## Audio only
|
||||
|
||||
To play audio only, disable the video:
|
||||
|
||||
```bash
|
||||
scrcpy --no-video
|
||||
# interrupt with Ctrl+C
|
||||
```
|
||||
|
||||
Without video, the audio latency is typically not criticial, so it might be
|
||||
interesting to add [buffering](#buffering) to minimize glitches:
|
||||
|
||||
```
|
||||
scrcpy --no-video --audio-buffer=200
|
||||
```
|
||||
|
||||
## Source
|
||||
|
||||
By default, the device audio output is forwarded.
|
||||
|
||||
It is possible to capture the device microphone instead:
|
||||
|
||||
```
|
||||
scrcpy --audio-source=mic
|
||||
```
|
||||
|
||||
For example, to use the device as a dictaphone and record a capture directly on
|
||||
the computer:
|
||||
|
||||
```
|
||||
scrcpy --audio-source=mic --no-video --no-audio-playback --record=file.opus
|
||||
```
|
||||
|
||||
|
||||
## Codec
|
||||
|
||||
The audio codec can be selected. The possible values are `opus` (default), `aac`
|
||||
|
@ -13,12 +13,18 @@ To record only the video:
|
||||
scrcpy --no-audio --record=file.mp4
|
||||
```
|
||||
|
||||
_It is currently not possible to record only the audio._
|
||||
|
||||
To disable mirroring while recording:
|
||||
To record only the audio:
|
||||
|
||||
```bash
|
||||
scrcpy --no-display --record=file.mp4
|
||||
scrcpy --no-video --record=file.opus
|
||||
scrcpy --no-video --audio-codec=aac --record-file=file.aac
|
||||
# .m4a/.mp4 and .mka/.mkv are also supported for both opus and aac
|
||||
```
|
||||
|
||||
To disable playback while recording:
|
||||
|
||||
```bash
|
||||
scrcpy --no-playback --record=file.mp4
|
||||
scrcpy -Nr file.mkv
|
||||
# interrupt recording with Ctrl+C
|
||||
```
|
||||
|
@ -35,7 +35,7 @@ To start `scrcpy` using a v4l2 sink:
|
||||
|
||||
```bash
|
||||
scrcpy --v4l2-sink=/dev/videoN
|
||||
scrcpy --v4l2-sink=/dev/videoN --no-display # disable mirroring window
|
||||
scrcpy --v4l2-sink=/dev/videoN --no-video-playback # disable playback window
|
||||
```
|
||||
|
||||
(replace `N` with the device ID, check with `ls /dev/video*`)
|
||||
|
31
doc/video.md
31
doc/video.md
@ -159,17 +159,38 @@ scrcpy --display-buffer=50 --v4l2-buffer=300
|
||||
```
|
||||
|
||||
|
||||
## No display
|
||||
## No playback
|
||||
|
||||
It is possible to capture an Android device without displaying a mirroring
|
||||
window. This option is available if either [recording](recording.md) or
|
||||
It is possible to capture an Android device without playing video or audio on
|
||||
the computer. This option is useful when [recording](recording.md) or when
|
||||
[v4l2](#video4linux) is enabled:
|
||||
|
||||
```bash
|
||||
scrcpy --v4l2-sink=/dev/video2 --no-display
|
||||
scrcpy --record=file.mkv --no-display
|
||||
scrcpy --v4l2-sink=/dev/video2 --no-playback
|
||||
scrcpy --record=file.mkv --no-playback
|
||||
# interrupt with Ctrl+C
|
||||
```
|
||||
|
||||
It is also possible to disable video and audio playback separately:
|
||||
|
||||
```bash
|
||||
# Send video to V4L2 sink without playing it, but keep audio playback
|
||||
scrcpy --v4l2-sink=/dev/video2 --no-video-playback
|
||||
|
||||
# Record both video and audio, but only play video
|
||||
scrcpy --record=file.mkv --no-audio-playback
|
||||
```
|
||||
|
||||
|
||||
## No video
|
||||
|
||||
To disable video forwarding completely, so that only audio is forwarded:
|
||||
|
||||
```
|
||||
scrcpy --no-video
|
||||
```
|
||||
|
||||
|
||||
## Video4Linux
|
||||
|
||||
See the dedicated [Video4Linux](v4l2.md) page.
|
||||
|
18
release.mk
18
release.mk
@ -94,11 +94,10 @@ dist-win32: build-server build-win32
|
||||
cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN32_TARGET_DIR)"
|
||||
cp app/data/icon.png "$(DIST)/$(WIN32_TARGET_DIR)"
|
||||
cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN32_TARGET_DIR)"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/avutil-58.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/avcodec-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/avformat-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/swresample-4.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win32/bin/zlib1.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avutil-58.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avcodec-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/avformat-60.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win32/bin/swresample-4.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/adb.exe "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinUsbApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
@ -113,11 +112,10 @@ dist-win64: build-server build-win64
|
||||
cp app/data/scrcpy-noconsole.vbs "$(DIST)/$(WIN64_TARGET_DIR)"
|
||||
cp app/data/icon.png "$(DIST)/$(WIN64_TARGET_DIR)"
|
||||
cp app/data/open_a_terminal_here.bat "$(DIST)/$(WIN64_TARGET_DIR)"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/avutil-58.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/avcodec-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/avformat-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/swresample-4.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-2/win64/bin/zlib1.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avutil-58.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avcodec-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/avformat-60.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/ffmpeg-6.0-scrcpy-4/win64/bin/swresample-4.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/adb.exe "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinUsbApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
|
26
server/src/main/aidl/android/view/IDisplayFoldListener.aidl
Normal file
26
server/src/main/aidl/android/view/IDisplayFoldListener.aidl
Normal file
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright (C) 2019 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.view;
|
||||
|
||||
/**
|
||||
* {@hide}
|
||||
*/
|
||||
oneway interface IDisplayFoldListener
|
||||
{
|
||||
/** Called when the foldedness of a display changes */
|
||||
void onDisplayFoldChanged(int displayId, boolean folded);
|
||||
}
|
@ -1,7 +1,16 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
public interface AsyncProcessor {
|
||||
void start();
|
||||
interface TerminationListener {
|
||||
/**
|
||||
* Notify processor termination
|
||||
*
|
||||
* @param fatalError {@code true} if this must cause the termination of the whole scrcpy-server.
|
||||
*/
|
||||
void onTerminated(boolean fatalError);
|
||||
}
|
||||
|
||||
void start(TerminationListener listener);
|
||||
void stop();
|
||||
void join() throws InterruptedException;
|
||||
}
|
||||
|
@ -10,7 +10,6 @@ import android.media.AudioFormat;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.AudioTimestamp;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaRecorder;
|
||||
import android.os.Build;
|
||||
import android.os.SystemClock;
|
||||
|
||||
@ -21,22 +20,29 @@ public final class AudioCapture {
|
||||
public static final int SAMPLE_RATE = 48000;
|
||||
public static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_STEREO;
|
||||
public static final int CHANNELS = 2;
|
||||
public static final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;
|
||||
public static final int CHANNEL_MASK = AudioFormat.CHANNEL_IN_LEFT | AudioFormat.CHANNEL_IN_RIGHT;
|
||||
public static final int ENCODING = AudioFormat.ENCODING_PCM_16BIT;
|
||||
public static final int BYTES_PER_SAMPLE = 2;
|
||||
|
||||
private final int audioSource;
|
||||
|
||||
private AudioRecord recorder;
|
||||
|
||||
private final AudioTimestamp timestamp = new AudioTimestamp();
|
||||
private long previousPts = 0;
|
||||
private long nextPts = 0;
|
||||
|
||||
public AudioCapture(AudioSource audioSource) {
|
||||
this.audioSource = audioSource.value();
|
||||
}
|
||||
|
||||
public static int millisToBytes(int millis) {
|
||||
return SAMPLE_RATE * CHANNELS * BYTES_PER_SAMPLE * millis / 1000;
|
||||
}
|
||||
|
||||
private static AudioFormat createAudioFormat() {
|
||||
AudioFormat.Builder builder = new AudioFormat.Builder();
|
||||
builder.setEncoding(FORMAT);
|
||||
builder.setEncoding(ENCODING);
|
||||
builder.setSampleRate(SAMPLE_RATE);
|
||||
builder.setChannelMask(CHANNEL_CONFIG);
|
||||
return builder.build();
|
||||
@ -44,15 +50,15 @@ public final class AudioCapture {
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
@SuppressLint({"WrongConstant", "MissingPermission"})
|
||||
private static AudioRecord createAudioRecord() {
|
||||
private static AudioRecord createAudioRecord(int audioSource) {
|
||||
AudioRecord.Builder builder = new AudioRecord.Builder();
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
|
||||
builder.setContext(FakeContext.get());
|
||||
}
|
||||
builder.setAudioSource(MediaRecorder.AudioSource.REMOTE_SUBMIX);
|
||||
builder.setAudioSource(audioSource);
|
||||
builder.setAudioFormat(createAudioFormat());
|
||||
int minBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, FORMAT);
|
||||
int minBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, CHANNEL_CONFIG, ENCODING);
|
||||
// This buffer size does not impact latency
|
||||
builder.setBufferSizeInBytes(8 * minBufferSize);
|
||||
return builder.build();
|
||||
@ -86,8 +92,8 @@ public final class AudioCapture {
|
||||
} catch (UnsupportedOperationException e) {
|
||||
if (attempts == 0) {
|
||||
Ln.e("Failed to start audio capture");
|
||||
Ln.e("On Android 11, audio capture must be started in the foreground, make sure that the device is unlocked when starting " +
|
||||
"scrcpy.");
|
||||
Ln.e("On Android 11, audio capture must be started in the foreground, make sure that the device is unlocked when starting "
|
||||
+ "scrcpy.");
|
||||
throw new AudioCaptureForegroundException();
|
||||
} else {
|
||||
Ln.d("Failed to start audio capture, retrying...");
|
||||
@ -97,7 +103,14 @@ public final class AudioCapture {
|
||||
}
|
||||
|
||||
private void startRecording() {
|
||||
recorder = createAudioRecord();
|
||||
try {
|
||||
recorder = createAudioRecord(audioSource);
|
||||
} catch (NullPointerException e) {
|
||||
// Creating an AudioRecord using an AudioRecord.Builder does not work on Vivo phones:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/3805>
|
||||
// - <https://github.com/Genymobile/scrcpy/pull/3862>
|
||||
recorder = Workarounds.createAudioRecord(audioSource, SAMPLE_RATE, CHANNEL_CONFIG, CHANNELS, CHANNEL_MASK, ENCODING);
|
||||
}
|
||||
recorder.startRecording();
|
||||
}
|
||||
|
||||
|
@ -40,6 +40,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
private static final int READ_MS = 5; // milliseconds
|
||||
private static final int READ_SIZE = AudioCapture.millisToBytes(READ_MS);
|
||||
|
||||
private final AudioCapture capture;
|
||||
private final Streamer streamer;
|
||||
private final int bitRate;
|
||||
private final List<CodecOption> codecOptions;
|
||||
@ -58,7 +59,8 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
|
||||
private boolean ended;
|
||||
|
||||
public AudioEncoder(Streamer streamer, int bitRate, List<CodecOption> codecOptions, String encoderName) {
|
||||
public AudioEncoder(AudioCapture capture, Streamer streamer, int bitRate, List<CodecOption> codecOptions, String encoderName) {
|
||||
this.capture = capture;
|
||||
this.streamer = streamer;
|
||||
this.bitRate = bitRate;
|
||||
this.codecOptions = codecOptions;
|
||||
@ -114,21 +116,29 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
public void start() {
|
||||
@Override
|
||||
public void start(TerminationListener listener) {
|
||||
thread = new Thread(() -> {
|
||||
boolean fatalError = false;
|
||||
try {
|
||||
encode();
|
||||
} catch (ConfigurationException | AudioCaptureForegroundException e) {
|
||||
} catch (ConfigurationException e) {
|
||||
// Do not print stack trace, a user-friendly error-message has already been logged
|
||||
fatalError = true;
|
||||
} catch (AudioCaptureForegroundException e) {
|
||||
// Do not print stack trace, a user-friendly error-message has already been logged
|
||||
} catch (IOException e) {
|
||||
Ln.e("Audio encoding error", e);
|
||||
fatalError = true;
|
||||
} finally {
|
||||
Ln.d("Audio encoder stopped");
|
||||
listener.onTerminated(fatalError);
|
||||
}
|
||||
});
|
||||
thread.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
// Just wake up the blocking wait from the thread, so that it properly releases all its resources and terminates
|
||||
@ -136,6 +146,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
@ -166,7 +177,6 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
|
||||
MediaCodec mediaCodec = null;
|
||||
AudioCapture capture = new AudioCapture();
|
||||
|
||||
boolean mediaCodecStarted = false;
|
||||
try {
|
||||
@ -183,10 +193,9 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
capture.start();
|
||||
|
||||
final MediaCodec mediaCodecRef = mediaCodec;
|
||||
final AudioCapture captureRef = capture;
|
||||
inputThread = new Thread(() -> {
|
||||
try {
|
||||
inputThread(mediaCodecRef, captureRef);
|
||||
inputThread(mediaCodecRef, capture);
|
||||
} catch (IOException | InterruptedException e) {
|
||||
Ln.e("Audio capture error", e);
|
||||
} finally {
|
||||
|
@ -1,12 +1,14 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public final class AudioRawRecorder implements AsyncProcessor {
|
||||
|
||||
private final AudioCapture capture;
|
||||
private final Streamer streamer;
|
||||
|
||||
private Thread thread;
|
||||
@ -14,15 +16,21 @@ public final class AudioRawRecorder implements AsyncProcessor {
|
||||
private static final int READ_MS = 5; // milliseconds
|
||||
private static final int READ_SIZE = AudioCapture.millisToBytes(READ_MS);
|
||||
|
||||
public AudioRawRecorder(Streamer streamer) {
|
||||
public AudioRawRecorder(AudioCapture capture, Streamer streamer) {
|
||||
this.capture = capture;
|
||||
this.streamer = streamer;
|
||||
}
|
||||
|
||||
private void record() throws IOException, AudioCaptureForegroundException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
streamer.writeDisableStream(false);
|
||||
return;
|
||||
}
|
||||
|
||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(READ_SIZE);
|
||||
final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
AudioCapture capture = new AudioCapture();
|
||||
try {
|
||||
capture.start();
|
||||
|
||||
@ -46,27 +54,33 @@ public final class AudioRawRecorder implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
public void start() {
|
||||
@Override
|
||||
public void start(TerminationListener listener) {
|
||||
thread = new Thread(() -> {
|
||||
boolean fatalError = false;
|
||||
try {
|
||||
record();
|
||||
} catch (AudioCaptureForegroundException e) {
|
||||
// Do not print stack trace, a user-friendly error-message has already been logged
|
||||
} catch (IOException e) {
|
||||
Ln.e("Audio recording error", e);
|
||||
fatalError = true;
|
||||
} finally {
|
||||
Ln.d("Audio recorder stopped");
|
||||
listener.onTerminated(fatalError);
|
||||
}
|
||||
});
|
||||
thread.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
thread.interrupt();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
|
30
server/src/main/java/com/genymobile/scrcpy/AudioSource.java
Normal file
30
server/src/main/java/com/genymobile/scrcpy/AudioSource.java
Normal file
@ -0,0 +1,30 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.media.MediaRecorder;
|
||||
|
||||
public enum AudioSource {
|
||||
OUTPUT("output", MediaRecorder.AudioSource.REMOTE_SUBMIX),
|
||||
MIC("mic", MediaRecorder.AudioSource.MIC);
|
||||
|
||||
private final String name;
|
||||
private final int value;
|
||||
|
||||
AudioSource(String name, int value) {
|
||||
this.name = name;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
int value() {
|
||||
return value;
|
||||
}
|
||||
|
||||
static AudioSource findByName(String name) {
|
||||
for (AudioSource audioSource : AudioSource.values()) {
|
||||
if (name.equals(audioSource.name)) {
|
||||
return audioSource;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -84,7 +84,8 @@ public class Controller implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
public void start() {
|
||||
@Override
|
||||
public void start(TerminationListener listener) {
|
||||
thread = new Thread(() -> {
|
||||
try {
|
||||
control();
|
||||
@ -92,12 +93,14 @@ public class Controller implements AsyncProcessor {
|
||||
// this is expected on close
|
||||
} finally {
|
||||
Ln.d("Controller stopped");
|
||||
listener.onTerminated(true);
|
||||
}
|
||||
});
|
||||
thread.start();
|
||||
sender.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
thread.interrupt();
|
||||
@ -105,6 +108,7 @@ public class Controller implements AsyncProcessor {
|
||||
sender.stop();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
|
@ -41,7 +41,7 @@ public final class DesktopConnection implements Closeable {
|
||||
controlInputStream = null;
|
||||
controlOutputStream = null;
|
||||
}
|
||||
videoFd = videoSocket.getFileDescriptor();
|
||||
videoFd = videoSocket != null ? videoSocket.getFileDescriptor() : null;
|
||||
audioFd = audioSocket != null ? audioSocket.getFileDescriptor() : null;
|
||||
}
|
||||
|
||||
@ -60,29 +60,43 @@ public final class DesktopConnection implements Closeable {
|
||||
return SOCKET_NAME_PREFIX + String.format("_%08x", scid);
|
||||
}
|
||||
|
||||
public static DesktopConnection open(int scid, boolean tunnelForward, boolean audio, boolean control, boolean sendDummyByte) throws IOException {
|
||||
public static DesktopConnection open(int scid, boolean tunnelForward, boolean video, boolean audio, boolean control, boolean sendDummyByte)
|
||||
throws IOException {
|
||||
String socketName = getSocketName(scid);
|
||||
|
||||
LocalSocket firstSocket = null;
|
||||
|
||||
LocalSocket videoSocket = null;
|
||||
LocalSocket audioSocket = null;
|
||||
LocalSocket controlSocket = null;
|
||||
try {
|
||||
if (tunnelForward) {
|
||||
try (LocalServerSocket localServerSocket = new LocalServerSocket(socketName)) {
|
||||
videoSocket = localServerSocket.accept();
|
||||
if (sendDummyByte) {
|
||||
// send one byte so the client may read() to detect a connection error
|
||||
videoSocket.getOutputStream().write(0);
|
||||
if (video) {
|
||||
videoSocket = localServerSocket.accept();
|
||||
firstSocket = videoSocket;
|
||||
}
|
||||
if (audio) {
|
||||
audioSocket = localServerSocket.accept();
|
||||
if (firstSocket == null) {
|
||||
firstSocket = audioSocket;
|
||||
}
|
||||
}
|
||||
if (control) {
|
||||
controlSocket = localServerSocket.accept();
|
||||
if (firstSocket == null) {
|
||||
firstSocket = controlSocket;
|
||||
}
|
||||
}
|
||||
if (sendDummyByte) {
|
||||
// send one byte so the client may read() to detect a connection error
|
||||
firstSocket.getOutputStream().write(0);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
videoSocket = connect(socketName);
|
||||
if (video) {
|
||||
videoSocket = connect(socketName);
|
||||
}
|
||||
if (audio) {
|
||||
audioSocket = connect(socketName);
|
||||
}
|
||||
@ -106,10 +120,22 @@ public final class DesktopConnection implements Closeable {
|
||||
return new DesktopConnection(videoSocket, audioSocket, controlSocket);
|
||||
}
|
||||
|
||||
private LocalSocket getFirstSocket() {
|
||||
if (videoSocket != null) {
|
||||
return videoSocket;
|
||||
}
|
||||
if (audioSocket != null) {
|
||||
return audioSocket;
|
||||
}
|
||||
return controlSocket;
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
videoSocket.shutdownInput();
|
||||
videoSocket.shutdownOutput();
|
||||
videoSocket.close();
|
||||
if (videoSocket != null) {
|
||||
videoSocket.shutdownInput();
|
||||
videoSocket.shutdownOutput();
|
||||
videoSocket.close();
|
||||
}
|
||||
if (audioSocket != null) {
|
||||
audioSocket.shutdownInput();
|
||||
audioSocket.shutdownOutput();
|
||||
@ -130,7 +156,8 @@ public final class DesktopConnection implements Closeable {
|
||||
System.arraycopy(deviceNameBytes, 0, buffer, 0, len);
|
||||
// byte[] are always 0-initialized in java, no need to set '\0' explicitly
|
||||
|
||||
IO.writeFully(videoFd, buffer, 0, buffer.length);
|
||||
FileDescriptor fd = getFirstSocket().getFileDescriptor();
|
||||
IO.writeFully(fd, buffer, 0, buffer.length);
|
||||
}
|
||||
|
||||
public FileDescriptor getVideoFd() {
|
||||
|
@ -12,6 +12,7 @@ import android.os.Build;
|
||||
import android.os.IBinder;
|
||||
import android.os.SystemClock;
|
||||
import android.view.IRotationWatcher;
|
||||
import android.view.IDisplayFoldListener;
|
||||
import android.view.InputDevice;
|
||||
import android.view.InputEvent;
|
||||
import android.view.KeyCharacterMap;
|
||||
@ -35,6 +36,10 @@ public final class Device {
|
||||
void onRotationChanged(int rotation);
|
||||
}
|
||||
|
||||
public interface FoldListener {
|
||||
void onFoldChanged(int displayId, boolean folded);
|
||||
}
|
||||
|
||||
public interface ClipboardListener {
|
||||
void onClipboardTextChanged(String text);
|
||||
}
|
||||
@ -46,6 +51,7 @@ public final class Device {
|
||||
|
||||
private ScreenInfo screenInfo;
|
||||
private RotationListener rotationListener;
|
||||
private FoldListener foldListener;
|
||||
private ClipboardListener clipboardListener;
|
||||
private final AtomicBoolean isSettingClipboard = new AtomicBoolean();
|
||||
|
||||
@ -93,6 +99,26 @@ public final class Device {
|
||||
}
|
||||
}, displayId);
|
||||
|
||||
ServiceManager.getWindowManager().registerDisplayFoldListener(new IDisplayFoldListener.Stub() {
|
||||
@Override
|
||||
public void onDisplayFoldChanged(int displayId, boolean folded) {
|
||||
synchronized (Device.this) {
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (displayInfo == null) {
|
||||
Ln.e("Display " + displayId + " not found\n" + LogUtils.buildDisplayListMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), displayInfo.getSize(), options.getCrop(),
|
||||
options.getMaxSize(), options.getLockVideoOrientation());
|
||||
// notify
|
||||
if (foldListener != null) {
|
||||
foldListener.onFoldChanged(displayId, folded);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (options.getControl() && options.getClipboardAutosync()) {
|
||||
// If control and autosync are enabled, synchronize Android clipboard to the computer automatically
|
||||
ClipboardManager clipboardManager = ServiceManager.getClipboardManager();
|
||||
@ -224,6 +250,10 @@ public final class Device {
|
||||
this.rotationListener = rotationListener;
|
||||
}
|
||||
|
||||
public synchronized void setFoldListener(FoldListener foldlistener) {
|
||||
this.foldListener = foldlistener;
|
||||
}
|
||||
|
||||
public synchronized void setClipboardListener(ClipboardListener clipboardListener) {
|
||||
this.clipboardListener = clipboardListener;
|
||||
}
|
||||
|
@ -3,15 +3,18 @@ package com.genymobile.scrcpy;
|
||||
import android.graphics.Rect;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
public class Options {
|
||||
|
||||
private Ln.Level logLevel = Ln.Level.DEBUG;
|
||||
private int scid = -1; // 31-bit non-negative value, or -1
|
||||
private boolean video = true;
|
||||
private boolean audio = true;
|
||||
private int maxSize;
|
||||
private VideoCodec videoCodec = VideoCodec.H264;
|
||||
private AudioCodec audioCodec = AudioCodec.OPUS;
|
||||
private AudioSource audioSource = AudioSource.OUTPUT;
|
||||
private int videoBitRate = 8000000;
|
||||
private int audioBitRate = 128000;
|
||||
private int maxFps;
|
||||
@ -46,166 +49,90 @@ public class Options {
|
||||
return logLevel;
|
||||
}
|
||||
|
||||
public void setLogLevel(Ln.Level logLevel) {
|
||||
this.logLevel = logLevel;
|
||||
}
|
||||
|
||||
public int getScid() {
|
||||
return scid;
|
||||
}
|
||||
|
||||
public void setScid(int scid) {
|
||||
this.scid = scid;
|
||||
public boolean getVideo() {
|
||||
return video;
|
||||
}
|
||||
|
||||
public boolean getAudio() {
|
||||
return audio;
|
||||
}
|
||||
|
||||
public void setAudio(boolean audio) {
|
||||
this.audio = audio;
|
||||
}
|
||||
|
||||
public int getMaxSize() {
|
||||
return maxSize;
|
||||
}
|
||||
|
||||
public void setMaxSize(int maxSize) {
|
||||
this.maxSize = maxSize;
|
||||
}
|
||||
|
||||
public VideoCodec getVideoCodec() {
|
||||
return videoCodec;
|
||||
}
|
||||
|
||||
public void setVideoCodec(VideoCodec videoCodec) {
|
||||
this.videoCodec = videoCodec;
|
||||
}
|
||||
|
||||
public AudioCodec getAudioCodec() {
|
||||
return audioCodec;
|
||||
}
|
||||
|
||||
public void setAudioCodec(AudioCodec audioCodec) {
|
||||
this.audioCodec = audioCodec;
|
||||
public AudioSource getAudioSource() {
|
||||
return audioSource;
|
||||
}
|
||||
|
||||
public int getVideoBitRate() {
|
||||
return videoBitRate;
|
||||
}
|
||||
|
||||
public void setVideoBitRate(int videoBitRate) {
|
||||
this.videoBitRate = videoBitRate;
|
||||
}
|
||||
|
||||
public int getAudioBitRate() {
|
||||
return audioBitRate;
|
||||
}
|
||||
|
||||
public void setAudioBitRate(int audioBitRate) {
|
||||
this.audioBitRate = audioBitRate;
|
||||
}
|
||||
|
||||
public int getMaxFps() {
|
||||
return maxFps;
|
||||
}
|
||||
|
||||
public void setMaxFps(int maxFps) {
|
||||
this.maxFps = maxFps;
|
||||
}
|
||||
|
||||
public int getLockVideoOrientation() {
|
||||
return lockVideoOrientation;
|
||||
}
|
||||
|
||||
public void setLockVideoOrientation(int lockVideoOrientation) {
|
||||
this.lockVideoOrientation = lockVideoOrientation;
|
||||
}
|
||||
|
||||
public boolean isTunnelForward() {
|
||||
return tunnelForward;
|
||||
}
|
||||
|
||||
public void setTunnelForward(boolean tunnelForward) {
|
||||
this.tunnelForward = tunnelForward;
|
||||
}
|
||||
|
||||
public Rect getCrop() {
|
||||
return crop;
|
||||
}
|
||||
|
||||
public void setCrop(Rect crop) {
|
||||
this.crop = crop;
|
||||
}
|
||||
|
||||
public boolean getControl() {
|
||||
return control;
|
||||
}
|
||||
|
||||
public void setControl(boolean control) {
|
||||
this.control = control;
|
||||
}
|
||||
|
||||
public int getDisplayId() {
|
||||
return displayId;
|
||||
}
|
||||
|
||||
public void setDisplayId(int displayId) {
|
||||
this.displayId = displayId;
|
||||
}
|
||||
|
||||
public boolean getShowTouches() {
|
||||
return showTouches;
|
||||
}
|
||||
|
||||
public void setShowTouches(boolean showTouches) {
|
||||
this.showTouches = showTouches;
|
||||
}
|
||||
|
||||
public boolean getStayAwake() {
|
||||
return stayAwake;
|
||||
}
|
||||
|
||||
public void setStayAwake(boolean stayAwake) {
|
||||
this.stayAwake = stayAwake;
|
||||
}
|
||||
|
||||
public List<CodecOption> getVideoCodecOptions() {
|
||||
return videoCodecOptions;
|
||||
}
|
||||
|
||||
public void setVideoCodecOptions(List<CodecOption> videoCodecOptions) {
|
||||
this.videoCodecOptions = videoCodecOptions;
|
||||
}
|
||||
|
||||
public List<CodecOption> getAudioCodecOptions() {
|
||||
return audioCodecOptions;
|
||||
}
|
||||
|
||||
public void setAudioCodecOptions(List<CodecOption> audioCodecOptions) {
|
||||
this.audioCodecOptions = audioCodecOptions;
|
||||
}
|
||||
|
||||
public String getVideoEncoder() {
|
||||
return videoEncoder;
|
||||
}
|
||||
|
||||
public void setVideoEncoder(String videoEncoder) {
|
||||
this.videoEncoder = videoEncoder;
|
||||
}
|
||||
|
||||
public String getAudioEncoder() {
|
||||
return audioEncoder;
|
||||
}
|
||||
|
||||
public void setAudioEncoder(String audioEncoder) {
|
||||
this.audioEncoder = audioEncoder;
|
||||
}
|
||||
|
||||
public void setPowerOffScreenOnClose(boolean powerOffScreenOnClose) {
|
||||
this.powerOffScreenOnClose = powerOffScreenOnClose;
|
||||
}
|
||||
|
||||
public boolean getPowerOffScreenOnClose() {
|
||||
return this.powerOffScreenOnClose;
|
||||
}
|
||||
@ -214,79 +141,214 @@ public class Options {
|
||||
return clipboardAutosync;
|
||||
}
|
||||
|
||||
public void setClipboardAutosync(boolean clipboardAutosync) {
|
||||
this.clipboardAutosync = clipboardAutosync;
|
||||
}
|
||||
|
||||
public boolean getDownsizeOnError() {
|
||||
return downsizeOnError;
|
||||
}
|
||||
|
||||
public void setDownsizeOnError(boolean downsizeOnError) {
|
||||
this.downsizeOnError = downsizeOnError;
|
||||
}
|
||||
|
||||
public boolean getCleanup() {
|
||||
return cleanup;
|
||||
}
|
||||
|
||||
public void setCleanup(boolean cleanup) {
|
||||
this.cleanup = cleanup;
|
||||
}
|
||||
|
||||
public boolean getPowerOn() {
|
||||
return powerOn;
|
||||
}
|
||||
|
||||
public void setPowerOn(boolean powerOn) {
|
||||
this.powerOn = powerOn;
|
||||
}
|
||||
|
||||
public boolean getListEncoders() {
|
||||
return listEncoders;
|
||||
}
|
||||
|
||||
public void setListEncoders(boolean listEncoders) {
|
||||
this.listEncoders = listEncoders;
|
||||
}
|
||||
|
||||
public boolean getListDisplays() {
|
||||
return listDisplays;
|
||||
}
|
||||
|
||||
public void setListDisplays(boolean listDisplays) {
|
||||
this.listDisplays = listDisplays;
|
||||
}
|
||||
|
||||
public boolean getSendDeviceMeta() {
|
||||
return sendDeviceMeta;
|
||||
}
|
||||
|
||||
public void setSendDeviceMeta(boolean sendDeviceMeta) {
|
||||
this.sendDeviceMeta = sendDeviceMeta;
|
||||
}
|
||||
|
||||
public boolean getSendFrameMeta() {
|
||||
return sendFrameMeta;
|
||||
}
|
||||
|
||||
public void setSendFrameMeta(boolean sendFrameMeta) {
|
||||
this.sendFrameMeta = sendFrameMeta;
|
||||
}
|
||||
|
||||
public boolean getSendDummyByte() {
|
||||
return sendDummyByte;
|
||||
}
|
||||
|
||||
public void setSendDummyByte(boolean sendDummyByte) {
|
||||
this.sendDummyByte = sendDummyByte;
|
||||
}
|
||||
|
||||
public boolean getSendCodecMeta() {
|
||||
return sendCodecMeta;
|
||||
}
|
||||
|
||||
public void setSendCodecMeta(boolean sendCodecMeta) {
|
||||
this.sendCodecMeta = sendCodecMeta;
|
||||
@SuppressWarnings("MethodLength")
|
||||
public static Options parse(String... args) {
|
||||
if (args.length < 1) {
|
||||
throw new IllegalArgumentException("Missing client version");
|
||||
}
|
||||
|
||||
String clientVersion = args[0];
|
||||
if (!clientVersion.equals(BuildConfig.VERSION_NAME)) {
|
||||
throw new IllegalArgumentException(
|
||||
"The server version (" + BuildConfig.VERSION_NAME + ") does not match the client " + "(" + clientVersion + ")");
|
||||
}
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
for (int i = 1; i < args.length; ++i) {
|
||||
String arg = args[i];
|
||||
int equalIndex = arg.indexOf('=');
|
||||
if (equalIndex == -1) {
|
||||
throw new IllegalArgumentException("Invalid key=value pair: \"" + arg + "\"");
|
||||
}
|
||||
String key = arg.substring(0, equalIndex);
|
||||
String value = arg.substring(equalIndex + 1);
|
||||
switch (key) {
|
||||
case "scid":
|
||||
int scid = Integer.parseInt(value, 0x10);
|
||||
if (scid < -1) {
|
||||
throw new IllegalArgumentException("scid may not be negative (except -1 for 'none'): " + scid);
|
||||
}
|
||||
options.scid = scid;
|
||||
break;
|
||||
case "log_level":
|
||||
options.logLevel = Ln.Level.valueOf(value.toUpperCase(Locale.ENGLISH));
|
||||
break;
|
||||
case "video":
|
||||
options.video = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "audio":
|
||||
options.audio = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "video_codec":
|
||||
VideoCodec videoCodec = VideoCodec.findByName(value);
|
||||
if (videoCodec == null) {
|
||||
throw new IllegalArgumentException("Video codec " + value + " not supported");
|
||||
}
|
||||
options.videoCodec = videoCodec;
|
||||
break;
|
||||
case "audio_codec":
|
||||
AudioCodec audioCodec = AudioCodec.findByName(value);
|
||||
if (audioCodec == null) {
|
||||
throw new IllegalArgumentException("Audio codec " + value + " not supported");
|
||||
}
|
||||
options.audioCodec = audioCodec;
|
||||
break;
|
||||
case "audio_source":
|
||||
AudioSource audioSource = AudioSource.findByName(value);
|
||||
if (audioSource == null) {
|
||||
throw new IllegalArgumentException("Audio source " + value + " not supported");
|
||||
}
|
||||
options.audioSource = audioSource;
|
||||
break;
|
||||
case "max_size":
|
||||
options.maxSize = Integer.parseInt(value) & ~7; // multiple of 8
|
||||
break;
|
||||
case "video_bit_rate":
|
||||
options.videoBitRate = Integer.parseInt(value);
|
||||
break;
|
||||
case "audio_bit_rate":
|
||||
options.audioBitRate = Integer.parseInt(value);
|
||||
break;
|
||||
case "max_fps":
|
||||
options.maxFps = Integer.parseInt(value);
|
||||
break;
|
||||
case "lock_video_orientation":
|
||||
options.lockVideoOrientation = Integer.parseInt(value);
|
||||
break;
|
||||
case "tunnel_forward":
|
||||
options.tunnelForward = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "crop":
|
||||
options.crop = parseCrop(value);
|
||||
break;
|
||||
case "control":
|
||||
options.control = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "display_id":
|
||||
options.displayId = Integer.parseInt(value);
|
||||
break;
|
||||
case "show_touches":
|
||||
options.showTouches = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "stay_awake":
|
||||
options.stayAwake = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "video_codec_options":
|
||||
options.videoCodecOptions = CodecOption.parse(value);
|
||||
break;
|
||||
case "audio_codec_options":
|
||||
options.audioCodecOptions = CodecOption.parse(value);
|
||||
break;
|
||||
case "video_encoder":
|
||||
if (!value.isEmpty()) {
|
||||
options.videoEncoder = value;
|
||||
}
|
||||
break;
|
||||
case "audio_encoder":
|
||||
if (!value.isEmpty()) {
|
||||
options.audioEncoder = value;
|
||||
}
|
||||
case "power_off_on_close":
|
||||
options.powerOffScreenOnClose = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "clipboard_autosync":
|
||||
options.clipboardAutosync = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "downsize_on_error":
|
||||
options.downsizeOnError = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "cleanup":
|
||||
options.cleanup = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "power_on":
|
||||
options.powerOn = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "list_encoders":
|
||||
options.listEncoders = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "list_displays":
|
||||
options.listDisplays = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "send_device_meta":
|
||||
options.sendDeviceMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "send_frame_meta":
|
||||
options.sendFrameMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "send_dummy_byte":
|
||||
options.sendDummyByte = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "send_codec_meta":
|
||||
options.sendCodecMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "raw_video_stream":
|
||||
boolean rawVideoStream = Boolean.parseBoolean(value);
|
||||
if (rawVideoStream) {
|
||||
options.sendDeviceMeta = false;
|
||||
options.sendFrameMeta = false;
|
||||
options.sendDummyByte = false;
|
||||
options.sendCodecMeta = false;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
Ln.w("Unknown server option: " + key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
private static Rect parseCrop(String crop) {
|
||||
if (crop.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
// input format: "width:height:x:y"
|
||||
String[] tokens = crop.split(":");
|
||||
if (tokens.length != 4) {
|
||||
throw new IllegalArgumentException("Crop must contains 4 values separated by colons: \"" + crop + "\"");
|
||||
}
|
||||
int width = Integer.parseInt(tokens[0]);
|
||||
int height = Integer.parseInt(tokens[1]);
|
||||
int x = Integer.parseInt(tokens[2]);
|
||||
int y = Integer.parseInt(tokens[3]);
|
||||
return new Rect(x, y, x + width, y + height);
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ import java.nio.ByteBuffer;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
public class ScreenEncoder implements Device.RotationListener {
|
||||
public class ScreenEncoder implements Device.RotationListener, Device.FoldListener, AsyncProcessor {
|
||||
|
||||
private static final int DEFAULT_I_FRAME_INTERVAL = 10; // seconds
|
||||
private static final int REPEAT_FRAME_DELAY_US = 100_000; // repeat after 100ms
|
||||
@ -26,7 +26,7 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
private static final int[] MAX_SIZE_FALLBACK = {2560, 1920, 1600, 1280, 1024, 800};
|
||||
private static final int MAX_CONSECUTIVE_ERRORS = 3;
|
||||
|
||||
private final AtomicBoolean rotationChanged = new AtomicBoolean();
|
||||
private final AtomicBoolean resetCapture = new AtomicBoolean();
|
||||
|
||||
private final Device device;
|
||||
private final Streamer streamer;
|
||||
@ -39,6 +39,9 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
private boolean firstFrameSent;
|
||||
private int consecutiveErrors;
|
||||
|
||||
private Thread thread;
|
||||
private final AtomicBoolean stopped = new AtomicBoolean();
|
||||
|
||||
public ScreenEncoder(Device device, Streamer streamer, int videoBitRate, int maxFps, List<CodecOption> codecOptions, String encoderName,
|
||||
boolean downsizeOnError) {
|
||||
this.device = device;
|
||||
@ -50,21 +53,27 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
this.downsizeOnError = downsizeOnError;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFoldChanged(int displayId, boolean folded) {
|
||||
resetCapture.set(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
rotationChanged.set(true);
|
||||
resetCapture.set(true);
|
||||
}
|
||||
|
||||
public boolean consumeRotationChange() {
|
||||
return rotationChanged.getAndSet(false);
|
||||
private boolean consumeResetCapture() {
|
||||
return resetCapture.getAndSet(false);
|
||||
}
|
||||
|
||||
public void streamScreen() throws IOException, ConfigurationException {
|
||||
private void streamScreen() throws IOException, ConfigurationException {
|
||||
Codec codec = streamer.getCodec();
|
||||
MediaCodec mediaCodec = createMediaCodec(codec, encoderName);
|
||||
MediaFormat format = createFormat(codec.getMimeType(), videoBitRate, maxFps, codecOptions);
|
||||
IBinder display = createDisplay();
|
||||
device.setRotationListener(this);
|
||||
device.setFoldListener(this);
|
||||
|
||||
streamer.writeVideoHeader(device.getScreenInfo().getVideoSize());
|
||||
|
||||
@ -112,6 +121,7 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
} finally {
|
||||
mediaCodec.release();
|
||||
device.setRotationListener(null);
|
||||
device.setFoldListener(null);
|
||||
SurfaceControl.destroyDisplay(display);
|
||||
}
|
||||
}
|
||||
@ -163,12 +173,17 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
|
||||
private boolean encode(MediaCodec codec, Streamer streamer) throws IOException {
|
||||
boolean eof = false;
|
||||
boolean alive = true;
|
||||
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
while (!consumeRotationChange() && !eof) {
|
||||
while (!consumeResetCapture() && !eof) {
|
||||
if (stopped.get()) {
|
||||
alive = false;
|
||||
break;
|
||||
}
|
||||
int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, -1);
|
||||
try {
|
||||
if (consumeRotationChange()) {
|
||||
if (consumeResetCapture()) {
|
||||
// must restart encoding with new size
|
||||
break;
|
||||
}
|
||||
@ -193,7 +208,7 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
}
|
||||
}
|
||||
|
||||
return !eof;
|
||||
return !eof && alive;
|
||||
}
|
||||
|
||||
private static MediaCodec createMediaCodec(Codec codec, String encoderName) throws IOException, ConfigurationException {
|
||||
@ -267,4 +282,38 @@ public class ScreenEncoder implements Device.RotationListener {
|
||||
SurfaceControl.closeTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(TerminationListener listener) {
|
||||
thread = new Thread(() -> {
|
||||
try {
|
||||
streamScreen();
|
||||
} catch (ConfigurationException e) {
|
||||
// Do not print stack trace, a user-friendly error-message has already been logged
|
||||
} catch (IOException e) {
|
||||
// Broken pipe is expected on close, because the socket is closed by the client
|
||||
if (!IO.isBrokenPipe(e)) {
|
||||
Ln.e("Video encoding error", e);
|
||||
}
|
||||
} finally {
|
||||
Ln.d("Screen streaming stopped");
|
||||
listener.onTerminated(true);
|
||||
}
|
||||
});
|
||||
thread.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
if (thread != null) {
|
||||
stopped.set(true);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void join() throws InterruptedException {
|
||||
if (thread != null) {
|
||||
thread.join();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,16 +1,43 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.os.BatteryManager;
|
||||
import android.os.Build;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
public final class Server {
|
||||
|
||||
private static class Completion {
|
||||
private int running;
|
||||
private boolean fatalError;
|
||||
|
||||
Completion(int running) {
|
||||
this.running = running;
|
||||
}
|
||||
|
||||
synchronized void addCompleted(boolean fatalError) {
|
||||
--running;
|
||||
if (fatalError) {
|
||||
this.fatalError = true;
|
||||
}
|
||||
if (running == 0 || this.fatalError) {
|
||||
notify();
|
||||
}
|
||||
}
|
||||
|
||||
synchronized void await() {
|
||||
try {
|
||||
while (running > 0 && !fatalError) {
|
||||
wait();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Server() {
|
||||
// not instantiable
|
||||
}
|
||||
@ -68,6 +95,7 @@ public final class Server {
|
||||
int scid = options.getScid();
|
||||
boolean tunnelForward = options.isTunnelForward();
|
||||
boolean control = options.getControl();
|
||||
boolean video = options.getVideo();
|
||||
boolean audio = options.getAudio();
|
||||
boolean sendDummyByte = options.getSendDummyByte();
|
||||
|
||||
@ -94,7 +122,8 @@ public final class Server {
|
||||
|
||||
List<AsyncProcessor> asyncProcessors = new ArrayList<>();
|
||||
|
||||
try (DesktopConnection connection = DesktopConnection.open(scid, tunnelForward, audio, control, sendDummyByte)) {
|
||||
DesktopConnection connection = DesktopConnection.open(scid, tunnelForward, video, audio, control, sendDummyByte);
|
||||
try {
|
||||
if (options.getSendDeviceMeta()) {
|
||||
connection.sendDeviceMeta(Device.getDeviceName());
|
||||
}
|
||||
@ -107,38 +136,36 @@ public final class Server {
|
||||
|
||||
if (audio) {
|
||||
AudioCodec audioCodec = options.getAudioCodec();
|
||||
AudioCapture audioCapture = new AudioCapture(options.getAudioSource());
|
||||
Streamer audioStreamer = new Streamer(connection.getAudioFd(), audioCodec, options.getSendCodecMeta(),
|
||||
options.getSendFrameMeta());
|
||||
AsyncProcessor audioRecorder;
|
||||
if (audioCodec == AudioCodec.RAW) {
|
||||
audioRecorder = new AudioRawRecorder(audioStreamer);
|
||||
audioRecorder = new AudioRawRecorder(audioCapture, audioStreamer);
|
||||
} else {
|
||||
audioRecorder = new AudioEncoder(audioStreamer, options.getAudioBitRate(), options.getAudioCodecOptions(),
|
||||
audioRecorder = new AudioEncoder(audioCapture, audioStreamer, options.getAudioBitRate(), options.getAudioCodecOptions(),
|
||||
options.getAudioEncoder());
|
||||
}
|
||||
asyncProcessors.add(audioRecorder);
|
||||
}
|
||||
|
||||
Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(),
|
||||
options.getSendFrameMeta());
|
||||
ScreenEncoder screenEncoder = new ScreenEncoder(device, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
|
||||
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
|
||||
if (video) {
|
||||
Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(),
|
||||
options.getSendFrameMeta());
|
||||
ScreenEncoder screenEncoder = new ScreenEncoder(device, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
|
||||
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
|
||||
asyncProcessors.add(screenEncoder);
|
||||
}
|
||||
|
||||
Completion completion = new Completion(asyncProcessors.size());
|
||||
for (AsyncProcessor asyncProcessor : asyncProcessors) {
|
||||
asyncProcessor.start();
|
||||
asyncProcessor.start((fatalError) -> {
|
||||
completion.addCompleted(fatalError);
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// synchronous
|
||||
screenEncoder.streamScreen();
|
||||
} catch (IOException e) {
|
||||
// Broken pipe is expected on close, because the socket is closed by the client
|
||||
if (!IO.isBrokenPipe(e)) {
|
||||
Ln.e("Video encoding error", e);
|
||||
}
|
||||
}
|
||||
completion.await();
|
||||
} finally {
|
||||
Ln.d("Screen streaming stopped");
|
||||
initThread.interrupt();
|
||||
for (AsyncProcessor asyncProcessor : asyncProcessors) {
|
||||
asyncProcessor.stop();
|
||||
@ -152,6 +179,8 @@ public final class Server {
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -161,203 +190,12 @@ public final class Server {
|
||||
return thread;
|
||||
}
|
||||
|
||||
@SuppressWarnings("MethodLength")
|
||||
private static Options createOptions(String... args) {
|
||||
if (args.length < 1) {
|
||||
throw new IllegalArgumentException("Missing client version");
|
||||
}
|
||||
|
||||
String clientVersion = args[0];
|
||||
if (!clientVersion.equals(BuildConfig.VERSION_NAME)) {
|
||||
throw new IllegalArgumentException(
|
||||
"The server version (" + BuildConfig.VERSION_NAME + ") does not match the client " + "(" + clientVersion + ")");
|
||||
}
|
||||
|
||||
Options options = new Options();
|
||||
|
||||
for (int i = 1; i < args.length; ++i) {
|
||||
String arg = args[i];
|
||||
int equalIndex = arg.indexOf('=');
|
||||
if (equalIndex == -1) {
|
||||
throw new IllegalArgumentException("Invalid key=value pair: \"" + arg + "\"");
|
||||
}
|
||||
String key = arg.substring(0, equalIndex);
|
||||
String value = arg.substring(equalIndex + 1);
|
||||
switch (key) {
|
||||
case "scid":
|
||||
int scid = Integer.parseInt(value, 0x10);
|
||||
if (scid < -1) {
|
||||
throw new IllegalArgumentException("scid may not be negative (except -1 for 'none'): " + scid);
|
||||
}
|
||||
options.setScid(scid);
|
||||
break;
|
||||
case "log_level":
|
||||
Ln.Level level = Ln.Level.valueOf(value.toUpperCase(Locale.ENGLISH));
|
||||
options.setLogLevel(level);
|
||||
break;
|
||||
case "audio":
|
||||
boolean audio = Boolean.parseBoolean(value);
|
||||
options.setAudio(audio);
|
||||
break;
|
||||
case "video_codec":
|
||||
VideoCodec videoCodec = VideoCodec.findByName(value);
|
||||
if (videoCodec == null) {
|
||||
throw new IllegalArgumentException("Video codec " + value + " not supported");
|
||||
}
|
||||
options.setVideoCodec(videoCodec);
|
||||
break;
|
||||
case "audio_codec":
|
||||
AudioCodec audioCodec = AudioCodec.findByName(value);
|
||||
if (audioCodec == null) {
|
||||
throw new IllegalArgumentException("Audio codec " + value + " not supported");
|
||||
}
|
||||
options.setAudioCodec(audioCodec);
|
||||
break;
|
||||
case "max_size":
|
||||
int maxSize = Integer.parseInt(value) & ~7; // multiple of 8
|
||||
options.setMaxSize(maxSize);
|
||||
break;
|
||||
case "video_bit_rate":
|
||||
int videoBitRate = Integer.parseInt(value);
|
||||
options.setVideoBitRate(videoBitRate);
|
||||
break;
|
||||
case "audio_bit_rate":
|
||||
int audioBitRate = Integer.parseInt(value);
|
||||
options.setAudioBitRate(audioBitRate);
|
||||
break;
|
||||
case "max_fps":
|
||||
int maxFps = Integer.parseInt(value);
|
||||
options.setMaxFps(maxFps);
|
||||
break;
|
||||
case "lock_video_orientation":
|
||||
int lockVideoOrientation = Integer.parseInt(value);
|
||||
options.setLockVideoOrientation(lockVideoOrientation);
|
||||
break;
|
||||
case "tunnel_forward":
|
||||
boolean tunnelForward = Boolean.parseBoolean(value);
|
||||
options.setTunnelForward(tunnelForward);
|
||||
break;
|
||||
case "crop":
|
||||
Rect crop = parseCrop(value);
|
||||
options.setCrop(crop);
|
||||
break;
|
||||
case "control":
|
||||
boolean control = Boolean.parseBoolean(value);
|
||||
options.setControl(control);
|
||||
break;
|
||||
case "display_id":
|
||||
int displayId = Integer.parseInt(value);
|
||||
options.setDisplayId(displayId);
|
||||
break;
|
||||
case "show_touches":
|
||||
boolean showTouches = Boolean.parseBoolean(value);
|
||||
options.setShowTouches(showTouches);
|
||||
break;
|
||||
case "stay_awake":
|
||||
boolean stayAwake = Boolean.parseBoolean(value);
|
||||
options.setStayAwake(stayAwake);
|
||||
break;
|
||||
case "video_codec_options":
|
||||
List<CodecOption> videoCodecOptions = CodecOption.parse(value);
|
||||
options.setVideoCodecOptions(videoCodecOptions);
|
||||
break;
|
||||
case "audio_codec_options":
|
||||
List<CodecOption> audioCodecOptions = CodecOption.parse(value);
|
||||
options.setAudioCodecOptions(audioCodecOptions);
|
||||
break;
|
||||
case "video_encoder":
|
||||
if (!value.isEmpty()) {
|
||||
options.setVideoEncoder(value);
|
||||
}
|
||||
break;
|
||||
case "audio_encoder":
|
||||
if (!value.isEmpty()) {
|
||||
options.setAudioEncoder(value);
|
||||
}
|
||||
case "power_off_on_close":
|
||||
boolean powerOffScreenOnClose = Boolean.parseBoolean(value);
|
||||
options.setPowerOffScreenOnClose(powerOffScreenOnClose);
|
||||
break;
|
||||
case "clipboard_autosync":
|
||||
boolean clipboardAutosync = Boolean.parseBoolean(value);
|
||||
options.setClipboardAutosync(clipboardAutosync);
|
||||
break;
|
||||
case "downsize_on_error":
|
||||
boolean downsizeOnError = Boolean.parseBoolean(value);
|
||||
options.setDownsizeOnError(downsizeOnError);
|
||||
break;
|
||||
case "cleanup":
|
||||
boolean cleanup = Boolean.parseBoolean(value);
|
||||
options.setCleanup(cleanup);
|
||||
break;
|
||||
case "power_on":
|
||||
boolean powerOn = Boolean.parseBoolean(value);
|
||||
options.setPowerOn(powerOn);
|
||||
break;
|
||||
case "list_encoders":
|
||||
boolean listEncoders = Boolean.parseBoolean(value);
|
||||
options.setListEncoders(listEncoders);
|
||||
break;
|
||||
case "list_displays":
|
||||
boolean listDisplays = Boolean.parseBoolean(value);
|
||||
options.setListDisplays(listDisplays);
|
||||
break;
|
||||
case "send_device_meta":
|
||||
boolean sendDeviceMeta = Boolean.parseBoolean(value);
|
||||
options.setSendDeviceMeta(sendDeviceMeta);
|
||||
break;
|
||||
case "send_frame_meta":
|
||||
boolean sendFrameMeta = Boolean.parseBoolean(value);
|
||||
options.setSendFrameMeta(sendFrameMeta);
|
||||
break;
|
||||
case "send_dummy_byte":
|
||||
boolean sendDummyByte = Boolean.parseBoolean(value);
|
||||
options.setSendDummyByte(sendDummyByte);
|
||||
break;
|
||||
case "send_codec_meta":
|
||||
boolean sendCodecMeta = Boolean.parseBoolean(value);
|
||||
options.setSendCodecMeta(sendCodecMeta);
|
||||
break;
|
||||
case "raw_video_stream":
|
||||
boolean rawVideoStream = Boolean.parseBoolean(value);
|
||||
if (rawVideoStream) {
|
||||
options.setSendDeviceMeta(false);
|
||||
options.setSendFrameMeta(false);
|
||||
options.setSendDummyByte(false);
|
||||
options.setSendCodecMeta(false);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
Ln.w("Unknown server option: " + key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
private static Rect parseCrop(String crop) {
|
||||
if (crop.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
// input format: "width:height:x:y"
|
||||
String[] tokens = crop.split(":");
|
||||
if (tokens.length != 4) {
|
||||
throw new IllegalArgumentException("Crop must contains 4 values separated by colons: \"" + crop + "\"");
|
||||
}
|
||||
int width = Integer.parseInt(tokens[0]);
|
||||
int height = Integer.parseInt(tokens[1]);
|
||||
int x = Integer.parseInt(tokens[2]);
|
||||
int y = Integer.parseInt(tokens[3]);
|
||||
return new Rect(x, y, x + width, y + height);
|
||||
}
|
||||
|
||||
public static void main(String... args) throws Exception {
|
||||
Thread.setDefaultUncaughtExceptionHandler((t, e) -> {
|
||||
Ln.e("Exception on thread " + t, e);
|
||||
});
|
||||
|
||||
Options options = createOptions(args);
|
||||
Options options = Options.parse(args);
|
||||
|
||||
Ln.initLogLevel(options.getLogLevel());
|
||||
|
||||
|
@ -1,13 +1,22 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.app.Application;
|
||||
import android.content.AttributionSource;
|
||||
import android.content.ContextWrapper;
|
||||
import android.content.pm.ApplicationInfo;
|
||||
import android.media.AudioAttributes;
|
||||
import android.media.AudioManager;
|
||||
import android.media.AudioRecord;
|
||||
import android.os.Build;
|
||||
import android.os.Looper;
|
||||
import android.os.Parcel;
|
||||
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
public final class Workarounds {
|
||||
|
||||
@ -95,4 +104,140 @@ public final class Workarounds {
|
||||
Ln.d("Could not fill app context: " + throwable.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.R)
|
||||
@SuppressLint({"WrongConstant", "MissingPermission", "BlockedPrivateApi", "SoonBlockedPrivateApi"})
|
||||
public static AudioRecord createAudioRecord(int source, int sampleRate, int channelConfig, int channels, int channelMask, int encoding) {
|
||||
// Vivo (and maybe some other third-party ROMs) modified `AudioRecord`'s constructor, requiring `Context`s from real App environment.
|
||||
//
|
||||
// This method invokes the `AudioRecord(long nativeRecordInJavaObj)` constructor to create an empty `AudioRecord` instance, then uses
|
||||
// reflections to initialize it like the normal constructor do (or the `AudioRecord.Builder.build()` method do).
|
||||
// As a result, the modified code was not executed.
|
||||
try {
|
||||
// AudioRecord audioRecord = new AudioRecord(0L);
|
||||
Constructor<AudioRecord> audioRecordConstructor = AudioRecord.class.getDeclaredConstructor(long.class);
|
||||
audioRecordConstructor.setAccessible(true);
|
||||
AudioRecord audioRecord = audioRecordConstructor.newInstance(0L);
|
||||
|
||||
// audioRecord.mRecordingState = RECORDSTATE_STOPPED;
|
||||
Field mRecordingStateField = AudioRecord.class.getDeclaredField("mRecordingState");
|
||||
mRecordingStateField.setAccessible(true);
|
||||
mRecordingStateField.set(audioRecord, AudioRecord.RECORDSTATE_STOPPED);
|
||||
|
||||
Looper looper = Looper.myLooper();
|
||||
if (looper == null) {
|
||||
looper = Looper.getMainLooper();
|
||||
}
|
||||
|
||||
// audioRecord.mInitializationLooper = looper;
|
||||
Field mInitializationLooperField = AudioRecord.class.getDeclaredField("mInitializationLooper");
|
||||
mInitializationLooperField.setAccessible(true);
|
||||
mInitializationLooperField.set(audioRecord, looper);
|
||||
|
||||
// Create `AudioAttributes` with fixed capture preset
|
||||
int capturePreset = source;
|
||||
AudioAttributes.Builder audioAttributesBuilder = new AudioAttributes.Builder();
|
||||
Method setInternalCapturePresetMethod = AudioAttributes.Builder.class.getMethod("setInternalCapturePreset", int.class);
|
||||
setInternalCapturePresetMethod.invoke(audioAttributesBuilder, capturePreset);
|
||||
AudioAttributes attributes = audioAttributesBuilder.build();
|
||||
|
||||
// audioRecord.mAudioAttributes = attributes;
|
||||
Field mAudioAttributesField = AudioRecord.class.getDeclaredField("mAudioAttributes");
|
||||
mAudioAttributesField.setAccessible(true);
|
||||
mAudioAttributesField.set(audioRecord, attributes);
|
||||
|
||||
// audioRecord.audioParamCheck(capturePreset, sampleRate, encoding);
|
||||
Method audioParamCheckMethod = AudioRecord.class.getDeclaredMethod("audioParamCheck", int.class, int.class, int.class);
|
||||
audioParamCheckMethod.setAccessible(true);
|
||||
audioParamCheckMethod.invoke(audioRecord, capturePreset, sampleRate, encoding);
|
||||
|
||||
// audioRecord.mChannelCount = channels
|
||||
Field mChannelCountField = AudioRecord.class.getDeclaredField("mChannelCount");
|
||||
mChannelCountField.setAccessible(true);
|
||||
mChannelCountField.set(audioRecord, channels);
|
||||
|
||||
// audioRecord.mChannelMask = channelMask
|
||||
Field mChannelMaskField = AudioRecord.class.getDeclaredField("mChannelMask");
|
||||
mChannelMaskField.setAccessible(true);
|
||||
mChannelMaskField.set(audioRecord, channelMask);
|
||||
|
||||
int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, encoding);
|
||||
int bufferSizeInBytes = minBufferSize * 8;
|
||||
|
||||
// audioRecord.audioBuffSizeCheck(bufferSizeInBytes)
|
||||
Method audioBuffSizeCheckMethod = AudioRecord.class.getDeclaredMethod("audioBuffSizeCheck", int.class);
|
||||
audioBuffSizeCheckMethod.setAccessible(true);
|
||||
audioBuffSizeCheckMethod.invoke(audioRecord, bufferSizeInBytes);
|
||||
|
||||
final int channelIndexMask = 0;
|
||||
|
||||
int[] sampleRateArray = new int[]{sampleRate};
|
||||
int[] session = new int[]{AudioManager.AUDIO_SESSION_ID_GENERATE};
|
||||
|
||||
int initResult;
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S) {
|
||||
// private native final int native_setup(Object audiorecord_this,
|
||||
// Object /*AudioAttributes*/ attributes,
|
||||
// int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
|
||||
// int buffSizeInBytes, int[] sessionId, String opPackageName,
|
||||
// long nativeRecordInJavaObj);
|
||||
Method nativeSetupMethod = AudioRecord.class.getDeclaredMethod("native_setup", Object.class, Object.class, int[].class, int.class,
|
||||
int.class, int.class, int.class, int[].class, String.class, long.class);
|
||||
nativeSetupMethod.setAccessible(true);
|
||||
initResult = (int) nativeSetupMethod.invoke(audioRecord, new WeakReference<AudioRecord>(audioRecord), attributes, sampleRateArray,
|
||||
channelMask, channelIndexMask, audioRecord.getAudioFormat(), bufferSizeInBytes, session, FakeContext.get().getOpPackageName(),
|
||||
0L);
|
||||
} else {
|
||||
// Assume `context` is never `null`
|
||||
AttributionSource attributionSource = FakeContext.get().getAttributionSource();
|
||||
|
||||
// Assume `attributionSource.getPackageName()` is never null
|
||||
|
||||
// ScopedParcelState attributionSourceState = attributionSource.asScopedParcelState()
|
||||
Method asScopedParcelStateMethod = AttributionSource.class.getDeclaredMethod("asScopedParcelState");
|
||||
asScopedParcelStateMethod.setAccessible(true);
|
||||
|
||||
try (AutoCloseable attributionSourceState = (AutoCloseable) asScopedParcelStateMethod.invoke(attributionSource)) {
|
||||
Method getParcelMethod = attributionSourceState.getClass().getDeclaredMethod("getParcel");
|
||||
Parcel attributionSourceParcel = (Parcel) getParcelMethod.invoke(attributionSourceState);
|
||||
|
||||
// private native int native_setup(Object audiorecordThis,
|
||||
// Object /*AudioAttributes*/ attributes,
|
||||
// int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
|
||||
// int buffSizeInBytes, int[] sessionId, @NonNull Parcel attributionSource,
|
||||
// long nativeRecordInJavaObj, int maxSharedAudioHistoryMs);
|
||||
Method nativeSetupMethod = AudioRecord.class.getDeclaredMethod("native_setup", Object.class, Object.class, int[].class, int.class,
|
||||
int.class, int.class, int.class, int[].class, Parcel.class, long.class, int.class);
|
||||
nativeSetupMethod.setAccessible(true);
|
||||
initResult = (int) nativeSetupMethod.invoke(audioRecord, new WeakReference<AudioRecord>(audioRecord), attributes, sampleRateArray,
|
||||
channelMask, channelIndexMask, audioRecord.getAudioFormat(), bufferSizeInBytes, session, attributionSourceParcel, 0L, 0);
|
||||
}
|
||||
}
|
||||
|
||||
if (initResult != AudioRecord.SUCCESS) {
|
||||
Ln.e("Error code " + initResult + " when initializing native AudioRecord object.");
|
||||
throw new RuntimeException("Cannot create AudioRecord");
|
||||
}
|
||||
|
||||
// mSampleRate = sampleRate[0]
|
||||
Field mSampleRateField = AudioRecord.class.getDeclaredField("mSampleRate");
|
||||
mSampleRateField.setAccessible(true);
|
||||
mSampleRateField.set(audioRecord, sampleRateArray[0]);
|
||||
|
||||
// audioRecord.mSessionId = session[0]
|
||||
Field mSessionIdField = AudioRecord.class.getDeclaredField("mSessionId");
|
||||
mSessionIdField.setAccessible(true);
|
||||
mSessionIdField.set(audioRecord, session[0]);
|
||||
|
||||
// audioRecord.mState = AudioRecord.STATE_INITIALIZED
|
||||
Field mStateField = AudioRecord.class.getDeclaredField("mState");
|
||||
mStateField.setAccessible(true);
|
||||
mStateField.set(audioRecord, AudioRecord.STATE_INITIALIZED);
|
||||
|
||||
return audioRecord;
|
||||
} catch (Exception e) {
|
||||
Ln.e("Failed to invoke AudioRecord.<init>.", e);
|
||||
throw new RuntimeException("Cannot create AudioRecord");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -37,8 +37,13 @@ public class ClipboardManager {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class);
|
||||
getMethodVersion = 1;
|
||||
} catch (NoSuchMethodException e2) {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class, int.class);
|
||||
getMethodVersion = 2;
|
||||
try {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class, int.class);
|
||||
getMethodVersion = 2;
|
||||
} catch (NoSuchMethodException e3) {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, int.class, String.class);
|
||||
getMethodVersion = 3;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -80,8 +85,10 @@ public class ClipboardManager {
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID);
|
||||
case 1:
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID);
|
||||
default:
|
||||
case 2:
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID, 0);
|
||||
default:
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID, null);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@ import com.genymobile.scrcpy.Ln;
|
||||
|
||||
import android.os.IInterface;
|
||||
import android.view.IRotationWatcher;
|
||||
import android.view.IDisplayFoldListener;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
@ -108,4 +109,13 @@ public final class WindowManager {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void registerDisplayFoldListener(IDisplayFoldListener foldListener) {
|
||||
try {
|
||||
Class<?> cls = manager.getClass();
|
||||
cls.getMethod("registerDisplayFoldListener", IDisplayFoldListener.class).invoke(manager, foldListener);
|
||||
} catch (Exception e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user