Compare commits
16 Commits
audio_outp
...
window_dis
Author | SHA1 | Date | |
---|---|---|---|
dd5dc9c3c2 | |||
f93a5a68c6 | |||
669e9a8d1e | |||
f77e1c474e | |||
2f9396e24a | |||
0ebb3df69c | |||
2fff9b9edf | |||
57f879d68a | |||
3626d90004 | |||
02f4ff7534 | |||
a3871130cc | |||
53cb5635cf | |||
d7841664f4 | |||
39544f34b4 | |||
4755b97908 | |||
cba2501254 |
@ -3,9 +3,11 @@ _scrcpy() {
|
|||||||
local opts="
|
local opts="
|
||||||
--always-on-top
|
--always-on-top
|
||||||
--audio-bit-rate=
|
--audio-bit-rate=
|
||||||
|
--audio-buffer=
|
||||||
--audio-codec=
|
--audio-codec=
|
||||||
--audio-codec-options=
|
--audio-codec-options=
|
||||||
--audio-encoder=
|
--audio-encoder=
|
||||||
|
--audio-output-buffer=
|
||||||
-b --video-bit-rate=
|
-b --video-bit-rate=
|
||||||
--crop=
|
--crop=
|
||||||
-d --select-usb
|
-d --select-usb
|
||||||
@ -115,20 +117,26 @@ _scrcpy() {
|
|||||||
COMPREPLY=($(compgen -W "$("${ADB:-adb}" devices | awk '$2 == "device" {print $1}')" -- ${cur}))
|
COMPREPLY=($(compgen -W "$("${ADB:-adb}" devices | awk '$2 == "device" {print $1}')" -- ${cur}))
|
||||||
return
|
return
|
||||||
;;
|
;;
|
||||||
-b|--video-bit-rate \
|
--audio-bit-rate \
|
||||||
|--codec-options \
|
|--audio-buffer \
|
||||||
|
|-b|--video-bit-rate \
|
||||||
|
|--audio-codec-options \
|
||||||
|
|--audio-encoder \
|
||||||
|
|--audio-output-buffer \
|
||||||
|--crop \
|
|--crop \
|
||||||
|--display \
|
|--display \
|
||||||
|--display-buffer \
|
|--display-buffer \
|
||||||
|--encoder \
|
|
||||||
|--max-fps \
|
|--max-fps \
|
||||||
|-m|--max-size \
|
|-m|--max-size \
|
||||||
|-p|--port \
|
|-p|--port \
|
||||||
|--push-target \
|
|--push-target \
|
||||||
|
|--rotation \
|
||||||
|--tunnel-host \
|
|--tunnel-host \
|
||||||
|--tunnel-port \
|
|--tunnel-port \
|
||||||
|--v4l2-buffer \
|
|--v4l2-buffer \
|
||||||
|--v4l2-sink \
|
|--v4l2-sink \
|
||||||
|
|--video-codec-options \
|
||||||
|
|--video-encoder \
|
||||||
|--tcpip \
|
|--tcpip \
|
||||||
|--window-*)
|
|--window-*)
|
||||||
# Option accepting an argument, but nothing to auto-complete
|
# Option accepting an argument, but nothing to auto-complete
|
||||||
|
@ -5,7 +5,7 @@ Comment=Display and control your Android device
|
|||||||
# For some users, the PATH or ADB environment variables are set from the shell
|
# For some users, the PATH or ADB environment variables are set from the shell
|
||||||
# startup file, like .bashrc or .zshrc… Run an interactive shell to get
|
# startup file, like .bashrc or .zshrc… Run an interactive shell to get
|
||||||
# environment correctly initialized.
|
# environment correctly initialized.
|
||||||
Exec=/bin/bash --norc --noprofile -i -c "\"\\$SHELL\" -i -c scrcpy || read -p 'Press any key to quit...'"
|
Exec=/bin/bash --norc --noprofile -i -c "\"\\$SHELL\" -i -c scrcpy || read -p 'Press Enter to quit...'"
|
||||||
Icon=scrcpy
|
Icon=scrcpy
|
||||||
Terminal=true
|
Terminal=true
|
||||||
Type=Application
|
Type=Application
|
||||||
|
@ -10,9 +10,11 @@ local arguments
|
|||||||
arguments=(
|
arguments=(
|
||||||
'--always-on-top[Make scrcpy window always on top \(above other windows\)]'
|
'--always-on-top[Make scrcpy window always on top \(above other windows\)]'
|
||||||
'--audio-bit-rate=[Encode the audio at the given bit-rate]'
|
'--audio-bit-rate=[Encode the audio at the given bit-rate]'
|
||||||
|
'--audio-buffer=[Configure the audio buffering delay (in milliseconds)]'
|
||||||
'--audio-codec=[Select the audio codec]:codec:(opus aac raw)'
|
'--audio-codec=[Select the audio codec]:codec:(opus aac raw)'
|
||||||
'--audio-codec-options=[Set a list of comma-separated key\:type=value options for the device audio encoder]'
|
'--audio-codec-options=[Set a list of comma-separated key\:type=value options for the device audio encoder]'
|
||||||
'--audio-encoder=[Use a specific MediaCodec audio encoder]'
|
'--audio-encoder=[Use a specific MediaCodec audio encoder]'
|
||||||
|
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
|
||||||
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
|
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
|
||||||
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
|
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
|
||||||
{-d,--select-usb}'[Use USB device]'
|
{-d,--select-usb}'[Use USB device]'
|
||||||
|
@ -14,6 +14,7 @@ src = [
|
|||||||
'src/delay_buffer.c',
|
'src/delay_buffer.c',
|
||||||
'src/demuxer.c',
|
'src/demuxer.c',
|
||||||
'src/device_msg.c',
|
'src/device_msg.c',
|
||||||
|
'src/display.c',
|
||||||
'src/icon.c',
|
'src/icon.c',
|
||||||
'src/file_pusher.c',
|
'src/file_pusher.c',
|
||||||
'src/fps_counter.c',
|
'src/fps_counter.c',
|
||||||
@ -277,10 +278,6 @@ if get_option('buildtype') == 'debug'
|
|||||||
'src/util/strbuf.c',
|
'src/util/strbuf.c',
|
||||||
'src/util/term.c',
|
'src/util/term.c',
|
||||||
]],
|
]],
|
||||||
['test_clock', [
|
|
||||||
'tests/test_clock.c',
|
|
||||||
'src/clock.c',
|
|
||||||
]],
|
|
||||||
['test_control_msg_serialize', [
|
['test_control_msg_serialize', [
|
||||||
'tests/test_control_msg_serialize.c',
|
'tests/test_control_msg_serialize.c',
|
||||||
'src/control_msg.c',
|
'src/control_msg.c',
|
||||||
@ -310,7 +307,8 @@ if get_option('buildtype') == 'debug'
|
|||||||
]
|
]
|
||||||
|
|
||||||
foreach t : tests
|
foreach t : tests
|
||||||
exe = executable(t[0], t[1],
|
sources = t[1] + ['src/compat.c']
|
||||||
|
exe = executable(t[0], sources,
|
||||||
include_directories: src_dir,
|
include_directories: src_dir,
|
||||||
dependencies: dependencies,
|
dependencies: dependencies,
|
||||||
c_args: ['-DSDL_MAIN_HANDLED', '-DSC_TEST'])
|
c_args: ['-DSDL_MAIN_HANDLED', '-DSC_TEST'])
|
||||||
|
@ -33,6 +33,14 @@ Lower values decrease the latency, but increase the likelyhood of buffer underru
|
|||||||
|
|
||||||
Default is 50.
|
Default is 50.
|
||||||
|
|
||||||
|
.TP
|
||||||
|
.BI "\-\-audio\-output\-buffer ms
|
||||||
|
Configure the size of the SDL audio output buffer (in milliseconds).
|
||||||
|
|
||||||
|
If you get "robotic" audio playback, you should test with a higher value (10). Do not change this setting otherwise.
|
||||||
|
|
||||||
|
Default is 5.
|
||||||
|
|
||||||
.TP
|
.TP
|
||||||
.BI "\-\-audio\-codec " name
|
.BI "\-\-audio\-codec " name
|
||||||
Select an audio codec (opus, aac or raw).
|
Select an audio codec (opus, aac or raw).
|
||||||
|
@ -204,6 +204,7 @@ sc_adb_parse_device_ip(char *str) {
|
|||||||
while (str[idx_line] != '\0') {
|
while (str[idx_line] != '\0') {
|
||||||
char *line = &str[idx_line];
|
char *line = &str[idx_line];
|
||||||
size_t len = strcspn(line, "\n");
|
size_t len = strcspn(line, "\n");
|
||||||
|
bool is_last_line = line[len] == '\0';
|
||||||
|
|
||||||
// The same, but without any trailing '\r'
|
// The same, but without any trailing '\r'
|
||||||
size_t line_len = sc_str_remove_trailing_cr(line, len);
|
size_t line_len = sc_str_remove_trailing_cr(line, len);
|
||||||
@ -215,12 +216,12 @@ sc_adb_parse_device_ip(char *str) {
|
|||||||
return ip;
|
return ip;
|
||||||
}
|
}
|
||||||
|
|
||||||
idx_line += len;
|
if (is_last_line) {
|
||||||
|
break;
|
||||||
if (str[idx_line] != '\0') {
|
|
||||||
// The next line starts after the '\n'
|
|
||||||
++idx_line;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The next line starts after the '\n'
|
||||||
|
idx_line += len + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -59,8 +59,6 @@
|
|||||||
#define SC_AV_SAMPLE_FMT AV_SAMPLE_FMT_FLT
|
#define SC_AV_SAMPLE_FMT AV_SAMPLE_FMT_FLT
|
||||||
#define SC_SDL_SAMPLE_FMT AUDIO_F32
|
#define SC_SDL_SAMPLE_FMT AUDIO_F32
|
||||||
|
|
||||||
#define SC_AUDIO_OUTPUT_BUFFER_MS 5
|
|
||||||
|
|
||||||
#define TO_BYTES(SAMPLES) sc_audiobuf_to_bytes(&ap->buf, (SAMPLES))
|
#define TO_BYTES(SAMPLES) sc_audiobuf_to_bytes(&ap->buf, (SAMPLES))
|
||||||
#define TO_SAMPLES(BYTES) sc_audiobuf_to_samples(&ap->buf, (BYTES))
|
#define TO_SAMPLES(BYTES) sc_audiobuf_to_samples(&ap->buf, (BYTES))
|
||||||
|
|
||||||
@ -230,8 +228,8 @@ sc_audio_player_frame_sink_push(struct sc_frame_sink *sink,
|
|||||||
|
|
||||||
if (played) {
|
if (played) {
|
||||||
uint32_t max_buffered_samples = ap->target_buffering
|
uint32_t max_buffered_samples = ap->target_buffering
|
||||||
+ 12 * SC_AUDIO_OUTPUT_BUFFER_MS * ap->sample_rate / 1000
|
+ 12 * ap->output_buffer
|
||||||
+ ap->target_buffering / 10;
|
+ ap->target_buffering / 10;
|
||||||
if (buffered_samples > max_buffered_samples) {
|
if (buffered_samples > max_buffered_samples) {
|
||||||
uint32_t skip_samples = buffered_samples - max_buffered_samples;
|
uint32_t skip_samples = buffered_samples - max_buffered_samples;
|
||||||
sc_audiobuf_skip(&ap->buf, skip_samples);
|
sc_audiobuf_skip(&ap->buf, skip_samples);
|
||||||
@ -246,7 +244,7 @@ sc_audio_player_frame_sink_push(struct sc_frame_sink *sink,
|
|||||||
// max_initial_buffering samples, this would cause unnecessary delay
|
// max_initial_buffering samples, this would cause unnecessary delay
|
||||||
// (and glitches to compensate) on start.
|
// (and glitches to compensate) on start.
|
||||||
uint32_t max_initial_buffering = ap->target_buffering
|
uint32_t max_initial_buffering = ap->target_buffering
|
||||||
+ 2 * SC_AUDIO_OUTPUT_BUFFER_MS * ap->sample_rate / 1000;
|
+ 2 * ap->output_buffer;
|
||||||
if (buffered_samples > max_initial_buffering) {
|
if (buffered_samples > max_initial_buffering) {
|
||||||
uint32_t skip_samples = buffered_samples - max_initial_buffering;
|
uint32_t skip_samples = buffered_samples - max_initial_buffering;
|
||||||
sc_audiobuf_skip(&ap->buf, skip_samples);
|
sc_audiobuf_skip(&ap->buf, skip_samples);
|
||||||
@ -333,11 +331,28 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
|||||||
unsigned nb_channels = tmp;
|
unsigned nb_channels = tmp;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
assert(ctx->sample_rate > 0);
|
||||||
|
assert(!av_sample_fmt_is_planar(SC_AV_SAMPLE_FMT));
|
||||||
|
int out_bytes_per_sample = av_get_bytes_per_sample(SC_AV_SAMPLE_FMT);
|
||||||
|
assert(out_bytes_per_sample > 0);
|
||||||
|
|
||||||
|
ap->sample_rate = ctx->sample_rate;
|
||||||
|
ap->nb_channels = nb_channels;
|
||||||
|
ap->out_bytes_per_sample = out_bytes_per_sample;
|
||||||
|
|
||||||
|
ap->target_buffering = ap->target_buffering_delay * ap->sample_rate
|
||||||
|
/ SC_TICK_FREQ;
|
||||||
|
|
||||||
|
uint64_t aout_samples = ap->output_buffer_duration * ap->sample_rate
|
||||||
|
/ SC_TICK_FREQ;
|
||||||
|
assert(aout_samples <= 0xFFFF);
|
||||||
|
ap->output_buffer = (uint16_t) aout_samples;
|
||||||
|
|
||||||
SDL_AudioSpec desired = {
|
SDL_AudioSpec desired = {
|
||||||
.freq = ctx->sample_rate,
|
.freq = ctx->sample_rate,
|
||||||
.format = SC_SDL_SAMPLE_FMT,
|
.format = SC_SDL_SAMPLE_FMT,
|
||||||
.channels = nb_channels,
|
.channels = nb_channels,
|
||||||
.samples = SC_AUDIO_OUTPUT_BUFFER_MS * ctx->sample_rate / 1000,
|
.samples = aout_samples,
|
||||||
.callback = sc_audio_player_sdl_callback,
|
.callback = sc_audio_player_sdl_callback,
|
||||||
.userdata = ap,
|
.userdata = ap,
|
||||||
};
|
};
|
||||||
@ -356,11 +371,6 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
|||||||
}
|
}
|
||||||
ap->swr_ctx = swr_ctx;
|
ap->swr_ctx = swr_ctx;
|
||||||
|
|
||||||
assert(ctx->sample_rate > 0);
|
|
||||||
assert(!av_sample_fmt_is_planar(SC_AV_SAMPLE_FMT));
|
|
||||||
int out_bytes_per_sample = av_get_bytes_per_sample(SC_AV_SAMPLE_FMT);
|
|
||||||
assert(out_bytes_per_sample > 0);
|
|
||||||
|
|
||||||
#ifdef SCRCPY_LAVU_HAS_CHLAYOUT
|
#ifdef SCRCPY_LAVU_HAS_CHLAYOUT
|
||||||
av_opt_set_chlayout(swr_ctx, "in_chlayout", &ctx->ch_layout, 0);
|
av_opt_set_chlayout(swr_ctx, "in_chlayout", &ctx->ch_layout, 0);
|
||||||
av_opt_set_chlayout(swr_ctx, "out_chlayout", &ctx->ch_layout, 0);
|
av_opt_set_chlayout(swr_ctx, "out_chlayout", &ctx->ch_layout, 0);
|
||||||
@ -383,13 +393,6 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
|||||||
goto error_free_swr_ctx;
|
goto error_free_swr_ctx;
|
||||||
}
|
}
|
||||||
|
|
||||||
ap->sample_rate = ctx->sample_rate;
|
|
||||||
ap->nb_channels = nb_channels;
|
|
||||||
ap->out_bytes_per_sample = out_bytes_per_sample;
|
|
||||||
|
|
||||||
ap->target_buffering = ap->target_buffering_delay * ap->sample_rate
|
|
||||||
/ SC_TICK_FREQ;
|
|
||||||
|
|
||||||
// Use a ring-buffer of the target buffering size plus 1 second between the
|
// Use a ring-buffer of the target buffering size plus 1 second between the
|
||||||
// producer and the consumer. It's too big on purpose, to guarantee that
|
// producer and the consumer. It's too big on purpose, to guarantee that
|
||||||
// the producer and the consumer will be able to access it in parallel
|
// the producer and the consumer will be able to access it in parallel
|
||||||
@ -458,8 +461,10 @@ sc_audio_player_frame_sink_close(struct sc_frame_sink *sink) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
sc_audio_player_init(struct sc_audio_player *ap, sc_tick target_buffering) {
|
sc_audio_player_init(struct sc_audio_player *ap, sc_tick target_buffering,
|
||||||
|
sc_tick output_buffer_duration) {
|
||||||
ap->target_buffering_delay = target_buffering;
|
ap->target_buffering_delay = target_buffering;
|
||||||
|
ap->output_buffer_duration = output_buffer_duration;
|
||||||
|
|
||||||
static const struct sc_frame_sink_ops ops = {
|
static const struct sc_frame_sink_ops ops = {
|
||||||
.open = sc_audio_player_frame_sink_open,
|
.open = sc_audio_player_frame_sink_open,
|
||||||
|
@ -27,6 +27,10 @@ struct sc_audio_player {
|
|||||||
sc_tick target_buffering_delay;
|
sc_tick target_buffering_delay;
|
||||||
uint32_t target_buffering; // in samples
|
uint32_t target_buffering; // in samples
|
||||||
|
|
||||||
|
// SDL audio output buffer size.
|
||||||
|
sc_tick output_buffer_duration;
|
||||||
|
uint16_t output_buffer;
|
||||||
|
|
||||||
// Audio buffer to communicate between the receiver and the SDL audio
|
// Audio buffer to communicate between the receiver and the SDL audio
|
||||||
// callback (protected by SDL_AudioDeviceLock())
|
// callback (protected by SDL_AudioDeviceLock())
|
||||||
struct sc_audiobuf buf;
|
struct sc_audiobuf buf;
|
||||||
@ -80,6 +84,7 @@ struct sc_audio_player_callbacks {
|
|||||||
};
|
};
|
||||||
|
|
||||||
void
|
void
|
||||||
sc_audio_player_init(struct sc_audio_player *ap, sc_tick target_buffering);
|
sc_audio_player_init(struct sc_audio_player *ap, sc_tick target_buffering,
|
||||||
|
sc_tick audio_output_buffer);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -71,6 +71,7 @@ enum {
|
|||||||
OPT_LIST_DISPLAYS,
|
OPT_LIST_DISPLAYS,
|
||||||
OPT_REQUIRE_AUDIO,
|
OPT_REQUIRE_AUDIO,
|
||||||
OPT_AUDIO_BUFFER,
|
OPT_AUDIO_BUFFER,
|
||||||
|
OPT_AUDIO_OUTPUT_BUFFER,
|
||||||
};
|
};
|
||||||
|
|
||||||
struct sc_option {
|
struct sc_option {
|
||||||
@ -129,6 +130,16 @@ static const struct sc_option options[] = {
|
|||||||
"likelyhood of buffer underrun (causing audio glitches).\n"
|
"likelyhood of buffer underrun (causing audio glitches).\n"
|
||||||
"Default is 50.",
|
"Default is 50.",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
.longopt_id = OPT_AUDIO_OUTPUT_BUFFER,
|
||||||
|
.longopt = "audio-output-buffer",
|
||||||
|
.argdesc = "ms",
|
||||||
|
.text = "Configure the size of the SDL audio output buffer (in "
|
||||||
|
"milliseconds).\n"
|
||||||
|
"If you get \"robotic\" audio playback, you should test with "
|
||||||
|
"a higher value (10). Do not change this setting otherwise.\n"
|
||||||
|
"Default is 5.",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
.longopt_id = OPT_AUDIO_CODEC,
|
.longopt_id = OPT_AUDIO_CODEC,
|
||||||
.longopt = "audio-codec",
|
.longopt = "audio-codec",
|
||||||
@ -1204,6 +1215,19 @@ parse_buffering_time(const char *s, sc_tick *tick) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static bool
|
||||||
|
parse_audio_output_buffer(const char *s, sc_tick *tick) {
|
||||||
|
long value;
|
||||||
|
bool ok = parse_integer_arg(s, &value, false, 0, 1000,
|
||||||
|
"audio output buffer");
|
||||||
|
if (!ok) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
*tick = SC_TICK_FROM_MS(value);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
parse_lock_video_orientation(const char *s,
|
parse_lock_video_orientation(const char *s,
|
||||||
enum sc_lock_video_orientation *lock_mode) {
|
enum sc_lock_video_orientation *lock_mode) {
|
||||||
@ -1831,6 +1855,12 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
case OPT_AUDIO_OUTPUT_BUFFER:
|
||||||
|
if (!parse_audio_output_buffer(optarg,
|
||||||
|
&opts->audio_output_buffer)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
// getopt prints the error message on stderr
|
// getopt prints the error message on stderr
|
||||||
return false;
|
return false;
|
||||||
|
108
app/src/clock.c
108
app/src/clock.c
@ -1,116 +1,36 @@
|
|||||||
#include "clock.h"
|
#include "clock.h"
|
||||||
|
|
||||||
|
#include <assert.h>
|
||||||
|
|
||||||
#include "util/log.h"
|
#include "util/log.h"
|
||||||
|
|
||||||
#define SC_CLOCK_NDEBUG // comment to debug
|
#define SC_CLOCK_NDEBUG // comment to debug
|
||||||
|
|
||||||
|
#define SC_CLOCK_RANGE 32
|
||||||
|
|
||||||
void
|
void
|
||||||
sc_clock_init(struct sc_clock *clock) {
|
sc_clock_init(struct sc_clock *clock) {
|
||||||
clock->count = 0;
|
clock->range = 0;
|
||||||
clock->head = 0;
|
clock->offset = 0;
|
||||||
clock->left_sum.system = 0;
|
|
||||||
clock->left_sum.stream = 0;
|
|
||||||
clock->right_sum.system = 0;
|
|
||||||
clock->right_sum.stream = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Estimate the affine function f(stream) = slope * stream + offset
|
|
||||||
static void
|
|
||||||
sc_clock_estimate(struct sc_clock *clock,
|
|
||||||
double *out_slope, sc_tick *out_offset) {
|
|
||||||
assert(clock->count);
|
|
||||||
|
|
||||||
if (clock->count == 1) {
|
|
||||||
// If there is only 1 point, we can't compute a slope. Assume it is 1.
|
|
||||||
struct sc_clock_point *single_point = &clock->right_sum;
|
|
||||||
*out_slope = 1;
|
|
||||||
*out_offset = single_point->system - single_point->stream;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct sc_clock_point left_avg = {
|
|
||||||
.system = clock->left_sum.system / (clock->count / 2),
|
|
||||||
.stream = clock->left_sum.stream / (clock->count / 2),
|
|
||||||
};
|
|
||||||
struct sc_clock_point right_avg = {
|
|
||||||
.system = clock->right_sum.system / ((clock->count + 1) / 2),
|
|
||||||
.stream = clock->right_sum.stream / ((clock->count + 1) / 2),
|
|
||||||
};
|
|
||||||
|
|
||||||
double slope = (double) (right_avg.system - left_avg.system)
|
|
||||||
/ (right_avg.stream - left_avg.stream);
|
|
||||||
|
|
||||||
if (clock->count < SC_CLOCK_RANGE) {
|
|
||||||
/* The first frames are typically received and decoded with more delay
|
|
||||||
* than the others, causing a wrong slope estimation on start. To
|
|
||||||
* compensate, assume an initial slope of 1, then progressively use the
|
|
||||||
* estimated slope. */
|
|
||||||
slope = (clock->count * slope + (SC_CLOCK_RANGE - clock->count))
|
|
||||||
/ SC_CLOCK_RANGE;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct sc_clock_point global_avg = {
|
|
||||||
.system = (clock->left_sum.system + clock->right_sum.system)
|
|
||||||
/ clock->count,
|
|
||||||
.stream = (clock->left_sum.stream + clock->right_sum.stream)
|
|
||||||
/ clock->count,
|
|
||||||
};
|
|
||||||
|
|
||||||
sc_tick offset = global_avg.system - (sc_tick) (global_avg.stream * slope);
|
|
||||||
|
|
||||||
*out_slope = slope;
|
|
||||||
*out_offset = offset;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
sc_clock_update(struct sc_clock *clock, sc_tick system, sc_tick stream) {
|
sc_clock_update(struct sc_clock *clock, sc_tick system, sc_tick stream) {
|
||||||
struct sc_clock_point *point = &clock->points[clock->head];
|
if (clock->range < SC_CLOCK_RANGE) {
|
||||||
|
++clock->range;
|
||||||
if (clock->count == SC_CLOCK_RANGE || clock->count & 1) {
|
|
||||||
// One point passes from the right sum to the left sum
|
|
||||||
|
|
||||||
unsigned mid;
|
|
||||||
if (clock->count == SC_CLOCK_RANGE) {
|
|
||||||
mid = (clock->head + SC_CLOCK_RANGE / 2) % SC_CLOCK_RANGE;
|
|
||||||
} else {
|
|
||||||
// Only for the first frames
|
|
||||||
mid = clock->count / 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct sc_clock_point *mid_point = &clock->points[mid];
|
|
||||||
clock->left_sum.system += mid_point->system;
|
|
||||||
clock->left_sum.stream += mid_point->stream;
|
|
||||||
clock->right_sum.system -= mid_point->system;
|
|
||||||
clock->right_sum.stream -= mid_point->stream;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (clock->count == SC_CLOCK_RANGE) {
|
sc_tick offset = system - stream;
|
||||||
// The current point overwrites the previous value in the circular
|
clock->offset = ((clock->range - 1) * clock->offset + offset)
|
||||||
// array, update the left sum accordingly
|
/ clock->range;
|
||||||
clock->left_sum.system -= point->system;
|
|
||||||
clock->left_sum.stream -= point->stream;
|
|
||||||
} else {
|
|
||||||
++clock->count;
|
|
||||||
}
|
|
||||||
|
|
||||||
point->system = system;
|
|
||||||
point->stream = stream;
|
|
||||||
|
|
||||||
clock->right_sum.system += system;
|
|
||||||
clock->right_sum.stream += stream;
|
|
||||||
|
|
||||||
clock->head = (clock->head + 1) % SC_CLOCK_RANGE;
|
|
||||||
|
|
||||||
// Update estimation
|
|
||||||
sc_clock_estimate(clock, &clock->slope, &clock->offset);
|
|
||||||
|
|
||||||
#ifndef SC_CLOCK_NDEBUG
|
#ifndef SC_CLOCK_NDEBUG
|
||||||
LOGD("Clock estimation: %f * pts + %" PRItick, clock->slope, clock->offset);
|
LOGD("Clock estimation: pts + %" PRItick, clock->offset);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
sc_tick
|
sc_tick
|
||||||
sc_clock_to_system_time(struct sc_clock *clock, sc_tick stream) {
|
sc_clock_to_system_time(struct sc_clock *clock, sc_tick stream) {
|
||||||
assert(clock->count); // sc_clock_update() must have been called
|
assert(clock->range); // sc_clock_update() must have been called
|
||||||
return (sc_tick) (stream * clock->slope) + clock->offset;
|
return stream + clock->offset;
|
||||||
}
|
}
|
||||||
|
@ -3,13 +3,8 @@
|
|||||||
|
|
||||||
#include "common.h"
|
#include "common.h"
|
||||||
|
|
||||||
#include <assert.h>
|
|
||||||
|
|
||||||
#include "util/tick.h"
|
#include "util/tick.h"
|
||||||
|
|
||||||
#define SC_CLOCK_RANGE 32
|
|
||||||
static_assert(!(SC_CLOCK_RANGE & 1), "SC_CLOCK_RANGE must be even");
|
|
||||||
|
|
||||||
struct sc_clock_point {
|
struct sc_clock_point {
|
||||||
sc_tick system;
|
sc_tick system;
|
||||||
sc_tick stream;
|
sc_tick stream;
|
||||||
@ -21,40 +16,18 @@ struct sc_clock_point {
|
|||||||
*
|
*
|
||||||
* f(stream) = slope * stream + offset
|
* f(stream) = slope * stream + offset
|
||||||
*
|
*
|
||||||
* To that end, it stores the SC_CLOCK_RANGE last clock points (the timestamps
|
* Theoretically, the slope encodes the drift between the device clock and the
|
||||||
* of a frame expressed both in stream time and system time) in a circular
|
* computer clock. It is expected to be very close to 1.
|
||||||
* array.
|
|
||||||
*
|
*
|
||||||
* To estimate the slope, it splits the last SC_CLOCK_RANGE points into two
|
* Since the clock is used to estimate very close points in the future (which
|
||||||
* sets of SC_CLOCK_RANGE/2 points, and computes their centroid ("average
|
* are reestimated on every clock update, see delay_buffer), the error caused
|
||||||
* point"). The slope of the estimated affine function is that of the line
|
* by clock drift is totally negligible, so it is better to assume that the
|
||||||
* passing through these two points.
|
* slope is 1 than to estimate it (the estimation error would be larger).
|
||||||
*
|
*
|
||||||
* To estimate the offset, it computes the centroid of all the SC_CLOCK_RANGE
|
* Therefore, only the offset is estimated.
|
||||||
* points. The resulting affine function passes by this centroid.
|
|
||||||
*
|
|
||||||
* With a circular array, the rolling sums (and average) are quick to compute.
|
|
||||||
* In practice, the estimation is stable and the evolution is smooth.
|
|
||||||
*/
|
*/
|
||||||
struct sc_clock {
|
struct sc_clock {
|
||||||
// Circular array
|
unsigned range;
|
||||||
struct sc_clock_point points[SC_CLOCK_RANGE];
|
|
||||||
|
|
||||||
// Number of points in the array (count <= SC_CLOCK_RANGE)
|
|
||||||
unsigned count;
|
|
||||||
|
|
||||||
// Index of the next point to write
|
|
||||||
unsigned head;
|
|
||||||
|
|
||||||
// Sum of the first count/2 points
|
|
||||||
struct sc_clock_point left_sum;
|
|
||||||
|
|
||||||
// Sum of the last (count+1)/2 points
|
|
||||||
struct sc_clock_point right_sum;
|
|
||||||
|
|
||||||
// Estimated slope and offset
|
|
||||||
// (computed on sc_clock_update(), used by sc_clock_to_system_time())
|
|
||||||
double slope;
|
|
||||||
sc_tick offset;
|
sc_tick offset;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -194,7 +194,7 @@ sc_delay_buffer_frame_sink_push(struct sc_frame_sink *sink,
|
|||||||
sc_clock_update(&db->clock, sc_tick_now(), pts);
|
sc_clock_update(&db->clock, sc_tick_now(), pts);
|
||||||
sc_cond_signal(&db->wait_cond);
|
sc_cond_signal(&db->wait_cond);
|
||||||
|
|
||||||
if (db->first_frame_asap && db->clock.count == 1) {
|
if (db->first_frame_asap && db->clock.range == 1) {
|
||||||
sc_mutex_unlock(&db->mutex);
|
sc_mutex_unlock(&db->mutex);
|
||||||
return sc_frame_source_sinks_push(&db->frame_source, frame);
|
return sc_frame_source_sinks_push(&db->frame_source, frame);
|
||||||
}
|
}
|
||||||
|
166
app/src/display.c
Normal file
166
app/src/display.c
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
#include "display.h"
|
||||||
|
|
||||||
|
#include <assert.h>
|
||||||
|
|
||||||
|
#include "util/log.h"
|
||||||
|
|
||||||
|
bool
|
||||||
|
sc_display_init(struct sc_display *display, SDL_Window *window, bool mipmaps) {
|
||||||
|
display->renderer =
|
||||||
|
SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);
|
||||||
|
if (!display->renderer) {
|
||||||
|
LOGE("Could not create renderer: %s", SDL_GetError());
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
SDL_RendererInfo renderer_info;
|
||||||
|
int r = SDL_GetRendererInfo(display->renderer, &renderer_info);
|
||||||
|
const char *renderer_name = r ? NULL : renderer_info.name;
|
||||||
|
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
|
||||||
|
|
||||||
|
display->mipmaps = false;
|
||||||
|
|
||||||
|
// starts with "opengl"
|
||||||
|
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
|
||||||
|
if (use_opengl) {
|
||||||
|
struct sc_opengl *gl = &display->gl;
|
||||||
|
sc_opengl_init(gl);
|
||||||
|
|
||||||
|
LOGI("OpenGL version: %s", gl->version);
|
||||||
|
|
||||||
|
if (mipmaps) {
|
||||||
|
bool supports_mipmaps =
|
||||||
|
sc_opengl_version_at_least(gl, 3, 0, /* OpenGL 3.0+ */
|
||||||
|
2, 0 /* OpenGL ES 2.0+ */);
|
||||||
|
if (supports_mipmaps) {
|
||||||
|
LOGI("Trilinear filtering enabled");
|
||||||
|
display->mipmaps = true;
|
||||||
|
} else {
|
||||||
|
LOGW("Trilinear filtering disabled "
|
||||||
|
"(OpenGL 3.0+ or ES 2.0+ required");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
LOGI("Trilinear filtering disabled");
|
||||||
|
}
|
||||||
|
} else if (mipmaps) {
|
||||||
|
LOGD("Trilinear filtering disabled (not an OpenGL renderer");
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
sc_display_destroy(struct sc_display *display) {
|
||||||
|
if (display->texture) {
|
||||||
|
SDL_DestroyTexture(display->texture);
|
||||||
|
}
|
||||||
|
SDL_DestroyRenderer(display->renderer);
|
||||||
|
}
|
||||||
|
|
||||||
|
static SDL_Texture *
|
||||||
|
sc_display_create_texture(struct sc_display *display,
|
||||||
|
struct sc_size size) {
|
||||||
|
SDL_Renderer *renderer = display->renderer;
|
||||||
|
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
|
||||||
|
SDL_TEXTUREACCESS_STREAMING,
|
||||||
|
size.width, size.height);
|
||||||
|
if (!texture) {
|
||||||
|
LOGE("Could not create texture: %s", SDL_GetError());
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (display->mipmaps) {
|
||||||
|
struct sc_opengl *gl = &display->gl;
|
||||||
|
|
||||||
|
SDL_GL_BindTexture(texture, NULL, NULL);
|
||||||
|
|
||||||
|
// Enable trilinear filtering for downscaling
|
||||||
|
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
|
||||||
|
GL_LINEAR_MIPMAP_LINEAR);
|
||||||
|
gl->TexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -1.f);
|
||||||
|
|
||||||
|
SDL_GL_UnbindTexture(texture);
|
||||||
|
}
|
||||||
|
|
||||||
|
return texture;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
sc_display_set_texture_size(struct sc_display *display, struct sc_size size) {
|
||||||
|
if (display->texture) {
|
||||||
|
SDL_DestroyTexture(display->texture);
|
||||||
|
}
|
||||||
|
|
||||||
|
display->texture = sc_display_create_texture(display, size);
|
||||||
|
if (!display->texture) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOGI("Texture: %" PRIu16 "x%" PRIu16, size.width, size.height);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
sc_display_update_texture(struct sc_display *display, const AVFrame *frame) {
|
||||||
|
int ret = SDL_UpdateYUVTexture(display->texture, NULL,
|
||||||
|
frame->data[0], frame->linesize[0],
|
||||||
|
frame->data[1], frame->linesize[1],
|
||||||
|
frame->data[2], frame->linesize[2]);
|
||||||
|
if (ret) {
|
||||||
|
LOGE("Could not update texture: %s", SDL_GetError());
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (display->mipmaps) {
|
||||||
|
SDL_GL_BindTexture(display->texture, NULL, NULL);
|
||||||
|
display->gl.GenerateMipmap(GL_TEXTURE_2D);
|
||||||
|
SDL_GL_UnbindTexture(display->texture);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
sc_display_render(struct sc_display *display, const SDL_Rect *geometry,
|
||||||
|
unsigned rotation) {
|
||||||
|
SDL_RenderClear(display->renderer);
|
||||||
|
|
||||||
|
SDL_Renderer *renderer = display->renderer;
|
||||||
|
SDL_Texture *texture = display->texture;
|
||||||
|
|
||||||
|
if (rotation == 0) {
|
||||||
|
int ret = SDL_RenderCopy(renderer, texture, NULL, geometry);
|
||||||
|
if (ret) {
|
||||||
|
LOGE("Could not render texture: %s", SDL_GetError());
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// rotation in RenderCopyEx() is clockwise, while screen->rotation is
|
||||||
|
// counterclockwise (to be consistent with --lock-video-orientation)
|
||||||
|
int cw_rotation = (4 - rotation) % 4;
|
||||||
|
double angle = 90 * cw_rotation;
|
||||||
|
|
||||||
|
const SDL_Rect *dstrect = NULL;
|
||||||
|
SDL_Rect rect;
|
||||||
|
if (rotation & 1) {
|
||||||
|
rect.x = geometry->x + (geometry->w - geometry->h) / 2;
|
||||||
|
rect.y = geometry->y + (geometry->h - geometry->w) / 2;
|
||||||
|
rect.w = geometry->h;
|
||||||
|
rect.h = geometry->w;
|
||||||
|
dstrect = ▭
|
||||||
|
} else {
|
||||||
|
assert(rotation == 2);
|
||||||
|
dstrect = geometry;
|
||||||
|
}
|
||||||
|
|
||||||
|
int ret = SDL_RenderCopyEx(renderer, texture, NULL, dstrect, angle,
|
||||||
|
NULL, 0);
|
||||||
|
if (ret) {
|
||||||
|
LOGE("Could not render texture: %s", SDL_GetError());
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
SDL_RenderPresent(display->renderer);
|
||||||
|
return true;
|
||||||
|
}
|
37
app/src/display.h
Normal file
37
app/src/display.h
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
#ifndef SC_DISPLAY_H
|
||||||
|
#define SC_DISPLAY_H
|
||||||
|
|
||||||
|
#include "common.h"
|
||||||
|
|
||||||
|
#include <stdbool.h>
|
||||||
|
#include <libavformat/avformat.h>
|
||||||
|
#include <SDL2/SDL.h>
|
||||||
|
|
||||||
|
#include "coords.h"
|
||||||
|
#include "opengl.h"
|
||||||
|
|
||||||
|
struct sc_display {
|
||||||
|
SDL_Renderer *renderer;
|
||||||
|
SDL_Texture *texture;
|
||||||
|
|
||||||
|
struct sc_opengl gl;
|
||||||
|
bool mipmaps;
|
||||||
|
};
|
||||||
|
|
||||||
|
bool
|
||||||
|
sc_display_init(struct sc_display *display, SDL_Window *window, bool mipmaps);
|
||||||
|
|
||||||
|
void
|
||||||
|
sc_display_destroy(struct sc_display *display);
|
||||||
|
|
||||||
|
bool
|
||||||
|
sc_display_set_texture_size(struct sc_display *display, struct sc_size size);
|
||||||
|
|
||||||
|
bool
|
||||||
|
sc_display_update_texture(struct sc_display *display, const AVFrame *frame);
|
||||||
|
|
||||||
|
bool
|
||||||
|
sc_display_render(struct sc_display *display, const SDL_Rect *geometry,
|
||||||
|
unsigned rotation);
|
||||||
|
|
||||||
|
#endif
|
@ -797,7 +797,8 @@ sc_input_manager_process_file(struct sc_input_manager *im,
|
|||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
sc_input_manager_handle_event(struct sc_input_manager *im, SDL_Event *event) {
|
sc_input_manager_handle_event(struct sc_input_manager *im,
|
||||||
|
const SDL_Event *event) {
|
||||||
bool control = im->controller;
|
bool control = im->controller;
|
||||||
switch (event->type) {
|
switch (event->type) {
|
||||||
case SDL_TEXTINPUT:
|
case SDL_TEXTINPUT:
|
||||||
|
@ -61,6 +61,7 @@ sc_input_manager_init(struct sc_input_manager *im,
|
|||||||
const struct sc_input_manager_params *params);
|
const struct sc_input_manager_params *params);
|
||||||
|
|
||||||
void
|
void
|
||||||
sc_input_manager_handle_event(struct sc_input_manager *im, SDL_Event *event);
|
sc_input_manager_handle_event(struct sc_input_manager *im,
|
||||||
|
const SDL_Event *event);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
@ -44,6 +44,7 @@ const struct scrcpy_options scrcpy_options_default = {
|
|||||||
.display_buffer = 0,
|
.display_buffer = 0,
|
||||||
.v4l2_buffer = 0,
|
.v4l2_buffer = 0,
|
||||||
.audio_buffer = SC_TICK_FROM_MS(50),
|
.audio_buffer = SC_TICK_FROM_MS(50),
|
||||||
|
.audio_output_buffer = SC_TICK_FROM_MS(5),
|
||||||
#ifdef HAVE_USB
|
#ifdef HAVE_USB
|
||||||
.otg = false,
|
.otg = false,
|
||||||
#endif
|
#endif
|
||||||
|
@ -127,6 +127,7 @@ struct scrcpy_options {
|
|||||||
sc_tick display_buffer;
|
sc_tick display_buffer;
|
||||||
sc_tick v4l2_buffer;
|
sc_tick v4l2_buffer;
|
||||||
sc_tick audio_buffer;
|
sc_tick audio_buffer;
|
||||||
|
sc_tick audio_output_buffer;
|
||||||
#ifdef HAVE_USB
|
#ifdef HAVE_USB
|
||||||
bool otg;
|
bool otg;
|
||||||
#endif
|
#endif
|
||||||
|
@ -688,7 +688,8 @@ aoa_hid_end:
|
|||||||
sc_frame_source_add_sink(src, &s->screen.frame_sink);
|
sc_frame_source_add_sink(src, &s->screen.frame_sink);
|
||||||
|
|
||||||
if (options->audio) {
|
if (options->audio) {
|
||||||
sc_audio_player_init(&s->audio_player, options->audio_buffer);
|
sc_audio_player_init(&s->audio_player, options->audio_buffer,
|
||||||
|
options->audio_output_buffer);
|
||||||
sc_frame_source_add_sink(&s->audio_decoder.frame_source,
|
sc_frame_source_add_sink(&s->audio_decoder.frame_source,
|
||||||
&s->audio_player.frame_sink);
|
&s->audio_player.frame_sink);
|
||||||
}
|
}
|
||||||
|
162
app/src/screen.c
162
app/src/screen.c
@ -239,35 +239,6 @@ sc_screen_update_content_rect(struct sc_screen *screen) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
|
||||||
create_texture(struct sc_screen *screen) {
|
|
||||||
SDL_Renderer *renderer = screen->renderer;
|
|
||||||
struct sc_size size = screen->frame_size;
|
|
||||||
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
|
|
||||||
SDL_TEXTUREACCESS_STREAMING,
|
|
||||||
size.width, size.height);
|
|
||||||
if (!texture) {
|
|
||||||
LOGE("Could not create texture: %s", SDL_GetError());
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (screen->mipmaps) {
|
|
||||||
struct sc_opengl *gl = &screen->gl;
|
|
||||||
|
|
||||||
SDL_GL_BindTexture(texture, NULL, NULL);
|
|
||||||
|
|
||||||
// Enable trilinear filtering for downscaling
|
|
||||||
gl->TexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
|
|
||||||
GL_LINEAR_MIPMAP_LINEAR);
|
|
||||||
gl->TexParameterf(GL_TEXTURE_2D, GL_TEXTURE_LOD_BIAS, -1.f);
|
|
||||||
|
|
||||||
SDL_GL_UnbindTexture(texture);
|
|
||||||
}
|
|
||||||
|
|
||||||
screen->texture = texture;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// render the texture to the renderer
|
// render the texture to the renderer
|
||||||
//
|
//
|
||||||
// Set the update_content_rect flag if the window or content size may have
|
// Set the update_content_rect flag if the window or content size may have
|
||||||
@ -278,35 +249,11 @@ sc_screen_render(struct sc_screen *screen, bool update_content_rect) {
|
|||||||
sc_screen_update_content_rect(screen);
|
sc_screen_update_content_rect(screen);
|
||||||
}
|
}
|
||||||
|
|
||||||
SDL_RenderClear(screen->renderer);
|
bool ok = sc_display_render(&screen->display, &screen->rect,
|
||||||
if (screen->rotation == 0) {
|
screen->rotation);
|
||||||
SDL_RenderCopy(screen->renderer, screen->texture, NULL, &screen->rect);
|
(void) ok; // error already logged
|
||||||
} else {
|
|
||||||
// rotation in RenderCopyEx() is clockwise, while screen->rotation is
|
|
||||||
// counterclockwise (to be consistent with --lock-video-orientation)
|
|
||||||
int cw_rotation = (4 - screen->rotation) % 4;
|
|
||||||
double angle = 90 * cw_rotation;
|
|
||||||
|
|
||||||
SDL_Rect *dstrect = NULL;
|
|
||||||
SDL_Rect rect;
|
|
||||||
if (screen->rotation & 1) {
|
|
||||||
rect.x = screen->rect.x + (screen->rect.w - screen->rect.h) / 2;
|
|
||||||
rect.y = screen->rect.y + (screen->rect.h - screen->rect.w) / 2;
|
|
||||||
rect.w = screen->rect.h;
|
|
||||||
rect.h = screen->rect.w;
|
|
||||||
dstrect = ▭
|
|
||||||
} else {
|
|
||||||
assert(screen->rotation == 2);
|
|
||||||
dstrect = &screen->rect;
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_RenderCopyEx(screen->renderer, screen->texture, NULL, dstrect,
|
|
||||||
angle, NULL, 0);
|
|
||||||
}
|
|
||||||
SDL_RenderPresent(screen->renderer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#if defined(__APPLE__) || defined(__WINDOWS__)
|
#if defined(__APPLE__) || defined(__WINDOWS__)
|
||||||
# define CONTINUOUS_RESIZING_WORKAROUND
|
# define CONTINUOUS_RESIZING_WORKAROUND
|
||||||
#endif
|
#endif
|
||||||
@ -453,46 +400,11 @@ sc_screen_init(struct sc_screen *screen,
|
|||||||
goto error_destroy_fps_counter;
|
goto error_destroy_fps_counter;
|
||||||
}
|
}
|
||||||
|
|
||||||
screen->renderer = SDL_CreateRenderer(screen->window, -1,
|
ok = sc_display_init(&screen->display, screen->window, params->mipmaps);
|
||||||
SDL_RENDERER_ACCELERATED);
|
if (!ok) {
|
||||||
if (!screen->renderer) {
|
|
||||||
LOGE("Could not create renderer: %s", SDL_GetError());
|
|
||||||
goto error_destroy_window;
|
goto error_destroy_window;
|
||||||
}
|
}
|
||||||
|
|
||||||
SDL_RendererInfo renderer_info;
|
|
||||||
int r = SDL_GetRendererInfo(screen->renderer, &renderer_info);
|
|
||||||
const char *renderer_name = r ? NULL : renderer_info.name;
|
|
||||||
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
|
|
||||||
|
|
||||||
screen->mipmaps = false;
|
|
||||||
|
|
||||||
// starts with "opengl"
|
|
||||||
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
|
|
||||||
if (use_opengl) {
|
|
||||||
struct sc_opengl *gl = &screen->gl;
|
|
||||||
sc_opengl_init(gl);
|
|
||||||
|
|
||||||
LOGI("OpenGL version: %s", gl->version);
|
|
||||||
|
|
||||||
if (params->mipmaps) {
|
|
||||||
bool supports_mipmaps =
|
|
||||||
sc_opengl_version_at_least(gl, 3, 0, /* OpenGL 3.0+ */
|
|
||||||
2, 0 /* OpenGL ES 2.0+ */);
|
|
||||||
if (supports_mipmaps) {
|
|
||||||
LOGI("Trilinear filtering enabled");
|
|
||||||
screen->mipmaps = true;
|
|
||||||
} else {
|
|
||||||
LOGW("Trilinear filtering disabled "
|
|
||||||
"(OpenGL 3.0+ or ES 2.0+ required)");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
LOGI("Trilinear filtering disabled");
|
|
||||||
}
|
|
||||||
} else if (params->mipmaps) {
|
|
||||||
LOGD("Trilinear filtering disabled (not an OpenGL renderer)");
|
|
||||||
}
|
|
||||||
|
|
||||||
SDL_Surface *icon = scrcpy_icon_load();
|
SDL_Surface *icon = scrcpy_icon_load();
|
||||||
if (icon) {
|
if (icon) {
|
||||||
SDL_SetWindowIcon(screen->window, icon);
|
SDL_SetWindowIcon(screen->window, icon);
|
||||||
@ -504,7 +416,7 @@ sc_screen_init(struct sc_screen *screen,
|
|||||||
screen->frame = av_frame_alloc();
|
screen->frame = av_frame_alloc();
|
||||||
if (!screen->frame) {
|
if (!screen->frame) {
|
||||||
LOG_OOM();
|
LOG_OOM();
|
||||||
goto error_destroy_renderer;
|
goto error_destroy_display;
|
||||||
}
|
}
|
||||||
|
|
||||||
struct sc_input_manager_params im_params = {
|
struct sc_input_manager_params im_params = {
|
||||||
@ -539,8 +451,8 @@ sc_screen_init(struct sc_screen *screen,
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
error_destroy_renderer:
|
error_destroy_display:
|
||||||
SDL_DestroyRenderer(screen->renderer);
|
sc_display_destroy(&screen->display);
|
||||||
error_destroy_window:
|
error_destroy_window:
|
||||||
SDL_DestroyWindow(screen->window);
|
SDL_DestroyWindow(screen->window);
|
||||||
error_destroy_fps_counter:
|
error_destroy_fps_counter:
|
||||||
@ -596,11 +508,8 @@ sc_screen_destroy(struct sc_screen *screen) {
|
|||||||
#ifndef NDEBUG
|
#ifndef NDEBUG
|
||||||
assert(!screen->open);
|
assert(!screen->open);
|
||||||
#endif
|
#endif
|
||||||
|
sc_display_destroy(&screen->display);
|
||||||
av_frame_free(&screen->frame);
|
av_frame_free(&screen->frame);
|
||||||
if (screen->texture) {
|
|
||||||
SDL_DestroyTexture(screen->texture);
|
|
||||||
}
|
|
||||||
SDL_DestroyRenderer(screen->renderer);
|
|
||||||
SDL_DestroyWindow(screen->window);
|
SDL_DestroyWindow(screen->window);
|
||||||
sc_fps_counter_destroy(&screen->fps_counter);
|
sc_fps_counter_destroy(&screen->fps_counter);
|
||||||
sc_frame_buffer_destroy(&screen->fb);
|
sc_frame_buffer_destroy(&screen->fb);
|
||||||
@ -667,7 +576,6 @@ static bool
|
|||||||
sc_screen_init_size(struct sc_screen *screen) {
|
sc_screen_init_size(struct sc_screen *screen) {
|
||||||
// Before first frame
|
// Before first frame
|
||||||
assert(!screen->has_frame);
|
assert(!screen->has_frame);
|
||||||
assert(!screen->texture);
|
|
||||||
|
|
||||||
// The requested size is passed via screen->frame_size
|
// The requested size is passed via screen->frame_size
|
||||||
|
|
||||||
@ -675,48 +583,27 @@ sc_screen_init_size(struct sc_screen *screen) {
|
|||||||
get_rotated_size(screen->frame_size, screen->rotation);
|
get_rotated_size(screen->frame_size, screen->rotation);
|
||||||
screen->content_size = content_size;
|
screen->content_size = content_size;
|
||||||
|
|
||||||
LOGI("Initial texture: %" PRIu16 "x%" PRIu16,
|
return sc_display_set_texture_size(&screen->display, screen->frame_size);
|
||||||
screen->frame_size.width, screen->frame_size.height);
|
|
||||||
return create_texture(screen);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// recreate the texture and resize the window if the frame size has changed
|
// recreate the texture and resize the window if the frame size has changed
|
||||||
static bool
|
static bool
|
||||||
prepare_for_frame(struct sc_screen *screen, struct sc_size new_frame_size) {
|
prepare_for_frame(struct sc_screen *screen, struct sc_size new_frame_size) {
|
||||||
if (screen->frame_size.width != new_frame_size.width
|
if (screen->frame_size.width == new_frame_size.width
|
||||||
|| screen->frame_size.height != new_frame_size.height) {
|
&& screen->frame_size.height == new_frame_size.height) {
|
||||||
// frame dimension changed, destroy texture
|
return true;
|
||||||
SDL_DestroyTexture(screen->texture);
|
|
||||||
|
|
||||||
screen->frame_size = new_frame_size;
|
|
||||||
|
|
||||||
struct sc_size new_content_size =
|
|
||||||
get_rotated_size(new_frame_size, screen->rotation);
|
|
||||||
set_content_size(screen, new_content_size);
|
|
||||||
|
|
||||||
sc_screen_update_content_rect(screen);
|
|
||||||
|
|
||||||
LOGI("New texture: %" PRIu16 "x%" PRIu16,
|
|
||||||
screen->frame_size.width, screen->frame_size.height);
|
|
||||||
return create_texture(screen);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
// frame dimension changed
|
||||||
}
|
screen->frame_size = new_frame_size;
|
||||||
|
|
||||||
// write the frame into the texture
|
struct sc_size new_content_size =
|
||||||
static void
|
get_rotated_size(new_frame_size, screen->rotation);
|
||||||
update_texture(struct sc_screen *screen, const AVFrame *frame) {
|
set_content_size(screen, new_content_size);
|
||||||
SDL_UpdateYUVTexture(screen->texture, NULL,
|
|
||||||
frame->data[0], frame->linesize[0],
|
|
||||||
frame->data[1], frame->linesize[1],
|
|
||||||
frame->data[2], frame->linesize[2]);
|
|
||||||
|
|
||||||
if (screen->mipmaps) {
|
sc_screen_update_content_rect(screen);
|
||||||
SDL_GL_BindTexture(screen->texture, NULL, NULL);
|
|
||||||
screen->gl.GenerateMipmap(GL_TEXTURE_2D);
|
return sc_display_set_texture_size(&screen->display, screen->frame_size);
|
||||||
SDL_GL_UnbindTexture(screen->texture);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
@ -731,7 +618,10 @@ sc_screen_update_frame(struct sc_screen *screen) {
|
|||||||
if (!prepare_for_frame(screen, new_frame_size)) {
|
if (!prepare_for_frame(screen, new_frame_size)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
update_texture(screen, frame);
|
|
||||||
|
if (!sc_display_update_texture(&screen->display, frame)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
if (!screen->has_frame) {
|
if (!screen->has_frame) {
|
||||||
screen->has_frame = true;
|
screen->has_frame = true;
|
||||||
@ -812,7 +702,7 @@ sc_screen_is_mouse_capture_key(SDL_Keycode key) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event) {
|
sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event) {
|
||||||
bool relative_mode = sc_screen_is_relative_mode(screen);
|
bool relative_mode = sc_screen_is_relative_mode(screen);
|
||||||
|
|
||||||
switch (event->type) {
|
switch (event->type) {
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
|
|
||||||
#include "controller.h"
|
#include "controller.h"
|
||||||
#include "coords.h"
|
#include "coords.h"
|
||||||
|
#include "display.h"
|
||||||
#include "fps_counter.h"
|
#include "fps_counter.h"
|
||||||
#include "frame_buffer.h"
|
#include "frame_buffer.h"
|
||||||
#include "input_manager.h"
|
#include "input_manager.h"
|
||||||
@ -24,6 +25,7 @@ struct sc_screen {
|
|||||||
bool open; // track the open/close state to assert correct behavior
|
bool open; // track the open/close state to assert correct behavior
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
struct sc_display display;
|
||||||
struct sc_input_manager im;
|
struct sc_input_manager im;
|
||||||
struct sc_frame_buffer fb;
|
struct sc_frame_buffer fb;
|
||||||
struct sc_fps_counter fps_counter;
|
struct sc_fps_counter fps_counter;
|
||||||
@ -39,9 +41,6 @@ struct sc_screen {
|
|||||||
} req;
|
} req;
|
||||||
|
|
||||||
SDL_Window *window;
|
SDL_Window *window;
|
||||||
SDL_Renderer *renderer;
|
|
||||||
SDL_Texture *texture;
|
|
||||||
struct sc_opengl gl;
|
|
||||||
struct sc_size frame_size;
|
struct sc_size frame_size;
|
||||||
struct sc_size content_size; // rotated frame_size
|
struct sc_size content_size; // rotated frame_size
|
||||||
|
|
||||||
@ -57,7 +56,6 @@ struct sc_screen {
|
|||||||
bool has_frame;
|
bool has_frame;
|
||||||
bool fullscreen;
|
bool fullscreen;
|
||||||
bool maximized;
|
bool maximized;
|
||||||
bool mipmaps;
|
|
||||||
|
|
||||||
// To enable/disable mouse capture, a mouse capture key (LALT, LGUI or
|
// To enable/disable mouse capture, a mouse capture key (LALT, LGUI or
|
||||||
// RGUI) must be pressed. This variable tracks the pressed capture key.
|
// RGUI) must be pressed. This variable tracks the pressed capture key.
|
||||||
@ -137,7 +135,7 @@ sc_screen_set_rotation(struct sc_screen *screen, unsigned rotation);
|
|||||||
// react to SDL events
|
// react to SDL events
|
||||||
// If this function returns false, scrcpy must exit with an error.
|
// If this function returns false, scrcpy must exit with an error.
|
||||||
bool
|
bool
|
||||||
sc_screen_handle_event(struct sc_screen *screen, SDL_Event *event);
|
sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event);
|
||||||
|
|
||||||
// convert point from window coordinates to frame coordinates
|
// convert point from window coordinates to frame coordinates
|
||||||
// x and y are expressed in pixels
|
// x and y are expressed in pixels
|
||||||
|
@ -217,6 +217,18 @@ static void test_get_ip_multiline_second_ok(void) {
|
|||||||
free(ip);
|
free(ip);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void test_get_ip_multiline_second_ok_without_cr(void) {
|
||||||
|
char ip_route[] = "10.0.0.0/24 dev rmnet proto kernel scope link src "
|
||||||
|
"10.0.0.3\n"
|
||||||
|
"192.168.1.0/24 dev wlan0 proto kernel scope link src "
|
||||||
|
"192.168.1.3\n";
|
||||||
|
|
||||||
|
char *ip = sc_adb_parse_device_ip(ip_route);
|
||||||
|
assert(ip);
|
||||||
|
assert(!strcmp(ip, "192.168.1.3"));
|
||||||
|
free(ip);
|
||||||
|
}
|
||||||
|
|
||||||
static void test_get_ip_no_wlan(void) {
|
static void test_get_ip_no_wlan(void) {
|
||||||
char ip_route[] = "192.168.1.0/24 dev rmnet proto kernel scope link src "
|
char ip_route[] = "192.168.1.0/24 dev rmnet proto kernel scope link src "
|
||||||
"192.168.12.34\r\r\n";
|
"192.168.12.34\r\r\n";
|
||||||
@ -259,6 +271,7 @@ int main(int argc, char *argv[]) {
|
|||||||
test_get_ip_single_line_with_trailing_space();
|
test_get_ip_single_line_with_trailing_space();
|
||||||
test_get_ip_multiline_first_ok();
|
test_get_ip_multiline_first_ok();
|
||||||
test_get_ip_multiline_second_ok();
|
test_get_ip_multiline_second_ok();
|
||||||
|
test_get_ip_multiline_second_ok_without_cr();
|
||||||
test_get_ip_no_wlan();
|
test_get_ip_no_wlan();
|
||||||
test_get_ip_no_wlan_without_eol();
|
test_get_ip_no_wlan_without_eol();
|
||||||
test_get_ip_truncated();
|
test_get_ip_truncated();
|
||||||
|
@ -1,79 +0,0 @@
|
|||||||
#include "common.h"
|
|
||||||
|
|
||||||
#include <assert.h>
|
|
||||||
|
|
||||||
#include "clock.h"
|
|
||||||
|
|
||||||
void test_small_rolling_sum(void) {
|
|
||||||
struct sc_clock clock;
|
|
||||||
sc_clock_init(&clock);
|
|
||||||
|
|
||||||
assert(clock.count == 0);
|
|
||||||
assert(clock.left_sum.system == 0);
|
|
||||||
assert(clock.left_sum.stream == 0);
|
|
||||||
assert(clock.right_sum.system == 0);
|
|
||||||
assert(clock.right_sum.stream == 0);
|
|
||||||
|
|
||||||
sc_clock_update(&clock, 2, 3);
|
|
||||||
assert(clock.count == 1);
|
|
||||||
assert(clock.left_sum.system == 0);
|
|
||||||
assert(clock.left_sum.stream == 0);
|
|
||||||
assert(clock.right_sum.system == 2);
|
|
||||||
assert(clock.right_sum.stream == 3);
|
|
||||||
|
|
||||||
sc_clock_update(&clock, 10, 20);
|
|
||||||
assert(clock.count == 2);
|
|
||||||
assert(clock.left_sum.system == 2);
|
|
||||||
assert(clock.left_sum.stream == 3);
|
|
||||||
assert(clock.right_sum.system == 10);
|
|
||||||
assert(clock.right_sum.stream == 20);
|
|
||||||
|
|
||||||
sc_clock_update(&clock, 40, 80);
|
|
||||||
assert(clock.count == 3);
|
|
||||||
assert(clock.left_sum.system == 2);
|
|
||||||
assert(clock.left_sum.stream == 3);
|
|
||||||
assert(clock.right_sum.system == 50);
|
|
||||||
assert(clock.right_sum.stream == 100);
|
|
||||||
|
|
||||||
sc_clock_update(&clock, 400, 800);
|
|
||||||
assert(clock.count == 4);
|
|
||||||
assert(clock.left_sum.system == 12);
|
|
||||||
assert(clock.left_sum.stream == 23);
|
|
||||||
assert(clock.right_sum.system == 440);
|
|
||||||
assert(clock.right_sum.stream == 880);
|
|
||||||
}
|
|
||||||
|
|
||||||
void test_large_rolling_sum(void) {
|
|
||||||
const unsigned half_range = SC_CLOCK_RANGE / 2;
|
|
||||||
|
|
||||||
struct sc_clock clock1;
|
|
||||||
sc_clock_init(&clock1);
|
|
||||||
for (unsigned i = 0; i < 5 * half_range; ++i) {
|
|
||||||
sc_clock_update(&clock1, i, 2 * i + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
struct sc_clock clock2;
|
|
||||||
sc_clock_init(&clock2);
|
|
||||||
for (unsigned i = 3 * half_range; i < 5 * half_range; ++i) {
|
|
||||||
sc_clock_update(&clock2, i, 2 * i + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(clock1.count == SC_CLOCK_RANGE);
|
|
||||||
assert(clock2.count == SC_CLOCK_RANGE);
|
|
||||||
|
|
||||||
// The values before the last SC_CLOCK_RANGE points in clock1 should have
|
|
||||||
// no impact
|
|
||||||
assert(clock1.left_sum.system == clock2.left_sum.system);
|
|
||||||
assert(clock1.left_sum.stream == clock2.left_sum.stream);
|
|
||||||
assert(clock1.right_sum.system == clock2.right_sum.system);
|
|
||||||
assert(clock1.right_sum.stream == clock2.right_sum.stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
|
||||||
(void) argc;
|
|
||||||
(void) argv;
|
|
||||||
|
|
||||||
test_small_rolling_sum();
|
|
||||||
test_large_rolling_sum();
|
|
||||||
return 0;
|
|
||||||
};
|
|
11
doc/audio.md
11
doc/audio.md
@ -88,3 +88,14 @@ avoid glitches and smooth the playback:
|
|||||||
```
|
```
|
||||||
scrcpy --display-buffer=200 --audio-buffer=200
|
scrcpy --display-buffer=200 --audio-buffer=200
|
||||||
```
|
```
|
||||||
|
|
||||||
|
It is also possible to configure another audio buffer (the audio output buffer),
|
||||||
|
by default set to 5ms. Don't change it, unless you get some [robotic and glitchy
|
||||||
|
sound][#3793]:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Only if absolutely necessary
|
||||||
|
scrcpy --audio-output-buffer=10
|
||||||
|
```
|
||||||
|
|
||||||
|
[#3793]: https://github.com/Genymobile/scrcpy/issues/3793
|
||||||
|
@ -59,45 +59,58 @@ public final class AudioCapture {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static void startWorkaroundAndroid11() {
|
private static void startWorkaroundAndroid11() {
|
||||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
// Android 11 requires Apps to be at foreground to record audio.
|
||||||
// Android 11 requires Apps to be at foreground to record audio.
|
// Normally, each App has its own user ID, so Android checks whether the requesting App has the user ID that's at the foreground.
|
||||||
// Normally, each App has its own user ID, so Android checks whether the requesting App has the user ID that's at the foreground.
|
// But scrcpy server is NOT an App, it's a Java application started from Android shell, so it has the same user ID (2000) with Android
|
||||||
// But scrcpy server is NOT an App, it's a Java application started from Android shell, so it has the same user ID (2000) with Android
|
// shell ("com.android.shell").
|
||||||
// shell ("com.android.shell").
|
// If there is an Activity from Android shell running at foreground, then the permission system will believe scrcpy is also in the
|
||||||
// If there is an Activity from Android shell running at foreground, then the permission system will believe scrcpy is also in the
|
// foreground.
|
||||||
// foreground.
|
Intent intent = new Intent(Intent.ACTION_MAIN);
|
||||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
|
||||||
Intent intent = new Intent(Intent.ACTION_MAIN);
|
intent.addCategory(Intent.CATEGORY_LAUNCHER);
|
||||||
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
|
intent.setComponent(new ComponentName(FakeContext.PACKAGE_NAME, "com.android.shell.HeapDumpActivity"));
|
||||||
intent.addCategory(Intent.CATEGORY_LAUNCHER);
|
ServiceManager.getActivityManager().startActivityAsUserWithFeature(intent);
|
||||||
intent.setComponent(new ComponentName(FakeContext.PACKAGE_NAME, "com.android.shell.HeapDumpActivity"));
|
|
||||||
ServiceManager.getActivityManager().startActivityAsUserWithFeature(intent);
|
|
||||||
// Wait for activity to start
|
|
||||||
SystemClock.sleep(150);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void stopWorkaroundAndroid11() {
|
private static void stopWorkaroundAndroid11() {
|
||||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
ServiceManager.getActivityManager().forceStopPackage(FakeContext.PACKAGE_NAME);
|
||||||
ServiceManager.getActivityManager().forceStopPackage(FakeContext.PACKAGE_NAME);
|
}
|
||||||
|
|
||||||
|
private void tryStartRecording(int attempts, int delayMs) throws AudioCaptureForegroundException {
|
||||||
|
while (attempts-- > 0) {
|
||||||
|
// Wait for activity to start
|
||||||
|
SystemClock.sleep(delayMs);
|
||||||
|
try {
|
||||||
|
startRecording();
|
||||||
|
return; // it worked
|
||||||
|
} catch (UnsupportedOperationException e) {
|
||||||
|
if (attempts == 0) {
|
||||||
|
Ln.e("Failed to start audio capture");
|
||||||
|
Ln.e("On Android 11, audio capture must be started in the foreground, make sure that the device is unlocked when starting " +
|
||||||
|
"scrcpy.");
|
||||||
|
throw new AudioCaptureForegroundException();
|
||||||
|
} else {
|
||||||
|
Ln.d("Failed to start audio capture, retrying...");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void startRecording() {
|
||||||
|
recorder = createAudioRecord();
|
||||||
|
recorder.startRecording();
|
||||||
|
}
|
||||||
|
|
||||||
public void start() throws AudioCaptureForegroundException {
|
public void start() throws AudioCaptureForegroundException {
|
||||||
startWorkaroundAndroid11();
|
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||||
try {
|
startWorkaroundAndroid11();
|
||||||
recorder = createAudioRecord();
|
try {
|
||||||
recorder.startRecording();
|
tryStartRecording(3, 100);
|
||||||
} catch (UnsupportedOperationException e) {
|
} finally {
|
||||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
stopWorkaroundAndroid11();
|
||||||
Ln.e("Failed to start audio capture");
|
|
||||||
Ln.e("On Android 11, it is only possible to capture in foreground, make sure that the device is unlocked when starting scrcpy.");
|
|
||||||
throw new AudioCaptureForegroundException();
|
|
||||||
}
|
}
|
||||||
throw e;
|
} else {
|
||||||
} finally {
|
startRecording();
|
||||||
stopWorkaroundAndroid11();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -271,13 +271,22 @@ public final class AudioEncoder implements AsyncProcessor {
|
|||||||
try {
|
try {
|
||||||
return MediaCodec.createByCodecName(encoderName);
|
return MediaCodec.createByCodecName(encoderName);
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
Ln.e("Encoder '" + encoderName + "' for " + codec.getName() + " not found\n" + LogUtils.buildAudioEncoderListMessage());
|
Ln.e("Audio encoder '" + encoderName + "' for " + codec.getName() + " not found\n" + LogUtils.buildAudioEncoderListMessage());
|
||||||
throw new ConfigurationException("Unknown encoder: " + encoderName);
|
throw new ConfigurationException("Unknown encoder: " + encoderName);
|
||||||
|
} catch (IOException e) {
|
||||||
|
Ln.e("Could not create audio encoder '" + encoderName + "' for " + codec.getName() + "\n" + LogUtils.buildAudioEncoderListMessage());
|
||||||
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MediaCodec mediaCodec = MediaCodec.createEncoderByType(codec.getMimeType());
|
|
||||||
Ln.d("Using audio encoder: '" + mediaCodec.getName() + "'");
|
try {
|
||||||
return mediaCodec;
|
MediaCodec mediaCodec = MediaCodec.createEncoderByType(codec.getMimeType());
|
||||||
|
Ln.d("Using audio encoder: '" + mediaCodec.getName() + "'");
|
||||||
|
return mediaCodec;
|
||||||
|
} catch (IOException | IllegalArgumentException e) {
|
||||||
|
Ln.e("Could not create default audio encoder for " + codec.getName() + "\n" + LogUtils.buildAudioEncoderListMessage());
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class EncoderCallback extends MediaCodec.Callback {
|
private class EncoderCallback extends MediaCodec.Callback {
|
||||||
|
@ -288,10 +288,7 @@ public final class Device {
|
|||||||
boolean allOk = true;
|
boolean allOk = true;
|
||||||
for (long physicalDisplayId : physicalDisplayIds) {
|
for (long physicalDisplayId : physicalDisplayIds) {
|
||||||
IBinder binder = SurfaceControl.getPhysicalDisplayToken(physicalDisplayId);
|
IBinder binder = SurfaceControl.getPhysicalDisplayToken(physicalDisplayId);
|
||||||
boolean ok = SurfaceControl.setDisplayPowerMode(binder, mode);
|
allOk &= SurfaceControl.setDisplayPowerMode(binder, mode);
|
||||||
if (!ok) {
|
|
||||||
allOk = false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return allOk;
|
return allOk;
|
||||||
}
|
}
|
||||||
|
@ -38,4 +38,10 @@ public final class FakeContext extends ContextWrapper {
|
|||||||
builder.setPackageName(PACKAGE_NAME);
|
builder.setPackageName(PACKAGE_NAME);
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// @Override to be added on SDK upgrade for Android 14
|
||||||
|
@SuppressWarnings("unused")
|
||||||
|
public int getDeviceId() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -202,13 +202,22 @@ public class ScreenEncoder implements Device.RotationListener {
|
|||||||
try {
|
try {
|
||||||
return MediaCodec.createByCodecName(encoderName);
|
return MediaCodec.createByCodecName(encoderName);
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
Ln.e("Encoder '" + encoderName + "' for " + codec.getName() + " not found\n" + LogUtils.buildVideoEncoderListMessage());
|
Ln.e("Video encoder '" + encoderName + "' for " + codec.getName() + " not found\n" + LogUtils.buildVideoEncoderListMessage());
|
||||||
throw new ConfigurationException("Unknown encoder: " + encoderName);
|
throw new ConfigurationException("Unknown encoder: " + encoderName);
|
||||||
|
} catch (IOException e) {
|
||||||
|
Ln.e("Could not create video encoder '" + encoderName + "' for " + codec.getName() + "\n" + LogUtils.buildVideoEncoderListMessage());
|
||||||
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MediaCodec mediaCodec = MediaCodec.createEncoderByType(codec.getMimeType());
|
|
||||||
Ln.d("Using encoder: '" + mediaCodec.getName() + "'");
|
try {
|
||||||
return mediaCodec;
|
MediaCodec mediaCodec = MediaCodec.createEncoderByType(codec.getMimeType());
|
||||||
|
Ln.d("Using video encoder: '" + mediaCodec.getName() + "'");
|
||||||
|
return mediaCodec;
|
||||||
|
} catch (IOException | IllegalArgumentException e) {
|
||||||
|
Ln.e("Could not create default video encoder for " + codec.getName() + "\n" + LogUtils.buildVideoEncoderListMessage());
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static MediaFormat createFormat(String videoMimeType, int bitRate, int maxFps, List<CodecOption> codecOptions) {
|
private static MediaFormat createFormat(String videoMimeType, int bitRate, int maxFps, List<CodecOption> codecOptions) {
|
||||||
|
@ -16,9 +16,9 @@ public class ClipboardManager {
|
|||||||
private Method getPrimaryClipMethod;
|
private Method getPrimaryClipMethod;
|
||||||
private Method setPrimaryClipMethod;
|
private Method setPrimaryClipMethod;
|
||||||
private Method addPrimaryClipChangedListener;
|
private Method addPrimaryClipChangedListener;
|
||||||
private boolean alternativeGetMethod;
|
private int getMethodVersion;
|
||||||
private boolean alternativeSetMethod;
|
private int setMethodVersion;
|
||||||
private boolean alternativeAddListenerMethod;
|
private int addListenerMethodVersion;
|
||||||
|
|
||||||
public ClipboardManager(IInterface manager) {
|
public ClipboardManager(IInterface manager) {
|
||||||
this.manager = manager;
|
this.manager = manager;
|
||||||
@ -31,9 +31,20 @@ public class ClipboardManager {
|
|||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, int.class);
|
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, int.class);
|
||||||
} catch (NoSuchMethodException e) {
|
getMethodVersion = 0;
|
||||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class);
|
} catch (NoSuchMethodException e1) {
|
||||||
alternativeGetMethod = true;
|
try {
|
||||||
|
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class);
|
||||||
|
getMethodVersion = 1;
|
||||||
|
} catch (NoSuchMethodException e2) {
|
||||||
|
try {
|
||||||
|
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, String.class, int.class, int.class);
|
||||||
|
getMethodVersion = 2;
|
||||||
|
} catch (NoSuchMethodException e3) {
|
||||||
|
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, int.class, String.class);
|
||||||
|
getMethodVersion = 3;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -47,41 +58,64 @@ public class ClipboardManager {
|
|||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class, int.class);
|
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class, int.class);
|
||||||
} catch (NoSuchMethodException e) {
|
setMethodVersion = 0;
|
||||||
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class, String.class, int.class);
|
} catch (NoSuchMethodException e1) {
|
||||||
alternativeSetMethod = true;
|
try {
|
||||||
|
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class, String.class, int.class);
|
||||||
|
setMethodVersion = 1;
|
||||||
|
} catch (NoSuchMethodException e2) {
|
||||||
|
setPrimaryClipMethod = manager.getClass()
|
||||||
|
.getMethod("setPrimaryClip", ClipData.class, String.class, String.class, int.class, int.class);
|
||||||
|
setMethodVersion = 2;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return setPrimaryClipMethod;
|
return setPrimaryClipMethod;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ClipData getPrimaryClip(Method method, boolean alternativeMethod, IInterface manager)
|
private static ClipData getPrimaryClip(Method method, int methodVersion, IInterface manager)
|
||||||
throws InvocationTargetException, IllegalAccessException {
|
throws InvocationTargetException, IllegalAccessException {
|
||||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME);
|
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME);
|
||||||
}
|
}
|
||||||
if (alternativeMethod) {
|
|
||||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID);
|
switch (methodVersion) {
|
||||||
|
case 0:
|
||||||
|
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID);
|
||||||
|
case 1:
|
||||||
|
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID);
|
||||||
|
case 2:
|
||||||
|
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID, 0);
|
||||||
|
default:
|
||||||
|
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID, null);
|
||||||
}
|
}
|
||||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void setPrimaryClip(Method method, boolean alternativeMethod, IInterface manager, ClipData clipData)
|
private static void setPrimaryClip(Method method, int methodVersion, IInterface manager, ClipData clipData)
|
||||||
throws InvocationTargetException, IllegalAccessException {
|
throws InvocationTargetException, IllegalAccessException {
|
||||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME);
|
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME);
|
||||||
} else if (alternativeMethod) {
|
return;
|
||||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID);
|
}
|
||||||
} else {
|
|
||||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID);
|
switch (methodVersion) {
|
||||||
|
case 0:
|
||||||
|
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID);
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID, 0);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public CharSequence getText() {
|
public CharSequence getText() {
|
||||||
try {
|
try {
|
||||||
Method method = getGetPrimaryClipMethod();
|
Method method = getGetPrimaryClipMethod();
|
||||||
ClipData clipData = getPrimaryClip(method, alternativeGetMethod, manager);
|
ClipData clipData = getPrimaryClip(method, getMethodVersion, manager);
|
||||||
if (clipData == null || clipData.getItemCount() == 0) {
|
if (clipData == null || clipData.getItemCount() == 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -96,7 +130,7 @@ public class ClipboardManager {
|
|||||||
try {
|
try {
|
||||||
Method method = getSetPrimaryClipMethod();
|
Method method = getSetPrimaryClipMethod();
|
||||||
ClipData clipData = ClipData.newPlainText(null, text);
|
ClipData clipData = ClipData.newPlainText(null, text);
|
||||||
setPrimaryClip(method, alternativeSetMethod, manager, clipData);
|
setPrimaryClip(method, setMethodVersion, manager, clipData);
|
||||||
return true;
|
return true;
|
||||||
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
|
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
|
||||||
Ln.e("Could not invoke method", e);
|
Ln.e("Could not invoke method", e);
|
||||||
@ -104,14 +138,23 @@ public class ClipboardManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void addPrimaryClipChangedListener(Method method, boolean alternativeMethod, IInterface manager,
|
private static void addPrimaryClipChangedListener(Method method, int methodVersion, IInterface manager,
|
||||||
IOnPrimaryClipChangedListener listener) throws InvocationTargetException, IllegalAccessException {
|
IOnPrimaryClipChangedListener listener) throws InvocationTargetException, IllegalAccessException {
|
||||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME);
|
method.invoke(manager, listener, FakeContext.PACKAGE_NAME);
|
||||||
} else if (alternativeMethod) {
|
return;
|
||||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID);
|
}
|
||||||
} else {
|
|
||||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID);
|
switch (methodVersion) {
|
||||||
|
case 0:
|
||||||
|
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, FakeContext.ROOT_UID);
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, null, FakeContext.ROOT_UID, 0);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,10 +167,19 @@ public class ClipboardManager {
|
|||||||
try {
|
try {
|
||||||
addPrimaryClipChangedListener = manager.getClass()
|
addPrimaryClipChangedListener = manager.getClass()
|
||||||
.getMethod("addPrimaryClipChangedListener", IOnPrimaryClipChangedListener.class, String.class, int.class);
|
.getMethod("addPrimaryClipChangedListener", IOnPrimaryClipChangedListener.class, String.class, int.class);
|
||||||
} catch (NoSuchMethodException e) {
|
addListenerMethodVersion = 0;
|
||||||
addPrimaryClipChangedListener = manager.getClass()
|
} catch (NoSuchMethodException e1) {
|
||||||
.getMethod("addPrimaryClipChangedListener", IOnPrimaryClipChangedListener.class, String.class, String.class, int.class);
|
try {
|
||||||
alternativeAddListenerMethod = true;
|
addPrimaryClipChangedListener = manager.getClass()
|
||||||
|
.getMethod("addPrimaryClipChangedListener", IOnPrimaryClipChangedListener.class, String.class, String.class,
|
||||||
|
int.class);
|
||||||
|
addListenerMethodVersion = 1;
|
||||||
|
} catch (NoSuchMethodException e2) {
|
||||||
|
addPrimaryClipChangedListener = manager.getClass()
|
||||||
|
.getMethod("addPrimaryClipChangedListener", IOnPrimaryClipChangedListener.class, String.class, String.class,
|
||||||
|
int.class, int.class);
|
||||||
|
addListenerMethodVersion = 2;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -137,7 +189,7 @@ public class ClipboardManager {
|
|||||||
public boolean addPrimaryClipChangedListener(IOnPrimaryClipChangedListener listener) {
|
public boolean addPrimaryClipChangedListener(IOnPrimaryClipChangedListener listener) {
|
||||||
try {
|
try {
|
||||||
Method method = getAddPrimaryClipChangedListener();
|
Method method = getAddPrimaryClipChangedListener();
|
||||||
addPrimaryClipChangedListener(method, alternativeAddListenerMethod, manager, listener);
|
addPrimaryClipChangedListener(method, addListenerMethodVersion, manager, listener);
|
||||||
return true;
|
return true;
|
||||||
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
|
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
|
||||||
Ln.e("Could not invoke method", e);
|
Ln.e("Could not invoke method", e);
|
||||||
|
Reference in New Issue
Block a user