Compare commits
45 Commits
codec_mism
...
display_ch
Author | SHA1 | Date | |
---|---|---|---|
3174ce2d5a | |||
05c80a036f | |||
db241065d1 | |||
85302acf5e | |||
d2989920c1 | |||
6ff3d9b571 | |||
1ea229fc2c | |||
49c7d1037f | |||
57b91b1fee | |||
49cf48614e | |||
699c5c7efb | |||
c20aae5331 | |||
7dc21e87f9 | |||
cf3ae53e97 | |||
5c1482559a | |||
68476b6d28 | |||
7b3dd595b4 | |||
a46150f753 | |||
3acffaae57 | |||
e33be3d288 | |||
c15df01171 | |||
09741bc805 | |||
afbaf59abb | |||
5b10650f22 | |||
0d8014be52 | |||
064670ab4c | |||
ff9fb5994d | |||
a36de26969 | |||
281fcc7052 | |||
65fc53eace | |||
a6f74d72f5 | |||
e724ff4349 | |||
79014143b9 | |||
c0a6432967 | |||
ec602a0334 | |||
7a9ea5c66f | |||
d92b7a6024 | |||
0bb3955b95 | |||
62776fb261 | |||
10f60054ac | |||
42fb947780 | |||
2e7a15a998 | |||
a7e61fb871 | |||
0cc6f6aa09 | |||
f69ac40534 |
@ -33,6 +33,7 @@ _scrcpy() {
|
||||
--keyboard=
|
||||
--kill-adb-on-close
|
||||
--legacy-paste
|
||||
--list-apps
|
||||
--list-camera-sizes
|
||||
--list-cameras
|
||||
--list-displays
|
||||
@ -46,6 +47,8 @@ _scrcpy() {
|
||||
--mouse-bind=
|
||||
-n --no-control
|
||||
-N --no-playback
|
||||
--new-display
|
||||
--new-display=
|
||||
--no-audio
|
||||
--no-audio-playback
|
||||
--no-cleanup
|
||||
@ -76,6 +79,7 @@ _scrcpy() {
|
||||
-s --serial=
|
||||
-S --turn-screen-off
|
||||
--shortcut-mod=
|
||||
--start-app=
|
||||
-t --show-touches
|
||||
--tcpip
|
||||
--tcpip=
|
||||
|
@ -40,6 +40,7 @@ arguments=(
|
||||
'--keyboard=[Set the keyboard input mode]:mode:(disabled sdk uhid aoa)'
|
||||
'--kill-adb-on-close[Kill adb when scrcpy terminates]'
|
||||
'--legacy-paste[Inject computer clipboard text as a sequence of key events on Ctrl+v]'
|
||||
'--list-apps[List Android apps installed on the device]'
|
||||
'--list-camera-sizes[List the valid camera capture sizes]'
|
||||
'--list-cameras[List cameras available on the device]'
|
||||
'--list-displays[List displays available on the device]'
|
||||
@ -52,6 +53,7 @@ arguments=(
|
||||
'--mouse-bind=[Configure bindings of secondary clicks]'
|
||||
{-n,--no-control}'[Disable device control \(mirror the device in read only\)]'
|
||||
{-N,--no-playback}'[Disable video and audio playback]'
|
||||
'--new-display=[Create a new display]'
|
||||
'--no-audio[Disable audio forwarding]'
|
||||
'--no-audio-playback[Disable audio playback]'
|
||||
'--no-cleanup[Disable device cleanup actions on exit]'
|
||||
@ -80,6 +82,7 @@ arguments=(
|
||||
{-s,--serial=}'[The device serial number \(mandatory for multiple devices only\)]:serial:($("${ADB-adb}" devices | awk '\''$2 == "device" {print $1}'\''))'
|
||||
{-S,--turn-screen-off}'[Turn the device screen off immediately]'
|
||||
'--shortcut-mod=[\[key1,key2+key3,...\] Specify the modifiers to use for scrcpy shortcuts]:shortcut mod:(lctrl rctrl lalt ralt lsuper rsuper)'
|
||||
'--start-app=[Start an Android app]'
|
||||
{-t,--show-touches}'[Show physical touches]'
|
||||
'--tcpip[\(optional \[ip\:port\]\) Configure and connect the device over TCP/IP]'
|
||||
'--time-limit=[Set the maximum mirroring time, in seconds]'
|
||||
|
@ -5,6 +5,7 @@ src = [
|
||||
'src/adb/adb_parser.c',
|
||||
'src/adb/adb_tunnel.c',
|
||||
'src/audio_player.c',
|
||||
'src/audio_regulator.c',
|
||||
'src/cli.c',
|
||||
'src/clock.c',
|
||||
'src/compat.c',
|
||||
@ -22,6 +23,7 @@ src = [
|
||||
'src/frame_buffer.c',
|
||||
'src/input_manager.c',
|
||||
'src/keyboard_sdk.c',
|
||||
'src/mouse_capture.c',
|
||||
'src/mouse_sdk.c',
|
||||
'src/opengl.c',
|
||||
'src/options.c',
|
||||
|
37
app/scrcpy.1
37
app/scrcpy.1
@ -227,6 +227,10 @@ Inject computer clipboard text as a sequence of key events on Ctrl+v (like MOD+S
|
||||
|
||||
This is a workaround for some devices not behaving as expected when setting the device clipboard programmatically.
|
||||
|
||||
.TP
|
||||
.B \-\-list\-apps
|
||||
List Android apps installed on the device.
|
||||
|
||||
.TP
|
||||
.B \-\-list\-camera\-sizes
|
||||
List the valid camera capture sizes.
|
||||
@ -314,6 +318,17 @@ Disable device control (mirror the device in read\-only).
|
||||
.B \-N, \-\-no\-playback
|
||||
Disable video and audio playback on the computer (equivalent to \fB\-\-no\-video\-playback \-\-no\-audio\-playback\fR).
|
||||
|
||||
.TP
|
||||
\fB\-\-new\-display\fR[=[\fIwidth\fRx\fIheight\fR][/\fIdpi\fR]]
|
||||
Create a new display with the specified resolution and density. If not provided, they default to the main display dimensions and DPI, and \fB\-\-max\-size\fR is considered.
|
||||
|
||||
Examples:
|
||||
|
||||
\-\-new\-display=1920x1080
|
||||
\-\-new\-display=1920x1080/420
|
||||
\-\-new\-display # default screen size and density
|
||||
\-\-new\-display=240 # default screen size and 240 dpi
|
||||
|
||||
.TP
|
||||
.B \-\-no\-audio
|
||||
Disable audio forwarding.
|
||||
@ -478,6 +493,22 @@ For example, to use either LCtrl or LSuper for scrcpy shortcuts, pass "lctrl,lsu
|
||||
|
||||
Default is "lalt,lsuper" (left-Alt or left-Super).
|
||||
|
||||
.TP
|
||||
.BI "\-\-start\-app " name
|
||||
Start an Android app, by its exact package name.
|
||||
|
||||
Add a '?' prefix to select an app whose name starts with the given name, case-insensitive (it may take some time to retrieve the app names on the device):
|
||||
|
||||
scrcpy --start-app=?firefox
|
||||
|
||||
Add a '+' prefix to force-stop before starting the app:
|
||||
|
||||
scrcpy --new-display --start-app=+org.mozilla.firefox
|
||||
|
||||
Both prefixes can be used, in that order:
|
||||
|
||||
scrcpy --start-app=+?firefox
|
||||
|
||||
.TP
|
||||
.B \-t, \-\-show\-touches
|
||||
Enable "show touches" on start, restore the initial value on exit.
|
||||
@ -727,7 +758,11 @@ Pinch-to-zoom and rotate from the center of the screen
|
||||
|
||||
.TP
|
||||
.B Shift+click-and-move
|
||||
Tilt (slide vertically with two fingers)
|
||||
Tilt vertically (slide with 2 fingers)
|
||||
|
||||
.TP
|
||||
.B Ctrl+Shift+click-and-move
|
||||
Tilt horizontally (slide with 2 fingers)
|
||||
|
||||
.TP
|
||||
.B Drag & drop APK file
|
||||
|
@ -1,138 +1,23 @@
|
||||
#include "audio_player.h"
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/opt.h>
|
||||
|
||||
#include "util/log.h"
|
||||
|
||||
//#define SC_AUDIO_PLAYER_DEBUG // uncomment to debug
|
||||
|
||||
/**
|
||||
* Real-time audio player with configurable latency
|
||||
*
|
||||
* As input, the player regularly receives AVFrames of decoded audio samples.
|
||||
* As output, an SDL callback regularly requests audio samples to be played.
|
||||
* In the middle, an audio buffer stores the samples produced but not consumed
|
||||
* yet.
|
||||
*
|
||||
* The goal of the player is to feed the audio output with a latency as low as
|
||||
* possible while avoiding buffer underrun (i.e. not being able to provide
|
||||
* samples when requested).
|
||||
*
|
||||
* The player aims to feed the audio output with as little latency as possible
|
||||
* while avoiding buffer underrun. To achieve this, it attempts to maintain the
|
||||
* average buffering (the number of samples present in the buffer) around a
|
||||
* target value. If this target buffering is too low, then buffer underrun will
|
||||
* occur frequently. If it is too high, then latency will become unacceptable.
|
||||
* This target value is configured using the scrcpy option --audio-buffer.
|
||||
*
|
||||
* The player cannot adjust the sample input rate (it receives samples produced
|
||||
* in real-time) or the sample output rate (it must provide samples as
|
||||
* requested by the audio output callback). Therefore, it may only apply
|
||||
* compensation by resampling (converting _m_ input samples to _n_ output
|
||||
* samples).
|
||||
*
|
||||
* The compensation itself is applied by libswresample (FFmpeg). It is
|
||||
* configured using swr_set_compensation(). An important work for the player
|
||||
* is to estimate the compensation value regularly and apply it.
|
||||
*
|
||||
* The estimated buffering level is the result of averaging the "natural"
|
||||
* buffering (samples are produced and consumed by blocks, so it must be
|
||||
* smoothed), and making instant adjustments resulting of its own actions
|
||||
* (explicit compensation and silence insertion on underflow), which are not
|
||||
* smoothed.
|
||||
*
|
||||
* Buffer underflow events can occur when packets arrive too late. In that case,
|
||||
* the player inserts silence. Once the packets finally arrive (late), one
|
||||
* strategy could be to drop the samples that were replaced by silence, in
|
||||
* order to keep a minimal latency. However, dropping samples in case of buffer
|
||||
* underflow is inadvisable, as it would temporarily increase the underflow
|
||||
* even more and cause very noticeable audio glitches.
|
||||
*
|
||||
* Therefore, the player doesn't drop any sample on underflow. The compensation
|
||||
* mechanism will absorb the delay introduced by the inserted silence.
|
||||
*/
|
||||
|
||||
/** Downcast frame_sink to sc_audio_player */
|
||||
#define DOWNCAST(SINK) container_of(SINK, struct sc_audio_player, frame_sink)
|
||||
|
||||
#define SC_AV_SAMPLE_FMT AV_SAMPLE_FMT_FLT
|
||||
#define SC_SDL_SAMPLE_FMT AUDIO_F32
|
||||
|
||||
#define TO_BYTES(SAMPLES) sc_audiobuf_to_bytes(&ap->buf, (SAMPLES))
|
||||
#define TO_SAMPLES(BYTES) sc_audiobuf_to_samples(&ap->buf, (BYTES))
|
||||
|
||||
static void SDLCALL
|
||||
sc_audio_player_sdl_callback(void *userdata, uint8_t *stream, int len_int) {
|
||||
struct sc_audio_player *ap = userdata;
|
||||
|
||||
// This callback is called with the lock used by SDL_LockAudioDevice()
|
||||
|
||||
assert(len_int > 0);
|
||||
size_t len = len_int;
|
||||
uint32_t count = TO_SAMPLES(len);
|
||||
|
||||
#ifdef SC_AUDIO_PLAYER_DEBUG
|
||||
LOGD("[Audio] SDL callback requests %" PRIu32 " samples", count);
|
||||
#endif
|
||||
assert(len % ap->audioreg.sample_size == 0);
|
||||
uint32_t out_samples = len / ap->audioreg.sample_size;
|
||||
|
||||
bool played = atomic_load_explicit(&ap->played, memory_order_relaxed);
|
||||
if (!played) {
|
||||
uint32_t buffered_samples = sc_audiobuf_can_read(&ap->buf);
|
||||
// Wait until the buffer is filled up to at least target_buffering
|
||||
// before playing
|
||||
if (buffered_samples < ap->target_buffering) {
|
||||
LOGV("[Audio] Inserting initial buffering silence: %" PRIu32
|
||||
" samples", count);
|
||||
// Delay playback starting to reach the target buffering. Fill the
|
||||
// whole buffer with silence (len is small compared to the
|
||||
// arbitrary margin value).
|
||||
memset(stream, 0, len);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t read = sc_audiobuf_read(&ap->buf, stream, count);
|
||||
|
||||
if (read < count) {
|
||||
uint32_t silence = count - read;
|
||||
// Insert silence. In theory, the inserted silent samples replace the
|
||||
// missing real samples, which will arrive later, so they should be
|
||||
// dropped to keep the latency minimal. However, this would cause very
|
||||
// audible glitches, so let the clock compensation restore the target
|
||||
// latency.
|
||||
LOGD("[Audio] Buffer underflow, inserting silence: %" PRIu32 " samples",
|
||||
silence);
|
||||
memset(stream + TO_BYTES(read), 0, TO_BYTES(silence));
|
||||
|
||||
bool received = atomic_load_explicit(&ap->received,
|
||||
memory_order_relaxed);
|
||||
if (received) {
|
||||
// Inserting additional samples immediately increases buffering
|
||||
atomic_fetch_add_explicit(&ap->underflow, silence,
|
||||
memory_order_relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
atomic_store_explicit(&ap->played, true, memory_order_relaxed);
|
||||
}
|
||||
|
||||
static uint8_t *
|
||||
sc_audio_player_get_swr_buf(struct sc_audio_player *ap, uint32_t min_samples) {
|
||||
size_t min_buf_size = TO_BYTES(min_samples);
|
||||
if (min_buf_size > ap->swr_buf_alloc_size) {
|
||||
size_t new_size = min_buf_size + 4096;
|
||||
uint8_t *buf = realloc(ap->swr_buf, new_size);
|
||||
if (!buf) {
|
||||
LOG_OOM();
|
||||
// Could not realloc to the requested size
|
||||
return NULL;
|
||||
}
|
||||
ap->swr_buf = buf;
|
||||
ap->swr_buf_alloc_size = new_size;
|
||||
}
|
||||
|
||||
return ap->swr_buf;
|
||||
sc_audio_regulator_pull(&ap->audioreg, stream, out_samples);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -140,209 +25,21 @@ sc_audio_player_frame_sink_push(struct sc_frame_sink *sink,
|
||||
const AVFrame *frame) {
|
||||
struct sc_audio_player *ap = DOWNCAST(sink);
|
||||
|
||||
SwrContext *swr_ctx = ap->swr_ctx;
|
||||
|
||||
int64_t swr_delay = swr_get_delay(swr_ctx, ap->sample_rate);
|
||||
// No need to av_rescale_rnd(), input and output sample rates are the same.
|
||||
// Add more space (256) for clock compensation.
|
||||
int dst_nb_samples = swr_delay + frame->nb_samples + 256;
|
||||
|
||||
uint8_t *swr_buf = sc_audio_player_get_swr_buf(ap, dst_nb_samples);
|
||||
if (!swr_buf) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int ret = swr_convert(swr_ctx, &swr_buf, dst_nb_samples,
|
||||
(const uint8_t **) frame->data, frame->nb_samples);
|
||||
if (ret < 0) {
|
||||
LOGE("Resampling failed: %d", ret);
|
||||
return false;
|
||||
}
|
||||
|
||||
// swr_convert() returns the number of samples which would have been
|
||||
// written if the buffer was big enough.
|
||||
uint32_t samples = MIN(ret, dst_nb_samples);
|
||||
#ifdef SC_AUDIO_PLAYER_DEBUG
|
||||
LOGD("[Audio] %" PRIu32 " samples written to buffer", samples);
|
||||
#endif
|
||||
|
||||
uint32_t cap = sc_audiobuf_capacity(&ap->buf);
|
||||
if (samples > cap) {
|
||||
// Very very unlikely: a single resampled frame should never
|
||||
// exceed the audio buffer size (or something is very wrong).
|
||||
// Ignore the first bytes in swr_buf to avoid memory corruption anyway.
|
||||
swr_buf += TO_BYTES(samples - cap);
|
||||
samples = cap;
|
||||
}
|
||||
|
||||
uint32_t skipped_samples = 0;
|
||||
|
||||
uint32_t written = sc_audiobuf_write(&ap->buf, swr_buf, samples);
|
||||
if (written < samples) {
|
||||
uint32_t remaining = samples - written;
|
||||
|
||||
// All samples that could be written without locking have been written,
|
||||
// now we need to lock to drop/consume old samples
|
||||
SDL_LockAudioDevice(ap->device);
|
||||
|
||||
// Retry with the lock
|
||||
written += sc_audiobuf_write(&ap->buf,
|
||||
swr_buf + TO_BYTES(written),
|
||||
remaining);
|
||||
if (written < samples) {
|
||||
remaining = samples - written;
|
||||
// Still insufficient, drop old samples to make space
|
||||
skipped_samples = sc_audiobuf_read(&ap->buf, NULL, remaining);
|
||||
assert(skipped_samples == remaining);
|
||||
}
|
||||
|
||||
SDL_UnlockAudioDevice(ap->device);
|
||||
|
||||
if (written < samples) {
|
||||
// Now there is enough space
|
||||
uint32_t w = sc_audiobuf_write(&ap->buf,
|
||||
swr_buf + TO_BYTES(written),
|
||||
remaining);
|
||||
assert(w == remaining);
|
||||
(void) w;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t underflow = 0;
|
||||
uint32_t max_buffered_samples;
|
||||
bool played = atomic_load_explicit(&ap->played, memory_order_relaxed);
|
||||
if (played) {
|
||||
underflow = atomic_exchange_explicit(&ap->underflow, 0,
|
||||
memory_order_relaxed);
|
||||
|
||||
max_buffered_samples = ap->target_buffering
|
||||
+ 12 * ap->output_buffer
|
||||
+ ap->target_buffering / 10;
|
||||
} else {
|
||||
// SDL playback not started yet, do not accumulate more than
|
||||
// max_initial_buffering samples, this would cause unnecessary delay
|
||||
// (and glitches to compensate) on start.
|
||||
max_buffered_samples = ap->target_buffering + 2 * ap->output_buffer;
|
||||
}
|
||||
|
||||
uint32_t can_read = sc_audiobuf_can_read(&ap->buf);
|
||||
if (can_read > max_buffered_samples) {
|
||||
uint32_t skip_samples = 0;
|
||||
|
||||
SDL_LockAudioDevice(ap->device);
|
||||
can_read = sc_audiobuf_can_read(&ap->buf);
|
||||
if (can_read > max_buffered_samples) {
|
||||
skip_samples = can_read - max_buffered_samples;
|
||||
uint32_t r = sc_audiobuf_read(&ap->buf, NULL, skip_samples);
|
||||
assert(r == skip_samples);
|
||||
(void) r;
|
||||
skipped_samples += skip_samples;
|
||||
}
|
||||
SDL_UnlockAudioDevice(ap->device);
|
||||
|
||||
if (skip_samples) {
|
||||
if (played) {
|
||||
LOGD("[Audio] Buffering threshold exceeded, skipping %" PRIu32
|
||||
" samples", skip_samples);
|
||||
#ifdef SC_AUDIO_PLAYER_DEBUG
|
||||
} else {
|
||||
LOGD("[Audio] Playback not started, skipping %" PRIu32
|
||||
" samples", skip_samples);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
atomic_store_explicit(&ap->received, true, memory_order_relaxed);
|
||||
if (!played) {
|
||||
// Nothing more to do
|
||||
return true;
|
||||
}
|
||||
|
||||
// Number of samples added (or removed, if negative) for compensation
|
||||
int32_t instant_compensation = (int32_t) written - frame->nb_samples;
|
||||
// Inserting silence instantly increases buffering
|
||||
int32_t inserted_silence = (int32_t) underflow;
|
||||
// Dropping input samples instantly decreases buffering
|
||||
int32_t dropped = (int32_t) skipped_samples;
|
||||
|
||||
// The compensation must apply instantly, it must not be smoothed
|
||||
ap->avg_buffering.avg += instant_compensation + inserted_silence - dropped;
|
||||
if (ap->avg_buffering.avg < 0) {
|
||||
// Since dropping samples instantly reduces buffering, the difference
|
||||
// is applied immediately to the average value, assuming that the delay
|
||||
// between the producer and the consumer will be caught up.
|
||||
//
|
||||
// However, when this assumption is not valid, the average buffering
|
||||
// may decrease indefinitely. Prevent it to become negative to limit
|
||||
// the consequences.
|
||||
ap->avg_buffering.avg = 0;
|
||||
}
|
||||
|
||||
// However, the buffering level must be smoothed
|
||||
sc_average_push(&ap->avg_buffering, can_read);
|
||||
|
||||
#ifdef SC_AUDIO_PLAYER_DEBUG
|
||||
LOGD("[Audio] can_read=%" PRIu32 " avg_buffering=%f",
|
||||
can_read, sc_average_get(&ap->avg_buffering));
|
||||
#endif
|
||||
|
||||
ap->samples_since_resync += written;
|
||||
if (ap->samples_since_resync >= ap->sample_rate) {
|
||||
// Recompute compensation every second
|
||||
ap->samples_since_resync = 0;
|
||||
|
||||
float avg = sc_average_get(&ap->avg_buffering);
|
||||
int diff = ap->target_buffering - avg;
|
||||
|
||||
// Enable compensation when the difference exceeds +/- 4ms.
|
||||
// Disable compensation when the difference is lower than +/- 1ms.
|
||||
int threshold = ap->compensation != 0
|
||||
? ap->sample_rate / 1000 /* 1ms */
|
||||
: ap->sample_rate * 4 / 1000; /* 4ms */
|
||||
|
||||
if (abs(diff) < threshold) {
|
||||
// Do not compensate for small values, the error is just noise
|
||||
diff = 0;
|
||||
} else if (diff < 0 && can_read < ap->target_buffering) {
|
||||
// Do not accelerate if the instant buffering level is below the
|
||||
// target, this would increase underflow
|
||||
diff = 0;
|
||||
}
|
||||
// Compensate the diff over 4 seconds (but will be recomputed after 1
|
||||
// second)
|
||||
int distance = 4 * ap->sample_rate;
|
||||
// Limit compensation rate to 2%
|
||||
int abs_max_diff = distance / 50;
|
||||
diff = CLAMP(diff, -abs_max_diff, abs_max_diff);
|
||||
LOGV("[Audio] Buffering: target=%" PRIu32 " avg=%f cur=%" PRIu32
|
||||
" compensation=%d", ap->target_buffering, avg, can_read, diff);
|
||||
|
||||
if (diff != ap->compensation) {
|
||||
int ret = swr_set_compensation(swr_ctx, diff, distance);
|
||||
if (ret < 0) {
|
||||
LOGW("Resampling compensation failed: %d", ret);
|
||||
// not fatal
|
||||
} else {
|
||||
ap->compensation = diff;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
return sc_audio_regulator_push(&ap->audioreg, frame);
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
const AVCodecContext *ctx) {
|
||||
struct sc_audio_player *ap = DOWNCAST(sink);
|
||||
|
||||
#ifdef SCRCPY_LAVU_HAS_CHLAYOUT
|
||||
assert(ctx->ch_layout.nb_channels > 0);
|
||||
unsigned nb_channels = ctx->ch_layout.nb_channels;
|
||||
assert(ctx->ch_layout.nb_channels > 0 && ctx->ch_layout.nb_channels < 256);
|
||||
uint8_t nb_channels = ctx->ch_layout.nb_channels;
|
||||
#else
|
||||
int tmp = av_get_channel_layout_nb_channels(ctx->channel_layout);
|
||||
assert(tmp > 0);
|
||||
unsigned nb_channels = tmp;
|
||||
assert(tmp > 0 && tmp < 256);
|
||||
uint8_t nb_channels = tmp;
|
||||
#endif
|
||||
|
||||
assert(ctx->sample_rate > 0);
|
||||
@ -350,17 +47,19 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
int out_bytes_per_sample = av_get_bytes_per_sample(SC_AV_SAMPLE_FMT);
|
||||
assert(out_bytes_per_sample > 0);
|
||||
|
||||
ap->sample_rate = ctx->sample_rate;
|
||||
ap->nb_channels = nb_channels;
|
||||
ap->out_bytes_per_sample = out_bytes_per_sample;
|
||||
uint32_t target_buffering_samples =
|
||||
ap->target_buffering_delay * ctx->sample_rate / SC_TICK_FREQ;
|
||||
|
||||
ap->target_buffering = ap->target_buffering_delay * ap->sample_rate
|
||||
/ SC_TICK_FREQ;
|
||||
size_t sample_size = nb_channels * out_bytes_per_sample;
|
||||
bool ok = sc_audio_regulator_init(&ap->audioreg, sample_size, ctx,
|
||||
target_buffering_samples);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint64_t aout_samples = ap->output_buffer_duration * ap->sample_rate
|
||||
uint64_t aout_samples = ap->output_buffer_duration * ctx->sample_rate
|
||||
/ SC_TICK_FREQ;
|
||||
assert(aout_samples <= 0xFFFF);
|
||||
ap->output_buffer = (uint16_t) aout_samples;
|
||||
|
||||
SDL_AudioSpec desired = {
|
||||
.freq = ctx->sample_rate,
|
||||
@ -375,69 +74,10 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
ap->device = SDL_OpenAudioDevice(NULL, 0, &desired, &obtained, 0);
|
||||
if (!ap->device) {
|
||||
LOGE("Could not open audio device: %s", SDL_GetError());
|
||||
sc_audio_regulator_destroy(&ap->audioreg);
|
||||
return false;
|
||||
}
|
||||
|
||||
SwrContext *swr_ctx = swr_alloc();
|
||||
if (!swr_ctx) {
|
||||
LOG_OOM();
|
||||
goto error_close_audio_device;
|
||||
}
|
||||
ap->swr_ctx = swr_ctx;
|
||||
|
||||
#ifdef SCRCPY_LAVU_HAS_CHLAYOUT
|
||||
av_opt_set_chlayout(swr_ctx, "in_chlayout", &ctx->ch_layout, 0);
|
||||
av_opt_set_chlayout(swr_ctx, "out_chlayout", &ctx->ch_layout, 0);
|
||||
#else
|
||||
av_opt_set_channel_layout(swr_ctx, "in_channel_layout",
|
||||
ctx->channel_layout, 0);
|
||||
av_opt_set_channel_layout(swr_ctx, "out_channel_layout",
|
||||
ctx->channel_layout, 0);
|
||||
#endif
|
||||
|
||||
av_opt_set_int(swr_ctx, "in_sample_rate", ctx->sample_rate, 0);
|
||||
av_opt_set_int(swr_ctx, "out_sample_rate", ctx->sample_rate, 0);
|
||||
|
||||
av_opt_set_sample_fmt(swr_ctx, "in_sample_fmt", ctx->sample_fmt, 0);
|
||||
av_opt_set_sample_fmt(swr_ctx, "out_sample_fmt", SC_AV_SAMPLE_FMT, 0);
|
||||
|
||||
int ret = swr_init(swr_ctx);
|
||||
if (ret) {
|
||||
LOGE("Failed to initialize the resampling context");
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
// Use a ring-buffer of the target buffering size plus 1 second between the
|
||||
// producer and the consumer. It's too big on purpose, to guarantee that
|
||||
// the producer and the consumer will be able to access it in parallel
|
||||
// without locking.
|
||||
uint32_t audiobuf_samples = ap->target_buffering + ap->sample_rate;
|
||||
|
||||
size_t sample_size = ap->nb_channels * ap->out_bytes_per_sample;
|
||||
bool ok = sc_audiobuf_init(&ap->buf, sample_size, audiobuf_samples);
|
||||
if (!ok) {
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
size_t initial_swr_buf_size = TO_BYTES(4096);
|
||||
ap->swr_buf = malloc(initial_swr_buf_size);
|
||||
if (!ap->swr_buf) {
|
||||
LOG_OOM();
|
||||
goto error_destroy_audiobuf;
|
||||
}
|
||||
ap->swr_buf_alloc_size = initial_swr_buf_size;
|
||||
|
||||
// Samples are produced and consumed by blocks, so the buffering must be
|
||||
// smoothed to get a relatively stable value.
|
||||
sc_average_init(&ap->avg_buffering, 128);
|
||||
ap->samples_since_resync = 0;
|
||||
|
||||
ap->received = false;
|
||||
atomic_init(&ap->played, false);
|
||||
atomic_init(&ap->received, false);
|
||||
atomic_init(&ap->underflow, 0);
|
||||
ap->compensation = 0;
|
||||
|
||||
// The thread calling open() is the thread calling push(), which fills the
|
||||
// audio buffer consumed by the SDL audio thread.
|
||||
ok = sc_thread_set_priority(SC_THREAD_PRIORITY_TIME_CRITICAL);
|
||||
@ -449,15 +89,6 @@ sc_audio_player_frame_sink_open(struct sc_frame_sink *sink,
|
||||
SDL_PauseAudioDevice(ap->device, 0);
|
||||
|
||||
return true;
|
||||
|
||||
error_destroy_audiobuf:
|
||||
sc_audiobuf_destroy(&ap->buf);
|
||||
error_free_swr_ctx:
|
||||
swr_free(&ap->swr_ctx);
|
||||
error_close_audio_device:
|
||||
SDL_CloseAudioDevice(ap->device);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static void
|
||||
@ -468,9 +99,7 @@ sc_audio_player_frame_sink_close(struct sc_frame_sink *sink) {
|
||||
SDL_PauseAudioDevice(ap->device, 1);
|
||||
SDL_CloseAudioDevice(ap->device);
|
||||
|
||||
free(ap->swr_buf);
|
||||
sc_audiobuf_destroy(&ap->buf);
|
||||
swr_free(&ap->swr_ctx);
|
||||
sc_audio_regulator_destroy(&ap->audioreg);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -5,76 +5,27 @@
|
||||
|
||||
#include <stdatomic.h>
|
||||
#include <stdbool.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <libswresample/swresample.h>
|
||||
#include <SDL2/SDL.h>
|
||||
|
||||
#include "audio_regulator.h"
|
||||
#include "trait/frame_sink.h"
|
||||
#include "util/audiobuf.h"
|
||||
#include "util/average.h"
|
||||
#include "util/thread.h"
|
||||
#include "util/tick.h"
|
||||
|
||||
struct sc_audio_player {
|
||||
struct sc_frame_sink frame_sink;
|
||||
|
||||
SDL_AudioDeviceID device;
|
||||
|
||||
// The target buffering between the producer and the consumer. This value
|
||||
// is directly use for compensation.
|
||||
// Since audio capture and/or encoding on the device typically produce
|
||||
// blocks of 960 samples (20ms) or 1024 samples (~21.3ms), this target
|
||||
// value should be higher.
|
||||
sc_tick target_buffering_delay;
|
||||
uint32_t target_buffering; // in samples
|
||||
|
||||
// SDL audio output buffer size.
|
||||
// SDL audio output buffer size
|
||||
sc_tick output_buffer_duration;
|
||||
uint16_t output_buffer;
|
||||
|
||||
// Audio buffer to communicate between the receiver and the SDL audio
|
||||
// callback
|
||||
struct sc_audiobuf buf;
|
||||
|
||||
// Resampler (only used from the receiver thread)
|
||||
struct SwrContext *swr_ctx;
|
||||
|
||||
// The sample rate is the same for input and output
|
||||
unsigned sample_rate;
|
||||
// The number of channels is the same for input and output
|
||||
unsigned nb_channels;
|
||||
// The number of bytes per sample for a single channel
|
||||
size_t out_bytes_per_sample;
|
||||
|
||||
// Target buffer for resampling (only used by the receiver thread)
|
||||
uint8_t *swr_buf;
|
||||
size_t swr_buf_alloc_size;
|
||||
|
||||
// Number of buffered samples (may be negative on underflow) (only used by
|
||||
// the receiver thread)
|
||||
struct sc_average avg_buffering;
|
||||
// Count the number of samples to trigger a compensation update regularly
|
||||
// (only used by the receiver thread)
|
||||
uint32_t samples_since_resync;
|
||||
|
||||
// Number of silence samples inserted since the last received packet
|
||||
atomic_uint_least32_t underflow;
|
||||
|
||||
// Current applied compensation value (only used by the receiver thread)
|
||||
int compensation;
|
||||
|
||||
// Set to true the first time a sample is received
|
||||
atomic_bool received;
|
||||
|
||||
// Set to true the first time the SDL callback is called
|
||||
atomic_bool played;
|
||||
|
||||
const struct sc_audio_player_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
};
|
||||
|
||||
struct sc_audio_player_callbacks {
|
||||
void (*on_ended)(struct sc_audio_player *ap, bool success, void *userdata);
|
||||
SDL_AudioDeviceID device;
|
||||
struct sc_audio_regulator audioreg;
|
||||
};
|
||||
|
||||
void
|
||||
|
415
app/src/audio_regulator.c
Normal file
415
app/src/audio_regulator.c
Normal file
@ -0,0 +1,415 @@
|
||||
#include "audio_regulator.h"
|
||||
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/opt.h>
|
||||
|
||||
#include "util/log.h"
|
||||
|
||||
//#define SC_AUDIO_REGULATOR_DEBUG // uncomment to debug
|
||||
|
||||
/**
|
||||
* Real-time audio regulator with configurable latency
|
||||
*
|
||||
* As input, the regulator regularly receives AVFrames of decoded audio samples.
|
||||
* As output, the audio player regularly requests audio samples to be played.
|
||||
* In the middle, an audio buffer stores the samples produced but not consumed
|
||||
* yet.
|
||||
*
|
||||
* The goal of the regulator is to feed the audio player with a latency as low
|
||||
* as possible while avoiding buffer underrun (i.e. not being able to provide
|
||||
* samples when requested).
|
||||
*
|
||||
* To achieve this, it attempts to maintain the average buffering (the number
|
||||
* of samples present in the buffer) around a target value. If this target
|
||||
* buffering is too low, then buffer underrun will occur frequently. If it is
|
||||
* too high, then latency will become unacceptable. This target value is
|
||||
* configured using the scrcpy option --audio-buffer.
|
||||
*
|
||||
* The regulator cannot adjust the sample input rate (it receives samples
|
||||
* produced in real-time) or the sample output rate (it must provide samples as
|
||||
* requested by the audio player). Therefore, it may only apply compensation by
|
||||
* resampling (converting _m_ input samples to _n_ output samples).
|
||||
*
|
||||
* The compensation itself is applied by libswresample (FFmpeg). It is
|
||||
* configured using swr_set_compensation(). An important work for the regulator
|
||||
* is to estimate the compensation value regularly and apply it.
|
||||
*
|
||||
* The estimated buffering level is the result of averaging the "natural"
|
||||
* buffering (samples are produced and consumed by blocks, so it must be
|
||||
* smoothed), and making instant adjustments resulting of its own actions
|
||||
* (explicit compensation and silence insertion on underflow), which are not
|
||||
* smoothed.
|
||||
*
|
||||
* Buffer underflow events can occur when packets arrive too late. In that case,
|
||||
* the regulator inserts silence. Once the packets finally arrive (late), one
|
||||
* strategy could be to drop the samples that were replaced by silence, in
|
||||
* order to keep a minimal latency. However, dropping samples in case of buffer
|
||||
* underflow is inadvisable, as it would temporarily increase the underflow
|
||||
* even more and cause very noticeable audio glitches.
|
||||
*
|
||||
* Therefore, the regulator doesn't drop any sample on underflow. The
|
||||
* compensation mechanism will absorb the delay introduced by the inserted
|
||||
* silence.
|
||||
*/
|
||||
|
||||
#define TO_BYTES(SAMPLES) sc_audiobuf_to_bytes(&ar->buf, (SAMPLES))
|
||||
#define TO_SAMPLES(BYTES) sc_audiobuf_to_samples(&ar->buf, (BYTES))
|
||||
|
||||
void
|
||||
sc_audio_regulator_pull(struct sc_audio_regulator *ar, uint8_t *out,
|
||||
uint32_t out_samples) {
|
||||
#ifdef SC_AUDIO_REGULATOR_DEBUG
|
||||
LOGD("[Audio] Audio regulator pulls %" PRIu32 " samples", out_samples);
|
||||
#endif
|
||||
|
||||
// A lock is necessary in the rare case where the producer needs to drop
|
||||
// samples already pushed (when the buffer is full)
|
||||
sc_mutex_lock(&ar->mutex);
|
||||
|
||||
bool played = atomic_load_explicit(&ar->played, memory_order_relaxed);
|
||||
if (!played) {
|
||||
uint32_t buffered_samples = sc_audiobuf_can_read(&ar->buf);
|
||||
// Wait until the buffer is filled up to at least target_buffering
|
||||
// before playing
|
||||
if (buffered_samples < ar->target_buffering) {
|
||||
LOGV("[Audio] Inserting initial buffering silence: %" PRIu32
|
||||
" samples", out_samples);
|
||||
// Delay playback starting to reach the target buffering. Fill the
|
||||
// whole buffer with silence (len is small compared to the
|
||||
// arbitrary margin value).
|
||||
memset(out, 0, out_samples * ar->sample_size);
|
||||
sc_mutex_unlock(&ar->mutex);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t read = sc_audiobuf_read(&ar->buf, out, out_samples);
|
||||
|
||||
sc_mutex_unlock(&ar->mutex);
|
||||
|
||||
if (read < out_samples) {
|
||||
uint32_t silence = out_samples - read;
|
||||
// Insert silence. In theory, the inserted silent samples replace the
|
||||
// missing real samples, which will arrive later, so they should be
|
||||
// dropped to keep the latency minimal. However, this would cause very
|
||||
// audible glitches, so let the clock compensation restore the target
|
||||
// latency.
|
||||
LOGD("[Audio] Buffer underflow, inserting silence: %" PRIu32 " samples",
|
||||
silence);
|
||||
memset(out + TO_BYTES(read), 0, TO_BYTES(silence));
|
||||
|
||||
bool received = atomic_load_explicit(&ar->received,
|
||||
memory_order_relaxed);
|
||||
if (received) {
|
||||
// Inserting additional samples immediately increases buffering
|
||||
atomic_fetch_add_explicit(&ar->underflow, silence,
|
||||
memory_order_relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
atomic_store_explicit(&ar->played, true, memory_order_relaxed);
|
||||
}
|
||||
|
||||
static uint8_t *
|
||||
sc_audio_regulator_get_swr_buf(struct sc_audio_regulator *ar,
|
||||
uint32_t min_samples) {
|
||||
size_t min_buf_size = TO_BYTES(min_samples);
|
||||
if (min_buf_size > ar->swr_buf_alloc_size) {
|
||||
size_t new_size = min_buf_size + 4096;
|
||||
uint8_t *buf = realloc(ar->swr_buf, new_size);
|
||||
if (!buf) {
|
||||
LOG_OOM();
|
||||
// Could not realloc to the requested size
|
||||
return NULL;
|
||||
}
|
||||
ar->swr_buf = buf;
|
||||
ar->swr_buf_alloc_size = new_size;
|
||||
}
|
||||
|
||||
return ar->swr_buf;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_audio_regulator_push(struct sc_audio_regulator *ar, const AVFrame *frame) {
|
||||
SwrContext *swr_ctx = ar->swr_ctx;
|
||||
|
||||
int64_t swr_delay = swr_get_delay(swr_ctx, ar->sample_rate);
|
||||
// No need to av_rescale_rnd(), input and output sample rates are the same.
|
||||
// Add more space (256) for clock compensation.
|
||||
int dst_nb_samples = swr_delay + frame->nb_samples + 256;
|
||||
|
||||
uint8_t *swr_buf = sc_audio_regulator_get_swr_buf(ar, dst_nb_samples);
|
||||
if (!swr_buf) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int ret = swr_convert(swr_ctx, &swr_buf, dst_nb_samples,
|
||||
(const uint8_t **) frame->data, frame->nb_samples);
|
||||
if (ret < 0) {
|
||||
LOGE("Resampling failed: %d", ret);
|
||||
return false;
|
||||
}
|
||||
|
||||
// swr_convert() returns the number of samples which would have been
|
||||
// written if the buffer was big enough.
|
||||
uint32_t samples = MIN(ret, dst_nb_samples);
|
||||
#ifdef SC_AUDIO_REGULATOR_DEBUG
|
||||
LOGD("[Audio] %" PRIu32 " samples written to buffer", samples);
|
||||
#endif
|
||||
|
||||
uint32_t cap = sc_audiobuf_capacity(&ar->buf);
|
||||
if (samples > cap) {
|
||||
// Very very unlikely: a single resampled frame should never
|
||||
// exceed the audio buffer size (or something is very wrong).
|
||||
// Ignore the first bytes in swr_buf to avoid memory corruption anyway.
|
||||
swr_buf += TO_BYTES(samples - cap);
|
||||
samples = cap;
|
||||
}
|
||||
|
||||
uint32_t skipped_samples = 0;
|
||||
|
||||
uint32_t written = sc_audiobuf_write(&ar->buf, swr_buf, samples);
|
||||
if (written < samples) {
|
||||
uint32_t remaining = samples - written;
|
||||
|
||||
// All samples that could be written without locking have been written,
|
||||
// now we need to lock to drop/consume old samples
|
||||
sc_mutex_lock(&ar->mutex);
|
||||
|
||||
// Retry with the lock
|
||||
written += sc_audiobuf_write(&ar->buf,
|
||||
swr_buf + TO_BYTES(written),
|
||||
remaining);
|
||||
if (written < samples) {
|
||||
remaining = samples - written;
|
||||
// Still insufficient, drop old samples to make space
|
||||
skipped_samples = sc_audiobuf_read(&ar->buf, NULL, remaining);
|
||||
assert(skipped_samples == remaining);
|
||||
}
|
||||
|
||||
sc_mutex_unlock(&ar->mutex);
|
||||
|
||||
if (written < samples) {
|
||||
// Now there is enough space
|
||||
uint32_t w = sc_audiobuf_write(&ar->buf,
|
||||
swr_buf + TO_BYTES(written),
|
||||
remaining);
|
||||
assert(w == remaining);
|
||||
(void) w;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t underflow = 0;
|
||||
uint32_t max_buffered_samples;
|
||||
bool played = atomic_load_explicit(&ar->played, memory_order_relaxed);
|
||||
if (played) {
|
||||
underflow = atomic_exchange_explicit(&ar->underflow, 0,
|
||||
memory_order_relaxed);
|
||||
|
||||
max_buffered_samples = ar->target_buffering * 11 / 10
|
||||
+ 60 * ar->sample_rate / 1000 /* 60 ms */;
|
||||
} else {
|
||||
// Playback not started yet, do not accumulate more than
|
||||
// max_initial_buffering samples, this would cause unnecessary delay
|
||||
// (and glitches to compensate) on start.
|
||||
max_buffered_samples = ar->target_buffering
|
||||
+ 10 * ar->sample_rate / 1000 /* 10 ms */;
|
||||
}
|
||||
|
||||
uint32_t can_read = sc_audiobuf_can_read(&ar->buf);
|
||||
if (can_read > max_buffered_samples) {
|
||||
uint32_t skip_samples = 0;
|
||||
|
||||
sc_mutex_lock(&ar->mutex);
|
||||
can_read = sc_audiobuf_can_read(&ar->buf);
|
||||
if (can_read > max_buffered_samples) {
|
||||
skip_samples = can_read - max_buffered_samples;
|
||||
uint32_t r = sc_audiobuf_read(&ar->buf, NULL, skip_samples);
|
||||
assert(r == skip_samples);
|
||||
(void) r;
|
||||
skipped_samples += skip_samples;
|
||||
}
|
||||
sc_mutex_unlock(&ar->mutex);
|
||||
|
||||
if (skip_samples) {
|
||||
if (played) {
|
||||
LOGD("[Audio] Buffering threshold exceeded, skipping %" PRIu32
|
||||
" samples", skip_samples);
|
||||
#ifdef SC_AUDIO_REGULATOR_DEBUG
|
||||
} else {
|
||||
LOGD("[Audio] Playback not started, skipping %" PRIu32
|
||||
" samples", skip_samples);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
atomic_store_explicit(&ar->received, true, memory_order_relaxed);
|
||||
if (!played) {
|
||||
// Nothing more to do
|
||||
return true;
|
||||
}
|
||||
|
||||
// Number of samples added (or removed, if negative) for compensation
|
||||
int32_t instant_compensation = (int32_t) written - frame->nb_samples;
|
||||
// Inserting silence instantly increases buffering
|
||||
int32_t inserted_silence = (int32_t) underflow;
|
||||
// Dropping input samples instantly decreases buffering
|
||||
int32_t dropped = (int32_t) skipped_samples;
|
||||
|
||||
// The compensation must apply instantly, it must not be smoothed
|
||||
ar->avg_buffering.avg += instant_compensation + inserted_silence - dropped;
|
||||
if (ar->avg_buffering.avg < 0) {
|
||||
// Since dropping samples instantly reduces buffering, the difference
|
||||
// is applied immediately to the average value, assuming that the delay
|
||||
// between the producer and the consumer will be caught up.
|
||||
//
|
||||
// However, when this assumption is not valid, the average buffering
|
||||
// may decrease indefinitely. Prevent it to become negative to limit
|
||||
// the consequences.
|
||||
ar->avg_buffering.avg = 0;
|
||||
}
|
||||
|
||||
// However, the buffering level must be smoothed
|
||||
sc_average_push(&ar->avg_buffering, can_read);
|
||||
|
||||
#ifdef SC_AUDIO_REGULATOR_DEBUG
|
||||
LOGD("[Audio] can_read=%" PRIu32 " avg_buffering=%f",
|
||||
can_read, sc_average_get(&ar->avg_buffering));
|
||||
#endif
|
||||
|
||||
ar->samples_since_resync += written;
|
||||
if (ar->samples_since_resync >= ar->sample_rate) {
|
||||
// Recompute compensation every second
|
||||
ar->samples_since_resync = 0;
|
||||
|
||||
float avg = sc_average_get(&ar->avg_buffering);
|
||||
int diff = ar->target_buffering - avg;
|
||||
|
||||
// Enable compensation when the difference exceeds +/- 4ms.
|
||||
// Disable compensation when the difference is lower than +/- 1ms.
|
||||
int threshold = ar->compensation != 0
|
||||
? ar->sample_rate / 1000 /* 1ms */
|
||||
: ar->sample_rate * 4 / 1000; /* 4ms */
|
||||
|
||||
if (abs(diff) < threshold) {
|
||||
// Do not compensate for small values, the error is just noise
|
||||
diff = 0;
|
||||
} else if (diff < 0 && can_read < ar->target_buffering) {
|
||||
// Do not accelerate if the instant buffering level is below the
|
||||
// target, this would increase underflow
|
||||
diff = 0;
|
||||
}
|
||||
// Compensate the diff over 4 seconds (but will be recomputed after 1
|
||||
// second)
|
||||
int distance = 4 * ar->sample_rate;
|
||||
// Limit compensation rate to 2%
|
||||
int abs_max_diff = distance / 50;
|
||||
diff = CLAMP(diff, -abs_max_diff, abs_max_diff);
|
||||
LOGV("[Audio] Buffering: target=%" PRIu32 " avg=%f cur=%" PRIu32
|
||||
" compensation=%d", ar->target_buffering, avg, can_read, diff);
|
||||
|
||||
if (diff != ar->compensation) {
|
||||
int ret = swr_set_compensation(swr_ctx, diff, distance);
|
||||
if (ret < 0) {
|
||||
LOGW("Resampling compensation failed: %d", ret);
|
||||
// not fatal
|
||||
} else {
|
||||
ar->compensation = diff;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_audio_regulator_init(struct sc_audio_regulator *ar, size_t sample_size,
|
||||
const AVCodecContext *ctx, uint32_t target_buffering) {
|
||||
SwrContext *swr_ctx = swr_alloc();
|
||||
if (!swr_ctx) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
ar->swr_ctx = swr_ctx;
|
||||
|
||||
#ifdef SCRCPY_LAVU_HAS_CHLAYOUT
|
||||
av_opt_set_chlayout(swr_ctx, "in_chlayout", &ctx->ch_layout, 0);
|
||||
av_opt_set_chlayout(swr_ctx, "out_chlayout", &ctx->ch_layout, 0);
|
||||
#else
|
||||
av_opt_set_channel_layout(swr_ctx, "in_channel_layout",
|
||||
ctx->channel_layout, 0);
|
||||
av_opt_set_channel_layout(swr_ctx, "out_channel_layout",
|
||||
ctx->channel_layout, 0);
|
||||
#endif
|
||||
|
||||
av_opt_set_int(swr_ctx, "in_sample_rate", ctx->sample_rate, 0);
|
||||
av_opt_set_int(swr_ctx, "out_sample_rate", ctx->sample_rate, 0);
|
||||
|
||||
av_opt_set_sample_fmt(swr_ctx, "in_sample_fmt", ctx->sample_fmt, 0);
|
||||
av_opt_set_sample_fmt(swr_ctx, "out_sample_fmt", SC_AV_SAMPLE_FMT, 0);
|
||||
|
||||
int ret = swr_init(swr_ctx);
|
||||
if (ret) {
|
||||
LOGE("Failed to initialize the resampling context");
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
bool ok = sc_mutex_init(&ar->mutex);
|
||||
if (!ok) {
|
||||
goto error_free_swr_ctx;
|
||||
}
|
||||
|
||||
ar->target_buffering = target_buffering;
|
||||
ar->sample_size = sample_size;
|
||||
ar->sample_rate = ctx->sample_rate;
|
||||
|
||||
// Use a ring-buffer of the target buffering size plus 1 second between the
|
||||
// producer and the consumer. It's too big on purpose, to guarantee that
|
||||
// the producer and the consumer will be able to access it in parallel
|
||||
// without locking.
|
||||
uint32_t audiobuf_samples = target_buffering + ar->sample_rate;
|
||||
|
||||
ok = sc_audiobuf_init(&ar->buf, sample_size, audiobuf_samples);
|
||||
if (!ok) {
|
||||
goto error_destroy_mutex;
|
||||
}
|
||||
|
||||
size_t initial_swr_buf_size = TO_BYTES(4096);
|
||||
ar->swr_buf = malloc(initial_swr_buf_size);
|
||||
if (!ar->swr_buf) {
|
||||
LOG_OOM();
|
||||
goto error_destroy_audiobuf;
|
||||
}
|
||||
ar->swr_buf_alloc_size = initial_swr_buf_size;
|
||||
|
||||
// Samples are produced and consumed by blocks, so the buffering must be
|
||||
// smoothed to get a relatively stable value.
|
||||
sc_average_init(&ar->avg_buffering, 128);
|
||||
ar->samples_since_resync = 0;
|
||||
|
||||
ar->received = false;
|
||||
atomic_init(&ar->played, false);
|
||||
atomic_init(&ar->received, false);
|
||||
atomic_init(&ar->underflow, 0);
|
||||
ar->compensation = 0;
|
||||
|
||||
return true;
|
||||
|
||||
error_destroy_audiobuf:
|
||||
sc_audiobuf_destroy(&ar->buf);
|
||||
error_destroy_mutex:
|
||||
sc_mutex_destroy(&ar->mutex);
|
||||
error_free_swr_ctx:
|
||||
swr_free(&ar->swr_ctx);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void
|
||||
sc_audio_regulator_destroy(struct sc_audio_regulator *ar) {
|
||||
free(ar->swr_buf);
|
||||
sc_audiobuf_destroy(&ar->buf);
|
||||
sc_mutex_destroy(&ar->mutex);
|
||||
swr_free(&ar->swr_ctx);
|
||||
}
|
71
app/src/audio_regulator.h
Normal file
71
app/src/audio_regulator.h
Normal file
@ -0,0 +1,71 @@
|
||||
#ifndef SC_AUDIO_REGULATOR_H
|
||||
#define SC_AUDIO_REGULATOR_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdatomic.h>
|
||||
#include <stdbool.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libswresample/swresample.h>
|
||||
#include "util/audiobuf.h"
|
||||
#include "util/average.h"
|
||||
#include "util/thread.h"
|
||||
|
||||
#define SC_AV_SAMPLE_FMT AV_SAMPLE_FMT_FLT
|
||||
|
||||
struct sc_audio_regulator {
|
||||
sc_mutex mutex;
|
||||
|
||||
// Target buffering between the producer and the consumer (in samples)
|
||||
uint32_t target_buffering;
|
||||
|
||||
// Audio buffer to communicate between the receiver and the player
|
||||
struct sc_audiobuf buf;
|
||||
|
||||
// Resampler (only used from the receiver thread)
|
||||
struct SwrContext *swr_ctx;
|
||||
|
||||
// The sample rate is the same for input and output
|
||||
uint32_t sample_rate;
|
||||
// The number of bytes per sample (for all channels)
|
||||
size_t sample_size;
|
||||
|
||||
// Target buffer for resampling (only used by the receiver thread)
|
||||
uint8_t *swr_buf;
|
||||
size_t swr_buf_alloc_size;
|
||||
|
||||
// Number of buffered samples (may be negative on underflow) (only used by
|
||||
// the receiver thread)
|
||||
struct sc_average avg_buffering;
|
||||
// Count the number of samples to trigger a compensation update regularly
|
||||
// (only used by the receiver thread)
|
||||
uint32_t samples_since_resync;
|
||||
|
||||
// Number of silence samples inserted since the last received packet
|
||||
atomic_uint_least32_t underflow;
|
||||
|
||||
// Current applied compensation value (only used by the receiver thread)
|
||||
int compensation;
|
||||
|
||||
// Set to true the first time a sample is received
|
||||
atomic_bool received;
|
||||
|
||||
// Set to true the first time samples are pulled by the player
|
||||
atomic_bool played;
|
||||
};
|
||||
|
||||
bool
|
||||
sc_audio_regulator_init(struct sc_audio_regulator *ar, size_t sample_size,
|
||||
const AVCodecContext *ctx, uint32_t target_buffering);
|
||||
|
||||
void
|
||||
sc_audio_regulator_destroy(struct sc_audio_regulator *ar);
|
||||
|
||||
bool
|
||||
sc_audio_regulator_push(struct sc_audio_regulator *ar, const AVFrame *frame);
|
||||
|
||||
void
|
||||
sc_audio_regulator_pull(struct sc_audio_regulator *ar, uint8_t *out,
|
||||
uint32_t samples);
|
||||
|
||||
#endif
|
@ -102,6 +102,9 @@ enum {
|
||||
OPT_NO_MOUSE_HOVER,
|
||||
OPT_AUDIO_DUP,
|
||||
OPT_GAMEPAD,
|
||||
OPT_NEW_DISPLAY,
|
||||
OPT_LIST_APPS,
|
||||
OPT_START_APP,
|
||||
};
|
||||
|
||||
struct sc_option {
|
||||
@ -442,6 +445,11 @@ static const struct sc_option options[] = {
|
||||
"This is a workaround for some devices not behaving as "
|
||||
"expected when setting the device clipboard programmatically.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LIST_APPS,
|
||||
.longopt = "list-apps",
|
||||
.text = "List Android apps installed on the device.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LIST_CAMERAS,
|
||||
.longopt = "list-cameras",
|
||||
@ -557,6 +565,20 @@ static const struct sc_option options[] = {
|
||||
.text = "Disable video and audio playback on the computer (equivalent "
|
||||
"to --no-video-playback --no-audio-playback).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NEW_DISPLAY,
|
||||
.longopt = "new-display",
|
||||
.argdesc = "[<width>x<height>][/<dpi>]",
|
||||
.optional_arg = true,
|
||||
.text = "Create a new display with the specified resolution and "
|
||||
"density. If not provided, they default to the main display "
|
||||
"dimensions and DPI, and --max-size is considered.\n"
|
||||
"Examples:\n"
|
||||
" --new-display=1920x1080\n"
|
||||
" --new-display=1920x1080/420 # force 420 dpi\n"
|
||||
" --new-display # default screen size and density\n"
|
||||
" --new-display=240 # default screen size and 240 dpi",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_NO_AUDIO,
|
||||
.longopt = "no-audio",
|
||||
@ -784,6 +806,20 @@ static const struct sc_option options[] = {
|
||||
"shortcuts, pass \"lctrl,lsuper\".\n"
|
||||
"Default is \"lalt,lsuper\" (left-Alt or left-Super).",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_START_APP,
|
||||
.longopt = "start-app",
|
||||
.argdesc = "name",
|
||||
.text = "Start an Android app, by its exact package name.\n"
|
||||
"Add a '?' prefix to select an app whose name starts with the "
|
||||
"given name, case-insensitive (it may take some time to "
|
||||
"retrieve the app names on the device):\n"
|
||||
" scrcpy --start-app=?firefox\n"
|
||||
"Add a '+' prefix to force-stop before starting the app:\n"
|
||||
" scrcpy --new-display --start-app=+org.mozilla.firefox\n"
|
||||
"Both prefixes can be used, in that order:\n"
|
||||
" scrcpy --start-app=+?firefox",
|
||||
},
|
||||
{
|
||||
.shortopt = 't',
|
||||
.longopt = "show-touches",
|
||||
@ -1072,7 +1108,11 @@ static const struct sc_shortcut shortcuts[] = {
|
||||
},
|
||||
{
|
||||
.shortcuts = { "Shift+click-and-move" },
|
||||
.text = "Tilt (slide vertically with two fingers)",
|
||||
.text = "Tilt vertically (slide with 2 fingers)",
|
||||
},
|
||||
{
|
||||
.shortcuts = { "Ctrl+Shift+click-and-move" },
|
||||
.text = "Tilt horizontally (slide with 2 fingers)",
|
||||
},
|
||||
{
|
||||
.shortcuts = { "Drag & drop APK file" },
|
||||
@ -2591,6 +2631,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
case OPT_LIST_CAMERA_SIZES:
|
||||
opts->list |= SC_OPTION_LIST_CAMERA_SIZES;
|
||||
break;
|
||||
case OPT_LIST_APPS:
|
||||
opts->list |= SC_OPTION_LIST_APPS;
|
||||
break;
|
||||
case OPT_REQUIRE_AUDIO:
|
||||
opts->require_audio = true;
|
||||
break;
|
||||
@ -2664,6 +2707,12 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_NEW_DISPLAY:
|
||||
opts->new_display = optarg ? optarg : "auto";
|
||||
break;
|
||||
case OPT_START_APP:
|
||||
opts->start_app = optarg;
|
||||
break;
|
||||
default:
|
||||
// getopt prints the error message on stderr
|
||||
return false;
|
||||
@ -2914,6 +2963,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->new_display) {
|
||||
LOGE("--new-display is only available with --video-source=display");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->camera_id && opts->camera_facing != SC_CAMERA_FACING_ANY) {
|
||||
LOGE("Cannot specify both --camera-id and --camera-facing");
|
||||
return false;
|
||||
@ -2950,6 +3004,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->display_id != 0 && opts->new_display) {
|
||||
LOGE("Cannot specify both --display-id and --new-display");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts->audio && opts->audio_source == SC_AUDIO_SOURCE_AUTO) {
|
||||
// Select the audio source according to the video source
|
||||
if (opts->video_source == SC_VIDEO_SOURCE_DISPLAY) {
|
||||
@ -3082,6 +3141,10 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
LOGE("Cannot request power off on close if control is disabled");
|
||||
return false;
|
||||
}
|
||||
if (opts->start_app) {
|
||||
LOGE("Cannot start an Android app if control is disabled");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
# ifdef _WIN32
|
||||
|
@ -183,6 +183,10 @@ sc_control_msg_serialize(const struct sc_control_msg *msg, uint8_t *buf) {
|
||||
case SC_CONTROL_MSG_TYPE_UHID_DESTROY:
|
||||
sc_write16be(&buf[1], msg->uhid_destroy.id);
|
||||
return 3;
|
||||
case SC_CONTROL_MSG_TYPE_START_APP: {
|
||||
size_t len = write_string_tiny(&buf[1], msg->start_app.name, 255);
|
||||
return 1 + len;
|
||||
}
|
||||
case SC_CONTROL_MSG_TYPE_EXPAND_NOTIFICATION_PANEL:
|
||||
case SC_CONTROL_MSG_TYPE_EXPAND_SETTINGS_PANEL:
|
||||
case SC_CONTROL_MSG_TYPE_COLLAPSE_PANELS:
|
||||
@ -308,6 +312,9 @@ sc_control_msg_log(const struct sc_control_msg *msg) {
|
||||
case SC_CONTROL_MSG_TYPE_OPEN_HARD_KEYBOARD_SETTINGS:
|
||||
LOG_CMSG("open hard keyboard settings");
|
||||
break;
|
||||
case SC_CONTROL_MSG_TYPE_START_APP:
|
||||
LOG_CMSG("start app \"%s\"", msg->start_app.name);
|
||||
break;
|
||||
default:
|
||||
LOG_CMSG("unknown type: %u", (unsigned) msg->type);
|
||||
break;
|
||||
@ -333,6 +340,9 @@ sc_control_msg_destroy(struct sc_control_msg *msg) {
|
||||
case SC_CONTROL_MSG_TYPE_SET_CLIPBOARD:
|
||||
free(msg->set_clipboard.text);
|
||||
break;
|
||||
case SC_CONTROL_MSG_TYPE_START_APP:
|
||||
free(msg->start_app.name);
|
||||
break;
|
||||
default:
|
||||
// do nothing
|
||||
break;
|
||||
|
@ -41,6 +41,7 @@ enum sc_control_msg_type {
|
||||
SC_CONTROL_MSG_TYPE_UHID_INPUT,
|
||||
SC_CONTROL_MSG_TYPE_UHID_DESTROY,
|
||||
SC_CONTROL_MSG_TYPE_OPEN_HARD_KEYBOARD_SETTINGS,
|
||||
SC_CONTROL_MSG_TYPE_START_APP,
|
||||
};
|
||||
|
||||
enum sc_screen_power_mode {
|
||||
@ -110,6 +111,9 @@ struct sc_control_msg {
|
||||
struct {
|
||||
uint16_t id;
|
||||
} uhid_destroy;
|
||||
struct {
|
||||
char *name;
|
||||
} start_app;
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <SDL_events.h>
|
||||
#include <SDL2/SDL_events.h>
|
||||
|
||||
enum {
|
||||
SC_EVENT_NEW_FRAME = SDL_USEREVENT,
|
||||
|
@ -5,53 +5,9 @@
|
||||
|
||||
#include "input_events.h"
|
||||
#include "screen.h"
|
||||
#include "shortcut_mod.h"
|
||||
#include "util/log.h"
|
||||
|
||||
#define SC_SDL_SHORTCUT_MODS_MASK (KMOD_CTRL | KMOD_ALT | KMOD_GUI)
|
||||
|
||||
static inline uint16_t
|
||||
to_sdl_mod(uint8_t shortcut_mod) {
|
||||
uint16_t sdl_mod = 0;
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_LCTRL) {
|
||||
sdl_mod |= KMOD_LCTRL;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_RCTRL) {
|
||||
sdl_mod |= KMOD_RCTRL;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_LALT) {
|
||||
sdl_mod |= KMOD_LALT;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_RALT) {
|
||||
sdl_mod |= KMOD_RALT;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_LSUPER) {
|
||||
sdl_mod |= KMOD_LGUI;
|
||||
}
|
||||
if (shortcut_mod & SC_SHORTCUT_MOD_RSUPER) {
|
||||
sdl_mod |= KMOD_RGUI;
|
||||
}
|
||||
return sdl_mod;
|
||||
}
|
||||
|
||||
static bool
|
||||
is_shortcut_mod(struct sc_input_manager *im, uint16_t sdl_mod) {
|
||||
// keep only the relevant modifier keys
|
||||
sdl_mod &= SC_SDL_SHORTCUT_MODS_MASK;
|
||||
|
||||
// at least one shortcut mod pressed?
|
||||
return sdl_mod & im->sdl_shortcut_mods;
|
||||
}
|
||||
|
||||
static bool
|
||||
is_shortcut_key(struct sc_input_manager *im, SDL_Keycode keycode) {
|
||||
return (im->sdl_shortcut_mods & KMOD_LCTRL && keycode == SDLK_LCTRL)
|
||||
|| (im->sdl_shortcut_mods & KMOD_RCTRL && keycode == SDLK_RCTRL)
|
||||
|| (im->sdl_shortcut_mods & KMOD_LALT && keycode == SDLK_LALT)
|
||||
|| (im->sdl_shortcut_mods & KMOD_RALT && keycode == SDLK_RALT)
|
||||
|| (im->sdl_shortcut_mods & KMOD_LGUI && keycode == SDLK_LGUI)
|
||||
|| (im->sdl_shortcut_mods & KMOD_RGUI && keycode == SDLK_RGUI);
|
||||
}
|
||||
|
||||
void
|
||||
sc_input_manager_init(struct sc_input_manager *im,
|
||||
const struct sc_input_manager_params *params) {
|
||||
@ -73,7 +29,7 @@ sc_input_manager_init(struct sc_input_manager *im,
|
||||
im->legacy_paste = params->legacy_paste;
|
||||
im->clipboard_autosync = params->clipboard_autosync;
|
||||
|
||||
im->sdl_shortcut_mods = to_sdl_mod(params->shortcut_mods);
|
||||
im->sdl_shortcut_mods = sc_shortcut_mods_to_sdl(params->shortcut_mods);
|
||||
|
||||
im->vfinger_down = false;
|
||||
im->vfinger_invert_x = false;
|
||||
@ -346,7 +302,8 @@ sc_input_manager_process_text_input(struct sc_input_manager *im,
|
||||
return;
|
||||
}
|
||||
|
||||
if (is_shortcut_mod(im, SDL_GetModState())) {
|
||||
if (sc_shortcut_mods_is_shortcut_mod(im->sdl_shortcut_mods,
|
||||
SDL_GetModState())) {
|
||||
// A shortcut must never generate text events
|
||||
return;
|
||||
}
|
||||
@ -413,8 +370,9 @@ sc_input_manager_process_key(struct sc_input_manager *im,
|
||||
// press/release is a modifier key.
|
||||
// The second condition is necessary to ignore the release of the modifier
|
||||
// key (because in this case mod is 0).
|
||||
bool is_shortcut = is_shortcut_mod(im, mod)
|
||||
|| is_shortcut_key(im, sdl_keycode);
|
||||
uint16_t mods = im->sdl_shortcut_mods;
|
||||
bool is_shortcut = sc_shortcut_mods_is_shortcut_mod(mods, mod)
|
||||
|| sc_shortcut_mods_is_shortcut_key(mods, sdl_keycode);
|
||||
|
||||
if (down && !repeat) {
|
||||
if (sdl_keycode == im->last_keycode && mod == im->last_mod) {
|
||||
@ -536,7 +494,7 @@ sc_input_manager_process_key(struct sc_input_manager *im,
|
||||
return;
|
||||
case SDLK_f:
|
||||
if (video && !shift && !repeat && down) {
|
||||
sc_screen_switch_fullscreen(im->screen);
|
||||
sc_screen_toggle_fullscreen(im->screen);
|
||||
}
|
||||
return;
|
||||
case SDLK_w:
|
||||
@ -836,7 +794,7 @@ sc_input_manager_process_mouse_button(struct sc_input_manager *im,
|
||||
}
|
||||
|
||||
bool change_vfinger = event->button == SDL_BUTTON_LEFT &&
|
||||
((down && !im->vfinger_down && (ctrl_pressed ^ shift_pressed)) ||
|
||||
((down && !im->vfinger_down && (ctrl_pressed || shift_pressed)) ||
|
||||
(!down && im->vfinger_down));
|
||||
bool use_finger = im->vfinger_down || change_vfinger;
|
||||
|
||||
@ -868,16 +826,28 @@ sc_input_manager_process_mouse_button(struct sc_input_manager *im,
|
||||
// In other words, the center of the rotation/scaling is the center of the
|
||||
// screen.
|
||||
//
|
||||
// To simulate a tilt gesture (a vertical slide with two fingers), Shift
|
||||
// can be used instead of Ctrl. The "virtual finger" has a position
|
||||
// To simulate a vertical tilt gesture (a vertical slide with two fingers),
|
||||
// Shift can be used instead of Ctrl. The "virtual finger" has a position
|
||||
// inverted with respect to the vertical axis of symmetry in the middle of
|
||||
// the screen.
|
||||
//
|
||||
// To simulate a horizontal tilt gesture (a horizontal slide with two
|
||||
// fingers), Ctrl+Shift can be used. The "virtual finger" has a position
|
||||
// inverted with respect to the horizontal axis of symmetry in the middle
|
||||
// of the screen. It is expected to be less frequently used, that's why the
|
||||
// one-mod shortcuts are assigned to rotation and vertical tilt.
|
||||
if (change_vfinger) {
|
||||
struct sc_point mouse =
|
||||
sc_screen_convert_window_to_frame_coords(im->screen, event->x,
|
||||
event->y);
|
||||
if (down) {
|
||||
im->vfinger_invert_x = ctrl_pressed || shift_pressed;
|
||||
// Ctrl Shift invert_x invert_y
|
||||
// ---- ----- ==> -------- --------
|
||||
// 0 0 0 0 -
|
||||
// 0 1 1 0 vertical tilt
|
||||
// 1 0 1 1 rotate
|
||||
// 1 1 0 1 horizontal tilt
|
||||
im->vfinger_invert_x = ctrl_pressed ^ shift_pressed;
|
||||
im->vfinger_invert_y = ctrl_pressed;
|
||||
}
|
||||
struct sc_point vfinger = inverse_point(mouse, im->screen->frame_size,
|
||||
|
@ -45,6 +45,10 @@ convert_keycode(enum sc_keycode from, enum android_keycode *to, uint16_t mod,
|
||||
{SC_KEYCODE_RCTRL, AKEYCODE_CTRL_RIGHT},
|
||||
{SC_KEYCODE_LSHIFT, AKEYCODE_SHIFT_LEFT},
|
||||
{SC_KEYCODE_RSHIFT, AKEYCODE_SHIFT_RIGHT},
|
||||
{SC_KEYCODE_LALT, AKEYCODE_ALT_LEFT},
|
||||
{SC_KEYCODE_RALT, AKEYCODE_ALT_RIGHT},
|
||||
{SC_KEYCODE_LGUI, AKEYCODE_META_LEFT},
|
||||
{SC_KEYCODE_RGUI, AKEYCODE_META_RIGHT},
|
||||
};
|
||||
|
||||
// Numpad navigation keys.
|
||||
@ -166,11 +170,7 @@ convert_keycode(enum sc_keycode from, enum android_keycode *to, uint16_t mod,
|
||||
return false;
|
||||
}
|
||||
|
||||
if (mod & (SC_MOD_LALT | SC_MOD_RALT | SC_MOD_LGUI | SC_MOD_RGUI)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// if ALT and META are not pressed, also handle letters and space
|
||||
// Handle letters and space
|
||||
entry = SC_INTMAP_FIND_ENTRY(alphaspace_keys, from);
|
||||
if (entry) {
|
||||
*to = entry->value;
|
||||
|
123
app/src/mouse_capture.c
Normal file
123
app/src/mouse_capture.c
Normal file
@ -0,0 +1,123 @@
|
||||
#include "mouse_capture.h"
|
||||
|
||||
#include "shortcut_mod.h"
|
||||
#include "util/log.h"
|
||||
|
||||
void
|
||||
sc_mouse_capture_init(struct sc_mouse_capture *mc, SDL_Window *window,
|
||||
uint8_t shortcut_mods) {
|
||||
mc->window = window;
|
||||
mc->sdl_mouse_capture_keys = sc_shortcut_mods_to_sdl(shortcut_mods);
|
||||
mc->mouse_capture_key_pressed = SDLK_UNKNOWN;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_mouse_capture_is_capture_key(struct sc_mouse_capture *mc, SDL_Keycode key) {
|
||||
return sc_shortcut_mods_is_shortcut_key(mc->sdl_mouse_capture_keys, key);
|
||||
}
|
||||
|
||||
bool
|
||||
sc_mouse_capture_handle_event(struct sc_mouse_capture *mc,
|
||||
const SDL_Event *event) {
|
||||
switch (event->type) {
|
||||
case SDL_WINDOWEVENT:
|
||||
if (event->window.event == SDL_WINDOWEVENT_FOCUS_LOST) {
|
||||
sc_mouse_capture_set_active(mc, false);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_KEYDOWN: {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
if (sc_mouse_capture_is_capture_key(mc, key)) {
|
||||
if (!mc->mouse_capture_key_pressed) {
|
||||
mc->mouse_capture_key_pressed = key;
|
||||
} else {
|
||||
// Another mouse capture key has been pressed, cancel
|
||||
// mouse (un)capture
|
||||
mc->mouse_capture_key_pressed = 0;
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SDL_KEYUP: {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
SDL_Keycode cap = mc->mouse_capture_key_pressed;
|
||||
mc->mouse_capture_key_pressed = 0;
|
||||
if (sc_mouse_capture_is_capture_key(mc, key)) {
|
||||
if (key == cap) {
|
||||
// A mouse capture key has been pressed then released:
|
||||
// toggle the capture mouse mode
|
||||
sc_mouse_capture_toggle(mc);
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case SDL_MOUSEWHEEL:
|
||||
case SDL_MOUSEMOTION:
|
||||
case SDL_MOUSEBUTTONDOWN:
|
||||
if (!sc_mouse_capture_is_active(mc)) {
|
||||
// The mouse will be captured on SDL_MOUSEBUTTONUP, so consume
|
||||
// the event
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONUP:
|
||||
if (!sc_mouse_capture_is_active(mc)) {
|
||||
sc_mouse_capture_set_active(mc, true);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_FINGERMOTION:
|
||||
case SDL_FINGERDOWN:
|
||||
case SDL_FINGERUP:
|
||||
// Touch events are not compatible with relative mode
|
||||
// (coordinates are not relative), so consume the event
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void
|
||||
sc_mouse_capture_set_active(struct sc_mouse_capture *mc, bool capture) {
|
||||
#ifdef __APPLE__
|
||||
// Workaround for SDL bug on macOS:
|
||||
// <https://github.com/libsdl-org/SDL/issues/5340>
|
||||
if (capture) {
|
||||
int mouse_x, mouse_y;
|
||||
SDL_GetGlobalMouseState(&mouse_x, &mouse_y);
|
||||
|
||||
int x, y, w, h;
|
||||
SDL_GetWindowPosition(mc->window, &x, &y);
|
||||
SDL_GetWindowSize(mc->window, &w, &h);
|
||||
|
||||
bool outside_window = mouse_x < x || mouse_x >= x + w
|
||||
|| mouse_y < y || mouse_y >= y + h;
|
||||
if (outside_window) {
|
||||
SDL_WarpMouseInWindow(mc->window, w / 2, h / 2);
|
||||
}
|
||||
}
|
||||
#else
|
||||
(void) mc;
|
||||
#endif
|
||||
if (SDL_SetRelativeMouseMode(capture)) {
|
||||
LOGE("Could not set relative mouse mode to %s: %s",
|
||||
capture ? "true" : "false", SDL_GetError());
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
sc_mouse_capture_is_active(struct sc_mouse_capture *mc) {
|
||||
(void) mc;
|
||||
return SDL_GetRelativeMouseMode();
|
||||
}
|
||||
|
||||
void
|
||||
sc_mouse_capture_toggle(struct sc_mouse_capture *mc) {
|
||||
bool new_value = !sc_mouse_capture_is_active(mc);
|
||||
sc_mouse_capture_set_active(mc, new_value);
|
||||
}
|
38
app/src/mouse_capture.h
Normal file
38
app/src/mouse_capture.h
Normal file
@ -0,0 +1,38 @@
|
||||
#ifndef SC_MOUSE_CAPTURE_H
|
||||
#define SC_MOUSE_CAPTURE_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
|
||||
#include <SDL2/SDL.h>
|
||||
|
||||
struct sc_mouse_capture {
|
||||
SDL_Window *window;
|
||||
uint16_t sdl_mouse_capture_keys;
|
||||
|
||||
// To enable/disable mouse capture, a mouse capture key (LALT, LGUI or
|
||||
// RGUI) must be pressed. This variable tracks the pressed capture key.
|
||||
SDL_Keycode mouse_capture_key_pressed;
|
||||
|
||||
};
|
||||
|
||||
void
|
||||
sc_mouse_capture_init(struct sc_mouse_capture *mc, SDL_Window *window,
|
||||
uint8_t shortcut_mods);
|
||||
|
||||
void
|
||||
sc_mouse_capture_set_active(struct sc_mouse_capture *mc, bool capture);
|
||||
|
||||
bool
|
||||
sc_mouse_capture_is_active(struct sc_mouse_capture *mc);
|
||||
|
||||
void
|
||||
sc_mouse_capture_toggle(struct sc_mouse_capture *mc);
|
||||
|
||||
// Return true if it consumed the event
|
||||
bool
|
||||
sc_mouse_capture_handle_event(struct sc_mouse_capture *mc,
|
||||
const SDL_Event *event);
|
||||
|
||||
#endif
|
@ -103,6 +103,8 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.window = true,
|
||||
.mouse_hover = true,
|
||||
.audio_dup = false,
|
||||
.new_display = NULL,
|
||||
.start_app = NULL,
|
||||
};
|
||||
|
||||
enum sc_orientation
|
||||
|
@ -304,10 +304,13 @@ struct scrcpy_options {
|
||||
#define SC_OPTION_LIST_DISPLAYS 0x2
|
||||
#define SC_OPTION_LIST_CAMERAS 0x4
|
||||
#define SC_OPTION_LIST_CAMERA_SIZES 0x8
|
||||
#define SC_OPTION_LIST_APPS 0x10
|
||||
uint8_t list;
|
||||
bool window;
|
||||
bool mouse_hover;
|
||||
bool audio_dup;
|
||||
const char *new_display; // [<width>x<height>][/<dpi>] parsed by the server
|
||||
const char *start_app;
|
||||
};
|
||||
|
||||
extern const struct scrcpy_options scrcpy_options_default;
|
||||
|
@ -431,6 +431,7 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.lock_video_orientation = options->lock_video_orientation,
|
||||
.control = options->control,
|
||||
.display_id = options->display_id,
|
||||
.new_display = options->new_display,
|
||||
.video = options->video,
|
||||
.audio = options->audio,
|
||||
.audio_dup = options->audio_dup,
|
||||
@ -906,6 +907,25 @@ aoa_complete:
|
||||
init_sdl_gamepads();
|
||||
}
|
||||
|
||||
if (options->control && options->start_app) {
|
||||
assert(controller);
|
||||
|
||||
char *name = strdup(options->start_app);
|
||||
if (!name) {
|
||||
LOG_OOM();
|
||||
goto end;
|
||||
}
|
||||
|
||||
struct sc_control_msg msg;
|
||||
msg.type = SC_CONTROL_MSG_TYPE_START_APP;
|
||||
msg.start_app.name = name;
|
||||
|
||||
if (!sc_controller_push_msg(controller, &msg)) {
|
||||
LOGW("Could not request start app '%s'", name);
|
||||
free(name);
|
||||
}
|
||||
}
|
||||
|
||||
ret = event_loop(s);
|
||||
terminate_event_loop();
|
||||
LOGD("quit...");
|
||||
|
127
app/src/screen.c
127
app/src/screen.c
@ -162,47 +162,6 @@ sc_screen_is_relative_mode(struct sc_screen *screen) {
|
||||
return screen->im.mp && screen->im.mp->relative_mode;
|
||||
}
|
||||
|
||||
static void
|
||||
sc_screen_set_mouse_capture(struct sc_screen *screen, bool capture) {
|
||||
#ifdef __APPLE__
|
||||
// Workaround for SDL bug on macOS:
|
||||
// <https://github.com/libsdl-org/SDL/issues/5340>
|
||||
if (capture) {
|
||||
int mouse_x, mouse_y;
|
||||
SDL_GetGlobalMouseState(&mouse_x, &mouse_y);
|
||||
|
||||
int x, y, w, h;
|
||||
SDL_GetWindowPosition(screen->window, &x, &y);
|
||||
SDL_GetWindowSize(screen->window, &w, &h);
|
||||
|
||||
bool outside_window = mouse_x < x || mouse_x >= x + w
|
||||
|| mouse_y < y || mouse_y >= y + h;
|
||||
if (outside_window) {
|
||||
SDL_WarpMouseInWindow(screen->window, w / 2, h / 2);
|
||||
}
|
||||
}
|
||||
#else
|
||||
(void) screen;
|
||||
#endif
|
||||
if (SDL_SetRelativeMouseMode(capture)) {
|
||||
LOGE("Could not set relative mouse mode to %s: %s",
|
||||
capture ? "true" : "false", SDL_GetError());
|
||||
}
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_screen_get_mouse_capture(struct sc_screen *screen) {
|
||||
(void) screen;
|
||||
return SDL_GetRelativeMouseMode();
|
||||
}
|
||||
|
||||
static inline void
|
||||
sc_screen_toggle_mouse_capture(struct sc_screen *screen) {
|
||||
(void) screen;
|
||||
bool new_value = !sc_screen_get_mouse_capture(screen);
|
||||
sc_screen_set_mouse_capture(screen, new_value);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_screen_update_content_rect(struct sc_screen *screen) {
|
||||
assert(screen->video);
|
||||
@ -371,7 +330,6 @@ sc_screen_init(struct sc_screen *screen,
|
||||
screen->fullscreen = false;
|
||||
screen->maximized = false;
|
||||
screen->minimized = false;
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
screen->paused = false;
|
||||
screen->resume_frame = NULL;
|
||||
screen->orientation = SC_ORIENTATION_0;
|
||||
@ -486,6 +444,9 @@ sc_screen_init(struct sc_screen *screen,
|
||||
|
||||
sc_input_manager_init(&screen->im, &im_params);
|
||||
|
||||
// Initialize even if not used for simplicity
|
||||
sc_mouse_capture_init(&screen->mc, screen->window, params->shortcut_mods);
|
||||
|
||||
#ifdef CONTINUOUS_RESIZING_WORKAROUND
|
||||
if (screen->video) {
|
||||
SDL_AddEventWatch(event_watcher, screen);
|
||||
@ -506,7 +467,7 @@ sc_screen_init(struct sc_screen *screen,
|
||||
|
||||
if (!screen->video && sc_screen_is_relative_mode(screen)) {
|
||||
// Capture mouse immediately if video mirroring is disabled
|
||||
sc_screen_set_mouse_capture(screen, true);
|
||||
sc_mouse_capture_set_active(&screen->mc, true);
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -538,7 +499,7 @@ sc_screen_show_initial_window(struct sc_screen *screen) {
|
||||
SDL_SetWindowPosition(screen->window, x, y);
|
||||
|
||||
if (screen->req.fullscreen) {
|
||||
sc_screen_switch_fullscreen(screen);
|
||||
sc_screen_toggle_fullscreen(screen);
|
||||
}
|
||||
|
||||
if (screen->req.start_fps_counter) {
|
||||
@ -713,7 +674,7 @@ sc_screen_apply_frame(struct sc_screen *screen) {
|
||||
|
||||
if (sc_screen_is_relative_mode(screen)) {
|
||||
// Capture mouse on start
|
||||
sc_screen_set_mouse_capture(screen, true);
|
||||
sc_mouse_capture_set_active(&screen->mc, true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -774,7 +735,7 @@ sc_screen_set_paused(struct sc_screen *screen, bool paused) {
|
||||
}
|
||||
|
||||
void
|
||||
sc_screen_switch_fullscreen(struct sc_screen *screen) {
|
||||
sc_screen_toggle_fullscreen(struct sc_screen *screen) {
|
||||
assert(screen->video);
|
||||
|
||||
uint32_t new_mode = screen->fullscreen ? 0 : SDL_WINDOW_FULLSCREEN_DESKTOP;
|
||||
@ -837,15 +798,8 @@ sc_screen_resize_to_pixel_perfect(struct sc_screen *screen) {
|
||||
content_size.height);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_screen_is_mouse_capture_key(SDL_Keycode key) {
|
||||
return key == SDLK_LALT || key == SDLK_LGUI || key == SDLK_RGUI;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event) {
|
||||
bool relative_mode = sc_screen_is_relative_mode(screen);
|
||||
|
||||
switch (event->type) {
|
||||
case SC_EVENT_SCREEN_INIT_SIZE: {
|
||||
// The initial size is passed via screen->frame_size
|
||||
@ -903,69 +857,14 @@ sc_screen_handle_event(struct sc_screen *screen, const SDL_Event *event) {
|
||||
apply_pending_resize(screen);
|
||||
sc_screen_render(screen, true);
|
||||
break;
|
||||
case SDL_WINDOWEVENT_FOCUS_LOST:
|
||||
if (relative_mode) {
|
||||
sc_screen_set_mouse_capture(screen, false);
|
||||
}
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
case SDL_KEYDOWN:
|
||||
if (relative_mode) {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
if (sc_screen_is_mouse_capture_key(key)) {
|
||||
if (!screen->mouse_capture_key_pressed) {
|
||||
screen->mouse_capture_key_pressed = key;
|
||||
} else {
|
||||
// Another mouse capture key has been pressed, cancel
|
||||
// mouse (un)capture
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case SDL_KEYUP:
|
||||
if (relative_mode) {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
SDL_Keycode cap = screen->mouse_capture_key_pressed;
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
if (sc_screen_is_mouse_capture_key(key)) {
|
||||
if (key == cap) {
|
||||
// A mouse capture key has been pressed then released:
|
||||
// toggle the capture mouse mode
|
||||
sc_screen_toggle_mouse_capture(screen);
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEWHEEL:
|
||||
case SDL_MOUSEMOTION:
|
||||
case SDL_MOUSEBUTTONDOWN:
|
||||
if (relative_mode && !sc_screen_get_mouse_capture(screen)) {
|
||||
// Do not forward to input manager, the mouse will be captured
|
||||
// on SDL_MOUSEBUTTONUP
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_FINGERMOTION:
|
||||
case SDL_FINGERDOWN:
|
||||
case SDL_FINGERUP:
|
||||
if (relative_mode) {
|
||||
// Touch events are not compatible with relative mode
|
||||
// (coordinates are not relative)
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONUP:
|
||||
if (relative_mode && !sc_screen_get_mouse_capture(screen)) {
|
||||
sc_screen_set_mouse_capture(screen, true);
|
||||
return true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (sc_screen_is_relative_mode(screen)
|
||||
&& sc_mouse_capture_handle_event(&screen->mc, event)) {
|
||||
// The mouse capture handler consumed the event
|
||||
return true;
|
||||
}
|
||||
|
||||
sc_input_manager_handle_event(&screen->im, event);
|
||||
|
@ -13,6 +13,7 @@
|
||||
#include "fps_counter.h"
|
||||
#include "frame_buffer.h"
|
||||
#include "input_manager.h"
|
||||
#include "mouse_capture.h"
|
||||
#include "opengl.h"
|
||||
#include "options.h"
|
||||
#include "trait/key_processor.h"
|
||||
@ -30,6 +31,7 @@ struct sc_screen {
|
||||
|
||||
struct sc_display display;
|
||||
struct sc_input_manager im;
|
||||
struct sc_mouse_capture mc; // only used in mouse relative mode
|
||||
struct sc_frame_buffer fb;
|
||||
struct sc_fps_counter fps_counter;
|
||||
|
||||
@ -61,10 +63,6 @@ struct sc_screen {
|
||||
bool maximized;
|
||||
bool minimized;
|
||||
|
||||
// To enable/disable mouse capture, a mouse capture key (LALT, LGUI or
|
||||
// RGUI) must be pressed. This variable tracks the pressed capture key.
|
||||
SDL_Keycode mouse_capture_key_pressed;
|
||||
|
||||
AVFrame *frame;
|
||||
|
||||
bool paused;
|
||||
@ -126,9 +124,9 @@ sc_screen_destroy(struct sc_screen *screen);
|
||||
void
|
||||
sc_screen_hide_window(struct sc_screen *screen);
|
||||
|
||||
// switch the fullscreen mode
|
||||
// toggle the fullscreen mode
|
||||
void
|
||||
sc_screen_switch_fullscreen(struct sc_screen *screen);
|
||||
sc_screen_toggle_fullscreen(struct sc_screen *screen);
|
||||
|
||||
// resize window to optimal size (remove black borders)
|
||||
void
|
||||
|
@ -66,56 +66,6 @@ get_server_path(void) {
|
||||
return server_path;
|
||||
}
|
||||
|
||||
static void
|
||||
sc_server_params_destroy(struct sc_server_params *params) {
|
||||
// The server stores a copy of the params provided by the user
|
||||
free((char *) params->req_serial);
|
||||
free((char *) params->crop);
|
||||
free((char *) params->video_codec_options);
|
||||
free((char *) params->audio_codec_options);
|
||||
free((char *) params->video_encoder);
|
||||
free((char *) params->audio_encoder);
|
||||
free((char *) params->tcpip_dst);
|
||||
free((char *) params->camera_id);
|
||||
free((char *) params->camera_ar);
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_server_params_copy(struct sc_server_params *dst,
|
||||
const struct sc_server_params *src) {
|
||||
*dst = *src;
|
||||
|
||||
// The params reference user-allocated memory, so we must copy them to
|
||||
// handle them from another thread
|
||||
|
||||
#define COPY(FIELD) do { \
|
||||
dst->FIELD = NULL; \
|
||||
if (src->FIELD) { \
|
||||
dst->FIELD = strdup(src->FIELD); \
|
||||
if (!dst->FIELD) { \
|
||||
goto error; \
|
||||
} \
|
||||
} \
|
||||
} while(0)
|
||||
|
||||
COPY(req_serial);
|
||||
COPY(crop);
|
||||
COPY(video_codec_options);
|
||||
COPY(audio_codec_options);
|
||||
COPY(video_encoder);
|
||||
COPY(audio_encoder);
|
||||
COPY(tcpip_dst);
|
||||
COPY(camera_id);
|
||||
COPY(camera_ar);
|
||||
#undef COPY
|
||||
|
||||
return true;
|
||||
|
||||
error:
|
||||
sc_server_params_destroy(dst);
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
push_server(struct sc_intr *intr, const char *serial) {
|
||||
char *server_path = get_server_path();
|
||||
@ -405,6 +355,10 @@ execute_server(struct sc_server *server,
|
||||
// By default, power_on is true
|
||||
ADD_PARAM("power_on=false");
|
||||
}
|
||||
if (params->new_display) {
|
||||
VALIDATE_STRING(params->new_display);
|
||||
ADD_PARAM("new_display=%s", params->new_display);
|
||||
}
|
||||
if (params->list & SC_OPTION_LIST_ENCODERS) {
|
||||
ADD_PARAM("list_encoders=true");
|
||||
}
|
||||
@ -417,6 +371,9 @@ execute_server(struct sc_server *server,
|
||||
if (params->list & SC_OPTION_LIST_CAMERA_SIZES) {
|
||||
ADD_PARAM("list_camera_sizes=true");
|
||||
}
|
||||
if (params->list & SC_OPTION_LIST_APPS) {
|
||||
ADD_PARAM("list_apps=true");
|
||||
}
|
||||
|
||||
#undef ADD_PARAM
|
||||
|
||||
@ -499,22 +456,18 @@ connect_to_server(struct sc_server *server, unsigned attempts, sc_tick delay,
|
||||
bool
|
||||
sc_server_init(struct sc_server *server, const struct sc_server_params *params,
|
||||
const struct sc_server_callbacks *cbs, void *cbs_userdata) {
|
||||
bool ok = sc_server_params_copy(&server->params, params);
|
||||
if (!ok) {
|
||||
LOG_OOM();
|
||||
return false;
|
||||
}
|
||||
// The allocated data in params (const char *) must remain valid until the
|
||||
// end of the program
|
||||
server->params = *params;
|
||||
|
||||
ok = sc_mutex_init(&server->mutex);
|
||||
bool ok = sc_mutex_init(&server->mutex);
|
||||
if (!ok) {
|
||||
sc_server_params_destroy(&server->params);
|
||||
return false;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&server->cond_stopped);
|
||||
if (!ok) {
|
||||
sc_mutex_destroy(&server->mutex);
|
||||
sc_server_params_destroy(&server->params);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -522,7 +475,6 @@ sc_server_init(struct sc_server *server, const struct sc_server_params *params,
|
||||
if (!ok) {
|
||||
sc_cond_destroy(&server->cond_stopped);
|
||||
sc_mutex_destroy(&server->mutex);
|
||||
sc_server_params_destroy(&server->params);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1161,7 +1113,6 @@ sc_server_destroy(struct sc_server *server) {
|
||||
|
||||
free(server->serial);
|
||||
free(server->device_socket_name);
|
||||
sc_server_params_destroy(&server->params);
|
||||
sc_intr_destroy(&server->intr);
|
||||
sc_cond_destroy(&server->cond_stopped);
|
||||
sc_mutex_destroy(&server->mutex);
|
||||
|
@ -48,6 +48,7 @@ struct sc_server_params {
|
||||
int8_t lock_video_orientation;
|
||||
bool control;
|
||||
uint32_t display_id;
|
||||
const char *new_display;
|
||||
bool video;
|
||||
bool audio;
|
||||
bool audio_dup;
|
||||
|
60
app/src/shortcut_mod.h
Normal file
60
app/src/shortcut_mod.h
Normal file
@ -0,0 +1,60 @@
|
||||
#ifndef SC_SHORTCUT_MOD_H
|
||||
#define SC_SHORTCUT_MOD_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <SDL2/SDL_keycode.h>
|
||||
|
||||
#include "options.h"
|
||||
|
||||
#define SC_SDL_SHORTCUT_MODS_MASK (KMOD_CTRL | KMOD_ALT | KMOD_GUI)
|
||||
|
||||
// input: OR of enum sc_shortcut_mod
|
||||
// output: OR of SDL_Keymod
|
||||
static inline uint16_t
|
||||
sc_shortcut_mods_to_sdl(uint8_t shortcut_mods) {
|
||||
uint16_t sdl_mod = 0;
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_LCTRL) {
|
||||
sdl_mod |= KMOD_LCTRL;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_RCTRL) {
|
||||
sdl_mod |= KMOD_RCTRL;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_LALT) {
|
||||
sdl_mod |= KMOD_LALT;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_RALT) {
|
||||
sdl_mod |= KMOD_RALT;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_LSUPER) {
|
||||
sdl_mod |= KMOD_LGUI;
|
||||
}
|
||||
if (shortcut_mods & SC_SHORTCUT_MOD_RSUPER) {
|
||||
sdl_mod |= KMOD_RGUI;
|
||||
}
|
||||
return sdl_mod;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_shortcut_mods_is_shortcut_mod(uint16_t sdl_shortcut_mods, uint16_t sdl_mod) {
|
||||
// sdl_shortcut_mods must be within the mask
|
||||
assert(!(sdl_shortcut_mods & ~SC_SDL_SHORTCUT_MODS_MASK));
|
||||
|
||||
// at least one shortcut mod pressed?
|
||||
return sdl_mod & sdl_shortcut_mods;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_shortcut_mods_is_shortcut_key(uint16_t sdl_shortcut_mods,
|
||||
SDL_Keycode keycode) {
|
||||
return (sdl_shortcut_mods & KMOD_LCTRL && keycode == SDLK_LCTRL)
|
||||
|| (sdl_shortcut_mods & KMOD_RCTRL && keycode == SDLK_RCTRL)
|
||||
|| (sdl_shortcut_mods & KMOD_LALT && keycode == SDLK_LALT)
|
||||
|| (sdl_shortcut_mods & KMOD_RALT && keycode == SDLK_RALT)
|
||||
|| (sdl_shortcut_mods & KMOD_LGUI && keycode == SDLK_LGUI)
|
||||
|| (sdl_shortcut_mods & KMOD_RGUI && keycode == SDLK_RGUI);
|
||||
}
|
||||
|
||||
#endif
|
@ -1,6 +1,8 @@
|
||||
#ifndef SC_AOA_HID_H
|
||||
#define SC_AOA_HID_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
|
||||
|
@ -185,6 +185,7 @@ scrcpy_otg(struct scrcpy_options *options) {
|
||||
.window_width = options->window_width,
|
||||
.window_height = options->window_height,
|
||||
.window_borderless = options->window_borderless,
|
||||
.shortcut_mods = options->shortcut_mods,
|
||||
};
|
||||
|
||||
ok = sc_screen_otg_init(&s->screen_otg, ¶ms);
|
||||
|
@ -4,47 +4,6 @@
|
||||
#include "options.h"
|
||||
#include "util/log.h"
|
||||
|
||||
static void
|
||||
sc_screen_otg_set_mouse_capture(struct sc_screen_otg *screen, bool capture) {
|
||||
#ifdef __APPLE__
|
||||
// Workaround for SDL bug on macOS:
|
||||
// <https://github.com/libsdl-org/SDL/issues/5340>
|
||||
if (capture) {
|
||||
int mouse_x, mouse_y;
|
||||
SDL_GetGlobalMouseState(&mouse_x, &mouse_y);
|
||||
|
||||
int x, y, w, h;
|
||||
SDL_GetWindowPosition(screen->window, &x, &y);
|
||||
SDL_GetWindowSize(screen->window, &w, &h);
|
||||
|
||||
bool outside_window = mouse_x < x || mouse_x >= x + w
|
||||
|| mouse_y < y || mouse_y >= y + h;
|
||||
if (outside_window) {
|
||||
SDL_WarpMouseInWindow(screen->window, w / 2, h / 2);
|
||||
}
|
||||
}
|
||||
#else
|
||||
(void) screen;
|
||||
#endif
|
||||
if (SDL_SetRelativeMouseMode(capture)) {
|
||||
LOGE("Could not set relative mouse mode to %s: %s",
|
||||
capture ? "true" : "false", SDL_GetError());
|
||||
}
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_screen_otg_get_mouse_capture(struct sc_screen_otg *screen) {
|
||||
(void) screen;
|
||||
return SDL_GetRelativeMouseMode();
|
||||
}
|
||||
|
||||
static inline void
|
||||
sc_screen_otg_toggle_mouse_capture(struct sc_screen_otg *screen) {
|
||||
(void) screen;
|
||||
bool new_value = !sc_screen_otg_get_mouse_capture(screen);
|
||||
sc_screen_otg_set_mouse_capture(screen, new_value);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_screen_otg_render(struct sc_screen_otg *screen) {
|
||||
SDL_RenderClear(screen->renderer);
|
||||
@ -61,8 +20,6 @@ sc_screen_otg_init(struct sc_screen_otg *screen,
|
||||
screen->mouse = params->mouse;
|
||||
screen->gamepad = params->gamepad;
|
||||
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
|
||||
const char *title = params->window_title;
|
||||
assert(title);
|
||||
|
||||
@ -113,9 +70,11 @@ sc_screen_otg_init(struct sc_screen_otg *screen,
|
||||
LOGW("Could not load icon");
|
||||
}
|
||||
|
||||
sc_mouse_capture_init(&screen->mc, screen->window, params->shortcut_mods);
|
||||
|
||||
if (screen->mouse) {
|
||||
// Capture mouse on start
|
||||
sc_screen_otg_set_mouse_capture(screen, true);
|
||||
sc_mouse_capture_set_active(&screen->mc, true);
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -137,11 +96,6 @@ sc_screen_otg_destroy(struct sc_screen_otg *screen) {
|
||||
SDL_DestroyWindow(screen->window);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_screen_otg_is_mouse_capture_key(SDL_Keycode key) {
|
||||
return key == SDLK_LALT || key == SDLK_LGUI || key == SDLK_RGUI;
|
||||
}
|
||||
|
||||
static void
|
||||
sc_screen_otg_process_key(struct sc_screen_otg *screen,
|
||||
const SDL_KeyboardEvent *event) {
|
||||
@ -298,80 +252,46 @@ sc_screen_otg_process_gamepad_button(struct sc_screen_otg *screen,
|
||||
|
||||
void
|
||||
sc_screen_otg_handle_event(struct sc_screen_otg *screen, SDL_Event *event) {
|
||||
if (sc_mouse_capture_handle_event(&screen->mc, event)) {
|
||||
// The mouse capture handler consumed the event
|
||||
return;
|
||||
}
|
||||
|
||||
switch (event->type) {
|
||||
case SDL_WINDOWEVENT:
|
||||
switch (event->window.event) {
|
||||
case SDL_WINDOWEVENT_EXPOSED:
|
||||
sc_screen_otg_render(screen);
|
||||
break;
|
||||
case SDL_WINDOWEVENT_FOCUS_LOST:
|
||||
if (screen->mouse) {
|
||||
sc_screen_otg_set_mouse_capture(screen, false);
|
||||
}
|
||||
break;
|
||||
}
|
||||
return;
|
||||
case SDL_KEYDOWN:
|
||||
if (screen->mouse) {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
if (sc_screen_otg_is_mouse_capture_key(key)) {
|
||||
if (!screen->mouse_capture_key_pressed) {
|
||||
screen->mouse_capture_key_pressed = key;
|
||||
} else {
|
||||
// Another mouse capture key has been pressed, cancel
|
||||
// mouse (un)capture
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (screen->keyboard) {
|
||||
sc_screen_otg_process_key(screen, &event->key);
|
||||
}
|
||||
break;
|
||||
case SDL_KEYUP:
|
||||
if (screen->mouse) {
|
||||
SDL_Keycode key = event->key.keysym.sym;
|
||||
SDL_Keycode cap = screen->mouse_capture_key_pressed;
|
||||
screen->mouse_capture_key_pressed = 0;
|
||||
if (sc_screen_otg_is_mouse_capture_key(key)) {
|
||||
if (key == cap) {
|
||||
// A mouse capture key has been pressed then released:
|
||||
// toggle the capture mouse mode
|
||||
sc_screen_otg_toggle_mouse_capture(screen);
|
||||
}
|
||||
// Mouse capture keys are never forwarded to the device
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (screen->keyboard) {
|
||||
sc_screen_otg_process_key(screen, &event->key);
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEMOTION:
|
||||
if (screen->mouse && sc_screen_otg_get_mouse_capture(screen)) {
|
||||
if (screen->mouse) {
|
||||
sc_screen_otg_process_mouse_motion(screen, &event->motion);
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONDOWN:
|
||||
if (screen->mouse && sc_screen_otg_get_mouse_capture(screen)) {
|
||||
if (screen->mouse) {
|
||||
sc_screen_otg_process_mouse_button(screen, &event->button);
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONUP:
|
||||
if (screen->mouse) {
|
||||
if (sc_screen_otg_get_mouse_capture(screen)) {
|
||||
sc_screen_otg_process_mouse_button(screen, &event->button);
|
||||
} else {
|
||||
sc_screen_otg_set_mouse_capture(screen, true);
|
||||
}
|
||||
sc_screen_otg_process_mouse_button(screen, &event->button);
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEWHEEL:
|
||||
if (screen->mouse && sc_screen_otg_get_mouse_capture(screen)) {
|
||||
if (screen->mouse) {
|
||||
sc_screen_otg_process_mouse_wheel(screen, &event->wheel);
|
||||
}
|
||||
break;
|
||||
|
@ -8,6 +8,7 @@
|
||||
|
||||
#include "keyboard_aoa.h"
|
||||
#include "mouse_aoa.h"
|
||||
#include "mouse_capture.h"
|
||||
#include "gamepad_aoa.h"
|
||||
|
||||
struct sc_screen_otg {
|
||||
@ -19,8 +20,7 @@ struct sc_screen_otg {
|
||||
SDL_Renderer *renderer;
|
||||
SDL_Texture *texture;
|
||||
|
||||
// See equivalent mechanism in screen.h
|
||||
SDL_Keycode mouse_capture_key_pressed;
|
||||
struct sc_mouse_capture mc;
|
||||
};
|
||||
|
||||
struct sc_screen_otg_params {
|
||||
@ -35,6 +35,7 @@ struct sc_screen_otg_params {
|
||||
uint16_t window_width;
|
||||
uint16_t window_height;
|
||||
bool window_borderless;
|
||||
uint8_t shortcut_mods; // OR of enum sc_shortcut_mod values
|
||||
};
|
||||
|
||||
bool
|
||||
|
@ -10,14 +10,14 @@ typedef int64_t sc_tick;
|
||||
#define SC_TICK_FREQ 1000000 // microsecond
|
||||
|
||||
// To be adapted if SC_TICK_FREQ changes
|
||||
#define SC_TICK_TO_NS(tick) ((tick) * 1000)
|
||||
#define SC_TICK_TO_US(tick) (tick)
|
||||
#define SC_TICK_TO_MS(tick) ((tick) / 1000)
|
||||
#define SC_TICK_TO_SEC(tick) ((tick) / 1000000)
|
||||
#define SC_TICK_FROM_NS(ns) ((ns) / 1000)
|
||||
#define SC_TICK_FROM_US(us) (us)
|
||||
#define SC_TICK_FROM_MS(ms) ((ms) * 1000)
|
||||
#define SC_TICK_FROM_SEC(sec) ((sec) * 1000000)
|
||||
#define SC_TICK_TO_NS(tick) ((sc_tick) (tick) * 1000)
|
||||
#define SC_TICK_TO_US(tick) ((sc_tick) tick)
|
||||
#define SC_TICK_TO_MS(tick) ((sc_tick) (tick) / 1000)
|
||||
#define SC_TICK_TO_SEC(tick) ((sc_tick) (tick) / 1000000)
|
||||
#define SC_TICK_FROM_NS(ns) ((sc_tick) (ns) / 1000)
|
||||
#define SC_TICK_FROM_US(us) ((sc_tick) us)
|
||||
#define SC_TICK_FROM_MS(ms) ((sc_tick) (ms) * 1000)
|
||||
#define SC_TICK_FROM_SEC(sec) ((sc_tick) (sec) * 1000000)
|
||||
|
||||
sc_tick
|
||||
sc_tick_now(void);
|
||||
|
@ -62,6 +62,7 @@ void
|
||||
sc_timeout_stop(struct sc_timeout *timeout) {
|
||||
sc_mutex_lock(&timeout->mutex);
|
||||
timeout->stopped = true;
|
||||
sc_cond_signal(&timeout->cond);
|
||||
sc_mutex_unlock(&timeout->mutex);
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@ buildscript {
|
||||
mavenCentral()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:8.3.0'
|
||||
classpath 'com.android.tools.build:gradle:8.7.1'
|
||||
|
||||
// NOTE: Do not place your application dependencies here; they belong
|
||||
// in the individual module build.gradle files
|
||||
|
@ -94,14 +94,18 @@ the content (if supported by the app) relative to the center of the screen.
|
||||
|
||||
https://github.com/Genymobile/scrcpy/assets/543275/26c4a920-9805-43f1-8d4c-608752d04767
|
||||
|
||||
To simulate a tilt gesture: <kbd>Shift</kbd>+_click-and-move-up-or-down_.
|
||||
To simulate a vertical tilt gesture: <kbd>Shift</kbd>+_click-and-move-up-or-down_.
|
||||
|
||||
https://github.com/Genymobile/scrcpy/assets/543275/1e252341-4a90-4b29-9d11-9153b324669f
|
||||
|
||||
Similarly, to simulate a horizontal tilt gesture:
|
||||
<kbd>Ctrl</kbd>+<kbd>Shift</kbd>+_click-and-move-left-or-right_.
|
||||
|
||||
Technically, _scrcpy_ generates additional touch events from a "virtual finger"
|
||||
at a location inverted through the center of the screen. When pressing
|
||||
<kbd>Ctrl</kbd> the _x_ and _y_ coordinates are inverted. Using <kbd>Shift</kbd>
|
||||
only inverts _x_.
|
||||
only inverts _x_, whereas using <kbd>Ctrl</kbd>+<kbd>Shift</kbd> only inverts
|
||||
_y_.
|
||||
|
||||
This only works for the default mouse mode (`--mouse=sdk`).
|
||||
|
||||
|
@ -53,7 +53,8 @@ _<kbd>[Super]</kbd> is typically the <kbd>Windows</kbd> or <kbd>Cmd</kbd> key._
|
||||
| Open keyboard settings (HID keyboard only) | <kbd>MOD</kbd>+<kbd>k</kbd>
|
||||
| Enable/disable FPS counter (on stdout) | <kbd>MOD</kbd>+<kbd>i</kbd>
|
||||
| Pinch-to-zoom/rotate | <kbd>Ctrl</kbd>+_click-and-move_
|
||||
| Tilt (slide vertically with 2 fingers) | <kbd>Shift</kbd>+_click-and-move_
|
||||
| Tilt vertically (slide with 2 fingers) | <kbd>Shift</kbd>+_click-and-move_
|
||||
| Tilt horizontally (slide with 2 fingers) | <kbd>Ctrl</kbd>+<kbd>Shift</kbd>+_click-and-move_
|
||||
| Drag & drop APK file | Install APK from computer
|
||||
| Drag & drop non-APK file | [Push file to device](control.md#push-file-to-device)
|
||||
|
||||
|
4
gradle/wrapper/gradle-wrapper.properties
vendored
4
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,5 +1,7 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip
|
||||
# https://gradle.org/release-checksums/
|
||||
distributionSha256Sum=d725d707bfabd4dfdc958c624003b3c80accc03f7037b5122c4b1d0ef15cecab
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
@ -2,11 +2,11 @@ apply plugin: 'com.android.application'
|
||||
|
||||
android {
|
||||
namespace 'com.genymobile.scrcpy'
|
||||
compileSdk 34
|
||||
compileSdk 35
|
||||
defaultConfig {
|
||||
applicationId "com.genymobile.scrcpy"
|
||||
minSdkVersion 21
|
||||
targetSdkVersion 34
|
||||
targetSdkVersion 35
|
||||
versionCode 20700
|
||||
versionName "2.7"
|
||||
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
|
||||
|
@ -14,8 +14,8 @@ set -e
|
||||
SCRCPY_DEBUG=false
|
||||
SCRCPY_VERSION_NAME=2.7
|
||||
|
||||
PLATFORM=${ANDROID_PLATFORM:-34}
|
||||
BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-34.0.0}
|
||||
PLATFORM=${ANDROID_PLATFORM:-35}
|
||||
BUILD_TOOLS=${ANDROID_BUILD_TOOLS:-35.0.0}
|
||||
BUILD_TOOLS_DIR="$ANDROID_HOME/build-tools/$BUILD_TOOLS"
|
||||
|
||||
BUILD_DIR="$(realpath ${BUILD_DIR:-build_manual})"
|
||||
@ -45,10 +45,10 @@ EOF
|
||||
|
||||
echo "Generating java from aidl..."
|
||||
cd "$SERVER_DIR/src/main/aidl"
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" android/view/IRotationWatcher.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" \
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. android/view/IRotationWatcher.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. \
|
||||
android/content/IOnPrimaryClipChangedListener.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" android/view/IDisplayFoldListener.aidl
|
||||
"$BUILD_TOOLS_DIR/aidl" -o"$GEN_DIR" -I. android/view/IDisplayFoldListener.aidl
|
||||
|
||||
SRC=( \
|
||||
com/genymobile/scrcpy/*.java \
|
||||
|
@ -0,0 +1,32 @@
|
||||
package com.genymobile.scrcpy;
|
||||
|
||||
import android.os.Build;
|
||||
|
||||
/**
|
||||
* Android version code constants, done right.
|
||||
* <p/>
|
||||
* <a href="https://apilevels.com/">API levels</a>
|
||||
*/
|
||||
public final class AndroidVersions {
|
||||
|
||||
private AndroidVersions() {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static final int API_21_ANDROID_5_0 = Build.VERSION_CODES.LOLLIPOP;
|
||||
public static final int API_22_ANDROID_5_1 = Build.VERSION_CODES.LOLLIPOP_MR1;
|
||||
public static final int API_23_ANDROID_6_0 = Build.VERSION_CODES.M;
|
||||
public static final int API_24_ANDROID_7_0 = Build.VERSION_CODES.N;
|
||||
public static final int API_25_ANDROID_7_1 = Build.VERSION_CODES.N_MR1;
|
||||
public static final int API_26_ANDROID_8_0 = Build.VERSION_CODES.O;
|
||||
public static final int API_27_ANDROID_8_1 = Build.VERSION_CODES.O_MR1;
|
||||
public static final int API_28_ANDROID_9 = Build.VERSION_CODES.P;
|
||||
public static final int API_29_ANDROID_10 = Build.VERSION_CODES.Q;
|
||||
public static final int API_30_ANDROID_11 = Build.VERSION_CODES.R;
|
||||
public static final int API_31_ANDROID_12 = Build.VERSION_CODES.S;
|
||||
public static final int API_32_ANDROID_12L = Build.VERSION_CODES.S_V2;
|
||||
public static final int API_33_ANDROID_13 = Build.VERSION_CODES.TIRAMISU;
|
||||
public static final int API_34_ANDROID_14 = Build.VERSION_CODES.UPSIDE_DOWN_CAKE;
|
||||
public static final int API_35_ANDROID_15 = Build.VERSION_CODES.VANILLA_ICE_CREAM;
|
||||
|
||||
}
|
@ -139,8 +139,10 @@ public final class CleanUp {
|
||||
|
||||
if (Device.isScreenOn()) {
|
||||
if (powerOffScreen) {
|
||||
Ln.i("Power off screen");
|
||||
Device.powerOffScreen(displayId);
|
||||
if (displayId != Device.DISPLAY_ID_NONE) {
|
||||
Ln.i("Power off screen");
|
||||
Device.powerOffScreen(displayId);
|
||||
}
|
||||
} else if (restoreNormalPowerMode) {
|
||||
Ln.i("Restoring normal power mode");
|
||||
Device.setScreenPowerMode(Device.POWER_MODE_NORMAL);
|
||||
|
@ -4,7 +4,6 @@ import android.annotation.TargetApi;
|
||||
import android.content.AttributionSource;
|
||||
import android.content.Context;
|
||||
import android.content.ContextWrapper;
|
||||
import android.os.Build;
|
||||
import android.os.Process;
|
||||
|
||||
public final class FakeContext extends ContextWrapper {
|
||||
@ -32,7 +31,7 @@ public final class FakeContext extends ContextWrapper {
|
||||
return PACKAGE_NAME;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@TargetApi(AndroidVersions.API_31_ANDROID_12)
|
||||
@Override
|
||||
public AttributionSource getAttributionSource() {
|
||||
AttributionSource.Builder builder = new AttributionSource.Builder(Process.SHELL_UID);
|
||||
|
@ -2,6 +2,7 @@ package com.genymobile.scrcpy;
|
||||
|
||||
import com.genymobile.scrcpy.audio.AudioCodec;
|
||||
import com.genymobile.scrcpy.audio.AudioSource;
|
||||
import com.genymobile.scrcpy.device.NewDisplay;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.CodecOption;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
@ -54,10 +55,13 @@ public class Options {
|
||||
private boolean cleanup = true;
|
||||
private boolean powerOn = true;
|
||||
|
||||
private NewDisplay newDisplay;
|
||||
|
||||
private boolean listEncoders;
|
||||
private boolean listDisplays;
|
||||
private boolean listCameras;
|
||||
private boolean listCameraSizes;
|
||||
private boolean listApps;
|
||||
|
||||
// Options not used by the scrcpy client, but useful to use scrcpy-server directly
|
||||
private boolean sendDeviceMeta = true; // send device name and size
|
||||
@ -205,8 +209,12 @@ public class Options {
|
||||
return powerOn;
|
||||
}
|
||||
|
||||
public NewDisplay getNewDisplay() {
|
||||
return newDisplay;
|
||||
}
|
||||
|
||||
public boolean getList() {
|
||||
return listEncoders || listDisplays || listCameras || listCameraSizes;
|
||||
return listEncoders || listDisplays || listCameras || listCameraSizes || listApps;
|
||||
}
|
||||
|
||||
public boolean getListEncoders() {
|
||||
@ -225,6 +233,10 @@ public class Options {
|
||||
return listCameraSizes;
|
||||
}
|
||||
|
||||
public boolean getListApps() {
|
||||
return listApps;
|
||||
}
|
||||
|
||||
public boolean getSendDeviceMeta() {
|
||||
return sendDeviceMeta;
|
||||
}
|
||||
@ -388,6 +400,9 @@ public class Options {
|
||||
case "list_camera_sizes":
|
||||
options.listCameraSizes = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "list_apps":
|
||||
options.listApps = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "camera_id":
|
||||
if (!value.isEmpty()) {
|
||||
options.cameraId = value;
|
||||
@ -418,6 +433,9 @@ public class Options {
|
||||
case "camera_high_speed":
|
||||
options.cameraHighSpeed = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "new_display":
|
||||
options.newDisplay = parseNewDisplay(value);
|
||||
break;
|
||||
case "send_device_meta":
|
||||
options.sendDeviceMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
@ -475,6 +493,9 @@ public class Options {
|
||||
}
|
||||
int width = Integer.parseInt(tokens[0]);
|
||||
int height = Integer.parseInt(tokens[1]);
|
||||
if (width <= 0 || height <= 0) {
|
||||
throw new IllegalArgumentException("Invalid non-positive size dimension: \"" + size + "\"");
|
||||
}
|
||||
return new Size(width, height);
|
||||
}
|
||||
|
||||
@ -501,4 +522,32 @@ public class Options {
|
||||
throw new IllegalArgumentException("Invalid float value for " + key + ": \"" + value + "\"");
|
||||
}
|
||||
}
|
||||
|
||||
private static NewDisplay parseNewDisplay(String newDisplay) {
|
||||
// input format: "auto", or "<width>x<height>", or "<width>x<height>:<dpi>"
|
||||
if ("auto".equals(newDisplay)) {
|
||||
return new NewDisplay();
|
||||
}
|
||||
|
||||
String[] tokens = newDisplay.split("/");
|
||||
|
||||
Size size;
|
||||
if (!tokens[0].isEmpty()) {
|
||||
size = parseSize(tokens[0]);
|
||||
} else {
|
||||
size = null;
|
||||
}
|
||||
|
||||
int dpi;
|
||||
if (tokens.length >= 2) {
|
||||
dpi = Integer.parseInt(tokens[1]);
|
||||
if (dpi <= 0) {
|
||||
throw new IllegalArgumentException("Invalid non-positive dpi: " + tokens[1]);
|
||||
}
|
||||
} else {
|
||||
dpi = 0;
|
||||
}
|
||||
|
||||
return new NewDisplay(size, dpi);
|
||||
}
|
||||
}
|
||||
|
@ -9,16 +9,17 @@ import com.genymobile.scrcpy.audio.AudioRawRecorder;
|
||||
import com.genymobile.scrcpy.audio.AudioSource;
|
||||
import com.genymobile.scrcpy.control.ControlChannel;
|
||||
import com.genymobile.scrcpy.control.Controller;
|
||||
import com.genymobile.scrcpy.control.DeviceMessage;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.DesktopConnection;
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.device.NewDisplay;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.util.Settings;
|
||||
import com.genymobile.scrcpy.util.SettingsException;
|
||||
import com.genymobile.scrcpy.video.CameraCapture;
|
||||
import com.genymobile.scrcpy.video.NewDisplayCapture;
|
||||
import com.genymobile.scrcpy.video.ScreenCapture;
|
||||
import com.genymobile.scrcpy.video.SurfaceCapture;
|
||||
import com.genymobile.scrcpy.video.SurfaceEncoder;
|
||||
@ -121,7 +122,7 @@ public final class Server {
|
||||
}
|
||||
|
||||
private static void scrcpy(Options options) throws IOException, ConfigurationException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && options.getVideoSource() == VideoSource.CAMERA) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_31_ANDROID_12 && options.getVideoSource() == VideoSource.CAMERA) {
|
||||
Ln.e("Camera mirroring is not supported before Android 12");
|
||||
throw new ConfigurationException("Camera mirroring is not supported");
|
||||
}
|
||||
@ -129,8 +130,11 @@ public final class Server {
|
||||
CleanUp cleanUp = null;
|
||||
Thread initThread = null;
|
||||
|
||||
NewDisplay newDisplay = options.getNewDisplay();
|
||||
int displayId = newDisplay == null ? options.getDisplayId() : Device.DISPLAY_ID_NONE;
|
||||
|
||||
if (options.getCleanup()) {
|
||||
cleanUp = CleanUp.configure(options.getDisplayId());
|
||||
cleanUp = CleanUp.configure(displayId);
|
||||
initThread = startInitThread(options, cleanUp);
|
||||
}
|
||||
|
||||
@ -140,9 +144,6 @@ public final class Server {
|
||||
boolean video = options.getVideo();
|
||||
boolean audio = options.getAudio();
|
||||
boolean sendDummyByte = options.getSendDummyByte();
|
||||
boolean camera = video && options.getVideoSource() == VideoSource.CAMERA;
|
||||
|
||||
final Device device = camera ? null : new Device(options);
|
||||
|
||||
Workarounds.apply();
|
||||
|
||||
@ -154,13 +155,11 @@ public final class Server {
|
||||
connection.sendDeviceMeta(Device.getDeviceName());
|
||||
}
|
||||
|
||||
Controller controller = null;
|
||||
|
||||
if (control) {
|
||||
ControlChannel controlChannel = connection.getControlChannel();
|
||||
Controller controller = new Controller(device, controlChannel, cleanUp, options.getClipboardAutosync(), options.getPowerOn());
|
||||
device.setClipboardListener(text -> {
|
||||
DeviceMessage msg = DeviceMessage.createClipboard(text);
|
||||
controller.getSender().send(msg);
|
||||
});
|
||||
controller = new Controller(displayId, controlChannel, cleanUp, options.getClipboardAutosync(), options.getPowerOn());
|
||||
asyncProcessors.add(controller);
|
||||
}
|
||||
|
||||
@ -190,7 +189,13 @@ public final class Server {
|
||||
options.getSendFrameMeta());
|
||||
SurfaceCapture surfaceCapture;
|
||||
if (options.getVideoSource() == VideoSource.DISPLAY) {
|
||||
surfaceCapture = new ScreenCapture(device);
|
||||
if (newDisplay != null) {
|
||||
surfaceCapture = new NewDisplayCapture(controller, newDisplay, options.getMaxSize());
|
||||
} else {
|
||||
assert displayId != Device.DISPLAY_ID_NONE;
|
||||
surfaceCapture = new ScreenCapture(controller, displayId, options.getMaxSize(), options.getCrop(),
|
||||
options.getLockVideoOrientation());
|
||||
}
|
||||
} else {
|
||||
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(),
|
||||
options.getMaxSize(), options.getCameraAspectRatio(), options.getCameraFps(), options.getCameraHighSpeed());
|
||||
@ -282,6 +287,11 @@ public final class Server {
|
||||
Workarounds.apply();
|
||||
Ln.i(LogUtils.buildCameraListMessage(options.getListCameraSizes()));
|
||||
}
|
||||
if (options.getListApps()) {
|
||||
Workarounds.apply();
|
||||
Ln.i("Processing Android apps... (this may take some time)");
|
||||
Ln.i(LogUtils.buildAppListMessage());
|
||||
}
|
||||
// Just print the requested data, do not mirror
|
||||
return;
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ public final class Workarounds {
|
||||
}
|
||||
|
||||
public static void apply() {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_31_ANDROID_12) {
|
||||
// On some Samsung devices, DisplayManagerGlobal.getDisplayInfoLocked() calls ActivityThread.currentActivityThread().getConfiguration(),
|
||||
// which requires a non-null ConfigurationController.
|
||||
// ConfigurationController was introduced in Android 12, so do not attempt to set it on lower versions.
|
||||
@ -155,7 +155,7 @@ public final class Workarounds {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.R)
|
||||
@TargetApi(AndroidVersions.API_30_ANDROID_11)
|
||||
@SuppressLint("WrongConstant,MissingPermission")
|
||||
public static AudioRecord createAudioRecord(int source, int sampleRate, int channelConfig, int channels, int channelMask, int encoding) throws
|
||||
AudioCaptureException {
|
||||
@ -226,7 +226,7 @@ public final class Workarounds {
|
||||
int[] session = new int[]{AudioManager.AUDIO_SESSION_ID_GENERATE};
|
||||
|
||||
int initResult;
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_31_ANDROID_12) {
|
||||
// private native final int native_setup(Object audiorecord_this,
|
||||
// Object /*AudioAttributes*/ attributes,
|
||||
// int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
|
||||
@ -252,7 +252,7 @@ public final class Workarounds {
|
||||
Method getParcelMethod = attributionSourceState.getClass().getDeclaredMethod("getParcel");
|
||||
Parcel attributionSourceParcel = (Parcel) getParcelMethod.invoke(attributionSourceState);
|
||||
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.UPSIDE_DOWN_CAKE) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_34_ANDROID_14) {
|
||||
// private native int native_setup(Object audiorecordThis,
|
||||
// Object /*AudioAttributes*/ attributes,
|
||||
// int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.Workarounds;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
@ -45,11 +46,11 @@ public class AudioDirectCapture implements AudioCapture {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
@TargetApi(AndroidVersions.API_23_ANDROID_6_0)
|
||||
@SuppressLint({"WrongConstant", "MissingPermission"})
|
||||
private static AudioRecord createAudioRecord(int audioSource) {
|
||||
AudioRecord.Builder builder = new AudioRecord.Builder();
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_31_ANDROID_12) {
|
||||
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
|
||||
builder.setContext(FakeContext.get());
|
||||
}
|
||||
@ -117,7 +118,7 @@ public class AudioDirectCapture implements AudioCapture {
|
||||
|
||||
@Override
|
||||
public void checkCompatibility() throws AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_30_ANDROID_11) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
throw new AudioCaptureException();
|
||||
}
|
||||
@ -125,7 +126,7 @@ public class AudioDirectCapture implements AudioCapture {
|
||||
|
||||
@Override
|
||||
public void start() throws AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_30_ANDROID_11) {
|
||||
startWorkaroundAndroid11();
|
||||
try {
|
||||
tryStartRecording(5, 100);
|
||||
@ -146,7 +147,7 @@ public class AudioDirectCapture implements AudioCapture {
|
||||
}
|
||||
|
||||
@Override
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
public int read(ByteBuffer outDirectBuffer, MediaCodec.BufferInfo outBufferInfo) {
|
||||
return reader.read(outDirectBuffer, outBufferInfo);
|
||||
}
|
||||
|
@ -1,14 +1,15 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.AsyncProcessor;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
import com.genymobile.scrcpy.util.Codec;
|
||||
import com.genymobile.scrcpy.util.CodecOption;
|
||||
import com.genymobile.scrcpy.util.CodecUtils;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.util.IO;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.media.MediaCodec;
|
||||
@ -93,7 +94,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
return format;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
private void inputThread(MediaCodec mediaCodec, AudioCapture capture) throws IOException, InterruptedException {
|
||||
final MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
|
||||
|
||||
@ -175,9 +176,9 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.M)
|
||||
@TargetApi(AndroidVersions.API_23_ANDROID_6_0)
|
||||
private void encode() throws IOException, ConfigurationException, AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_30_ANDROID_11) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
streamer.writeDisableStream(false);
|
||||
return;
|
||||
@ -287,7 +288,13 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
if (encoderName != null) {
|
||||
Ln.d("Creating audio encoder by name: '" + encoderName + "'");
|
||||
try {
|
||||
return MediaCodec.createByCodecName(encoderName);
|
||||
MediaCodec mediaCodec = MediaCodec.createByCodecName(encoderName);
|
||||
String mimeType = Codec.getMimeType(mediaCodec);
|
||||
if (!codec.getMimeType().equals(mimeType)) {
|
||||
Ln.e("Audio encoder type for \"" + encoderName + "\" (" + mimeType + ") does not match codec type (" + codec.getMimeType() + ")");
|
||||
throw new ConfigurationException("Incorrect encoder type: " + encoderName);
|
||||
}
|
||||
return mediaCodec;
|
||||
} catch (IllegalArgumentException e) {
|
||||
Ln.e("Audio encoder '" + encoderName + "' for " + codec.getName() + " not found\n" + LogUtils.buildAudioEncoderListMessage());
|
||||
throw new ConfigurationException("Unknown encoder: " + encoderName);
|
||||
@ -308,7 +315,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
}
|
||||
|
||||
private final class EncoderCallback extends MediaCodec.Callback {
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
@Override
|
||||
public void onInputBufferAvailable(MediaCodec codec, int index) {
|
||||
try {
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
@ -108,7 +109,7 @@ public final class AudioPlaybackCapture implements AudioCapture {
|
||||
|
||||
@Override
|
||||
public void checkCompatibility() throws AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_33_ANDROID_13) {
|
||||
Ln.w("Audio disabled: audio playback capture source not supported before Android 13");
|
||||
throw new AudioCaptureException();
|
||||
}
|
||||
@ -130,7 +131,7 @@ public final class AudioPlaybackCapture implements AudioCapture {
|
||||
}
|
||||
|
||||
@Override
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
public int read(ByteBuffer outDirectBuffer, MediaCodec.BufferInfo outBufferInfo) {
|
||||
return reader.read(outDirectBuffer, outBufferInfo);
|
||||
}
|
||||
|
@ -1,9 +1,10 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.AsyncProcessor;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
import com.genymobile.scrcpy.util.IO;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
@ -24,7 +25,7 @@ public final class AudioRawRecorder implements AsyncProcessor {
|
||||
}
|
||||
|
||||
private void record() throws IOException, AudioCaptureException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_30_ANDROID_11) {
|
||||
Ln.w("Audio disabled: it is not supported before Android 11");
|
||||
streamer.writeDisableStream(false);
|
||||
return;
|
||||
|
@ -1,12 +1,12 @@
|
||||
package com.genymobile.scrcpy.audio;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.AudioTimestamp;
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
@ -26,7 +26,7 @@ public class AudioRecordReader {
|
||||
this.recorder = recorder;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
public int read(ByteBuffer outDirectBuffer, MediaCodec.BufferInfo outBufferInfo) {
|
||||
int r = recorder.read(outDirectBuffer, AudioConfig.MAX_READ_SIZE);
|
||||
if (r <= 0) {
|
||||
|
@ -23,6 +23,7 @@ public final class ControlMessage {
|
||||
public static final int TYPE_UHID_INPUT = 13;
|
||||
public static final int TYPE_UHID_DESTROY = 14;
|
||||
public static final int TYPE_OPEN_HARD_KEYBOARD_SETTINGS = 15;
|
||||
public static final int TYPE_START_APP = 16;
|
||||
|
||||
public static final long SEQUENCE_INVALID = 0;
|
||||
|
||||
@ -155,6 +156,13 @@ public final class ControlMessage {
|
||||
return msg;
|
||||
}
|
||||
|
||||
public static ControlMessage createStartApp(String name) {
|
||||
ControlMessage msg = new ControlMessage();
|
||||
msg.type = TYPE_START_APP;
|
||||
msg.text = name;
|
||||
return msg;
|
||||
}
|
||||
|
||||
public int getType() {
|
||||
return type;
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.util.Binary;
|
||||
import com.genymobile.scrcpy.device.Position;
|
||||
import com.genymobile.scrcpy.util.Binary;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.DataInputStream;
|
||||
@ -53,6 +53,8 @@ public class ControlMessageReader {
|
||||
return parseUhidInput();
|
||||
case ControlMessage.TYPE_UHID_DESTROY:
|
||||
return parseUhidDestroy();
|
||||
case ControlMessage.TYPE_START_APP:
|
||||
return parseStartApp();
|
||||
default:
|
||||
throw new ControlProtocolException("Unknown event type: " + type);
|
||||
}
|
||||
@ -155,6 +157,11 @@ public class ControlMessageReader {
|
||||
return ControlMessage.createUhidDestroy(id);
|
||||
}
|
||||
|
||||
private ControlMessage parseStartApp() throws IOException {
|
||||
String name = parseString(1);
|
||||
return ControlMessage.createStartApp(name);
|
||||
}
|
||||
|
||||
private Position parsePosition() throws IOException {
|
||||
int x = dis.readInt();
|
||||
int y = dis.readInt();
|
||||
|
@ -1,14 +1,20 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.AsyncProcessor;
|
||||
import com.genymobile.scrcpy.CleanUp;
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.DeviceApp;
|
||||
import com.genymobile.scrcpy.device.Point;
|
||||
import com.genymobile.scrcpy.device.Position;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.video.VirtualDisplayListener;
|
||||
import com.genymobile.scrcpy.wrappers.ClipboardManager;
|
||||
import com.genymobile.scrcpy.wrappers.InputManager;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.content.IOnPrimaryClipChangedListener;
|
||||
import android.content.Intent;
|
||||
import android.os.Build;
|
||||
import android.os.SystemClock;
|
||||
@ -18,11 +24,40 @@ import android.view.KeyEvent;
|
||||
import android.view.MotionEvent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
public class Controller implements AsyncProcessor {
|
||||
public class Controller implements AsyncProcessor, VirtualDisplayListener {
|
||||
|
||||
/*
|
||||
* For event injection, there are two display ids:
|
||||
* - the displayId passed to the constructor (which comes from --display-id passed by the client, 0 for the main display);
|
||||
* - the virtualDisplayId used for mirroring, notified by the capture instance via the VirtualDisplayListener interface.
|
||||
*
|
||||
* (In case the ScreenCapture uses the "SurfaceControl API", then both ids are equals, but this is an implementation detail.)
|
||||
*
|
||||
* In order to make events work correctly in all cases:
|
||||
* - virtualDisplayId must be used for events relative to the display (mouse and touch events with coordinates);
|
||||
* - displayId must be used for other events (like key events).
|
||||
*
|
||||
* If a new separate virtual display is created (using --new-display), then displayId == Device.DISPLAY_ID_NONE. In that case, all events are
|
||||
* sent to the virtual display id.
|
||||
*/
|
||||
|
||||
private static final class DisplayData {
|
||||
private final int virtualDisplayId;
|
||||
private final PositionMapper positionMapper;
|
||||
|
||||
private DisplayData(int virtualDisplayId, PositionMapper positionMapper) {
|
||||
this.virtualDisplayId = virtualDisplayId;
|
||||
this.positionMapper = positionMapper;
|
||||
}
|
||||
}
|
||||
|
||||
private static final int DEFAULT_DEVICE_ID = 0;
|
||||
|
||||
@ -30,12 +65,14 @@ public class Controller implements AsyncProcessor {
|
||||
private static final int POINTER_ID_MOUSE = -1;
|
||||
|
||||
private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor();
|
||||
private ExecutorService startAppExecutor;
|
||||
|
||||
private Thread thread;
|
||||
|
||||
private UhidManager uhidManager;
|
||||
|
||||
private final Device device;
|
||||
private final int displayId;
|
||||
private final boolean supportsInputEvents;
|
||||
private final ControlChannel controlChannel;
|
||||
private final CleanUp cleanUp;
|
||||
private final DeviceMessageSender sender;
|
||||
@ -44,6 +81,11 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
private final KeyCharacterMap charMap = KeyCharacterMap.load(KeyCharacterMap.VIRTUAL_KEYBOARD);
|
||||
|
||||
private final AtomicBoolean isSettingClipboard = new AtomicBoolean();
|
||||
|
||||
private final AtomicReference<DisplayData> displayData = new AtomicReference<>();
|
||||
private final Object displayDataAvailable = new Object(); // condition variable
|
||||
|
||||
private long lastTouchDown;
|
||||
private final PointersState pointersState = new PointersState();
|
||||
private final MotionEvent.PointerProperties[] pointerProperties = new MotionEvent.PointerProperties[PointersState.MAX_POINTERS];
|
||||
@ -51,14 +93,54 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
private boolean keepPowerModeOff;
|
||||
|
||||
public Controller(Device device, ControlChannel controlChannel, CleanUp cleanUp, boolean clipboardAutosync, boolean powerOn) {
|
||||
this.device = device;
|
||||
public Controller(int displayId, ControlChannel controlChannel, CleanUp cleanUp, boolean clipboardAutosync, boolean powerOn) {
|
||||
this.displayId = displayId;
|
||||
this.controlChannel = controlChannel;
|
||||
this.cleanUp = cleanUp;
|
||||
this.clipboardAutosync = clipboardAutosync;
|
||||
this.powerOn = powerOn;
|
||||
initPointers();
|
||||
sender = new DeviceMessageSender(controlChannel);
|
||||
|
||||
// main display or any display on Android >= Q
|
||||
supportsInputEvents = Device.supportsInputEvents(displayId);
|
||||
if (!supportsInputEvents) {
|
||||
Ln.w("Input events are not supported for secondary displays before Android 10");
|
||||
}
|
||||
|
||||
if (clipboardAutosync) {
|
||||
// If control and autosync are enabled, synchronize Android clipboard to the computer automatically
|
||||
ClipboardManager clipboardManager = ServiceManager.getClipboardManager();
|
||||
if (clipboardManager != null) {
|
||||
clipboardManager.addPrimaryClipChangedListener(new IOnPrimaryClipChangedListener.Stub() {
|
||||
@Override
|
||||
public void dispatchPrimaryClipChanged() {
|
||||
if (isSettingClipboard.get()) {
|
||||
// This is a notification for the change we are currently applying, ignore it
|
||||
return;
|
||||
}
|
||||
String text = Device.getClipboardText();
|
||||
if (text != null) {
|
||||
DeviceMessage msg = DeviceMessage.createClipboard(text);
|
||||
sender.send(msg);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Ln.w("No clipboard manager, copy-paste between device and computer will not work");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNewVirtualDisplay(int virtualDisplayId, PositionMapper positionMapper) {
|
||||
DisplayData data = new DisplayData(virtualDisplayId, positionMapper);
|
||||
DisplayData old = this.displayData.getAndSet(data);
|
||||
if (old == null) {
|
||||
synchronized (displayDataAvailable) {
|
||||
displayDataAvailable.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private UhidManager getUhidManager() {
|
||||
@ -84,8 +166,8 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
private void control() throws IOException {
|
||||
// on start, power on the device
|
||||
if (powerOn && !Device.isScreenOn()) {
|
||||
device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, Device.INJECT_MODE_ASYNC);
|
||||
if (powerOn && displayId != Device.DISPLAY_ID_NONE && !Device.isScreenOn()) {
|
||||
Device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, displayId, Device.INJECT_MODE_ASYNC);
|
||||
|
||||
// dirty hack
|
||||
// After POWER is injected, the device is powered on asynchronously.
|
||||
@ -138,10 +220,6 @@ public class Controller implements AsyncProcessor {
|
||||
sender.join();
|
||||
}
|
||||
|
||||
public DeviceMessageSender getSender() {
|
||||
return sender;
|
||||
}
|
||||
|
||||
private boolean handleEvent() throws IOException {
|
||||
ControlMessage msg;
|
||||
try {
|
||||
@ -153,27 +231,27 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
switch (msg.getType()) {
|
||||
case ControlMessage.TYPE_INJECT_KEYCODE:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
injectKeycode(msg.getAction(), msg.getKeycode(), msg.getRepeat(), msg.getMetaState());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_INJECT_TEXT:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
injectText(msg.getText());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_INJECT_TOUCH_EVENT:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
injectTouch(msg.getAction(), msg.getPointerId(), msg.getPosition(), msg.getPressure(), msg.getActionButton(), msg.getButtons());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_INJECT_SCROLL_EVENT:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
injectScroll(msg.getPosition(), msg.getHScroll(), msg.getVScroll(), msg.getButtons());
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_BACK_OR_SCREEN_ON:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
pressBackOrTurnScreenOn(msg.getAction());
|
||||
}
|
||||
break;
|
||||
@ -193,7 +271,7 @@ public class Controller implements AsyncProcessor {
|
||||
setClipboard(msg.getText(), msg.getPaste(), msg.getSequence());
|
||||
break;
|
||||
case ControlMessage.TYPE_SET_SCREEN_POWER_MODE:
|
||||
if (device.supportsInputEvents()) {
|
||||
if (supportsInputEvents) {
|
||||
int mode = msg.getAction();
|
||||
boolean setPowerModeOk = Device.setScreenPowerMode(mode);
|
||||
if (setPowerModeOk) {
|
||||
@ -207,7 +285,7 @@ public class Controller implements AsyncProcessor {
|
||||
}
|
||||
break;
|
||||
case ControlMessage.TYPE_ROTATE_DEVICE:
|
||||
device.rotateDevice();
|
||||
Device.rotateDevice(getActionDisplayId());
|
||||
break;
|
||||
case ControlMessage.TYPE_UHID_CREATE:
|
||||
getUhidManager().open(msg.getId(), msg.getText(), msg.getData());
|
||||
@ -221,6 +299,9 @@ public class Controller implements AsyncProcessor {
|
||||
case ControlMessage.TYPE_OPEN_HARD_KEYBOARD_SETTINGS:
|
||||
openHardKeyboardSettings();
|
||||
break;
|
||||
case ControlMessage.TYPE_START_APP:
|
||||
startAppAsync(msg.getText());
|
||||
break;
|
||||
default:
|
||||
// do nothing
|
||||
}
|
||||
@ -232,7 +313,7 @@ public class Controller implements AsyncProcessor {
|
||||
if (keepPowerModeOff && action == KeyEvent.ACTION_UP && (keycode == KeyEvent.KEYCODE_POWER || keycode == KeyEvent.KEYCODE_WAKEUP)) {
|
||||
schedulePowerModeOff();
|
||||
}
|
||||
return device.injectKeyEvent(action, keycode, repeat, metaState, Device.INJECT_MODE_ASYNC);
|
||||
return injectKeyEvent(action, keycode, repeat, metaState, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
private boolean injectChar(char c) {
|
||||
@ -242,8 +323,10 @@ public class Controller implements AsyncProcessor {
|
||||
if (events == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int actionDisplayId = getActionDisplayId();
|
||||
for (KeyEvent event : events) {
|
||||
if (!device.injectEvent(event, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(event, actionDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -265,7 +348,12 @@ public class Controller implements AsyncProcessor {
|
||||
private boolean injectTouch(int action, long pointerId, Position position, float pressure, int actionButton, int buttons) {
|
||||
long now = SystemClock.uptimeMillis();
|
||||
|
||||
Point point = device.getPhysicalPoint(position);
|
||||
// it hides the field on purpose, to read it from the atomic once
|
||||
@SuppressWarnings("checkstyle:HiddenField")
|
||||
DisplayData displayData = this.displayData.get();
|
||||
assert displayData != null : "Cannot receive a touch event without a display";
|
||||
|
||||
Point point = displayData.positionMapper.map(position);
|
||||
if (point == null) {
|
||||
Ln.w("Ignore touch event, it was generated for a different device size");
|
||||
return false;
|
||||
@ -318,13 +406,13 @@ public class Controller implements AsyncProcessor {
|
||||
*
|
||||
* Otherwise, Chrome does not work properly: <https://github.com/Genymobile/scrcpy/issues/3635>
|
||||
*/
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && source == InputDevice.SOURCE_MOUSE) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0 && source == InputDevice.SOURCE_MOUSE) {
|
||||
if (action == MotionEvent.ACTION_DOWN) {
|
||||
if (actionButton == buttons) {
|
||||
// First button pressed: ACTION_DOWN
|
||||
MotionEvent downEvent = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_DOWN, pointerCount, pointerProperties,
|
||||
pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source, 0);
|
||||
if (!device.injectEvent(downEvent, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(downEvent, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -335,7 +423,7 @@ public class Controller implements AsyncProcessor {
|
||||
if (!InputManager.setActionButton(pressEvent, actionButton)) {
|
||||
return false;
|
||||
}
|
||||
if (!device.injectEvent(pressEvent, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(pressEvent, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -349,7 +437,7 @@ public class Controller implements AsyncProcessor {
|
||||
if (!InputManager.setActionButton(releaseEvent, actionButton)) {
|
||||
return false;
|
||||
}
|
||||
if (!device.injectEvent(releaseEvent, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(releaseEvent, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -357,7 +445,7 @@ public class Controller implements AsyncProcessor {
|
||||
// Last button released: ACTION_UP
|
||||
MotionEvent upEvent = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_UP, pointerCount, pointerProperties,
|
||||
pointerCoords, 0, buttons, 1f, 1f, DEFAULT_DEVICE_ID, 0, source, 0);
|
||||
if (!device.injectEvent(upEvent, Device.INJECT_MODE_ASYNC)) {
|
||||
if (!Device.injectEvent(upEvent, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -368,14 +456,20 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
MotionEvent event = MotionEvent.obtain(lastTouchDown, now, action, pointerCount, pointerProperties, pointerCoords, 0, buttons, 1f, 1f,
|
||||
DEFAULT_DEVICE_ID, 0, source, 0);
|
||||
return device.injectEvent(event, Device.INJECT_MODE_ASYNC);
|
||||
return Device.injectEvent(event, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
private boolean injectScroll(Position position, float hScroll, float vScroll, int buttons) {
|
||||
long now = SystemClock.uptimeMillis();
|
||||
Point point = device.getPhysicalPoint(position);
|
||||
|
||||
// it hides the field on purpose, to read it from the atomic once
|
||||
@SuppressWarnings("checkstyle:HiddenField")
|
||||
DisplayData displayData = this.displayData.get();
|
||||
assert displayData != null : "Cannot receive a scroll event without a display";
|
||||
|
||||
Point point = displayData.positionMapper.map(position);
|
||||
if (point == null) {
|
||||
// ignore event
|
||||
Ln.w("Ignore scroll event, it was generated for a different device size");
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -390,7 +484,7 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
MotionEvent event = MotionEvent.obtain(lastTouchDown, now, MotionEvent.ACTION_SCROLL, 1, pointerProperties, pointerCoords, 0, buttons, 1f, 1f,
|
||||
DEFAULT_DEVICE_ID, 0, InputDevice.SOURCE_MOUSE, 0);
|
||||
return device.injectEvent(event, Device.INJECT_MODE_ASYNC);
|
||||
return Device.injectEvent(event, displayData.virtualDisplayId, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -405,7 +499,7 @@ public class Controller implements AsyncProcessor {
|
||||
|
||||
private boolean pressBackOrTurnScreenOn(int action) {
|
||||
if (Device.isScreenOn()) {
|
||||
return device.injectKeyEvent(action, KeyEvent.KEYCODE_BACK, 0, 0, Device.INJECT_MODE_ASYNC);
|
||||
return injectKeyEvent(action, KeyEvent.KEYCODE_BACK, 0, 0, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
// Screen is off
|
||||
@ -418,15 +512,15 @@ public class Controller implements AsyncProcessor {
|
||||
if (keepPowerModeOff) {
|
||||
schedulePowerModeOff();
|
||||
}
|
||||
return device.pressReleaseKeycode(KeyEvent.KEYCODE_POWER, Device.INJECT_MODE_ASYNC);
|
||||
return pressReleaseKeycode(KeyEvent.KEYCODE_POWER, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
private void getClipboard(int copyKey) {
|
||||
// On Android >= 7, press the COPY or CUT key if requested
|
||||
if (copyKey != ControlMessage.COPY_KEY_NONE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && device.supportsInputEvents()) {
|
||||
if (copyKey != ControlMessage.COPY_KEY_NONE && Build.VERSION.SDK_INT >= AndroidVersions.API_24_ANDROID_7_0 && supportsInputEvents) {
|
||||
int key = copyKey == ControlMessage.COPY_KEY_COPY ? KeyEvent.KEYCODE_COPY : KeyEvent.KEYCODE_CUT;
|
||||
// Wait until the event is finished, to ensure that the clipboard text we read just after is the correct one
|
||||
device.pressReleaseKeycode(key, Device.INJECT_MODE_WAIT_FOR_FINISH);
|
||||
pressReleaseKeycode(key, Device.INJECT_MODE_WAIT_FOR_FINISH);
|
||||
}
|
||||
|
||||
// If clipboard autosync is enabled, then the device clipboard is synchronized to the computer clipboard whenever it changes, in
|
||||
@ -442,14 +536,16 @@ public class Controller implements AsyncProcessor {
|
||||
}
|
||||
|
||||
private boolean setClipboard(String text, boolean paste, long sequence) {
|
||||
boolean ok = device.setClipboardText(text);
|
||||
isSettingClipboard.set(true);
|
||||
boolean ok = Device.setClipboardText(text);
|
||||
isSettingClipboard.set(false);
|
||||
if (ok) {
|
||||
Ln.i("Device clipboard set");
|
||||
}
|
||||
|
||||
// On Android >= 7, also press the PASTE key if requested
|
||||
if (paste && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && device.supportsInputEvents()) {
|
||||
device.pressReleaseKeycode(KeyEvent.KEYCODE_PASTE, Device.INJECT_MODE_ASYNC);
|
||||
if (paste && Build.VERSION.SDK_INT >= AndroidVersions.API_24_ANDROID_7_0 && supportsInputEvents) {
|
||||
pressReleaseKeycode(KeyEvent.KEYCODE_PASTE, Device.INJECT_MODE_ASYNC);
|
||||
}
|
||||
|
||||
if (sequence != ControlMessage.SEQUENCE_INVALID) {
|
||||
@ -465,4 +561,118 @@ public class Controller implements AsyncProcessor {
|
||||
Intent intent = new Intent("android.settings.HARD_KEYBOARD_SETTINGS");
|
||||
ServiceManager.getActivityManager().startActivity(intent);
|
||||
}
|
||||
|
||||
private boolean injectKeyEvent(int action, int keyCode, int repeat, int metaState, int injectMode) {
|
||||
return Device.injectKeyEvent(action, keyCode, repeat, metaState, getActionDisplayId(), injectMode);
|
||||
}
|
||||
|
||||
private boolean pressReleaseKeycode(int keyCode, int injectMode) {
|
||||
return Device.pressReleaseKeycode(keyCode, getActionDisplayId(), injectMode);
|
||||
}
|
||||
|
||||
private int getActionDisplayId() {
|
||||
if (displayId != Device.DISPLAY_ID_NONE) {
|
||||
// Real screen mirrored, use the source display id
|
||||
return displayId;
|
||||
}
|
||||
|
||||
// Virtual display created by --new-display, use the virtualDisplayId
|
||||
DisplayData data = displayData.get();
|
||||
if (data == null) {
|
||||
// If no virtual display id is initialized yet, use the main display id
|
||||
return 0;
|
||||
}
|
||||
|
||||
return data.virtualDisplayId;
|
||||
}
|
||||
|
||||
private void startAppAsync(String name) {
|
||||
if (startAppExecutor == null) {
|
||||
startAppExecutor = Executors.newSingleThreadExecutor();
|
||||
}
|
||||
|
||||
// Listing and selecting the app may take a lot of time
|
||||
startAppExecutor.submit(() -> startApp(name));
|
||||
}
|
||||
|
||||
private void startApp(String name) {
|
||||
boolean forceStopBeforeStart = name.startsWith("+");
|
||||
if (forceStopBeforeStart) {
|
||||
name = name.substring(1);
|
||||
}
|
||||
|
||||
DeviceApp app;
|
||||
boolean searchByName = name.startsWith("?");
|
||||
if (searchByName) {
|
||||
name = name.substring(1);
|
||||
|
||||
Ln.i("Processing Android apps... (this may take some time)");
|
||||
List<DeviceApp> apps = Device.findByName(name);
|
||||
if (apps.isEmpty()) {
|
||||
Ln.w("No app found for name \"" + name + "\"");
|
||||
return;
|
||||
}
|
||||
|
||||
if (apps.size() > 1) {
|
||||
String title = "No unique app found for name \"" + name + "\":";
|
||||
Ln.w(LogUtils.buildAppListMessage(title, apps));
|
||||
return;
|
||||
}
|
||||
|
||||
app = apps.get(0);
|
||||
} else {
|
||||
app = Device.findByPackageName(name);
|
||||
if (app == null) {
|
||||
Ln.w("No app found for package \"" + name + "\"");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
int startAppDisplayId = getStartAppDisplayId();
|
||||
if (startAppDisplayId == Device.DISPLAY_ID_NONE) {
|
||||
Ln.e("No known display id to start app \"" + name + "\"");
|
||||
return;
|
||||
}
|
||||
|
||||
Ln.i("Starting app \"" + app.getName() + "\" [" + app.getPackageName() + "] on display " + startAppDisplayId + "...");
|
||||
Device.startApp(app.getPackageName(), startAppDisplayId, forceStopBeforeStart);
|
||||
}
|
||||
|
||||
private int getStartAppDisplayId() {
|
||||
if (displayId != Device.DISPLAY_ID_NONE) {
|
||||
return displayId;
|
||||
}
|
||||
|
||||
// Mirroring a new virtual display id (using --new-display-id feature)
|
||||
try {
|
||||
// Wait for at most 1 second until a virtual display id is known
|
||||
DisplayData data = waitDisplayData(1000);
|
||||
if (data != null) {
|
||||
return data.virtualDisplayId;
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
// No display id available
|
||||
return Device.DISPLAY_ID_NONE;
|
||||
}
|
||||
|
||||
private DisplayData waitDisplayData(long timeoutMillis) throws InterruptedException {
|
||||
long deadline = System.currentTimeMillis() + timeoutMillis;
|
||||
|
||||
synchronized (displayDataAvailable) {
|
||||
DisplayData data = displayData.get();
|
||||
while (data == null) {
|
||||
long timeout = deadline - System.currentTimeMillis();
|
||||
if (timeout < 0) {
|
||||
return null;
|
||||
}
|
||||
displayDataAvailable.wait(timeout);
|
||||
data = displayData.get();
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,46 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.device.Point;
|
||||
import com.genymobile.scrcpy.device.Position;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.video.ScreenInfo;
|
||||
|
||||
import android.graphics.Rect;
|
||||
|
||||
public final class PositionMapper {
|
||||
|
||||
private final Size videoSize;
|
||||
private final Rect contentRect;
|
||||
private final int coordsRotation;
|
||||
|
||||
public PositionMapper(Size videoSize, Rect contentRect, int videoRotation) {
|
||||
this.videoSize = videoSize;
|
||||
this.contentRect = contentRect;
|
||||
this.coordsRotation = reverseRotation(videoRotation);
|
||||
}
|
||||
|
||||
public static PositionMapper from(ScreenInfo screenInfo) {
|
||||
return new PositionMapper(screenInfo.getUnlockedVideoSize(), screenInfo.getContentRect(), screenInfo.getVideoRotation());
|
||||
}
|
||||
|
||||
private static int reverseRotation(int rotation) {
|
||||
return (4 - rotation) % 4;
|
||||
}
|
||||
|
||||
public Point map(Position position) {
|
||||
// reverse the video rotation to apply the events
|
||||
Position devicePosition = position.rotate(coordsRotation);
|
||||
|
||||
Size clientVideoSize = devicePosition.getScreenSize();
|
||||
if (!videoSize.equals(clientVideoSize)) {
|
||||
// The client sends a click relative to a video with wrong dimensions,
|
||||
// the device may have been rotated since the event was generated, so ignore the event
|
||||
return null;
|
||||
}
|
||||
|
||||
Point point = devicePosition.getPoint();
|
||||
int convertedX = contentRect.left + point.getX() * contentRect.width() / videoSize.getWidth();
|
||||
int convertedY = contentRect.top + point.getY() * contentRect.height() / videoSize.getHeight();
|
||||
return new Point(convertedX, convertedY);
|
||||
}
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.control;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.StringUtils;
|
||||
|
||||
@ -38,7 +39,7 @@ public final class UhidManager {
|
||||
|
||||
public UhidManager(DeviceMessageSender sender) {
|
||||
this.sender = sender;
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0) {
|
||||
HandlerThread thread = new HandlerThread("UHidManager");
|
||||
thread.start();
|
||||
queue = thread.getLooper().getQueue();
|
||||
@ -71,7 +72,7 @@ public final class UhidManager {
|
||||
}
|
||||
|
||||
private void registerUhidListener(int id, FileDescriptor fd) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0) {
|
||||
queue.addOnFileDescriptorEventListener(fd, MessageQueue.OnFileDescriptorEventListener.EVENT_INPUT, (fd2, events) -> {
|
||||
try {
|
||||
buffer.clear();
|
||||
@ -97,7 +98,7 @@ public final class UhidManager {
|
||||
}
|
||||
|
||||
private void unregisterUhidListener(FileDescriptor fd) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0) {
|
||||
queue.removeOnFileDescriptorEventListener(fd);
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
package com.genymobile.scrcpy.device;
|
||||
|
||||
import com.genymobile.scrcpy.Options;
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.video.ScreenInfo;
|
||||
import com.genymobile.scrcpy.wrappers.ActivityManager;
|
||||
import com.genymobile.scrcpy.wrappers.ClipboardManager;
|
||||
import com.genymobile.scrcpy.wrappers.DisplayControl;
|
||||
import com.genymobile.scrcpy.wrappers.InputManager;
|
||||
@ -11,22 +11,28 @@ import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
import com.genymobile.scrcpy.wrappers.SurfaceControl;
|
||||
import com.genymobile.scrcpy.wrappers.WindowManager;
|
||||
|
||||
import android.content.IOnPrimaryClipChangedListener;
|
||||
import android.graphics.Rect;
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.Intent;
|
||||
import android.app.ActivityOptions;
|
||||
import android.content.pm.ApplicationInfo;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.SystemClock;
|
||||
import android.view.IDisplayFoldListener;
|
||||
import android.view.IRotationWatcher;
|
||||
import android.view.InputDevice;
|
||||
import android.view.InputEvent;
|
||||
import android.view.KeyCharacterMap;
|
||||
import android.view.KeyEvent;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
public final class Device {
|
||||
|
||||
public static final int DISPLAY_ID_NONE = -1;
|
||||
|
||||
public static final int POWER_MODE_OFF = SurfaceControl.POWER_MODE_OFF;
|
||||
public static final int POWER_MODE_NORMAL = SurfaceControl.POWER_MODE_NORMAL;
|
||||
|
||||
@ -37,177 +43,8 @@ public final class Device {
|
||||
public static final int LOCK_VIDEO_ORIENTATION_UNLOCKED = -1;
|
||||
public static final int LOCK_VIDEO_ORIENTATION_INITIAL = -2;
|
||||
|
||||
public interface RotationListener {
|
||||
void onRotationChanged(int rotation);
|
||||
}
|
||||
|
||||
public interface FoldListener {
|
||||
void onFoldChanged(int displayId, boolean folded);
|
||||
}
|
||||
|
||||
public interface ClipboardListener {
|
||||
void onClipboardTextChanged(String text);
|
||||
}
|
||||
|
||||
private final Rect crop;
|
||||
private int maxSize;
|
||||
private final int lockVideoOrientation;
|
||||
|
||||
private Size deviceSize;
|
||||
private ScreenInfo screenInfo;
|
||||
private RotationListener rotationListener;
|
||||
private FoldListener foldListener;
|
||||
private ClipboardListener clipboardListener;
|
||||
private final AtomicBoolean isSettingClipboard = new AtomicBoolean();
|
||||
|
||||
/**
|
||||
* Logical display identifier
|
||||
*/
|
||||
private final int displayId;
|
||||
|
||||
/**
|
||||
* The surface flinger layer stack associated with this logical display
|
||||
*/
|
||||
private final int layerStack;
|
||||
|
||||
private final boolean supportsInputEvents;
|
||||
|
||||
public Device(Options options) throws ConfigurationException {
|
||||
displayId = options.getDisplayId();
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (displayInfo == null) {
|
||||
Ln.e("Display " + displayId + " not found\n" + LogUtils.buildDisplayListMessage());
|
||||
throw new ConfigurationException("Unknown display id: " + displayId);
|
||||
}
|
||||
|
||||
int displayInfoFlags = displayInfo.getFlags();
|
||||
|
||||
deviceSize = displayInfo.getSize();
|
||||
crop = options.getCrop();
|
||||
maxSize = options.getMaxSize();
|
||||
lockVideoOrientation = options.getLockVideoOrientation();
|
||||
|
||||
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), deviceSize, crop, maxSize, lockVideoOrientation);
|
||||
layerStack = displayInfo.getLayerStack();
|
||||
|
||||
ServiceManager.getWindowManager().registerRotationWatcher(new IRotationWatcher.Stub() {
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
synchronized (Device.this) {
|
||||
screenInfo = screenInfo.withDeviceRotation(rotation);
|
||||
|
||||
// notify
|
||||
if (rotationListener != null) {
|
||||
rotationListener.onRotationChanged(rotation);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, displayId);
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
ServiceManager.getWindowManager().registerDisplayFoldListener(new IDisplayFoldListener.Stub() {
|
||||
@Override
|
||||
public void onDisplayFoldChanged(int displayId, boolean folded) {
|
||||
if (Device.this.displayId != displayId) {
|
||||
// Ignore events related to other display ids
|
||||
return;
|
||||
}
|
||||
|
||||
synchronized (Device.this) {
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (displayInfo == null) {
|
||||
Ln.e("Display " + displayId + " not found\n" + LogUtils.buildDisplayListMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
deviceSize = displayInfo.getSize();
|
||||
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), deviceSize, crop, maxSize, lockVideoOrientation);
|
||||
// notify
|
||||
if (foldListener != null) {
|
||||
foldListener.onFoldChanged(displayId, folded);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (options.getControl() && options.getClipboardAutosync()) {
|
||||
// If control and autosync are enabled, synchronize Android clipboard to the computer automatically
|
||||
ClipboardManager clipboardManager = ServiceManager.getClipboardManager();
|
||||
if (clipboardManager != null) {
|
||||
clipboardManager.addPrimaryClipChangedListener(new IOnPrimaryClipChangedListener.Stub() {
|
||||
@Override
|
||||
public void dispatchPrimaryClipChanged() {
|
||||
if (isSettingClipboard.get()) {
|
||||
// This is a notification for the change we are currently applying, ignore it
|
||||
return;
|
||||
}
|
||||
synchronized (Device.this) {
|
||||
if (clipboardListener != null) {
|
||||
String text = getClipboardText();
|
||||
if (text != null) {
|
||||
clipboardListener.onClipboardTextChanged(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Ln.w("No clipboard manager, copy-paste between device and computer will not work");
|
||||
}
|
||||
}
|
||||
|
||||
if ((displayInfoFlags & DisplayInfo.FLAG_SUPPORTS_PROTECTED_BUFFERS) == 0) {
|
||||
Ln.w("Display doesn't have FLAG_SUPPORTS_PROTECTED_BUFFERS flag, mirroring can be restricted");
|
||||
}
|
||||
|
||||
// main display or any display on Android >= Q
|
||||
supportsInputEvents = displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
|
||||
if (!supportsInputEvents) {
|
||||
Ln.w("Input events are not supported for secondary displays before Android 10");
|
||||
}
|
||||
}
|
||||
|
||||
public int getDisplayId() {
|
||||
return displayId;
|
||||
}
|
||||
|
||||
public synchronized void setMaxSize(int newMaxSize) {
|
||||
maxSize = newMaxSize;
|
||||
screenInfo = ScreenInfo.computeScreenInfo(screenInfo.getReverseVideoRotation(), deviceSize, crop, newMaxSize, lockVideoOrientation);
|
||||
}
|
||||
|
||||
public synchronized ScreenInfo getScreenInfo() {
|
||||
return screenInfo;
|
||||
}
|
||||
|
||||
public int getLayerStack() {
|
||||
return layerStack;
|
||||
}
|
||||
|
||||
public Point getPhysicalPoint(Position position) {
|
||||
// it hides the field on purpose, to read it with a lock
|
||||
@SuppressWarnings("checkstyle:HiddenField")
|
||||
ScreenInfo screenInfo = getScreenInfo(); // read with synchronization
|
||||
|
||||
// ignore the locked video orientation, the events will apply in coordinates considered in the physical device orientation
|
||||
Size unlockedVideoSize = screenInfo.getUnlockedVideoSize();
|
||||
|
||||
int reverseVideoRotation = screenInfo.getReverseVideoRotation();
|
||||
// reverse the video rotation to apply the events
|
||||
Position devicePosition = position.rotate(reverseVideoRotation);
|
||||
|
||||
Size clientVideoSize = devicePosition.getScreenSize();
|
||||
if (!unlockedVideoSize.equals(clientVideoSize)) {
|
||||
// The client sends a click relative to a video with wrong dimensions,
|
||||
// the device may have been rotated since the event was generated, so ignore the event
|
||||
return null;
|
||||
}
|
||||
Rect contentRect = screenInfo.getContentRect();
|
||||
Point point = devicePosition.getPoint();
|
||||
int convertedX = contentRect.left + point.getX() * contentRect.width() / unlockedVideoSize.getWidth();
|
||||
int convertedY = contentRect.top + point.getY() * contentRect.height() / unlockedVideoSize.getHeight();
|
||||
return new Point(convertedX, convertedY);
|
||||
private Device() {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static String getDeviceName() {
|
||||
@ -215,11 +52,7 @@ public final class Device {
|
||||
}
|
||||
|
||||
public static boolean supportsInputEvents(int displayId) {
|
||||
return displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
|
||||
}
|
||||
|
||||
public boolean supportsInputEvents() {
|
||||
return supportsInputEvents;
|
||||
return displayId == 0 || Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10;
|
||||
}
|
||||
|
||||
public static boolean injectEvent(InputEvent inputEvent, int displayId, int injectMode) {
|
||||
@ -234,10 +67,6 @@ public final class Device {
|
||||
return ServiceManager.getInputManager().injectInputEvent(inputEvent, injectMode);
|
||||
}
|
||||
|
||||
public boolean injectEvent(InputEvent event, int injectMode) {
|
||||
return injectEvent(event, displayId, injectMode);
|
||||
}
|
||||
|
||||
public static boolean injectKeyEvent(int action, int keyCode, int repeat, int metaState, int displayId, int injectMode) {
|
||||
long now = SystemClock.uptimeMillis();
|
||||
KeyEvent event = new KeyEvent(now, now, action, keyCode, repeat, metaState, KeyCharacterMap.VIRTUAL_KEYBOARD, 0, 0,
|
||||
@ -245,35 +74,15 @@ public final class Device {
|
||||
return injectEvent(event, displayId, injectMode);
|
||||
}
|
||||
|
||||
public boolean injectKeyEvent(int action, int keyCode, int repeat, int metaState, int injectMode) {
|
||||
return injectKeyEvent(action, keyCode, repeat, metaState, displayId, injectMode);
|
||||
}
|
||||
|
||||
public static boolean pressReleaseKeycode(int keyCode, int displayId, int injectMode) {
|
||||
return injectKeyEvent(KeyEvent.ACTION_DOWN, keyCode, 0, 0, displayId, injectMode)
|
||||
&& injectKeyEvent(KeyEvent.ACTION_UP, keyCode, 0, 0, displayId, injectMode);
|
||||
}
|
||||
|
||||
public boolean pressReleaseKeycode(int keyCode, int injectMode) {
|
||||
return pressReleaseKeycode(keyCode, displayId, injectMode);
|
||||
}
|
||||
|
||||
public static boolean isScreenOn() {
|
||||
return ServiceManager.getPowerManager().isScreenOn();
|
||||
}
|
||||
|
||||
public synchronized void setRotationListener(RotationListener rotationListener) {
|
||||
this.rotationListener = rotationListener;
|
||||
}
|
||||
|
||||
public synchronized void setFoldListener(FoldListener foldlistener) {
|
||||
this.foldListener = foldlistener;
|
||||
}
|
||||
|
||||
public synchronized void setClipboardListener(ClipboardListener clipboardListener) {
|
||||
this.clipboardListener = clipboardListener;
|
||||
}
|
||||
|
||||
public static void expandNotificationPanel() {
|
||||
ServiceManager.getStatusBarManager().expandNotificationsPanel();
|
||||
}
|
||||
@ -298,7 +107,7 @@ public final class Device {
|
||||
return s.toString();
|
||||
}
|
||||
|
||||
public boolean setClipboardText(String text) {
|
||||
public static boolean setClipboardText(String text) {
|
||||
ClipboardManager clipboardManager = ServiceManager.getClipboardManager();
|
||||
if (clipboardManager == null) {
|
||||
return false;
|
||||
@ -313,20 +122,17 @@ public final class Device {
|
||||
return false;
|
||||
}
|
||||
|
||||
isSettingClipboard.set(true);
|
||||
boolean ok = clipboardManager.setText(text);
|
||||
isSettingClipboard.set(false);
|
||||
return ok;
|
||||
return clipboardManager.setText(text);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param mode one of the {@code POWER_MODE_*} constants
|
||||
*/
|
||||
public static boolean setScreenPowerMode(int mode) {
|
||||
boolean applyToMultiPhysicalDisplays = Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
|
||||
boolean applyToMultiPhysicalDisplays = Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10;
|
||||
|
||||
if (applyToMultiPhysicalDisplays
|
||||
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE
|
||||
&& Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14
|
||||
&& Build.BRAND.equalsIgnoreCase("honor")
|
||||
&& SurfaceControl.hasGetBuildInDisplayMethod()) {
|
||||
// Workaround for Honor devices with Android 14:
|
||||
@ -338,7 +144,7 @@ public final class Device {
|
||||
if (applyToMultiPhysicalDisplays) {
|
||||
// On Android 14, these internal methods have been moved to DisplayControl
|
||||
boolean useDisplayControl =
|
||||
Build.VERSION.SDK_INT >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE && !SurfaceControl.hasGetPhysicalDisplayIdsMethod();
|
||||
Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14 && !SurfaceControl.hasGetPhysicalDisplayIdsMethod();
|
||||
|
||||
// Change the power mode for all physical displays
|
||||
long[] physicalDisplayIds = useDisplayControl ? DisplayControl.getPhysicalDisplayIds() : SurfaceControl.getPhysicalDisplayIds();
|
||||
@ -366,6 +172,8 @@ public final class Device {
|
||||
}
|
||||
|
||||
public static boolean powerOffScreen(int displayId) {
|
||||
assert displayId != DISPLAY_ID_NONE;
|
||||
|
||||
if (!isScreenOn()) {
|
||||
return true;
|
||||
}
|
||||
@ -375,7 +183,9 @@ public final class Device {
|
||||
/**
|
||||
* Disable auto-rotation (if enabled), set the screen rotation and re-enable auto-rotation (if it was enabled).
|
||||
*/
|
||||
public void rotateDevice() {
|
||||
public static void rotateDevice(int displayId) {
|
||||
assert displayId != DISPLAY_ID_NONE;
|
||||
|
||||
WindowManager wm = ServiceManager.getWindowManager();
|
||||
|
||||
boolean accelerometerRotation = !wm.isRotationFrozen(displayId);
|
||||
@ -394,6 +204,8 @@ public final class Device {
|
||||
}
|
||||
|
||||
private static int getCurrentRotation(int displayId) {
|
||||
assert displayId != DISPLAY_ID_NONE;
|
||||
|
||||
if (displayId == 0) {
|
||||
return ServiceManager.getWindowManager().getRotation();
|
||||
}
|
||||
@ -401,4 +213,98 @@ public final class Device {
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
return displayInfo.getRotation();
|
||||
}
|
||||
|
||||
public static List<DeviceApp> listApps() {
|
||||
List<DeviceApp> apps = new ArrayList<>();
|
||||
PackageManager pm = FakeContext.get().getPackageManager();
|
||||
for (ApplicationInfo appInfo : getLaunchableApps(pm)) {
|
||||
apps.add(toApp(pm, appInfo));
|
||||
}
|
||||
|
||||
return apps;
|
||||
}
|
||||
|
||||
@SuppressLint("QueryPermissionsNeeded")
|
||||
private static List<ApplicationInfo> getLaunchableApps(PackageManager pm) {
|
||||
List<ApplicationInfo> result = new ArrayList<>();
|
||||
for (ApplicationInfo appInfo : pm.getInstalledApplications(PackageManager.GET_META_DATA)) {
|
||||
if (getLaunchIntent(pm, appInfo.packageName) != null) {
|
||||
result.add(appInfo);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Intent getLaunchIntent(PackageManager pm, String packageName) {
|
||||
Intent launchIntent = pm.getLaunchIntentForPackage(packageName);
|
||||
if (launchIntent != null) {
|
||||
return launchIntent;
|
||||
}
|
||||
|
||||
return pm.getLeanbackLaunchIntentForPackage(packageName);
|
||||
}
|
||||
|
||||
private static DeviceApp toApp(PackageManager pm, ApplicationInfo appInfo) {
|
||||
String name = pm.getApplicationLabel(appInfo).toString();
|
||||
boolean system = (appInfo.flags & ApplicationInfo.FLAG_SYSTEM) != 0;
|
||||
return new DeviceApp(appInfo.packageName, name, system, appInfo.enabled);
|
||||
}
|
||||
|
||||
@SuppressLint("QueryPermissionsNeeded")
|
||||
public static DeviceApp findByPackageName(String packageName) {
|
||||
PackageManager pm = FakeContext.get().getPackageManager();
|
||||
// No need to filter by "launchable" apps, an error will be reported on start if the app is not launchable
|
||||
for (ApplicationInfo appInfo : pm.getInstalledApplications(PackageManager.GET_META_DATA)) {
|
||||
if (packageName.equals(appInfo.packageName)) {
|
||||
return toApp(pm, appInfo);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressLint("QueryPermissionsNeeded")
|
||||
public static List<DeviceApp> findByName(String searchName) {
|
||||
List<DeviceApp> result = new ArrayList<>();
|
||||
searchName = searchName.toLowerCase(Locale.getDefault());
|
||||
|
||||
PackageManager pm = FakeContext.get().getPackageManager();
|
||||
for (ApplicationInfo appInfo : getLaunchableApps(pm)) {
|
||||
if (appInfo.enabled) {
|
||||
String name = pm.getApplicationLabel(appInfo).toString();
|
||||
if (name.toLowerCase(Locale.getDefault()).startsWith(searchName)) {
|
||||
boolean system = (appInfo.flags & ApplicationInfo.FLAG_SYSTEM) != 0;
|
||||
result.add(new DeviceApp(appInfo.packageName, name, system, appInfo.enabled));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static void startApp(String packageName, int displayId, boolean forceStop) {
|
||||
PackageManager pm = FakeContext.get().getPackageManager();
|
||||
|
||||
Intent launchIntent = getLaunchIntent(pm, packageName);
|
||||
if (launchIntent == null) {
|
||||
Ln.w("Cannot create launch intent for app " + packageName);
|
||||
return;
|
||||
}
|
||||
|
||||
launchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
|
||||
|
||||
Bundle options = null;
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_26_ANDROID_8_0) {
|
||||
ActivityOptions launchOptions = ActivityOptions.makeBasic();
|
||||
launchOptions.setLaunchDisplayId(displayId);
|
||||
options = launchOptions.toBundle();
|
||||
}
|
||||
|
||||
ActivityManager am = ServiceManager.getActivityManager();
|
||||
if (forceStop) {
|
||||
am.forceStopPackage(packageName);
|
||||
}
|
||||
am.startActivity(launchIntent, options);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,32 @@
|
||||
package com.genymobile.scrcpy.device;
|
||||
|
||||
public final class DeviceApp {
|
||||
|
||||
private final String packageName;
|
||||
private final String name;
|
||||
private final boolean system;
|
||||
private final boolean enabled;
|
||||
|
||||
public DeviceApp(String packageName, String name, boolean system, boolean enabled) {
|
||||
this.packageName = packageName;
|
||||
this.name = name;
|
||||
this.system = system;
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
public String getPackageName() {
|
||||
return packageName;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public boolean isSystem() {
|
||||
return system;
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
}
|
@ -6,15 +6,17 @@ public final class DisplayInfo {
|
||||
private final int rotation;
|
||||
private final int layerStack;
|
||||
private final int flags;
|
||||
private final int dpi;
|
||||
|
||||
public static final int FLAG_SUPPORTS_PROTECTED_BUFFERS = 0x00000001;
|
||||
|
||||
public DisplayInfo(int displayId, Size size, int rotation, int layerStack, int flags) {
|
||||
public DisplayInfo(int displayId, Size size, int rotation, int layerStack, int flags, int dpi) {
|
||||
this.displayId = displayId;
|
||||
this.size = size;
|
||||
this.rotation = rotation;
|
||||
this.layerStack = layerStack;
|
||||
this.flags = flags;
|
||||
this.dpi = dpi;
|
||||
}
|
||||
|
||||
public int getDisplayId() {
|
||||
@ -36,5 +38,9 @@ public final class DisplayInfo {
|
||||
public int getFlags() {
|
||||
return flags;
|
||||
}
|
||||
|
||||
public int getDpi() {
|
||||
return dpi;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,31 @@
|
||||
package com.genymobile.scrcpy.device;
|
||||
|
||||
public final class NewDisplay {
|
||||
private Size size;
|
||||
private int dpi;
|
||||
|
||||
public NewDisplay() {
|
||||
// Auto size and dpi
|
||||
}
|
||||
|
||||
public NewDisplay(Size size, int dpi) {
|
||||
this.size = size;
|
||||
this.dpi = dpi;
|
||||
}
|
||||
|
||||
public Size getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
public int getDpi() {
|
||||
return dpi;
|
||||
}
|
||||
|
||||
public boolean hasExplicitSize() {
|
||||
return size != null;
|
||||
}
|
||||
|
||||
public boolean hasExplicitDpi() {
|
||||
return dpi != 0;
|
||||
}
|
||||
}
|
@ -21,6 +21,10 @@ public final class Size {
|
||||
return height;
|
||||
}
|
||||
|
||||
public int getMax() {
|
||||
return Math.max(width, height);
|
||||
}
|
||||
|
||||
public Size rotate() {
|
||||
return new Size(height, width);
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
|
||||
public interface Codec {
|
||||
|
||||
enum Type {
|
||||
@ -14,4 +16,9 @@ public interface Codec {
|
||||
String getName();
|
||||
|
||||
String getMimeType();
|
||||
|
||||
static String getMimeType(MediaCodec codec) {
|
||||
String[] types = codec.getCodecInfo().getSupportedTypes();
|
||||
return types.length > 0 ? types[0] : null;
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,9 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.BuildConfig;
|
||||
|
||||
import android.os.Build;
|
||||
import android.system.ErrnoException;
|
||||
import android.system.Os;
|
||||
import android.system.OsConstants;
|
||||
@ -17,23 +19,38 @@ public final class IO {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static void writeFully(FileDescriptor fd, ByteBuffer from) throws IOException {
|
||||
// ByteBuffer position is not updated as expected by Os.write() on old Android versions, so
|
||||
// count the remaining bytes manually.
|
||||
// See <https://github.com/Genymobile/scrcpy/issues/291>.
|
||||
int remaining = from.remaining();
|
||||
while (remaining > 0) {
|
||||
private static int write(FileDescriptor fd, ByteBuffer from) throws IOException {
|
||||
while (true) {
|
||||
try {
|
||||
int w = Os.write(fd, from);
|
||||
return Os.write(fd, from);
|
||||
} catch (ErrnoException e) {
|
||||
if (e.errno != OsConstants.EINTR) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeFully(FileDescriptor fd, ByteBuffer from) throws IOException {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_23_ANDROID_6_0) {
|
||||
while (from.hasRemaining()) {
|
||||
write(fd, from);
|
||||
}
|
||||
} else {
|
||||
// ByteBuffer position is not updated as expected by Os.write() on old Android versions, so
|
||||
// handle the position and the remaining bytes manually.
|
||||
// See <https://github.com/Genymobile/scrcpy/issues/291>.
|
||||
int position = from.position();
|
||||
int remaining = from.remaining();
|
||||
while (remaining > 0) {
|
||||
int w = write(fd, from);
|
||||
if (BuildConfig.DEBUG && w < 0) {
|
||||
// w should not be negative, since an exception is thrown on error
|
||||
throw new AssertionError("Os.write() returned a negative value (" + w + ")");
|
||||
}
|
||||
remaining -= w;
|
||||
} catch (ErrnoException e) {
|
||||
if (e.errno != OsConstants.EINTR) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
position += w;
|
||||
from.position(position);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,13 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.device.DeviceApp;
|
||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.wrappers.DisplayManager;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.graphics.Rect;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
@ -13,7 +16,9 @@ import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.media.MediaCodec;
|
||||
import android.util.Range;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
|
||||
@ -31,7 +36,7 @@ public final class LogUtils {
|
||||
} else {
|
||||
for (CodecUtils.DeviceEncoder encoder : videoEncoders) {
|
||||
builder.append("\n --video-codec=").append(encoder.getCodec().getName());
|
||||
builder.append(" --video-encoder='").append(encoder.getInfo().getName()).append("'");
|
||||
builder.append(" --video-encoder=").append(encoder.getInfo().getName());
|
||||
}
|
||||
}
|
||||
return builder.toString();
|
||||
@ -45,7 +50,7 @@ public final class LogUtils {
|
||||
} else {
|
||||
for (CodecUtils.DeviceEncoder encoder : audioEncoders) {
|
||||
builder.append("\n --audio-codec=").append(encoder.getCodec().getName());
|
||||
builder.append(" --audio-encoder='").append(encoder.getInfo().getName()).append("'");
|
||||
builder.append(" --audio-encoder=").append(encoder.getInfo().getName());
|
||||
}
|
||||
}
|
||||
return builder.toString();
|
||||
@ -154,4 +159,59 @@ public final class LogUtils {
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
|
||||
public static String buildAppListMessage() {
|
||||
List<DeviceApp> apps = Device.listApps();
|
||||
return buildAppListMessage("List of apps:", apps);
|
||||
}
|
||||
|
||||
@SuppressLint("QueryPermissionsNeeded")
|
||||
public static String buildAppListMessage(String title, List<DeviceApp> apps) {
|
||||
StringBuilder builder = new StringBuilder(title);
|
||||
|
||||
// Sort by:
|
||||
// 1. system flag (system apps are before non-system apps)
|
||||
// 2. name
|
||||
// 3. package name
|
||||
Collections.sort(apps, (thisApp, otherApp) -> {
|
||||
// System apps first
|
||||
int cmp = -Boolean.compare(thisApp.isSystem(), otherApp.isSystem());
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = Objects.compare(thisApp.getName(), otherApp.getName(), String::compareTo);
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return Objects.compare(thisApp.getPackageName(), otherApp.getPackageName(), String::compareTo);
|
||||
});
|
||||
|
||||
final int column = 30;
|
||||
for (DeviceApp app : apps) {
|
||||
String name = app.getName();
|
||||
int padding = column - name.length();
|
||||
builder.append("\n ");
|
||||
if (app.isSystem()) {
|
||||
builder.append("* ");
|
||||
} else {
|
||||
builder.append("- ");
|
||||
|
||||
}
|
||||
builder.append(name);
|
||||
if (padding > 0) {
|
||||
builder.append(String.format("%" + padding + "s", " "));
|
||||
} else {
|
||||
builder.append("\n ").append(String.format("%" + column + "s", " "));
|
||||
}
|
||||
builder.append(" [").append(app.getPackageName()).append(']');
|
||||
if (!app.isEnabled()) {
|
||||
builder.append(" (disabled)");
|
||||
}
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.util;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.wrappers.ContentProvider;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
@ -34,7 +35,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static String getValue(String table, String key) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT <= AndroidVersions.API_30_ANDROID_11) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
return provider.getValue(table, key);
|
||||
@ -47,7 +48,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static void putValue(String table, String key, String value) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT <= AndroidVersions.API_30_ANDROID_11) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
provider.putValue(table, key, value);
|
||||
@ -60,7 +61,7 @@ public final class Settings {
|
||||
}
|
||||
|
||||
public static String getAndPutValue(String table, String key, String value) throws SettingsException {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) {
|
||||
if (Build.VERSION.SDK_INT <= AndroidVersions.API_30_ANDROID_11) {
|
||||
// on Android >= 12, it always fails: <https://github.com/Genymobile/scrcpy/issues/2788>
|
||||
try (ContentProvider provider = ServiceManager.getActivityManager().createSettingsProvider()) {
|
||||
String oldValue = provider.getValue(table, key);
|
||||
|
@ -1,8 +1,9 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.HandlerExecutor;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
@ -20,7 +21,6 @@ import android.hardware.camera2.params.OutputConfiguration;
|
||||
import android.hardware.camera2.params.SessionConfiguration;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.media.MediaCodec;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.util.Range;
|
||||
@ -118,7 +118,7 @@ public class CameraCapture extends SurfaceCapture {
|
||||
return null;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.N)
|
||||
@TargetApi(AndroidVersions.API_24_ANDROID_7_0)
|
||||
private static Size selectSize(String cameraId, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio, boolean highSpeed)
|
||||
throws CameraAccessException {
|
||||
if (explicitSize != null) {
|
||||
@ -242,7 +242,7 @@ public class CameraCapture extends SurfaceCapture {
|
||||
}
|
||||
|
||||
@SuppressLint("MissingPermission")
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@TargetApi(AndroidVersions.API_31_ANDROID_12)
|
||||
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
|
||||
CompletableFuture<CameraDevice> future = new CompletableFuture<>();
|
||||
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
|
||||
@ -289,7 +289,7 @@ public class CameraCapture extends SurfaceCapture {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@TargetApi(AndroidVersions.API_31_ANDROID_12)
|
||||
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
|
||||
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
|
||||
OutputConfiguration outputConfig = new OutputConfiguration(surface);
|
||||
@ -328,7 +328,7 @@ public class CameraCapture extends SurfaceCapture {
|
||||
return requestBuilder.build();
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.S)
|
||||
@TargetApi(AndroidVersions.API_31_ANDROID_12)
|
||||
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
|
||||
CameraCaptureSession.CaptureCallback callback = new CameraCaptureSession.CaptureCallback() {
|
||||
@Override
|
||||
|
@ -0,0 +1,146 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.control.PositionMapper;
|
||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||
import com.genymobile.scrcpy.device.NewDisplay;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.hardware.display.DisplayManager;
|
||||
import android.hardware.display.VirtualDisplay;
|
||||
import android.os.Build;
|
||||
import android.view.Surface;
|
||||
|
||||
public class NewDisplayCapture extends SurfaceCapture {
|
||||
|
||||
// Internal fields copied from android.hardware.display.DisplayManager
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_SUPPORTS_TOUCH = 1 << 6;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_ROTATES_WITH_CONTENT = 1 << 7;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_DESTROY_CONTENT_ON_REMOVAL = 1 << 8;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_SHOULD_SHOW_SYSTEM_DECORATIONS = 1 << 9;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_TRUSTED = 1 << 10;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_OWN_DISPLAY_GROUP = 1 << 11;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_ALWAYS_UNLOCKED = 1 << 12;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_TOUCH_FEEDBACK_DISABLED = 1 << 13;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_OWN_FOCUS = 1 << 14;
|
||||
private static final int VIRTUAL_DISPLAY_FLAG_DEVICE_DISPLAY_GROUP = 1 << 15;
|
||||
|
||||
private final VirtualDisplayListener vdListener;
|
||||
private final NewDisplay newDisplay;
|
||||
|
||||
private Size mainDisplaySize;
|
||||
private int mainDisplayDpi;
|
||||
private int maxSize; // only used if newDisplay.getSize() != null
|
||||
|
||||
private VirtualDisplay virtualDisplay;
|
||||
private Size size;
|
||||
private int dpi;
|
||||
|
||||
public NewDisplayCapture(VirtualDisplayListener vdListener, NewDisplay newDisplay, int maxSize) {
|
||||
this.vdListener = vdListener;
|
||||
this.newDisplay = newDisplay;
|
||||
this.maxSize = maxSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
size = newDisplay.getSize();
|
||||
dpi = newDisplay.getDpi();
|
||||
if (size == null || dpi == 0) {
|
||||
DisplayInfo displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(0);
|
||||
if (displayInfo != null) {
|
||||
mainDisplaySize = displayInfo.getSize();
|
||||
mainDisplayDpi = displayInfo.getDpi();
|
||||
} else {
|
||||
Ln.w("Main display not found, fallback to 1920x1080 240dpi");
|
||||
mainDisplaySize = new Size(1920, 1080);
|
||||
mainDisplayDpi = 240;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepare() {
|
||||
if (!newDisplay.hasExplicitSize()) {
|
||||
size = ScreenInfo.computeVideoSize(mainDisplaySize.getWidth(), mainDisplaySize.getHeight(), maxSize);
|
||||
}
|
||||
if (!newDisplay.hasExplicitDpi()) {
|
||||
dpi = scaleDpi(mainDisplaySize, mainDisplayDpi, size);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(Surface surface) {
|
||||
if (virtualDisplay != null) {
|
||||
virtualDisplay.release();
|
||||
virtualDisplay = null;
|
||||
}
|
||||
|
||||
int virtualDisplayId;
|
||||
try {
|
||||
int flags = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC
|
||||
| DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY
|
||||
| VIRTUAL_DISPLAY_FLAG_SUPPORTS_TOUCH
|
||||
| VIRTUAL_DISPLAY_FLAG_ROTATES_WITH_CONTENT
|
||||
| VIRTUAL_DISPLAY_FLAG_DESTROY_CONTENT_ON_REMOVAL
|
||||
| VIRTUAL_DISPLAY_FLAG_SHOULD_SHOW_SYSTEM_DECORATIONS;
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_33_ANDROID_13) {
|
||||
flags |= VIRTUAL_DISPLAY_FLAG_TRUSTED
|
||||
| VIRTUAL_DISPLAY_FLAG_OWN_DISPLAY_GROUP
|
||||
| VIRTUAL_DISPLAY_FLAG_ALWAYS_UNLOCKED
|
||||
| VIRTUAL_DISPLAY_FLAG_TOUCH_FEEDBACK_DISABLED;
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_34_ANDROID_14) {
|
||||
flags |= VIRTUAL_DISPLAY_FLAG_OWN_FOCUS
|
||||
| VIRTUAL_DISPLAY_FLAG_DEVICE_DISPLAY_GROUP;
|
||||
}
|
||||
}
|
||||
virtualDisplay = ServiceManager.getDisplayManager()
|
||||
.createNewVirtualDisplay("scrcpy", size.getWidth(), size.getHeight(), dpi, surface, flags);
|
||||
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
|
||||
Ln.i("New display id: " + virtualDisplayId);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not create display", e);
|
||||
throw new AssertionError("Could not create display");
|
||||
}
|
||||
|
||||
if (vdListener != null) {
|
||||
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
|
||||
Rect contentRect = new Rect(0, 0, size.getWidth(), size.getHeight());
|
||||
PositionMapper positionMapper = new PositionMapper(size, contentRect, 0);
|
||||
vdListener.onNewVirtualDisplay(virtualDisplayId, positionMapper);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
if (virtualDisplay != null) {
|
||||
virtualDisplay.release();
|
||||
virtualDisplay = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Size getSize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized boolean setMaxSize(int newMaxSize) {
|
||||
if (newDisplay.hasExplicitSize()) {
|
||||
// Cannot retry with a different size if the display size was explicitly provided
|
||||
return false;
|
||||
}
|
||||
|
||||
maxSize = newMaxSize;
|
||||
return true;
|
||||
}
|
||||
|
||||
private static int scaleDpi(Size initialSize, int initialDpi, Size size) {
|
||||
int den = initialSize.getMax();
|
||||
int num = size.getMax();
|
||||
return initialDpi * num / den;
|
||||
}
|
||||
}
|
@ -1,43 +1,97 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.control.PositionMapper;
|
||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.wrappers.DisplayManager;
|
||||
import com.genymobile.scrcpy.wrappers.ServiceManager;
|
||||
import com.genymobile.scrcpy.wrappers.SurfaceControl;
|
||||
|
||||
import android.graphics.Rect;
|
||||
import android.hardware.display.VirtualDisplay;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.IBinder;
|
||||
import android.view.IDisplayFoldListener;
|
||||
import android.view.IRotationWatcher;
|
||||
import android.view.Surface;
|
||||
|
||||
public class ScreenCapture extends SurfaceCapture implements Device.RotationListener, Device.FoldListener {
|
||||
public class ScreenCapture extends SurfaceCapture {
|
||||
|
||||
private final VirtualDisplayListener vdListener;
|
||||
private final int displayId;
|
||||
private int maxSize;
|
||||
private final Rect crop;
|
||||
private final int lockVideoOrientation;
|
||||
|
||||
private DisplayInfo displayInfo;
|
||||
private ScreenInfo screenInfo;
|
||||
|
||||
private final Device device;
|
||||
private IBinder display;
|
||||
private VirtualDisplay virtualDisplay;
|
||||
|
||||
public ScreenCapture(Device device) {
|
||||
this.device = device;
|
||||
private DisplayManager.DisplayListenerHandle displayListenerHandle;
|
||||
private HandlerThread handlerThread;
|
||||
|
||||
// On Android 14, the DisplayListener may be broken (it never send events). This is fixed in recent Android 14 upgrades, but we can't really know.
|
||||
// So register a RotationWatcher and a DisplayFoldListener as a fallback, until we receive the first event from DisplayListener (which proves
|
||||
// that it works).
|
||||
private boolean displayListenerWorks; // only accessed from the display listener thread
|
||||
private IRotationWatcher rotationWatcher;
|
||||
private IDisplayFoldListener displayFoldListener;
|
||||
|
||||
public ScreenCapture(VirtualDisplayListener vdListener, int displayId, int maxSize, Rect crop, int lockVideoOrientation) {
|
||||
this.vdListener = vdListener;
|
||||
this.displayId = displayId;
|
||||
this.maxSize = maxSize;
|
||||
this.crop = crop;
|
||||
this.lockVideoOrientation = lockVideoOrientation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
device.setRotationListener(this);
|
||||
device.setFoldListener(this);
|
||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
|
||||
registerDisplayListenerFallbacks();
|
||||
}
|
||||
|
||||
handlerThread = new HandlerThread("DisplayListener");
|
||||
handlerThread.start();
|
||||
Handler handler = new Handler(handlerThread.getLooper());
|
||||
displayListenerHandle = ServiceManager.getDisplayManager().registerDisplayListener(displayId -> {
|
||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
|
||||
if (!displayListenerWorks) {
|
||||
// On the first display listener event, we know it works, we can unregister the fallbacks
|
||||
displayListenerWorks = true;
|
||||
unregisterDisplayListenerFallbacks();
|
||||
}
|
||||
}
|
||||
if (this.displayId == displayId) {
|
||||
requestReset();
|
||||
}
|
||||
}, handler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void prepare() {
|
||||
displayInfo = ServiceManager.getDisplayManager().getDisplayInfo(displayId);
|
||||
if (displayInfo == null) {
|
||||
Ln.e("Display " + displayId + " not found\n" + LogUtils.buildDisplayListMessage());
|
||||
throw new AssertionError("Display " + display + " not found");
|
||||
}
|
||||
|
||||
if ((displayInfo.getFlags() & DisplayInfo.FLAG_SUPPORTS_PROTECTED_BUFFERS) == 0) {
|
||||
Ln.w("Display doesn't have FLAG_SUPPORTS_PROTECTED_BUFFERS flag, mirroring can be restricted");
|
||||
}
|
||||
|
||||
screenInfo = ScreenInfo.computeScreenInfo(displayInfo.getRotation(), displayInfo.getSize(), crop, maxSize, lockVideoOrientation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(Surface surface) {
|
||||
ScreenInfo screenInfo = device.getScreenInfo();
|
||||
Rect contentRect = screenInfo.getContentRect();
|
||||
|
||||
// does not include the locked video orientation
|
||||
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
|
||||
int videoRotation = screenInfo.getVideoRotation();
|
||||
int layerStack = device.getLayerStack();
|
||||
|
||||
if (display != null) {
|
||||
SurfaceControl.destroyDisplay(display);
|
||||
display = null;
|
||||
@ -47,15 +101,32 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
|
||||
virtualDisplay = null;
|
||||
}
|
||||
|
||||
Size displaySize = screenInfo.getVideoSize();
|
||||
|
||||
int virtualDisplayId;
|
||||
PositionMapper positionMapper;
|
||||
try {
|
||||
Rect videoRect = screenInfo.getVideoSize().toRect();
|
||||
virtualDisplay = ServiceManager.getDisplayManager()
|
||||
.createVirtualDisplay("scrcpy", videoRect.width(), videoRect.height(), device.getDisplayId(), surface);
|
||||
.createVirtualDisplay("scrcpy", displaySize.getWidth(), displaySize.getHeight(), displayId, surface);
|
||||
virtualDisplayId = virtualDisplay.getDisplay().getDisplayId();
|
||||
Rect contentRect = new Rect(0, 0, displaySize.getWidth(), displaySize.getHeight());
|
||||
// The position are relative to the virtual display, not the original display
|
||||
positionMapper = new PositionMapper(displaySize, contentRect, 0);
|
||||
Ln.d("Display: using DisplayManager API");
|
||||
} catch (Exception displayManagerException) {
|
||||
try {
|
||||
display = createDisplay();
|
||||
|
||||
Rect contentRect = screenInfo.getContentRect();
|
||||
|
||||
// does not include the locked video orientation
|
||||
Rect unlockedVideoRect = screenInfo.getUnlockedVideoSize().toRect();
|
||||
int videoRotation = screenInfo.getVideoRotation();
|
||||
int layerStack = displayInfo.getLayerStack();
|
||||
|
||||
setDisplaySurface(display, surface, videoRotation, contentRect, unlockedVideoRect, layerStack);
|
||||
virtualDisplayId = displayId;
|
||||
positionMapper = PositionMapper.from(screenInfo);
|
||||
Ln.d("Display: using SurfaceControl API");
|
||||
} catch (Exception surfaceControlException) {
|
||||
Ln.e("Could not create display using DisplayManager", displayManagerException);
|
||||
@ -63,12 +134,19 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
|
||||
throw new AssertionError("Could not create display");
|
||||
}
|
||||
}
|
||||
|
||||
if (vdListener != null) {
|
||||
vdListener.onNewVirtualDisplay(virtualDisplayId, positionMapper);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
device.setRotationListener(null);
|
||||
device.setFoldListener(null);
|
||||
if (Build.VERSION.SDK_INT == AndroidVersions.API_34_ANDROID_14) {
|
||||
unregisterDisplayListenerFallbacks();
|
||||
}
|
||||
handlerThread.quitSafely();
|
||||
ServiceManager.getDisplayManager().unregisterDisplayListener(displayListenerHandle);
|
||||
if (display != null) {
|
||||
SurfaceControl.destroyDisplay(display);
|
||||
display = null;
|
||||
@ -81,30 +159,20 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
|
||||
|
||||
@Override
|
||||
public Size getSize() {
|
||||
return device.getScreenInfo().getVideoSize();
|
||||
return screenInfo.getVideoSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean setMaxSize(int maxSize) {
|
||||
device.setMaxSize(maxSize);
|
||||
public boolean setMaxSize(int newMaxSize) {
|
||||
maxSize = newMaxSize;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFoldChanged(int displayId, boolean folded) {
|
||||
requestReset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
requestReset();
|
||||
}
|
||||
|
||||
private static IBinder createDisplay() throws Exception {
|
||||
// Since Android 12 (preview), secure displays could not be created with shell permissions anymore.
|
||||
// On Android 12 preview, SDK_INT is still R (not S), but CODENAME is "S".
|
||||
boolean secure = Build.VERSION.SDK_INT < Build.VERSION_CODES.R || (Build.VERSION.SDK_INT == Build.VERSION_CODES.R && !"S".equals(
|
||||
Build.VERSION.CODENAME));
|
||||
boolean secure = Build.VERSION.SDK_INT < AndroidVersions.API_30_ANDROID_11 || (Build.VERSION.SDK_INT == AndroidVersions.API_30_ANDROID_11
|
||||
&& !"S".equals(Build.VERSION.CODENAME));
|
||||
return SurfaceControl.createDisplay("scrcpy", secure);
|
||||
}
|
||||
|
||||
@ -118,4 +186,45 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
|
||||
SurfaceControl.closeTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
private void registerDisplayListenerFallbacks() {
|
||||
if (displayId == 0) {
|
||||
rotationWatcher = new IRotationWatcher.Stub() {
|
||||
@Override
|
||||
public void onRotationChanged(int rotation) {
|
||||
Ln.i("=== rotation");
|
||||
requestReset();
|
||||
}
|
||||
};
|
||||
ServiceManager.getWindowManager().registerRotationWatcher(rotationWatcher, displayId);
|
||||
}
|
||||
|
||||
// Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10 (but implied by == API_34_ANDROID 14)
|
||||
displayFoldListener = new IDisplayFoldListener.Stub() {
|
||||
@Override
|
||||
public void onDisplayFoldChanged(int displayId, boolean folded) {
|
||||
if (ScreenCapture.this.displayId != displayId) {
|
||||
// Ignore events related to other display ids
|
||||
return;
|
||||
}
|
||||
|
||||
requestReset();
|
||||
}
|
||||
};
|
||||
ServiceManager.getWindowManager().registerDisplayFoldListener(displayFoldListener);
|
||||
}
|
||||
|
||||
private void unregisterDisplayListenerFallbacks() {
|
||||
synchronized (this) {
|
||||
if (rotationWatcher != null) {
|
||||
ServiceManager.getWindowManager().unregisterRotationWatcher(rotationWatcher);
|
||||
rotationWatcher = null;
|
||||
}
|
||||
if (displayFoldListener != null) {
|
||||
// Build.VERSION.SDK_INT >= AndroidVersions.API_29_ANDROID_10 (but implied by == API_34_ANDROID 14)
|
||||
ServiceManager.getWindowManager().unregisterDisplayFoldListener(displayFoldListener);
|
||||
displayFoldListener = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,8 +2,8 @@ package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.BuildConfig;
|
||||
import com.genymobile.scrcpy.device.Device;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.graphics.Rect;
|
||||
|
||||
@ -112,7 +112,7 @@ public final class ScreenInfo {
|
||||
return rect.width() + ":" + rect.height() + ":" + rect.left + ":" + rect.top;
|
||||
}
|
||||
|
||||
private static Size computeVideoSize(int w, int h, int maxSize) {
|
||||
public static Size computeVideoSize(int w, int h, int maxSize) {
|
||||
// Compute the video size and the padding of the content inside this video.
|
||||
// Principle:
|
||||
// - scale down the great side of the screen to maxSize (if necessary);
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
|
||||
import android.view.Surface;
|
||||
@ -32,15 +33,22 @@ public abstract class SurfaceCapture {
|
||||
}
|
||||
|
||||
/**
|
||||
* Called once before the capture starts.
|
||||
* Called once before the first capture starts.
|
||||
*/
|
||||
public abstract void init() throws IOException;
|
||||
public abstract void init() throws ConfigurationException, IOException;
|
||||
|
||||
/**
|
||||
* Called after the capture ends (if and only if {@link #init()} has been called).
|
||||
* Called after the last capture ends (if and only if {@link #init()} has been called).
|
||||
*/
|
||||
public abstract void release();
|
||||
|
||||
/**
|
||||
* Called once before each capture starts, before {@link #getSize()}.
|
||||
*/
|
||||
public void prepare() {
|
||||
// empty by default
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the capture to the target surface.
|
||||
*
|
||||
|
@ -1,15 +1,16 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.AsyncProcessor;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
import com.genymobile.scrcpy.util.Codec;
|
||||
import com.genymobile.scrcpy.util.CodecOption;
|
||||
import com.genymobile.scrcpy.util.CodecUtils;
|
||||
import com.genymobile.scrcpy.device.ConfigurationException;
|
||||
import com.genymobile.scrcpy.util.IO;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.LogUtils;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.device.Streamer;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
@ -67,12 +68,17 @@ public class SurfaceEncoder implements AsyncProcessor {
|
||||
capture.init();
|
||||
|
||||
try {
|
||||
streamer.writeVideoHeader(capture.getSize());
|
||||
|
||||
boolean alive;
|
||||
boolean headerWritten = false;
|
||||
|
||||
do {
|
||||
capture.prepare();
|
||||
Size size = capture.getSize();
|
||||
if (!headerWritten) {
|
||||
streamer.writeVideoHeader(size);
|
||||
headerWritten = true;
|
||||
}
|
||||
|
||||
format.setInteger(MediaFormat.KEY_WIDTH, size.getWidth());
|
||||
format.setInteger(MediaFormat.KEY_HEIGHT, size.getHeight());
|
||||
|
||||
@ -205,7 +211,13 @@ public class SurfaceEncoder implements AsyncProcessor {
|
||||
if (encoderName != null) {
|
||||
Ln.d("Creating encoder by name: '" + encoderName + "'");
|
||||
try {
|
||||
return MediaCodec.createByCodecName(encoderName);
|
||||
MediaCodec mediaCodec = MediaCodec.createByCodecName(encoderName);
|
||||
String mimeType = Codec.getMimeType(mediaCodec);
|
||||
if (!codec.getMimeType().equals(mimeType)) {
|
||||
Ln.e("Video encoder type for \"" + encoderName + "\" (" + mimeType + ") does not match codec type (" + codec.getMimeType() + ")");
|
||||
throw new ConfigurationException("Incorrect encoder type: " + encoderName);
|
||||
}
|
||||
return mediaCodec;
|
||||
} catch (IllegalArgumentException e) {
|
||||
Ln.e("Video encoder '" + encoderName + "' for " + codec.getName() + " not found\n" + LogUtils.buildVideoEncoderListMessage());
|
||||
throw new ConfigurationException("Unknown encoder: " + encoderName);
|
||||
@ -232,7 +244,7 @@ public class SurfaceEncoder implements AsyncProcessor {
|
||||
// must be present to configure the encoder, but does not impact the actual frame rate, which is variable
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, 60);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_24_ANDROID_7_0) {
|
||||
format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED);
|
||||
}
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, DEFAULT_I_FRAME_INTERVAL);
|
||||
|
@ -0,0 +1,7 @@
|
||||
package com.genymobile.scrcpy.video;
|
||||
|
||||
import com.genymobile.scrcpy.control.PositionMapper;
|
||||
|
||||
public interface VirtualDisplayListener {
|
||||
void onNewVirtualDisplay(int displayId, PositionMapper positionMapper);
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
@ -7,7 +8,6 @@ import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Intent;
|
||||
import android.os.Binder;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.IBinder;
|
||||
import android.os.IInterface;
|
||||
@ -63,7 +63,7 @@ public final class ActivityManager {
|
||||
return removeContentProviderExternalMethod;
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.Q)
|
||||
@TargetApi(AndroidVersions.API_29_ANDROID_10)
|
||||
private ContentProvider getContentProviderExternal(String name, IBinder token) {
|
||||
try {
|
||||
Method method = getGetContentProviderExternalMethod();
|
||||
@ -118,8 +118,12 @@ public final class ActivityManager {
|
||||
return startActivityAsUserMethod;
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
public int startActivity(Intent intent) {
|
||||
return startActivity(intent, null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
public int startActivity(Intent intent, Bundle options) {
|
||||
try {
|
||||
Method method = getStartActivityAsUserMethod();
|
||||
return (int) method.invoke(
|
||||
@ -133,7 +137,7 @@ public final class ActivityManager {
|
||||
/* requestCode */ 0,
|
||||
/* startFlags */ 0,
|
||||
/* profilerInfo */ null,
|
||||
/* bOptions */ null,
|
||||
/* bOptions */ options,
|
||||
/* userId */ /* UserHandle.USER_CURRENT */ -2);
|
||||
} catch (Throwable e) {
|
||||
Ln.e("Could not invoke method", e);
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
@ -36,7 +37,7 @@ public final class ClipboardManager {
|
||||
|
||||
private Method getGetPrimaryClipMethod() throws NoSuchMethodException {
|
||||
if (getPrimaryClipMethod == null) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class);
|
||||
return getPrimaryClipMethod;
|
||||
}
|
||||
@ -99,7 +100,7 @@ public final class ClipboardManager {
|
||||
|
||||
private Method getSetPrimaryClipMethod() throws NoSuchMethodException {
|
||||
if (setPrimaryClipMethod == null) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class);
|
||||
return setPrimaryClipMethod;
|
||||
}
|
||||
@ -137,7 +138,7 @@ public final class ClipboardManager {
|
||||
}
|
||||
|
||||
private static ClipData getPrimaryClip(Method method, int methodVersion, IInterface manager) throws ReflectiveOperationException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME);
|
||||
}
|
||||
|
||||
@ -161,7 +162,7 @@ public final class ClipboardManager {
|
||||
}
|
||||
|
||||
private static void setPrimaryClip(Method method, int methodVersion, IInterface manager, ClipData clipData) throws ReflectiveOperationException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME);
|
||||
return;
|
||||
}
|
||||
@ -210,7 +211,7 @@ public final class ClipboardManager {
|
||||
|
||||
private static void addPrimaryClipChangedListener(Method method, int methodVersion, IInterface manager, IOnPrimaryClipChangedListener listener)
|
||||
throws ReflectiveOperationException {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
method.invoke(manager, listener, FakeContext.PACKAGE_NAME);
|
||||
return;
|
||||
}
|
||||
@ -230,7 +231,7 @@ public final class ClipboardManager {
|
||||
|
||||
private Method getAddPrimaryClipChangedListener() throws NoSuchMethodException {
|
||||
if (addPrimaryClipChangedListener == null) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
addPrimaryClipChangedListener = manager.getClass()
|
||||
.getMethod("addPrimaryClipChangedListener", IOnPrimaryClipChangedListener.class, String.class);
|
||||
} else {
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.util.SettingsException;
|
||||
@ -51,7 +52,7 @@ public final class ContentProvider implements Closeable {
|
||||
@SuppressLint("PrivateApi")
|
||||
private Method getCallMethod() throws NoSuchMethodException {
|
||||
if (callMethod == null) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_31_ANDROID_12) {
|
||||
callMethod = provider.getClass().getMethod("call", AttributionSource.class, String.class, String.class, String.class, Bundle.class);
|
||||
callMethodVersion = 0;
|
||||
} else {
|
||||
@ -79,7 +80,7 @@ public final class ContentProvider implements Closeable {
|
||||
Method method = getCallMethod();
|
||||
Object[] args;
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && callMethodVersion == 0) {
|
||||
if (Build.VERSION.SDK_INT >= AndroidVersions.API_31_ANDROID_12 && callMethodVersion == 0) {
|
||||
args = new Object[]{FakeContext.get().getAttributionSource(), "settings", callMethod, arg, extras};
|
||||
} else {
|
||||
switch (callMethodVersion) {
|
||||
|
@ -1,16 +1,16 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.os.Build;
|
||||
import android.os.IBinder;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
@SuppressLint({"PrivateApi", "SoonBlockedPrivateApi", "BlockedPrivateApi"})
|
||||
@TargetApi(Build.VERSION_CODES.UPSIDE_DOWN_CAKE)
|
||||
@TargetApi(AndroidVersions.API_34_ANDROID_14)
|
||||
public final class DisplayControl {
|
||||
|
||||
private static final Class<?> CLASS;
|
||||
|
@ -1,22 +1,48 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.util.Command;
|
||||
import com.genymobile.scrcpy.FakeContext;
|
||||
import com.genymobile.scrcpy.device.DisplayInfo;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
import com.genymobile.scrcpy.device.Size;
|
||||
import com.genymobile.scrcpy.util.Command;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.Context;
|
||||
import android.hardware.display.VirtualDisplay;
|
||||
import android.os.Handler;
|
||||
import android.view.Display;
|
||||
import android.view.Surface;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public final class DisplayManager {
|
||||
|
||||
// android.hardware.display.DisplayManager.EVENT_FLAG_DISPLAY_CHANGED
|
||||
public static final long EVENT_FLAG_DISPLAY_CHANGED = 1L << 2;
|
||||
|
||||
public interface DisplayListener {
|
||||
/**
|
||||
* Called whenever the properties of a logical {@link android.view.Display},
|
||||
* such as size and density, have changed.
|
||||
*
|
||||
* @param displayId The id of the logical display that changed.
|
||||
*/
|
||||
void onDisplayChanged(int displayId);
|
||||
}
|
||||
|
||||
public static final class DisplayListenerHandle {
|
||||
private final Object displayListenerProxy;
|
||||
private DisplayListenerHandle(Object displayListenerProxy) {
|
||||
this.displayListenerProxy = displayListenerProxy;
|
||||
}
|
||||
}
|
||||
|
||||
private final Object manager; // instance of hidden class android.hardware.display.DisplayManagerGlobal
|
||||
private Method createVirtualDisplayMethod;
|
||||
|
||||
@ -39,7 +65,7 @@ public final class DisplayManager {
|
||||
public static DisplayInfo parseDisplayInfo(String dumpsysDisplayOutput, int displayId) {
|
||||
Pattern regex = Pattern.compile(
|
||||
"^ mOverrideDisplayInfo=DisplayInfo\\{\".*?, displayId " + displayId + ".*?(, FLAG_.*)?, real ([0-9]+) x ([0-9]+).*?, "
|
||||
+ "rotation ([0-9]+).*?, layerStack ([0-9]+)",
|
||||
+ "rotation ([0-9]+).*?, density ([0-9]+).*?, layerStack ([0-9]+)",
|
||||
Pattern.MULTILINE);
|
||||
Matcher m = regex.matcher(dumpsysDisplayOutput);
|
||||
if (!m.find()) {
|
||||
@ -49,9 +75,10 @@ public final class DisplayManager {
|
||||
int width = Integer.parseInt(m.group(2));
|
||||
int height = Integer.parseInt(m.group(3));
|
||||
int rotation = Integer.parseInt(m.group(4));
|
||||
int layerStack = Integer.parseInt(m.group(5));
|
||||
int density = Integer.parseInt(m.group(5));
|
||||
int layerStack = Integer.parseInt(m.group(6));
|
||||
|
||||
return new DisplayInfo(displayId, new Size(width, height), rotation, layerStack, flags);
|
||||
return new DisplayInfo(displayId, new Size(width, height), rotation, layerStack, flags, density);
|
||||
}
|
||||
|
||||
private static DisplayInfo getDisplayInfoFromDumpsysDisplay(int displayId) {
|
||||
@ -98,7 +125,8 @@ public final class DisplayManager {
|
||||
int rotation = cls.getDeclaredField("rotation").getInt(displayInfo);
|
||||
int layerStack = cls.getDeclaredField("layerStack").getInt(displayInfo);
|
||||
int flags = cls.getDeclaredField("flags").getInt(displayInfo);
|
||||
return new DisplayInfo(displayId, new Size(width, height), rotation, layerStack, flags);
|
||||
int dpi = cls.getDeclaredField("logicalDensityDpi").getInt(displayInfo);
|
||||
return new DisplayInfo(displayId, new Size(width, height), rotation, layerStack, flags, dpi);
|
||||
} catch (ReflectiveOperationException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
@ -124,4 +152,58 @@ public final class DisplayManager {
|
||||
Method method = getCreateVirtualDisplayMethod();
|
||||
return (VirtualDisplay) method.invoke(null, name, width, height, displayIdToMirror, surface);
|
||||
}
|
||||
|
||||
public VirtualDisplay createNewVirtualDisplay(String name, int width, int height, int dpi, Surface surface, int flags) throws Exception {
|
||||
Constructor<android.hardware.display.DisplayManager> ctor = android.hardware.display.DisplayManager.class.getDeclaredConstructor(
|
||||
Context.class);
|
||||
ctor.setAccessible(true);
|
||||
android.hardware.display.DisplayManager dm = ctor.newInstance(FakeContext.get());
|
||||
return dm.createVirtualDisplay(name, width, height, dpi, surface, flags);
|
||||
}
|
||||
|
||||
public DisplayListenerHandle registerDisplayListener(DisplayListener listener, Handler handler) {
|
||||
try {
|
||||
Class<?> displayListenerClass = Class.forName("android.hardware.display.DisplayManager$DisplayListener");
|
||||
Object displayListenerProxy = Proxy.newProxyInstance(
|
||||
ClassLoader.getSystemClassLoader(),
|
||||
new Class[] {displayListenerClass},
|
||||
(proxy, method, args) -> {
|
||||
if ("onDisplayChanged".equals(method.getName())) {
|
||||
listener.onDisplayChanged((int) args[0]);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
try {
|
||||
manager.getClass()
|
||||
.getMethod("registerDisplayListener", displayListenerClass, Handler.class, long.class, String.class)
|
||||
.invoke(manager, displayListenerProxy, handler, EVENT_FLAG_DISPLAY_CHANGED, FakeContext.PACKAGE_NAME);
|
||||
} catch (NoSuchMethodException e) {
|
||||
try {
|
||||
manager.getClass()
|
||||
.getMethod("registerDisplayListener", displayListenerClass, Handler.class, long.class)
|
||||
.invoke(manager, displayListenerProxy, handler, EVENT_FLAG_DISPLAY_CHANGED);
|
||||
} catch (NoSuchMethodException e2) {
|
||||
manager.getClass()
|
||||
.getMethod("registerDisplayListener", displayListenerClass, Handler.class)
|
||||
.invoke(manager, displayListenerProxy, handler);
|
||||
}
|
||||
}
|
||||
|
||||
return new DisplayListenerHandle(displayListenerProxy);
|
||||
} catch (Exception e) {
|
||||
// Rotation and screen size won't be updated, not a fatal error
|
||||
Ln.e("Could not register display listener", e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public void unregisterDisplayListener(DisplayListenerHandle listener) {
|
||||
try {
|
||||
Class<?> displayListenerClass = Class.forName("android.hardware.display.DisplayManager$DisplayListener");
|
||||
manager.getClass().getMethod("unregisterDisplayListener", displayListenerClass).invoke(manager, listener.displayListenerProxy);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not unregister display listener", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,8 +2,6 @@ package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.os.Build;
|
||||
import android.os.IInterface;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
@ -23,9 +21,7 @@ public final class PowerManager {
|
||||
|
||||
private Method getIsScreenOnMethod() throws NoSuchMethodException {
|
||||
if (isScreenOnMethod == null) {
|
||||
@SuppressLint("ObsoleteSdkInt") // we may lower minSdkVersion in the future
|
||||
String methodName = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH ? "isInteractive" : "isScreenOn";
|
||||
isScreenOnMethod = manager.getClass().getMethod(methodName);
|
||||
isScreenOnMethod = manager.getClass().getMethod("isInteractive");
|
||||
}
|
||||
return isScreenOnMethod;
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
@ -83,9 +84,9 @@ public final class SurfaceControl {
|
||||
|
||||
private static Method getGetBuiltInDisplayMethod() throws NoSuchMethodException {
|
||||
if (getBuiltInDisplayMethod == null) {
|
||||
// the method signature has changed in Android Q
|
||||
// the method signature has changed in Android 10
|
||||
// <https://github.com/Genymobile/scrcpy/issues/586>
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
getBuiltInDisplayMethod = CLASS.getMethod("getBuiltInDisplay", int.class);
|
||||
} else {
|
||||
getBuiltInDisplayMethod = CLASS.getMethod("getInternalDisplayToken");
|
||||
@ -106,7 +107,7 @@ public final class SurfaceControl {
|
||||
public static IBinder getBuiltInDisplay() {
|
||||
try {
|
||||
Method method = getGetBuiltInDisplayMethod();
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
|
||||
if (Build.VERSION.SDK_INT < AndroidVersions.API_29_ANDROID_10) {
|
||||
// call getBuiltInDisplay(0)
|
||||
return (IBinder) method.invoke(null, 0);
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.genymobile.scrcpy.wrappers;
|
||||
|
||||
import com.genymobile.scrcpy.AndroidVersions;
|
||||
import com.genymobile.scrcpy.util.Ln;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
@ -200,13 +201,29 @@ public final class WindowManager {
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(29)
|
||||
public void unregisterRotationWatcher(IRotationWatcher rotationWatcher) {
|
||||
try {
|
||||
manager.getClass().getMethod("removeRotationWatcher", IRotationWatcher.class).invoke(manager, rotationWatcher);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not unregister rotation watcher", e);
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(AndroidVersions.API_29_ANDROID_10)
|
||||
public void registerDisplayFoldListener(IDisplayFoldListener foldListener) {
|
||||
try {
|
||||
Class<?> cls = manager.getClass();
|
||||
cls.getMethod("registerDisplayFoldListener", IDisplayFoldListener.class).invoke(manager, foldListener);
|
||||
manager.getClass().getMethod("registerDisplayFoldListener", IDisplayFoldListener.class).invoke(manager, foldListener);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not register display fold listener", e);
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(AndroidVersions.API_29_ANDROID_10)
|
||||
public void unregisterDisplayFoldListener(IDisplayFoldListener foldListener) {
|
||||
try {
|
||||
manager.getClass().getMethod("unregisterDisplayFoldListener", IDisplayFoldListener.class).invoke(manager, foldListener);
|
||||
} catch (Exception e) {
|
||||
Ln.e("Could not unregister display fold listener", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -399,6 +399,27 @@ public class ControlMessageReaderTest {
|
||||
Assert.assertEquals(-1, bis.read()); // EOS
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseStartApp() throws IOException {
|
||||
byte[] name = "firefox".getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
DataOutputStream dos = new DataOutputStream(bos);
|
||||
dos.writeByte(ControlMessage.TYPE_START_APP);
|
||||
dos.writeByte(name.length);
|
||||
dos.write(name);
|
||||
byte[] packet = bos.toByteArray();
|
||||
|
||||
ByteArrayInputStream bis = new ByteArrayInputStream(packet);
|
||||
ControlMessageReader reader = new ControlMessageReader(bis);
|
||||
|
||||
ControlMessage event = reader.read();
|
||||
Assert.assertEquals(ControlMessage.TYPE_START_APP, event.getType());
|
||||
Assert.assertEquals("firefox", event.getText());
|
||||
|
||||
Assert.assertEquals(-1, bis.read()); // EOS
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiEvents() throws IOException {
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
|
Reference in New Issue
Block a user