Compare commits
22 Commits
fix_audio_
...
develop
Author | SHA1 | Date | |
---|---|---|---|
b47e3ec018 | |||
09009c2aa7 | |||
fb21bbf763 | |||
0f1afff7a6 | |||
48a00fb481 | |||
3b7e2ca9c8 | |||
5bd7514871 | |||
d3c2955fb9 | |||
5042f8de93 | |||
7536f95d1c | |||
6832e8d629 | |||
28313631e5 | |||
fdbc9397a7 | |||
a3cdf1a6b8 | |||
b16d4d1835 | |||
b8d43866d2 | |||
2d79aeb117 | |||
888a5aae7d | |||
323ea2f1d9 | |||
9ca554ca41 | |||
9d3c656414 | |||
379caf8551 |
@ -19,8 +19,9 @@ _scrcpy() {
|
||||
-f --fullscreen
|
||||
--force-adb-forward
|
||||
--forward-all-clicks
|
||||
-K --hid-keyboard
|
||||
-h --help
|
||||
--kill-adb-on-close
|
||||
-K --hid-keyboard
|
||||
--legacy-paste
|
||||
--list-displays
|
||||
--list-encoders
|
||||
@ -59,6 +60,7 @@ _scrcpy() {
|
||||
-t --show-touches
|
||||
--tcpip
|
||||
--tcpip=
|
||||
--time-limit=
|
||||
--tunnel-host=
|
||||
--tunnel-port=
|
||||
--v4l2-buffer=
|
||||
|
@ -26,8 +26,9 @@ arguments=(
|
||||
{-f,--fullscreen}'[Start in fullscreen]'
|
||||
'--force-adb-forward[Do not attempt to use \"adb reverse\" to connect to the device]'
|
||||
'--forward-all-clicks[Forward clicks to device]'
|
||||
{-K,--hid-keyboard}'[Simulate a physical keyboard by using HID over AOAv2]'
|
||||
{-h,--help}'[Print the help]'
|
||||
'--kill-adb-on-close[Kill adb when scrcpy terminates]'
|
||||
{-K,--hid-keyboard}'[Simulate a physical keyboard by using HID over AOAv2]'
|
||||
'--legacy-paste[Inject computer clipboard text as a sequence of key events on Ctrl+v]'
|
||||
'--list-displays[List displays available on the device]'
|
||||
'--list-encoders[List video and audio encoders available on the device]'
|
||||
@ -64,6 +65,7 @@ arguments=(
|
||||
'--shortcut-mod=[\[key1,key2+key3,...\] Specify the modifiers to use for scrcpy shortcuts]:shortcut mod:(lctrl rctrl lalt ralt lsuper rsuper)'
|
||||
{-t,--show-touches}'[Show physical touches]'
|
||||
'--tcpip[\(optional \[ip\:port\]\) Configure and connect the device over TCP/IP]'
|
||||
'--time-limit=[Set the maximum mirroring time, in seconds]'
|
||||
'--tunnel-host=[Set the IP address of the adb tunnel to reach the scrcpy server]'
|
||||
'--tunnel-port=[Set the TCP port of the adb tunnel to reach the scrcpy server]'
|
||||
'--v4l2-buffer=[Add a buffering delay \(in milliseconds\) before pushing frames]'
|
||||
|
@ -51,6 +51,7 @@ src = [
|
||||
'src/util/term.c',
|
||||
'src/util/thread.c',
|
||||
'src/util/tick.c',
|
||||
'src/util/timeout.c',
|
||||
]
|
||||
|
||||
conf = configuration_data()
|
||||
|
@ -6,10 +6,10 @@ cd "$DIR"
|
||||
mkdir -p "$PREBUILT_DATA_DIR"
|
||||
cd "$PREBUILT_DATA_DIR"
|
||||
|
||||
DEP_DIR=SDL2-2.26.4
|
||||
DEP_DIR=SDL2-2.28.0
|
||||
|
||||
FILENAME=SDL2-devel-2.26.4-mingw.tar.gz
|
||||
SHA256SUM=fe899c8642caac2f180b1ee6f786857ddcaa0adc1fa82474312b09dd47d74712
|
||||
FILENAME=SDL2-devel-2.28.0-mingw.tar.gz
|
||||
SHA256SUM=b91ce59eeacd4a9db403f976fd2337d9360b21ada374124417d716065c380e20
|
||||
|
||||
if [[ -d "$DEP_DIR" ]]
|
||||
then
|
||||
|
@ -129,6 +129,10 @@ By default, right-click triggers BACK (or POWER on) and middle-click triggers HO
|
||||
.B \-h, \-\-help
|
||||
Print this help.
|
||||
|
||||
.TP
|
||||
.B \-\-kill\-adb\-on\-close
|
||||
Kill adb when scrcpy terminates.
|
||||
|
||||
.TP
|
||||
.B \-K, \-\-hid\-keyboard
|
||||
Simulate a physical keyboard by using HID over AOAv2.
|
||||
@ -350,6 +354,10 @@ If a destination address is provided, then scrcpy connects to this address befor
|
||||
|
||||
If no destination address is provided, then scrcpy attempts to find the IP address and adb port of the current device (typically connected over USB), enables TCP/IP mode if necessary, then connects to this address before starting.
|
||||
|
||||
.TP
|
||||
.BI "\-\-time\-limit " seconds
|
||||
Set the maximum mirroring time, in seconds.
|
||||
|
||||
.TP
|
||||
.BI "\-\-tunnel\-host " ip
|
||||
Set the IP address of the adb tunnel to reach the scrcpy server. This option automatically enables --force-adb-forward.
|
||||
|
@ -77,6 +77,8 @@ enum {
|
||||
OPT_NO_AUDIO_PLAYBACK,
|
||||
OPT_NO_VIDEO_PLAYBACK,
|
||||
OPT_AUDIO_SOURCE,
|
||||
OPT_KILL_ADB_ON_CLOSE,
|
||||
OPT_TIME_LIMIT,
|
||||
};
|
||||
|
||||
struct sc_option {
|
||||
@ -275,6 +277,16 @@ static const struct sc_option options[] = {
|
||||
"middle-click triggers HOME. This option disables these "
|
||||
"shortcuts and forwards the clicks to the device instead.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'h',
|
||||
.longopt = "help",
|
||||
.text = "Print this help.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_KILL_ADB_ON_CLOSE,
|
||||
.longopt = "kill-adb-on-close",
|
||||
.text = "Kill adb when scrcpy terminates.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'K',
|
||||
.longopt = "hid-keyboard",
|
||||
@ -292,11 +304,6 @@ static const struct sc_option options[] = {
|
||||
"is enabled (or a physical keyboard is connected).\n"
|
||||
"Also see --hid-mouse.",
|
||||
},
|
||||
{
|
||||
.shortopt = 'h',
|
||||
.longopt = "help",
|
||||
.text = "Print this help.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_LEGACY_PASTE,
|
||||
.longopt = "legacy-paste",
|
||||
@ -574,6 +581,12 @@ static const struct sc_option options[] = {
|
||||
"connected over USB), enables TCP/IP mode, then connects to "
|
||||
"this address before starting.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_TIME_LIMIT,
|
||||
.longopt = "time-limit",
|
||||
.argdesc = "seconds",
|
||||
.text = "Set the maximum mirroring time, in seconds.",
|
||||
},
|
||||
{
|
||||
.longopt_id = OPT_TUNNEL_HOST,
|
||||
.longopt = "tunnel-host",
|
||||
@ -1612,6 +1625,18 @@ parse_audio_source(const char *optarg, enum sc_audio_source *source) {
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_time_limit(const char *s, sc_tick *tick) {
|
||||
long value;
|
||||
bool ok = parse_integer_arg(s, &value, false, 0, 0x7FFFFFFF, "time limit");
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
*tick = SC_TICK_FROM_SEC(value);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
const char *optstring, const struct option *longopts) {
|
||||
@ -1944,6 +1969,14 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
case OPT_KILL_ADB_ON_CLOSE:
|
||||
opts->kill_adb_on_close = true;
|
||||
break;
|
||||
case OPT_TIME_LIMIT:
|
||||
if (!parse_time_limit(optarg, &opts->time_limit)) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// getopt prints the error message on stderr
|
||||
return false;
|
||||
|
@ -6,3 +6,4 @@
|
||||
#define SC_EVENT_DEMUXER_ERROR (SDL_USEREVENT + 5)
|
||||
#define SC_EVENT_RECORDER_ERROR (SDL_USEREVENT + 6)
|
||||
#define SC_EVENT_SCREEN_INIT_SIZE (SDL_USEREVENT + 7)
|
||||
#define SC_EVENT_TIME_LIMIT_REACHED (SDL_USEREVENT + 8)
|
||||
|
@ -42,6 +42,7 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.display_buffer = 0,
|
||||
.audio_buffer = SC_TICK_FROM_MS(50),
|
||||
.audio_output_buffer = SC_TICK_FROM_MS(5),
|
||||
.time_limit = 0,
|
||||
#ifdef HAVE_V4L2
|
||||
.v4l2_device = NULL,
|
||||
.v4l2_buffer = 0,
|
||||
@ -80,4 +81,5 @@ const struct scrcpy_options scrcpy_options_default = {
|
||||
.require_audio = false,
|
||||
.list_encoders = false,
|
||||
.list_displays = false,
|
||||
.kill_adb_on_close = false,
|
||||
};
|
||||
|
@ -142,6 +142,7 @@ struct scrcpy_options {
|
||||
sc_tick display_buffer;
|
||||
sc_tick audio_buffer;
|
||||
sc_tick audio_output_buffer;
|
||||
sc_tick time_limit;
|
||||
#ifdef HAVE_V4L2
|
||||
const char *v4l2_device;
|
||||
sc_tick v4l2_buffer;
|
||||
@ -180,6 +181,7 @@ struct scrcpy_options {
|
||||
bool require_audio;
|
||||
bool list_encoders;
|
||||
bool list_displays;
|
||||
bool kill_adb_on_close;
|
||||
};
|
||||
|
||||
extern const struct scrcpy_options scrcpy_options_default;
|
||||
|
@ -96,23 +96,30 @@ sc_recorder_rescale_packet(AVStream *stream, AVPacket *packet) {
|
||||
}
|
||||
|
||||
static bool
|
||||
sc_recorder_write_stream(struct sc_recorder *recorder, int stream_index,
|
||||
AVPacket *packet) {
|
||||
AVStream *stream = recorder->ctx->streams[stream_index];
|
||||
sc_recorder_write_stream(struct sc_recorder *recorder,
|
||||
struct sc_recorder_stream *st, AVPacket *packet) {
|
||||
AVStream *stream = recorder->ctx->streams[st->index];
|
||||
sc_recorder_rescale_packet(stream, packet);
|
||||
if (st->last_pts != AV_NOPTS_VALUE && packet->pts <= st->last_pts) {
|
||||
LOGW("Fixing PTS non monotonically increasing in stream %d "
|
||||
"(%" PRIi64 " >= %" PRIi64 ")",
|
||||
st->index, st->last_pts, packet->pts);
|
||||
packet->pts = ++st->last_pts;
|
||||
packet->dts = packet->pts;
|
||||
} else {
|
||||
st->last_pts = packet->pts;
|
||||
}
|
||||
return av_interleaved_write_frame(recorder->ctx, packet) >= 0;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_recorder_write_video(struct sc_recorder *recorder, AVPacket *packet) {
|
||||
return sc_recorder_write_stream(recorder, recorder->video_stream_index,
|
||||
packet);
|
||||
return sc_recorder_write_stream(recorder, &recorder->video_stream, packet);
|
||||
}
|
||||
|
||||
static inline bool
|
||||
sc_recorder_write_audio(struct sc_recorder *recorder, AVPacket *packet) {
|
||||
return sc_recorder_write_stream(recorder, recorder->audio_stream_index,
|
||||
packet);
|
||||
return sc_recorder_write_stream(recorder, &recorder->audio_stream, packet);
|
||||
}
|
||||
|
||||
static bool
|
||||
@ -178,10 +185,11 @@ static bool
|
||||
sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
|
||||
while (!recorder->stopped && (!recorder->video_init
|
||||
|| !recorder->audio_init
|
||||
|| sc_recorder_has_empty_queues(recorder))) {
|
||||
sc_cond_wait(&recorder->stream_cond, &recorder->mutex);
|
||||
while (!recorder->stopped &&
|
||||
((recorder->video && !recorder->video_init)
|
||||
|| (recorder->audio && !recorder->audio_init)
|
||||
|| sc_recorder_has_empty_queues(recorder))) {
|
||||
sc_cond_wait(&recorder->cond, &recorder->mutex);
|
||||
}
|
||||
|
||||
if (recorder->video && sc_vecdeque_is_empty(&recorder->video_queue)) {
|
||||
@ -214,9 +222,9 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
assert(recorder->video_stream_index >= 0);
|
||||
assert(recorder->video_stream.index >= 0);
|
||||
AVStream *video_stream =
|
||||
recorder->ctx->streams[recorder->video_stream_index];
|
||||
recorder->ctx->streams[recorder->video_stream.index];
|
||||
bool ok = sc_recorder_set_extradata(video_stream, video_pkt);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
@ -229,9 +237,9 @@ sc_recorder_process_header(struct sc_recorder *recorder) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
assert(recorder->audio_stream_index >= 0);
|
||||
assert(recorder->audio_stream.index >= 0);
|
||||
AVStream *audio_stream =
|
||||
recorder->ctx->streams[recorder->audio_stream_index];
|
||||
recorder->ctx->streams[recorder->audio_stream.index];
|
||||
bool ok = sc_recorder_set_extradata(audio_stream, audio_pkt);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
@ -289,7 +297,7 @@ sc_recorder_process_packets(struct sc_recorder *recorder) {
|
||||
// A new packet may be assigned to audio_pkt and be processed
|
||||
break;
|
||||
}
|
||||
sc_cond_wait(&recorder->queue_cond, &recorder->mutex);
|
||||
sc_cond_wait(&recorder->cond, &recorder->mutex);
|
||||
}
|
||||
|
||||
// If stopped is set, continue to process the remaining events (to
|
||||
@ -504,10 +512,10 @@ sc_recorder_video_packet_sink_open(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
recorder->video_stream_index = stream->index;
|
||||
recorder->video_stream.index = stream->index;
|
||||
|
||||
recorder->video_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
|
||||
return true;
|
||||
@ -522,7 +530,7 @@ sc_recorder_video_packet_sink_close(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
// EOS also stops the recorder
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -548,7 +556,7 @@ sc_recorder_video_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
rec->stream_index = recorder->video_stream_index;
|
||||
rec->stream_index = recorder->video_stream.index;
|
||||
|
||||
bool ok = sc_vecdeque_push(&recorder->video_queue, rec);
|
||||
if (!ok) {
|
||||
@ -557,7 +565,7 @@ sc_recorder_video_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
return true;
|
||||
@ -585,10 +593,10 @@ sc_recorder_audio_packet_sink_open(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
recorder->audio_stream_index = stream->index;
|
||||
recorder->audio_stream.index = stream->index;
|
||||
|
||||
recorder->audio_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
|
||||
return true;
|
||||
@ -604,7 +612,7 @@ sc_recorder_audio_packet_sink_close(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
// EOS also stops the recorder
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -631,7 +639,7 @@ sc_recorder_audio_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
rec->stream_index = recorder->audio_stream_index;
|
||||
rec->stream_index = recorder->audio_stream.index;
|
||||
|
||||
bool ok = sc_vecdeque_push(&recorder->audio_queue, rec);
|
||||
if (!ok) {
|
||||
@ -640,7 +648,7 @@ sc_recorder_audio_packet_sink_push(struct sc_packet_sink *sink,
|
||||
return false;
|
||||
}
|
||||
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
return true;
|
||||
@ -658,10 +666,16 @@ sc_recorder_audio_packet_sink_disable(struct sc_packet_sink *sink) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
recorder->audio = false;
|
||||
recorder->audio_init = true;
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_recorder_stream_init(struct sc_recorder_stream *stream) {
|
||||
stream->index = -1;
|
||||
stream->last_pts = AV_NOPTS_VALUE;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
enum sc_record_format format, bool video, bool audio,
|
||||
@ -677,16 +691,11 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
goto error_free_filename;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&recorder->queue_cond);
|
||||
ok = sc_cond_init(&recorder->cond);
|
||||
if (!ok) {
|
||||
goto error_mutex_destroy;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&recorder->stream_cond);
|
||||
if (!ok) {
|
||||
goto error_queue_cond_destroy;
|
||||
}
|
||||
|
||||
assert(video || audio);
|
||||
recorder->video = video;
|
||||
recorder->audio = audio;
|
||||
@ -698,8 +707,8 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
recorder->video_init = false;
|
||||
recorder->audio_init = false;
|
||||
|
||||
recorder->video_stream_index = -1;
|
||||
recorder->audio_stream_index = -1;
|
||||
sc_recorder_stream_init(&recorder->video_stream);
|
||||
sc_recorder_stream_init(&recorder->audio_stream);
|
||||
|
||||
recorder->format = format;
|
||||
|
||||
@ -730,8 +739,6 @@ sc_recorder_init(struct sc_recorder *recorder, const char *filename,
|
||||
|
||||
return true;
|
||||
|
||||
error_queue_cond_destroy:
|
||||
sc_cond_destroy(&recorder->queue_cond);
|
||||
error_mutex_destroy:
|
||||
sc_mutex_destroy(&recorder->mutex);
|
||||
error_free_filename:
|
||||
@ -756,8 +763,7 @@ void
|
||||
sc_recorder_stop(struct sc_recorder *recorder) {
|
||||
sc_mutex_lock(&recorder->mutex);
|
||||
recorder->stopped = true;
|
||||
sc_cond_signal(&recorder->queue_cond);
|
||||
sc_cond_signal(&recorder->stream_cond);
|
||||
sc_cond_signal(&recorder->cond);
|
||||
sc_mutex_unlock(&recorder->mutex);
|
||||
}
|
||||
|
||||
@ -768,8 +774,7 @@ sc_recorder_join(struct sc_recorder *recorder) {
|
||||
|
||||
void
|
||||
sc_recorder_destroy(struct sc_recorder *recorder) {
|
||||
sc_cond_destroy(&recorder->stream_cond);
|
||||
sc_cond_destroy(&recorder->queue_cond);
|
||||
sc_cond_destroy(&recorder->cond);
|
||||
sc_mutex_destroy(&recorder->mutex);
|
||||
free(recorder->filename);
|
||||
}
|
||||
|
@ -14,6 +14,11 @@
|
||||
|
||||
struct sc_recorder_queue SC_VECDEQUE(AVPacket *);
|
||||
|
||||
struct sc_recorder_stream {
|
||||
int index;
|
||||
int64_t last_pts;
|
||||
};
|
||||
|
||||
struct sc_recorder {
|
||||
struct sc_packet_sink video_packet_sink;
|
||||
struct sc_packet_sink audio_packet_sink;
|
||||
@ -35,19 +40,18 @@ struct sc_recorder {
|
||||
|
||||
sc_thread thread;
|
||||
sc_mutex mutex;
|
||||
sc_cond queue_cond;
|
||||
sc_cond cond;
|
||||
// set on sc_recorder_stop(), packet_sink close or recording failure
|
||||
bool stopped;
|
||||
struct sc_recorder_queue video_queue;
|
||||
struct sc_recorder_queue audio_queue;
|
||||
|
||||
// wake up the recorder thread once the video or audio codec is known
|
||||
sc_cond stream_cond;
|
||||
bool video_init;
|
||||
bool audio_init;
|
||||
|
||||
int video_stream_index;
|
||||
int audio_stream_index;
|
||||
struct sc_recorder_stream video_stream;
|
||||
struct sc_recorder_stream audio_stream;
|
||||
|
||||
const struct sc_recorder_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
|
@ -35,6 +35,7 @@
|
||||
#include "util/log.h"
|
||||
#include "util/net.h"
|
||||
#include "util/rand.h"
|
||||
#include "util/timeout.h"
|
||||
#ifdef HAVE_V4L2
|
||||
# include "v4l2_sink.h"
|
||||
#endif
|
||||
@ -73,6 +74,7 @@ struct scrcpy {
|
||||
struct sc_hid_mouse mouse_hid;
|
||||
#endif
|
||||
};
|
||||
struct sc_timeout timeout;
|
||||
};
|
||||
|
||||
static inline void
|
||||
@ -171,6 +173,9 @@ event_loop(struct scrcpy *s) {
|
||||
case SC_EVENT_RECORDER_ERROR:
|
||||
LOGE("Recorder error");
|
||||
return SCRCPY_EXIT_FAILURE;
|
||||
case SC_EVENT_TIME_LIMIT_REACHED:
|
||||
LOGI("Time limit reached");
|
||||
return SCRCPY_EXIT_SUCCESS;
|
||||
case SDL_QUIT:
|
||||
LOGD("User requested to quit");
|
||||
return SCRCPY_EXIT_SUCCESS;
|
||||
@ -280,6 +285,14 @@ sc_server_on_disconnected(struct sc_server *server, void *userdata) {
|
||||
// event
|
||||
}
|
||||
|
||||
static void
|
||||
sc_timeout_on_timeout(struct sc_timeout *timeout, void *userdata) {
|
||||
(void) timeout;
|
||||
(void) userdata;
|
||||
|
||||
PUSH_EVENT(SC_EVENT_TIME_LIMIT_REACHED);
|
||||
}
|
||||
|
||||
// Generate a scrcpy id to differentiate multiple running scrcpy instances
|
||||
static uint32_t
|
||||
scrcpy_generate_scid() {
|
||||
@ -321,6 +334,8 @@ scrcpy(struct scrcpy_options *options) {
|
||||
bool controller_initialized = false;
|
||||
bool controller_started = false;
|
||||
bool screen_initialized = false;
|
||||
bool timeout_initialized = false;
|
||||
bool timeout_started = false;
|
||||
|
||||
struct sc_acksync *acksync = NULL;
|
||||
|
||||
@ -364,6 +379,7 @@ scrcpy(struct scrcpy_options *options) {
|
||||
.power_on = options->power_on,
|
||||
.list_encoders = options->list_encoders,
|
||||
.list_displays = options->list_displays,
|
||||
.kill_adb_on_close = options->kill_adb_on_close,
|
||||
};
|
||||
|
||||
static const struct sc_server_callbacks cbs = {
|
||||
@ -742,6 +758,27 @@ aoa_hid_end:
|
||||
}
|
||||
}
|
||||
|
||||
if (options->time_limit) {
|
||||
bool ok = sc_timeout_init(&s->timeout);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
timeout_initialized = true;
|
||||
|
||||
sc_tick deadline = sc_tick_now() + options->time_limit;
|
||||
static const struct sc_timeout_callbacks cbs = {
|
||||
.on_timeout = sc_timeout_on_timeout,
|
||||
};
|
||||
|
||||
ok = sc_timeout_start(&s->timeout, deadline, &cbs, NULL);
|
||||
if (!ok) {
|
||||
goto end;
|
||||
}
|
||||
|
||||
timeout_started = true;
|
||||
}
|
||||
|
||||
ret = event_loop(s);
|
||||
LOGD("quit...");
|
||||
|
||||
@ -750,6 +787,10 @@ aoa_hid_end:
|
||||
sc_screen_hide_window(&s->screen);
|
||||
|
||||
end:
|
||||
if (timeout_started) {
|
||||
sc_timeout_stop(&s->timeout);
|
||||
}
|
||||
|
||||
// The demuxer is not stopped explicitly, because it will stop by itself on
|
||||
// end-of-stream
|
||||
#ifdef HAVE_USB
|
||||
@ -785,6 +826,13 @@ end:
|
||||
sc_server_stop(&s->server);
|
||||
}
|
||||
|
||||
if (timeout_started) {
|
||||
sc_timeout_join(&s->timeout);
|
||||
}
|
||||
if (timeout_initialized) {
|
||||
sc_timeout_destroy(&s->timeout);
|
||||
}
|
||||
|
||||
// now that the sockets are shutdown, the demuxer and controller are
|
||||
// interrupted, we can join them
|
||||
if (video_demuxer_started) {
|
||||
|
@ -794,6 +794,15 @@ sc_server_configure_tcpip_unknown_address(struct sc_server *server,
|
||||
return sc_server_connect_to_tcpip(server, ip_port);
|
||||
}
|
||||
|
||||
static void
|
||||
sc_server_kill_adb_if_requested(struct sc_server *server) {
|
||||
if (server->params.kill_adb_on_close) {
|
||||
LOGI("Killing adb server...");
|
||||
unsigned flags = SC_ADB_NO_STDOUT | SC_ADB_NO_STDERR | SC_ADB_NO_LOGERR;
|
||||
sc_adb_kill_server(&server->intr, flags);
|
||||
}
|
||||
}
|
||||
|
||||
static int
|
||||
run_server(void *data) {
|
||||
struct sc_server *server = data;
|
||||
@ -805,7 +814,7 @@ run_server(void *data) {
|
||||
// is parsed, so it is not output)
|
||||
bool ok = sc_adb_start_server(&server->intr, 0);
|
||||
if (!ok) {
|
||||
LOGE("Could not start adb daemon");
|
||||
LOGE("Could not start adb server");
|
||||
goto error_connection_failed;
|
||||
}
|
||||
|
||||
@ -993,9 +1002,12 @@ run_server(void *data) {
|
||||
|
||||
sc_process_close(pid);
|
||||
|
||||
sc_server_kill_adb_if_requested(server);
|
||||
|
||||
return 0;
|
||||
|
||||
error_connection_failed:
|
||||
sc_server_kill_adb_if_requested(server);
|
||||
server->cbs->on_connection_failed(server, server->cbs_userdata);
|
||||
return -1;
|
||||
}
|
||||
|
@ -58,6 +58,7 @@ struct sc_server_params {
|
||||
bool power_on;
|
||||
bool list_encoders;
|
||||
bool list_displays;
|
||||
bool kill_adb_on_close;
|
||||
};
|
||||
|
||||
struct sc_server {
|
||||
|
@ -83,7 +83,7 @@ scrcpy_otg(struct scrcpy_options *options) {
|
||||
#ifdef _WIN32
|
||||
// On Windows, only one process could open a USB device
|
||||
// <https://github.com/Genymobile/scrcpy/issues/2773>
|
||||
LOGI("Killing adb daemon (if any)...");
|
||||
LOGI("Killing adb server (if any)...");
|
||||
unsigned flags = SC_ADB_NO_STDOUT | SC_ADB_NO_STDERR | SC_ADB_NO_LOGERR;
|
||||
// uninterruptible (intr == NULL), but in practice it's very quick
|
||||
sc_adb_kill_server(NULL, flags);
|
||||
|
77
app/src/util/timeout.c
Normal file
77
app/src/util/timeout.c
Normal file
@ -0,0 +1,77 @@
|
||||
#include "timeout.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "log.h"
|
||||
|
||||
bool
|
||||
sc_timeout_init(struct sc_timeout *timeout) {
|
||||
bool ok = sc_mutex_init(&timeout->mutex);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ok = sc_cond_init(&timeout->cond);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
|
||||
timeout->stopped = false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static int
|
||||
run_timeout(void *data) {
|
||||
struct sc_timeout *timeout = data;
|
||||
sc_tick deadline = timeout->deadline;
|
||||
|
||||
sc_mutex_lock(&timeout->mutex);
|
||||
bool timed_out = false;
|
||||
while (!timeout->stopped && !timed_out) {
|
||||
timed_out = !sc_cond_timedwait(&timeout->cond, &timeout->mutex,
|
||||
deadline);
|
||||
}
|
||||
sc_mutex_unlock(&timeout->mutex);
|
||||
|
||||
timeout->cbs->on_timeout(timeout, timeout->cbs_userdata);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool
|
||||
sc_timeout_start(struct sc_timeout *timeout, sc_tick deadline,
|
||||
const struct sc_timeout_callbacks *cbs, void *cbs_userdata) {
|
||||
bool ok = sc_thread_create(&timeout->thread, run_timeout, "scrcpy-timeout",
|
||||
timeout);
|
||||
if (!ok) {
|
||||
LOGE("Timeout: could not start thread");
|
||||
return false;
|
||||
}
|
||||
|
||||
timeout->deadline = deadline;
|
||||
|
||||
assert(cbs && cbs->on_timeout);
|
||||
timeout->cbs = cbs;
|
||||
timeout->cbs_userdata = cbs_userdata;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
sc_timeout_stop(struct sc_timeout *timeout) {
|
||||
sc_mutex_lock(&timeout->mutex);
|
||||
timeout->stopped = true;
|
||||
sc_mutex_unlock(&timeout->mutex);
|
||||
}
|
||||
|
||||
void
|
||||
sc_timeout_join(struct sc_timeout *timeout) {
|
||||
sc_thread_join(&timeout->thread, NULL);
|
||||
}
|
||||
|
||||
void
|
||||
sc_timeout_destroy(struct sc_timeout *timeout) {
|
||||
sc_mutex_destroy(&timeout->mutex);
|
||||
sc_cond_destroy(&timeout->cond);
|
||||
}
|
43
app/src/util/timeout.h
Normal file
43
app/src/util/timeout.h
Normal file
@ -0,0 +1,43 @@
|
||||
#ifndef SC_TIMEOUT_H
|
||||
#define SC_TIMEOUT_H
|
||||
|
||||
#include "common.h"
|
||||
|
||||
#include <stdbool.h>
|
||||
|
||||
#include "thread.h"
|
||||
#include "tick.h"
|
||||
|
||||
struct sc_timeout {
|
||||
sc_thread thread;
|
||||
sc_tick deadline;
|
||||
|
||||
sc_mutex mutex;
|
||||
sc_cond cond;
|
||||
bool stopped;
|
||||
|
||||
const struct sc_timeout_callbacks *cbs;
|
||||
void *cbs_userdata;
|
||||
};
|
||||
|
||||
struct sc_timeout_callbacks {
|
||||
void (*on_timeout)(struct sc_timeout *timeout, void *userdata);
|
||||
};
|
||||
|
||||
bool
|
||||
sc_timeout_init(struct sc_timeout *timeout);
|
||||
|
||||
void
|
||||
sc_timeout_destroy(struct sc_timeout *timeout);
|
||||
|
||||
bool
|
||||
sc_timeout_start(struct sc_timeout *timeout, sc_tick deadline,
|
||||
const struct sc_timeout_callbacks *cbs, void *cbs_userdata);
|
||||
|
||||
void
|
||||
sc_timeout_stop(struct sc_timeout *timeout);
|
||||
|
||||
void
|
||||
sc_timeout_join(struct sc_timeout *timeout);
|
||||
|
||||
#endif
|
@ -17,5 +17,5 @@ endian = 'little'
|
||||
|
||||
[properties]
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-4/win32'
|
||||
prebuilt_sdl2 = 'SDL2-2.26.4/i686-w64-mingw32'
|
||||
prebuilt_sdl2 = 'SDL2-2.28.0/i686-w64-mingw32'
|
||||
prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-Win32'
|
||||
|
@ -17,5 +17,5 @@ endian = 'little'
|
||||
|
||||
[properties]
|
||||
prebuilt_ffmpeg = 'ffmpeg-6.0-scrcpy-4/win64'
|
||||
prebuilt_sdl2 = 'SDL2-2.26.4/x86_64-w64-mingw32'
|
||||
prebuilt_sdl2 = 'SDL2-2.28.0/x86_64-w64-mingw32'
|
||||
prebuilt_libusb = 'libusb-1.0.26/libusb-MinGW-x64'
|
||||
|
@ -56,7 +56,7 @@ For example, to use the device as a dictaphone and record a capture directly on
|
||||
the computer:
|
||||
|
||||
```
|
||||
scrcpy --audio-source=mic --no-video --no-audio-playback --record=file.opus
|
||||
scrcpy --audio-source=mic --no-video --no-playback --record=file.opus
|
||||
```
|
||||
|
||||
|
||||
|
445
doc/develop.md
445
doc/develop.md
@ -9,16 +9,52 @@ This application is composed of two parts:
|
||||
The client is responsible to push the server to the device and start its
|
||||
execution.
|
||||
|
||||
Once the client and the server are connected to each other, the server initially
|
||||
sends device information (name and initial screen dimensions), then starts to
|
||||
send a raw H.264 video stream of the device screen. The client decodes the video
|
||||
frames, and display them as soon as possible, without buffering, to minimize
|
||||
latency. The client is not aware of the device rotation (which is handled by the
|
||||
server), it just knows the dimensions of the video frames.
|
||||
The client and the server establish communication using separate sockets for
|
||||
video, audio and controls. Any of them may be disabled (but not all), so
|
||||
there are 1, 2 or 3 socket(s).
|
||||
|
||||
The client captures relevant keyboard and mouse events, that it transmits to the
|
||||
server, which injects them to the device.
|
||||
The server initially sends the device name on the first socket (it is used for
|
||||
the scrcpy window title), then each socket is used for its own purpose. All
|
||||
reads and writes are performed from a dedicated thread for each socket, both on
|
||||
the client and on the server.
|
||||
|
||||
If video is enabled, then the server sends a raw video stream (H.264 by default)
|
||||
of the device screen, with some additional headers for each packet. The client
|
||||
decodes the video frames, and displays them as soon as possible, without
|
||||
buffering (unless `--display-buffer=delay` is specified) to minimize latency.
|
||||
The client is not aware of the device rotation (which is handled by the server),
|
||||
it just knows the dimensions of the video frames it receives.
|
||||
|
||||
Similarly, if audio is enabled, then the server sends a raw audio stream (OPUS
|
||||
by default) of the device audio output (or the microphone if
|
||||
`--audio-source=mic` is specified), with some additional headers for each
|
||||
packet. The client decodes the stream, attempts to keep a minimal latency by
|
||||
maintaining an average buffering. The [blog post][scrcpy2] of the scrcpy v2.0
|
||||
release gives more details about the audio feature.
|
||||
|
||||
If control is enabled, then the client captures relevant keyboard and mouse
|
||||
events, that it transmits to the server, which injects them to the device. This
|
||||
is the only socket which is used in both direction: input events are sent from
|
||||
the client to the device, and when the device clipboard changes, the new content
|
||||
is sent from the device to the client to support seamless copy-paste.
|
||||
|
||||
[scrcpy2]: https://blog.rom1v.com/2023/03/scrcpy-2-0-with-audio/
|
||||
|
||||
Note that the client-server roles are expressed at the application level:
|
||||
|
||||
- the server _serves_ video and audio streams, and handle requests from the
|
||||
client,
|
||||
- the client _controls_ the device through the server.
|
||||
|
||||
However, by default (when `--force-adb-forward` is not set), the roles are
|
||||
reversed at the network level:
|
||||
|
||||
- the client opens a server socket and listen on a port before starting the
|
||||
server,
|
||||
- the server connects to the client.
|
||||
|
||||
This role inversion guarantees that the connection will not fail due to race
|
||||
conditions without polling.
|
||||
|
||||
|
||||
## Server
|
||||
@ -32,15 +68,14 @@ The server is a Java application (with a [`public static void main(String...
|
||||
args)`][main] method), compiled against the Android framework, and executed as
|
||||
`shell` on the Android device.
|
||||
|
||||
[main]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/Server.java#L123
|
||||
[main]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Server.java#L193
|
||||
|
||||
To run such a Java application, the classes must be [_dexed_][dex] (typically,
|
||||
to `classes.dex`). If `my.package.MainClass` is the main class, compiled to
|
||||
`classes.dex`, pushed to the device in `/data/local/tmp`, then it can be run
|
||||
with:
|
||||
|
||||
adb shell CLASSPATH=/data/local/tmp/classes.dex \
|
||||
app_process / my.package.MainClass
|
||||
adb shell CLASSPATH=/data/local/tmp/classes.dex app_process / my.package.MainClass
|
||||
|
||||
_The path `/data/local/tmp` is a good candidate to push the server, since it's
|
||||
readable and writable by `shell`, but not world-writable, so a malicious
|
||||
@ -49,7 +84,7 @@ application may not replace the server just before the client executes it._
|
||||
Instead of a raw _dex_ file, `app_process` accepts a _jar_ containing
|
||||
`classes.dex` (e.g. an [APK]). For simplicity, and to benefit from the gradle
|
||||
build system, the server is built to an (unsigned) APK (renamed to
|
||||
`scrcpy-server`).
|
||||
`scrcpy-server.jar`).
|
||||
|
||||
[dex]: https://en.wikipedia.org/wiki/Dalvik_(software)
|
||||
[apk]: https://en.wikipedia.org/wiki/Android_application_package
|
||||
@ -65,42 +100,77 @@ They can be called using reflection though. The communication with hidden
|
||||
components is provided by [_wrappers_ classes][wrappers] and [aidl].
|
||||
|
||||
[hidden]: https://stackoverflow.com/a/31908373/1987178
|
||||
[wrappers]: https://github.com/Genymobile/scrcpy/tree/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/wrappers
|
||||
[aidl]: https://github.com/Genymobile/scrcpy/tree/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/aidl/android/view
|
||||
[wrappers]: https://github.com/Genymobile/scrcpy/tree/master/server/src/main/java/com/genymobile/scrcpy/wrappers
|
||||
[aidl]: https://github.com/Genymobile/scrcpy/tree/master/server/src/main/aidl
|
||||
|
||||
|
||||
### Threading
|
||||
|
||||
The server uses 3 threads:
|
||||
### Execution
|
||||
|
||||
- the **main** thread, encoding and streaming the video to the client;
|
||||
- the **controller** thread, listening for _control messages_ (typically,
|
||||
keyboard and mouse events) from the client;
|
||||
- the **receiver** thread (managed by the controller), sending _device messages_
|
||||
to the clients (currently, it is only used to send the device clipboard
|
||||
content).
|
||||
The server is started by the client basically by executing the following
|
||||
commands:
|
||||
|
||||
Since the video encoding is typically hardware, there would be no benefit in
|
||||
encoding and streaming in two different threads.
|
||||
```bash
|
||||
adb push scrcpy-server /data/local/tmp/scrcpy-server.jar
|
||||
adb forward tcp:27183 localabstract:scrcpy
|
||||
adb shell CLASSPATH=/data/local/tmp/scrcpy-server.jar app_process / com.genymobile.scrcpy.Server 2.1
|
||||
```
|
||||
|
||||
The first argument (`2.1` in the example) is the client scrcpy version. The
|
||||
server fails if the client and the server do not have the exact same version.
|
||||
The protocol between the client and the server may change from version to
|
||||
version (see [protocol](#protocol) below), and there is no backward or forward
|
||||
compatibility (there is no point to use different client and server versions).
|
||||
This check allows to detect misconfiguration (running an older or newer server
|
||||
by mistake).
|
||||
|
||||
It is followed by any number of arguments, in the form of `key=value` pairs.
|
||||
Their order is irrelevant. The possible keys and associated value types can be
|
||||
found in the [server][server-options] and [client][client-options] code.
|
||||
|
||||
[server-options]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Options.java#L181
|
||||
[client-options]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/server.c#L226
|
||||
|
||||
For example, if we execute `scrcpy -m1920 --no-audio`, then the server
|
||||
execution will look like this:
|
||||
|
||||
```bash
|
||||
# scid is a random number to identify different clients running on the same device
|
||||
adb shell CLASSPATH=/data/local/tmp/scrcpy-server.jar app_process / com.genymobile.scrcpy.Server 2.1 scid=12345678 log_level=info audio=false max_size=1920
|
||||
```
|
||||
|
||||
### Components
|
||||
|
||||
When executed, its [`main()`][main] method is executed (on the "main" thread).
|
||||
It parses the arguments, establishes the connection with the client and starts
|
||||
the other "components":
|
||||
- the **video** streamer: it captures the video screen and send encoded video
|
||||
packets on the _video_ socket (from the _video_ thread).
|
||||
- the **audio** streamer: it uses several threads to capture raw packets,
|
||||
submits them to encoding and retrieve encoded packets, which it sends on the
|
||||
_audio_ socket.
|
||||
- the **controller**: it receives _control messages_ (typically input events)
|
||||
on the _control_ socket from one thread, and sends _device messages_ (e.g. to
|
||||
transmit the device clipboard content to the client) on the same _control
|
||||
socket_ from another thread. Thus, the _control_ socket is used in both
|
||||
directions (contrary to the _video_ and _audio_ sockets).
|
||||
|
||||
|
||||
### Screen video encoding
|
||||
|
||||
The encoding is managed by [`ScreenEncoder`].
|
||||
|
||||
The video is encoded using the [`MediaCodec`] API. The codec takes its input
|
||||
from a [surface] associated to the display, and writes the resulting H.264
|
||||
stream to the provided output stream (the socket connected to the client).
|
||||
The video is encoded using the [`MediaCodec`] API. The codec encodes the content
|
||||
of a `Surface` associated to the display, and writes the encoding packets to the
|
||||
client (on the _video_ socket).
|
||||
|
||||
[`ScreenEncoder`]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java
|
||||
[`ScreenEncoder`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java
|
||||
[`MediaCodec`]: https://developer.android.com/reference/android/media/MediaCodec.html
|
||||
[surface]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L68-L69
|
||||
|
||||
On device [rotation], the codec, surface and display are reinitialized, and a
|
||||
new video stream is produced.
|
||||
On device rotation (or folding), the encoding session is [reset] and restarted.
|
||||
|
||||
New frames are produced only when changes occur on the surface. This is good
|
||||
because it avoids to send unnecessary frames, but there are drawbacks:
|
||||
New frames are produced only when changes occur on the surface. This avoids to
|
||||
send unnecessary frames, but by default there might be drawbacks:
|
||||
|
||||
- it does not send any frame on start if the device screen does not change,
|
||||
- after fast motion changes, the last frame may have poor quality.
|
||||
@ -108,11 +178,24 @@ because it avoids to send unnecessary frames, but there are drawbacks:
|
||||
Both problems are [solved][repeat] by the flag
|
||||
[`KEY_REPEAT_PREVIOUS_FRAME_AFTER`][repeat-flag].
|
||||
|
||||
[reset]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L179
|
||||
[rotation]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L90
|
||||
[repeat]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L147-L148
|
||||
[repeat]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L246-L247
|
||||
[repeat-flag]: https://developer.android.com/reference/android/media/MediaFormat.html#KEY_REPEAT_PREVIOUS_FRAME_AFTER
|
||||
|
||||
|
||||
### Audio encoding
|
||||
|
||||
Similarly, the audio is [captured] using an [`AudioRecord`], and [encoded] using
|
||||
the [`MediaCodec`] asynchronous API.
|
||||
|
||||
More details are available on the [blog post][scrcpy2] introducing the audio feature.
|
||||
|
||||
[captured]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/AudioCapture.java
|
||||
[encoded]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/AudioEncoder.java
|
||||
[`AudioRecord`]: https://developer.android.com/reference/android/media/AudioRecord
|
||||
|
||||
|
||||
### Input events injection
|
||||
|
||||
_Control messages_ are received from the client by the [`Controller`] (run in a
|
||||
@ -124,13 +207,13 @@ separate thread). There are several types of input events:
|
||||
- other commands (e.g. to switch the screen on or to copy the clipboard).
|
||||
|
||||
Some of them need to inject input events to the system. To do so, they use the
|
||||
_hidden_ method [`InputManager.injectInputEvent`] (exposed by our
|
||||
_hidden_ method [`InputManager.injectInputEvent()`] (exposed by the
|
||||
[`InputManager` wrapper][inject-wrapper]).
|
||||
|
||||
[`Controller`]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/Controller.java#L81
|
||||
[`Controller`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Controller.java
|
||||
[`KeyEvent`]: https://developer.android.com/reference/android/view/KeyEvent.html
|
||||
[`MotionEvent`]: https://developer.android.com/reference/android/view/MotionEvent.html
|
||||
[`InputManager.injectInputEvent`]: https://android.googlesource.com/platform/frameworks/base/+/oreo-release/core/java/android/hardware/input/InputManager.java#857
|
||||
[`InputManager.injectInputEvent()`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/wrappers/InputManager.java#L34
|
||||
[inject-wrapper]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/wrappers/InputManager.java#L27
|
||||
|
||||
|
||||
@ -140,126 +223,222 @@ _hidden_ method [`InputManager.injectInputEvent`] (exposed by our
|
||||
The client relies on [SDL], which provides cross-platform API for UI, input
|
||||
events, threading, etc.
|
||||
|
||||
The video stream is decoded by [libav] (FFmpeg).
|
||||
The video and audio streams are decoded by [FFmpeg].
|
||||
|
||||
[SDL]: https://www.libsdl.org
|
||||
[libav]: https://www.libav.org/
|
||||
[ffmpeg]: https://ffmpeg.org/
|
||||
|
||||
|
||||
### Initialization
|
||||
|
||||
On startup, in addition to _libav_ and _SDL_ initialization, the client must
|
||||
push and start the server on the device, and open two sockets (one for the video
|
||||
stream, one for control) so that they may communicate.
|
||||
The client parses the command line arguments, then [runs one of two code
|
||||
paths][run]:
|
||||
- scrcpy in "normal" mode ([`scrcpy.c`])
|
||||
- scrcpy in [OTG mode](hid-otg.md) ([`scrcpy_otg.c`])
|
||||
|
||||
Note that the client-server roles are expressed at the application level:
|
||||
[run]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/main.c#L81-L82
|
||||
[`scrcpy.c`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/scrcpy.c#L292-L293
|
||||
[`scrcpy_otg.c`]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/usb/scrcpy_otg.c#L51-L52
|
||||
|
||||
- the server _serves_ video stream and handle requests from the client,
|
||||
- the client _controls_ the device through the server.
|
||||
In the remaining of this document, we assume that the "normal" mode is used
|
||||
(read the code for the OTG mode).
|
||||
|
||||
However, the roles are reversed at the network level:
|
||||
|
||||
- the client opens a server socket and listen on a port before starting the
|
||||
server,
|
||||
- the server connects to the client.
|
||||
|
||||
This role inversion guarantees that the connection will not fail due to race
|
||||
conditions, and avoids polling.
|
||||
|
||||
_(Note that over TCP/IP, the roles are not reversed, due to a bug in `adb
|
||||
reverse`. See commit [1038bad] and [issue #5].)_
|
||||
|
||||
Once the server is connected, it sends the device information (name and initial
|
||||
screen dimensions). Thus, the client may init the window and renderer, before
|
||||
the first frame is available.
|
||||
|
||||
To minimize startup time, SDL initialization is performed while listening for
|
||||
the connection from the server (see commit [90a46b4]).
|
||||
|
||||
[1038bad]: https://github.com/Genymobile/scrcpy/commit/1038bad3850f18717a048a4d5c0f8110e54ee172
|
||||
[issue #5]: https://github.com/Genymobile/scrcpy/issues/5
|
||||
[90a46b4]: https://github.com/Genymobile/scrcpy/commit/90a46b4c45637d083e877020d85ade52a9a5fa8e
|
||||
On startup, the client:
|
||||
- opens the _video_, _audio_ and _control_ sockets;
|
||||
- pushes and starts the server on the device;
|
||||
- initializes its components (demuxers, decoders, recorder…).
|
||||
|
||||
|
||||
### Threading
|
||||
### Video and audio streams
|
||||
|
||||
The client uses 4 threads:
|
||||
|
||||
- the **main** thread, executing the SDL event loop,
|
||||
- the **stream** thread, receiving the video and used for decoding and
|
||||
recording,
|
||||
- the **controller** thread, sending _control messages_ to the server,
|
||||
- the **receiver** thread (managed by the controller), receiving _device
|
||||
messages_ from the server.
|
||||
|
||||
In addition, another thread can be started if necessary to handle APK
|
||||
installation or file push requests (via drag&drop on the main window) or to
|
||||
print the framerate regularly in the console.
|
||||
|
||||
|
||||
|
||||
### Stream
|
||||
|
||||
The video [stream] is received from the socket (connected to the server on the
|
||||
device) in a separate thread.
|
||||
|
||||
If a [decoder] is present (i.e. `--no-display` is not set), then it uses _libav_
|
||||
to decode the H.264 stream from the socket, and notifies the main thread when a
|
||||
new frame is available.
|
||||
|
||||
There are two [frames][video_buffer] simultaneously in memory:
|
||||
- the **decoding** frame, written by the decoder from the decoder thread,
|
||||
- the **rendering** frame, rendered in a texture from the main thread.
|
||||
|
||||
When a new decoded frame is available, the decoder _swaps_ the decoding and
|
||||
rendering frame (with proper synchronization). Thus, it immediately starts
|
||||
to decode a new frame while the main thread renders the last one.
|
||||
|
||||
If a [recorder] is present (i.e. `--record` is enabled), then it muxes the raw
|
||||
H.264 packet to the output video file.
|
||||
|
||||
[stream]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/stream.h
|
||||
[decoder]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/decoder.h
|
||||
[video_buffer]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/video_buffer.h
|
||||
[recorder]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/recorder.h
|
||||
Depending on the arguments passed to `scrcpy`, several components may be used.
|
||||
Here is an overview of the video and audio components:
|
||||
|
||||
```
|
||||
+----------+ +----------+
|
||||
---> | decoder | ---> | screen |
|
||||
+---------+ / +----------+ +----------+
|
||||
socket ---> | stream | ----
|
||||
+---------+ \ +----------+
|
||||
---> | recorder |
|
||||
+----------+
|
||||
V4L2 sink
|
||||
/
|
||||
decoder
|
||||
/ \
|
||||
VIDEO -------------> demuxer display
|
||||
\
|
||||
recorder
|
||||
/
|
||||
AUDIO -------------> demuxer
|
||||
\
|
||||
decoder --- audio player
|
||||
```
|
||||
|
||||
The _demuxer_ is responsible to extract video and audio packets (read some
|
||||
header, split the video stream into packets at correct boundaries, etc.).
|
||||
|
||||
The demuxed packets may be sent to a _decoder_ (one per stream, to produce
|
||||
frames) and to a recorder (receiving both video and audio stream to record a
|
||||
single file). The packets are encoded on the device (by `MediaCodec`), but when
|
||||
recording, they are _muxed_ (asynchronously) into a container (MKV or MP4) on
|
||||
the client side.
|
||||
|
||||
Video frames are sent to the screen/display to be rendered in the scrcpy window.
|
||||
They may also be sent to a [V4L2 sink](v4l2.md).
|
||||
|
||||
Audio "frames" (an array of decoded samples) are sent to the audio player.
|
||||
|
||||
|
||||
### Controller
|
||||
|
||||
The [controller] is responsible to send _control messages_ to the device. It
|
||||
The _controller_ is responsible to send _control messages_ to the device. It
|
||||
runs in a separate thread, to avoid I/O on the main thread.
|
||||
|
||||
On SDL event, received on the main thread, the [input manager][inputmanager]
|
||||
creates appropriate [_control messages_][controlmsg]. It is responsible to
|
||||
convert SDL events to Android events (using [convert]). It pushes the _control
|
||||
messages_ to a queue hold by the controller. On its own thread, the controller
|
||||
takes messages from the queue, that it serializes and sends to the client.
|
||||
|
||||
[controller]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/controller.h
|
||||
[controlmsg]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/control_msg.h
|
||||
[inputmanager]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/input_manager.h
|
||||
[convert]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/convert.h
|
||||
On SDL event, received on the main thread, the _input manager_ creates
|
||||
appropriate _control messages_. It is responsible to convert SDL events to
|
||||
Android events. It then pushes the _control messages_ to a queue hold by the
|
||||
controller. On its own thread, the controller takes messages from the queue,
|
||||
that it serializes and sends to the client.
|
||||
|
||||
|
||||
### UI and event loop
|
||||
## Protocol
|
||||
|
||||
Initialization, input events and rendering are all [managed][scrcpy] in the main
|
||||
thread.
|
||||
The protocol between the client and the server must be considered _internal_: it
|
||||
may (and will) change at any time for any reason. Everything may change (the
|
||||
number of sockets, the order in which the sockets must be opened, the data
|
||||
format on the wire…) from version to version. A client must always be run with a
|
||||
matching server version.
|
||||
|
||||
Events are handled in the [event loop], which either updates the [screen] or
|
||||
delegates to the [input manager][inputmanager].
|
||||
This section documents the current protocol in scrcpy v2.1.
|
||||
|
||||
[scrcpy]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/scrcpy.c
|
||||
[event loop]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/scrcpy.c#L201
|
||||
[screen]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/screen.h
|
||||
### Connection
|
||||
|
||||
Firstly, the client sets up an adb tunnel:
|
||||
|
||||
```bash
|
||||
# By default, a reverse redirection: the computer listens, the device connects
|
||||
adb reverse localabstract:scrcpy_<SCID> tcp:27183
|
||||
|
||||
# As a fallback (or if --force-adb forward is set), a forward redirection:
|
||||
# the device listens, the computer connects
|
||||
adb forward tcp:27183 localabstract:scrcpy_<SCID>
|
||||
```
|
||||
|
||||
(`<SCID>` is a 31-bit random number, so that it does not fail when several
|
||||
scrcpy instances start "at the same time" for the same device.)
|
||||
|
||||
Then, up to 3 sockets are opened, in that order:
|
||||
- a _video_ socket
|
||||
- an _audio_ socket
|
||||
- a _control_ socket
|
||||
|
||||
Each one may be disabled (respectively by `--no-video`, `--no-audio` and
|
||||
`--no-control`, directly or indirectly). For example, if `--no-audio` is set,
|
||||
then the _video_ socket is opened first, then the _control_ socket.
|
||||
|
||||
On the _first_ socket opened (whichever it is), if the tunnel is _forward_, then
|
||||
a [dummy byte] is sent from the device to the client. This allows to detect a
|
||||
connection error (the client connection does not fail as long as there is an adb
|
||||
forward redirection, even if nothing is listening on the device side).
|
||||
|
||||
Still on this _first_ socket, the device sends some [metadata][device meta] to
|
||||
the client (currently only the device name, used as the window title, but there
|
||||
might be other fields in the future).
|
||||
|
||||
[dummy byte]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/DesktopConnection.java#L93
|
||||
[device meta]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/DesktopConnection.java#L151
|
||||
|
||||
You can read the [client][client-connection] and [server][server-connection]
|
||||
code for more details.
|
||||
|
||||
[client-connection]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/app/src/server.c#L465-L466
|
||||
[server-connection]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/DesktopConnection.java#L63
|
||||
|
||||
Then each socket is used for its intended purpose.
|
||||
|
||||
### Video and audio
|
||||
|
||||
On the _video_ and _audio_ sockets, the device first sends some [codec
|
||||
metadata]:
|
||||
- On the _video_ socket, 12 bytes:
|
||||
- the codec id (`u32`) (H264, H265 or AV1)
|
||||
- the initial video width (`u32`)
|
||||
- the initial video height (`u32`)
|
||||
- On the _audio_ socket, 4 bytes:
|
||||
- the codec id ('u32`) (OPUS, AAC or RAW)
|
||||
|
||||
[codec metadata]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Streamer.java#L33-L51
|
||||
|
||||
Then each packet produced by `MediaCodec` is sent, prefixed by a 12-byte [frame
|
||||
header]:
|
||||
- config packet flag (`u1`)
|
||||
- key frame flag (`u1`)
|
||||
- PTS (`u62`)
|
||||
- packet size (`u32`)
|
||||
|
||||
Here is a schema describing the frame header:
|
||||
|
||||
```
|
||||
[. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ...
|
||||
<-------------> <-----> <-----------------------------...
|
||||
PTS packet raw packet
|
||||
size
|
||||
<--------------------->
|
||||
frame header
|
||||
|
||||
The most significant bits of the PTS are used for packet flags:
|
||||
|
||||
byte 7 byte 6 byte 5 byte 4 byte 3 byte 2 byte 1 byte 0
|
||||
CK...... ........ ........ ........ ........ ........ ........ ........
|
||||
^^<------------------------------------------------------------------->
|
||||
|| PTS
|
||||
| `- key frame
|
||||
`-- config packet
|
||||
```
|
||||
|
||||
[frame header]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Streamer.java#L83
|
||||
|
||||
|
||||
### Controls
|
||||
|
||||
Controls messages are sent via a custom binary protocol.
|
||||
|
||||
The only documentation for this protocol is the set of unit tests on both sides:
|
||||
- `ControlMessage` (from client to device): [serialization](https://github.com/Genymobile/scrcpy/blob/master/app/tests/test_control_msg_serialize.c) | [deserialization](https://github.com/Genymobile/scrcpy/blob/master/server/src/test/java/com/genymobile/scrcpy/ControlMessageReaderTest.java)
|
||||
- `DeviceMessage` (from device to client) [serialization](https://github.com/Genymobile/scrcpy/blob/master/server/src/test/java/com/genymobile/scrcpy/DeviceMessageWriterTest.java) | [deserialization](https://github.com/Genymobile/scrcpy/blob/master/app/tests/test_device_msg_deserialize.c)
|
||||
|
||||
|
||||
## Standalone server
|
||||
|
||||
Although the server is designed to work for the scrcpy client, it can be used
|
||||
with any client which uses the same protocol.
|
||||
|
||||
For simplicity, some [server-specific options] have been added to produce raw
|
||||
streams easily:
|
||||
- `send_device_meta=false`: disable the device metata (in practice, the device
|
||||
name) sent on the _first_ socket
|
||||
- `send_frame_meta=false`: disable the 12-byte header for each packet
|
||||
- `send_dummy_byte`: disable the dummy byte sent on forward connections
|
||||
- `send_codec_meta`: disable the codec information (and initial device size for
|
||||
video)
|
||||
- `raw_stream`: disable all the above
|
||||
|
||||
[server-specific options]: https://github.com/Genymobile/scrcpy/blob/a3cdf1a6b86ea22786e1f7d09b9c202feabc6949/server/src/main/java/com/genymobile/scrcpy/Options.java#L309-L329
|
||||
|
||||
Concretely, here is how to expose a raw H.264 stream on a TCP socket:
|
||||
|
||||
```bash
|
||||
adb push scrcpy-server-v2.1 /data/local/tmp/scrcpy-server-manual.jar
|
||||
adb forward tcp:1234 localabstract:scrcpy
|
||||
adb shell CLASSPATH=/data/local/tmp/scrcpy-server-manual.jar \
|
||||
app_process / com.genymobile.scrcpy.Server 2.1 \
|
||||
tunnel_forward=true audio=false control=false cleanup=false \
|
||||
raw_stream=true max_size=1920
|
||||
```
|
||||
|
||||
As soon as a client connects over TCP on port 1234, the device will start
|
||||
streaming the video. For example, VLC can play the video (although you will
|
||||
experience a very high latency, more details [here][vlc-0latency]):
|
||||
|
||||
```
|
||||
vlc -Idummy --demux=h264 --network-caching=0 tcp://localhost:1234
|
||||
```
|
||||
|
||||
[vlc-0latency]: https://code.videolan.org/rom1v/vlc/-/merge_requests/20
|
||||
|
||||
|
||||
## Hack
|
||||
|
@ -17,24 +17,19 @@ To record only the audio:
|
||||
|
||||
```bash
|
||||
scrcpy --no-video --record=file.opus
|
||||
scrcpy --no-video --audio-codec=aac --record-file=file.aac
|
||||
scrcpy --no-video --audio-codec=aac --record=file.aac
|
||||
# .m4a/.mp4 and .mka/.mkv are also supported for both opus and aac
|
||||
```
|
||||
|
||||
To disable playback while recording:
|
||||
|
||||
```bash
|
||||
scrcpy --no-playback --record=file.mp4
|
||||
scrcpy -Nr file.mkv
|
||||
# interrupt recording with Ctrl+C
|
||||
```
|
||||
|
||||
Timestamps are captured on the device, so [packet delay variation] does not
|
||||
impact the recorded file, which is always clean (only if you use `--record` of
|
||||
course, not if you capture your scrcpy window and audio output on the computer).
|
||||
|
||||
[packet delay variation]: https://en.wikipedia.org/wiki/Packet_delay_variation
|
||||
|
||||
|
||||
## Format
|
||||
|
||||
The video and audio streams are encoded on the device, but are muxed on the
|
||||
client side. Two formats (containers) are supported:
|
||||
- Matroska (`.mkv`)
|
||||
@ -48,3 +43,36 @@ needs not end with `.mkv` or `.mp4`):
|
||||
```
|
||||
scrcpy --record=file --record-format=mkv
|
||||
```
|
||||
|
||||
|
||||
## No playback
|
||||
|
||||
To disable playback while recording:
|
||||
|
||||
```bash
|
||||
scrcpy --no-playback --record=file.mp4
|
||||
scrcpy -Nr file.mkv
|
||||
# interrupt recording with Ctrl+C
|
||||
```
|
||||
|
||||
It is also possible to disable video and audio playback separately:
|
||||
|
||||
```bash
|
||||
# Record both video and audio, but only play video
|
||||
scrcpy --record=file.mkv --no-audio-playback
|
||||
```
|
||||
|
||||
## Time limit
|
||||
|
||||
To limit the recording time:
|
||||
|
||||
```bash
|
||||
scrcpy --record=file.mkv --time-limit=20 # in seconds
|
||||
```
|
||||
|
||||
The `--time-limit` option is not limited to recording, it also impacts simple
|
||||
mirroring:
|
||||
|
||||
```
|
||||
scrcpy --time-limit=20
|
||||
```
|
||||
|
@ -101,7 +101,7 @@ dist-win32: build-server build-win32
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/adb.exe "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinUsbApi.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/SDL2-2.26.4/i686-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/SDL2-2.28.0/i686-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/libusb-1.0.26/libusb-MinGW-Win32/bin/msys-usb-1.0.dll "$(DIST)/$(WIN32_TARGET_DIR)/"
|
||||
|
||||
dist-win64: build-server build-win64
|
||||
@ -119,7 +119,7 @@ dist-win64: build-server build-win64
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/adb.exe "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/platform-tools-34.0.1/AdbWinUsbApi.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/SDL2-2.26.4/x86_64-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/SDL2-2.28.0/x86_64-w64-mingw32/bin/SDL2.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
cp app/prebuilt-deps/data/libusb-1.0.26/libusb-MinGW-x64/bin/msys-usb-1.0.dll "$(DIST)/$(WIN64_TARGET_DIR)/"
|
||||
|
||||
zip-win32: dist-win32
|
||||
|
@ -134,7 +134,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
Ln.d("Audio encoder stopped");
|
||||
listener.onTerminated(fatalError);
|
||||
}
|
||||
});
|
||||
}, "audio-encoder");
|
||||
thread.start();
|
||||
}
|
||||
|
||||
@ -183,7 +183,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
Codec codec = streamer.getCodec();
|
||||
mediaCodec = createMediaCodec(codec, encoderName);
|
||||
|
||||
mediaCodecThread = new HandlerThread("AudioEncoder");
|
||||
mediaCodecThread = new HandlerThread("media-codec");
|
||||
mediaCodecThread.start();
|
||||
|
||||
MediaFormat format = createFormat(codec.getMimeType(), bitRate, codecOptions);
|
||||
@ -201,7 +201,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
} finally {
|
||||
end();
|
||||
}
|
||||
});
|
||||
}, "audio-in");
|
||||
|
||||
outputThread = new Thread(() -> {
|
||||
try {
|
||||
@ -216,7 +216,7 @@ public final class AudioEncoder implements AsyncProcessor {
|
||||
} finally {
|
||||
end();
|
||||
}
|
||||
});
|
||||
}, "audio-out");
|
||||
|
||||
mediaCodec.start();
|
||||
mediaCodecStarted = true;
|
||||
|
@ -69,7 +69,7 @@ public final class AudioRawRecorder implements AsyncProcessor {
|
||||
Ln.d("Audio recorder stopped");
|
||||
listener.onTerminated(fatalError);
|
||||
}
|
||||
});
|
||||
}, "audio-raw");
|
||||
thread.start();
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ public class Controller implements AsyncProcessor {
|
||||
Ln.d("Controller stopped");
|
||||
listener.onTerminated(true);
|
||||
}
|
||||
});
|
||||
}, "control-recv");
|
||||
thread.start();
|
||||
sender.start();
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ public final class DeviceMessageSender {
|
||||
} finally {
|
||||
Ln.d("Device message sender stopped");
|
||||
}
|
||||
});
|
||||
}, "control-send");
|
||||
thread.start();
|
||||
}
|
||||
|
||||
|
@ -2,11 +2,11 @@ package com.genymobile.scrcpy;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.AttributionSource;
|
||||
import android.content.ContextWrapper;
|
||||
import android.content.MutableContextWrapper;
|
||||
import android.os.Build;
|
||||
import android.os.Process;
|
||||
|
||||
public final class FakeContext extends ContextWrapper {
|
||||
public final class FakeContext extends MutableContextWrapper {
|
||||
|
||||
public static final String PACKAGE_NAME = "com.android.shell";
|
||||
public static final int ROOT_UID = 0; // Like android.os.Process.ROOT_UID, but before API 29
|
||||
|
@ -318,9 +318,9 @@ public class Options {
|
||||
case "send_codec_meta":
|
||||
options.sendCodecMeta = Boolean.parseBoolean(value);
|
||||
break;
|
||||
case "raw_video_stream":
|
||||
boolean rawVideoStream = Boolean.parseBoolean(value);
|
||||
if (rawVideoStream) {
|
||||
case "raw_stream":
|
||||
boolean rawStream = Boolean.parseBoolean(value);
|
||||
if (rawStream) {
|
||||
options.sendDeviceMeta = false;
|
||||
options.sendFrameMeta = false;
|
||||
options.sendDummyByte = false;
|
||||
|
@ -299,7 +299,7 @@ public class ScreenEncoder implements Device.RotationListener, Device.FoldListen
|
||||
Ln.d("Screen streaming stopped");
|
||||
listener.onTerminated(true);
|
||||
}
|
||||
});
|
||||
}, "video");
|
||||
thread.start();
|
||||
}
|
||||
|
||||
|
@ -87,7 +87,7 @@ public final class Server {
|
||||
}
|
||||
|
||||
private static void scrcpy(Options options) throws IOException, ConfigurationException {
|
||||
Ln.i("Device: " + Build.MANUFACTURER + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
|
||||
Ln.i("Device: [" + Build.MANUFACTURER + "] " + Build.BRAND + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
|
||||
final Device device = new Device(options);
|
||||
|
||||
Thread initThread = startInitThread(options);
|
||||
@ -99,26 +99,7 @@ public final class Server {
|
||||
boolean audio = options.getAudio();
|
||||
boolean sendDummyByte = options.getSendDummyByte();
|
||||
|
||||
Workarounds.prepareMainLooper();
|
||||
|
||||
// Workarounds must be applied for Meizu phones:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/240>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/365>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/2656>
|
||||
//
|
||||
// But only apply when strictly necessary, since workarounds can cause other issues:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/940>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/994>
|
||||
if (Build.BRAND.equalsIgnoreCase("meizu")) {
|
||||
Workarounds.fillAppInfo();
|
||||
}
|
||||
|
||||
// Before Android 11, audio is not supported.
|
||||
// Since Android 12, we can properly set a context on the AudioRecord.
|
||||
// Only on Android 11 we must fill the application context for the AudioRecord to work.
|
||||
if (audio && Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
Workarounds.fillAppContext();
|
||||
}
|
||||
Workarounds.apply(audio);
|
||||
|
||||
List<AsyncProcessor> asyncProcessors = new ArrayList<>();
|
||||
|
||||
@ -137,8 +118,7 @@ public final class Server {
|
||||
if (audio) {
|
||||
AudioCodec audioCodec = options.getAudioCodec();
|
||||
AudioCapture audioCapture = new AudioCapture(options.getAudioSource());
|
||||
Streamer audioStreamer = new Streamer(connection.getAudioFd(), audioCodec, options.getSendCodecMeta(),
|
||||
options.getSendFrameMeta());
|
||||
Streamer audioStreamer = new Streamer(connection.getAudioFd(), audioCodec, options.getSendCodecMeta(), options.getSendFrameMeta());
|
||||
AsyncProcessor audioRecorder;
|
||||
if (audioCodec == AudioCodec.RAW) {
|
||||
audioRecorder = new AudioRawRecorder(audioCapture, audioStreamer);
|
||||
@ -185,7 +165,7 @@ public final class Server {
|
||||
}
|
||||
|
||||
private static Thread startInitThread(final Options options) {
|
||||
Thread thread = new Thread(() -> initAndCleanUp(options));
|
||||
Thread thread = new Thread(() -> initAndCleanUp(options), "init-cleanup");
|
||||
thread.start();
|
||||
return thread;
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import android.annotation.SuppressLint;
|
||||
import android.annotation.TargetApi;
|
||||
import android.app.Application;
|
||||
import android.content.AttributionSource;
|
||||
import android.content.Context;
|
||||
import android.content.ContextWrapper;
|
||||
import android.content.pm.ApplicationInfo;
|
||||
import android.media.AudioAttributes;
|
||||
@ -27,8 +28,56 @@ public final class Workarounds {
|
||||
// not instantiable
|
||||
}
|
||||
|
||||
public static void apply(boolean audio) {
|
||||
Workarounds.prepareMainLooper();
|
||||
|
||||
boolean mustFillAppInfo = false;
|
||||
boolean mustFillBaseContext = false;
|
||||
boolean mustFillAppContext = false;
|
||||
|
||||
|
||||
if (Build.BRAND.equalsIgnoreCase("meizu")) {
|
||||
// Workarounds must be applied for Meizu phones:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/240>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/365>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/2656>
|
||||
//
|
||||
// But only apply when strictly necessary, since workarounds can cause other issues:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/940>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/994>
|
||||
mustFillAppInfo = true;
|
||||
} else if (Build.BRAND.equalsIgnoreCase("honor")) {
|
||||
// More workarounds must be applied for Honor devices:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/4015>
|
||||
//
|
||||
// The system context must not be set for all devices, because it would cause other problems:
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/4015#issuecomment-1595382142>
|
||||
// - <https://github.com/Genymobile/scrcpy/issues/3805#issuecomment-1596148031>
|
||||
mustFillAppInfo = true;
|
||||
mustFillBaseContext = true;
|
||||
mustFillAppContext = true;
|
||||
}
|
||||
|
||||
if (audio && Build.VERSION.SDK_INT == Build.VERSION_CODES.R) {
|
||||
// Before Android 11, audio is not supported.
|
||||
// Since Android 12, we can properly set a context on the AudioRecord.
|
||||
// Only on Android 11 we must fill the application context for the AudioRecord to work.
|
||||
mustFillAppContext = true;
|
||||
}
|
||||
|
||||
if (mustFillAppInfo) {
|
||||
Workarounds.fillAppInfo();
|
||||
}
|
||||
if (mustFillBaseContext) {
|
||||
Workarounds.fillBaseContext();
|
||||
}
|
||||
if (mustFillAppContext) {
|
||||
Workarounds.fillAppContext();
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public static void prepareMainLooper() {
|
||||
private static void prepareMainLooper() {
|
||||
// Some devices internally create a Handler when creating an input Surface, causing an exception:
|
||||
// "Can't create handler inside thread that has not called Looper.prepare()"
|
||||
// <https://github.com/Genymobile/scrcpy/issues/240>
|
||||
@ -57,7 +106,7 @@ public final class Workarounds {
|
||||
}
|
||||
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public static void fillAppInfo() {
|
||||
private static void fillAppInfo() {
|
||||
try {
|
||||
fillActivityThread();
|
||||
|
||||
@ -86,7 +135,7 @@ public final class Workarounds {
|
||||
}
|
||||
|
||||
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
|
||||
public static void fillAppContext() {
|
||||
private static void fillAppContext() {
|
||||
try {
|
||||
fillActivityThread();
|
||||
|
||||
@ -105,8 +154,21 @@ public final class Workarounds {
|
||||
}
|
||||
}
|
||||
|
||||
public static void fillBaseContext() {
|
||||
try {
|
||||
fillActivityThread();
|
||||
|
||||
Method getSystemContextMethod = activityThreadClass.getDeclaredMethod("getSystemContext");
|
||||
Context context = (Context) getSystemContextMethod.invoke(activityThread);
|
||||
FakeContext.get().setBaseContext(context);
|
||||
} catch (Throwable throwable) {
|
||||
// this is a workaround, so failing is not an error
|
||||
Ln.d("Could not fill base context: " + throwable.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.R)
|
||||
@SuppressLint({"WrongConstant", "MissingPermission", "BlockedPrivateApi", "SoonBlockedPrivateApi"})
|
||||
@SuppressLint("WrongConstant,MissingPermission,BlockedPrivateApi,SoonBlockedPrivateApi,DiscouragedPrivateApi")
|
||||
public static AudioRecord createAudioRecord(int source, int sampleRate, int channelConfig, int channels, int channelMask, int encoding) {
|
||||
// Vivo (and maybe some other third-party ROMs) modified `AudioRecord`'s constructor, requiring `Context`s from real App environment.
|
||||
//
|
||||
|
@ -14,13 +14,13 @@ public final class InputManager {
|
||||
public static final int INJECT_INPUT_EVENT_MODE_WAIT_FOR_RESULT = 1;
|
||||
public static final int INJECT_INPUT_EVENT_MODE_WAIT_FOR_FINISH = 2;
|
||||
|
||||
private final android.hardware.input.InputManager manager;
|
||||
private final Object manager;
|
||||
private Method injectInputEventMethod;
|
||||
|
||||
private static Method setDisplayIdMethod;
|
||||
private static Method setActionButtonMethod;
|
||||
|
||||
public InputManager(android.hardware.input.InputManager manager) {
|
||||
public InputManager(Object manager) {
|
||||
this.manager = manager;
|
||||
}
|
||||
|
||||
|
@ -62,11 +62,21 @@ public final class ServiceManager {
|
||||
return displayManager;
|
||||
}
|
||||
|
||||
public static Class<?> getInputManagerClass() {
|
||||
try {
|
||||
// Parts of the InputManager class have been moved to a new InputManagerGlobal class in Android 14 preview
|
||||
return Class.forName("android.hardware.input.InputManagerGlobal");
|
||||
} catch (ClassNotFoundException e) {
|
||||
return android.hardware.input.InputManager.class;
|
||||
}
|
||||
}
|
||||
|
||||
public static InputManager getInputManager() {
|
||||
if (inputManager == null) {
|
||||
try {
|
||||
Method getInstanceMethod = android.hardware.input.InputManager.class.getDeclaredMethod("getInstance");
|
||||
android.hardware.input.InputManager im = (android.hardware.input.InputManager) getInstanceMethod.invoke(null);
|
||||
Class<?> inputManagerClass = getInputManagerClass();
|
||||
Method getInstanceMethod = inputManagerClass.getDeclaredMethod("getInstance");
|
||||
Object im = getInstanceMethod.invoke(null);
|
||||
inputManager = new InputManager(im);
|
||||
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
|
||||
throw new AssertionError(e);
|
||||
|
@ -12,7 +12,6 @@ import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
|
||||
|
||||
public class ControlMessageReaderTest {
|
||||
|
||||
@Test
|
||||
|
Reference in New Issue
Block a user