Compare commits

...

9 Commits

Author SHA1 Message Date
e3942b9a78 Fail-fast camera mirroring on Android 11 and older
Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-28 00:45:52 +02:00
c0e1a4152f Support camera size selection using -m/--camera-ar
In addition to --camera-size to specify an explicit size, make it
possible to select the camera size automatically, respecting the maximum
size (already used for display mirroring) and an aspect ratio.

For example, "scrcpy --video-source=camera" followed by:
 - (no additional arguments)
    : mirrors at the maximum size, any a-r
 - -m1920
    : only consider valid sizes having both dimensions not above 1920
 - --camera-ar=4:3
    : only consider valid sizes having an aspect ratio of 4:3 (+/- 10%)
 - -m2048 --camera-ar=1.6
    : only consider valid sizes having both dimensions not above 2048
      and an aspect ratio of 1.6 (+/- 10%)

Co-authored-by: Simon Chan <1330321+yume-chan@users.noreply.github.com>
2023-10-28 00:45:52 +02:00
5834657dae Add --camera-facing
Add an option to select the camera by its lens facing (any, front, back
or external).

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-28 00:10:15 +02:00
5f30a68297 Make camera id optional
If no camera id is provided, use the first camera available.
2023-10-28 00:10:15 +02:00
8735b388da Handle camera disconnection
Stop mirroring on camera disconnection.
2023-10-28 00:09:51 +02:00
8e9fa773c5 Automatically select audio source
If --audio-source is not specified, select the default value
according to the video source:
 - for display mirroring, use device audio by default;
 - for camera mirroring, use microphone by default.
2023-10-28 00:08:27 +02:00
75e58b3101 Add camera mirroring
Add --video-source=camera, and related options:
 - --camera-id=ID: select the camera (ids are listed by --list-cameras);
 - --camera-size=WIDTHxHEIGHT: select the capture size.

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-28 00:08:25 +02:00
548858bfdb Add --list-camera-sizes 2023-10-28 00:07:25 +02:00
5ed63314a5 Add --list-cameras
Add an option to list the device cameras.

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-10-28 00:06:15 +02:00
22 changed files with 981 additions and 16 deletions

View File

@ -10,6 +10,10 @@ _scrcpy() {
--audio-source=
--audio-output-buffer=
-b --video-bit-rate=
--camera-ar=
--camera-id=
--camera-facing=
--camera-size=
--crop=
-d --select-usb
--disable-screensaver
@ -23,6 +27,8 @@ _scrcpy() {
--kill-adb-on-close
-K --hid-keyboard
--legacy-paste
--list-camera-sizes
--list-cameras
--list-displays
--list-encoders
--lock-video-orientation
@ -72,6 +78,7 @@ _scrcpy() {
--video-codec=
--video-codec-options=
--video-encoder=
--video-source=
-w --stay-awake
--window-borderless
--window-title=
@ -91,10 +98,18 @@ _scrcpy() {
COMPREPLY=($(compgen -W 'opus aac raw' -- "$cur"))
return
;;
--video-source)
COMPREPLY=($(compgen -W 'display camera' -- "$cur"))
return
;;
--audio-source)
COMPREPLY=($(compgen -W 'output mic' -- "$cur"))
return
;;
--camera-facing)
COMPREPLY=($(compgen -W 'front back external' -- "$cur"))
return
;;
--lock-video-orientation)
COMPREPLY=($(compgen -W 'unlocked initial 0 1 2 3' -- "$cur"))
return
@ -139,6 +154,9 @@ _scrcpy() {
|--audio-codec-options \
|--audio-encoder \
|--audio-output-buffer \
|--camera-ar \
|--camera-id \
|--camera-size \
|--crop \
|--display-id \
|--display-buffer \

View File

@ -17,6 +17,10 @@ arguments=(
'--audio-source=[Select the audio source]:source:(output mic)'
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
'--camera-ar=[Select the camera size by its aspect ratio]'
'--camera-id=[Specify the camera id to mirror]'
'--camera-facing=[Select the device camera by its facing direction]:facing:(front back external)'
'--camera-size=[Specify an explicit camera capture size]'
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
{-d,--select-usb}'[Use USB device]'
'--disable-screensaver[Disable screensaver while scrcpy is running]'
@ -30,6 +34,8 @@ arguments=(
'--kill-adb-on-close[Kill adb when scrcpy terminates]'
{-K,--hid-keyboard}'[Simulate a physical keyboard by using HID over AOAv2]'
'--legacy-paste[Inject computer clipboard text as a sequence of key events on Ctrl+v]'
'--list-camera-sizes[List the valid camera capture sizes]'
'--list-cameras[List cameras available on the device]'
'--list-displays[List displays available on the device]'
'--list-encoders[List video and audio encoders available on the device]'
'--lock-video-orientation=[Lock video orientation]:orientation:(unlocked initial 0 1 2 3)'
@ -76,6 +82,7 @@ arguments=(
'--video-codec=[Select the video codec]:codec:(h264 h265 av1)'
'--video-codec-options=[Set a list of comma-separated key\:type=value options for the device video encoder]'
'--video-encoder=[Use a specific MediaCodec video encoder]'
'--video-source=[Select the video source]:source:(display camera)'
{-w,--stay-awake}'[Keep the device on while scrcpy is running, when the device is plugged in]'
'--window-borderless[Disable window decorations \(display borderless window\)]'
'--window-title=[Set a custom window title]'

View File

@ -75,6 +75,28 @@ Encode the video at the given bit rate, expressed in bits/s. Unit suffixes are s
Default is 8M (8000000).
.TP
.BI "\-\-camera\-ar " ar
Select the camera size by its aspect ratio (+/- 10%).
Possible values are "sensor" (use the camera sensor aspect ratio), "<num>:<den>" (e.g. "4:3") and "<value>" (e.g. "1.6").
.TP
.BI "\-\-camera\-id " id
Specify the device camera id to mirror.
The available camera ids can be listed by \-\-list\-cameras.
.TP
.BI "\-\-camera\-facing " facing
Select the device camera by its facing direction.
Possible values are "front", "back" and "external".
.TP
.BI "\-\-camera\-size " width\fRx\fIheight
Specify an explicit camera capture size.
.TP
.BI "\-\-crop " width\fR:\fIheight\fR:\fIx\fR:\fIy
Crop the device screen on the server.
@ -155,6 +177,12 @@ Inject computer clipboard text as a sequence of key events on Ctrl+v (like MOD+S
This is a workaround for some devices not behaving as expected when setting the device clipboard programmatically.
.B \-\-list\-camera\-sizes
List the valid camera capture sizes.
.B \-\-list\-cameras
List cameras available on the device.
.TP
.B \-\-list\-encoders
List video and audio encoders available on the device.
@ -426,6 +454,14 @@ Use a specific MediaCodec video encoder (depending on the codec provided by \fB\
The available encoders can be listed by \-\-list\-encoders.
.TP
.BI "\-\-video\-source " source
Select the video source (display or camera).
Camera mirroring requires Android 12+.
Default is display.
.TP
.B \-w, \-\-stay-awake
Keep the device on while scrcpy is running, when the device is plugged in.

View File

@ -77,10 +77,17 @@ enum {
OPT_NO_VIDEO,
OPT_NO_AUDIO_PLAYBACK,
OPT_NO_VIDEO_PLAYBACK,
OPT_VIDEO_SOURCE,
OPT_AUDIO_SOURCE,
OPT_KILL_ADB_ON_CLOSE,
OPT_TIME_LIMIT,
OPT_PAUSE_ON_EXIT,
OPT_LIST_CAMERAS,
OPT_LIST_CAMERA_SIZES,
OPT_CAMERA_ID,
OPT_CAMERA_SIZE,
OPT_CAMERA_FACING,
OPT_CAMERA_AR,
};
struct sc_option {
@ -197,6 +204,36 @@ static const struct sc_option options[] = {
.longopt = "bit-rate",
.argdesc = "value",
},
{
.longopt_id = OPT_CAMERA_AR,
.longopt = "camera-ar",
.argdesc = "ar",
.text = "Select the camera size by its aspect ratio (+/- 10%).\n"
"Possible values are \"sensor\" (use the camera sensor aspect "
"ratio), \"<num>:<den>\" (e.g. \"4:3\") or \"<value>\" (e.g. "
"\"1.6\")."
},
{
.longopt_id = OPT_CAMERA_ID,
.longopt = "camera-id",
.argdesc = "id",
.text = "Specify the device camera id to mirror.\n"
"The available camera ids can be listed by:\n"
" scrcpy --list-cameras",
},
{
.longopt_id = OPT_CAMERA_FACING,
.longopt = "camera-facing",
.argdesc = "facing",
.text = "Select the device camera by its facing direction.\n"
"Possible values are \"front\", \"back\" and \"external\".",
},
{
.longopt_id = OPT_CAMERA_SIZE,
.longopt = "camera-size",
.argdesc = "<width>x<height>",
.text = "Specify an explicit camera capture size.",
},
{
// Not really deprecated (--codec has never been released), but without
// declaring an explicit --codec option, getopt_long() partial matching
@ -320,6 +357,16 @@ static const struct sc_option options[] = {
"This is a workaround for some devices not behaving as "
"expected when setting the device clipboard programmatically.",
},
{
.longopt_id = OPT_LIST_CAMERAS,
.longopt = "list-cameras",
.text = "List device cameras.",
},
{
.longopt_id = OPT_LIST_CAMERA_SIZES,
.longopt = "list-camera-sizes",
.text = "List the valid camera capture sizes.",
},
{
.longopt_id = OPT_LIST_DISPLAYS,
.longopt = "list-displays",
@ -691,6 +738,14 @@ static const struct sc_option options[] = {
"codec provided by --video-codec).\n"
"The available encoders can be listed by --list-encoders.",
},
{
.longopt_id = OPT_VIDEO_SOURCE,
.longopt = "video-source",
.argdesc = "source",
.text = "Select the video source (display or camera).\n"
"Camera mirroring requires Android 12+.\n"
"Default is display.",
},
{
.shortopt = 'w',
.longopt = "stay-awake",
@ -1631,6 +1686,22 @@ parse_audio_codec(const char *optarg, enum sc_codec *codec) {
return false;
}
static bool
parse_video_source(const char *optarg, enum sc_video_source *source) {
if (!strcmp(optarg, "display")) {
*source = SC_VIDEO_SOURCE_DISPLAY;
return true;
}
if (!strcmp(optarg, "camera")) {
*source = SC_VIDEO_SOURCE_CAMERA;
return true;
}
LOGE("Unsupported video source: %s (expected display or camera)", optarg);
return false;
}
static bool
parse_audio_source(const char *optarg, enum sc_audio_source *source) {
if (!strcmp(optarg, "mic")) {
@ -1647,6 +1718,34 @@ parse_audio_source(const char *optarg, enum sc_audio_source *source) {
return false;
}
static bool
parse_camera_facing(const char *optarg, enum sc_camera_facing *facing) {
if (!strcmp(optarg, "front")) {
*facing = SC_CAMERA_FACING_FRONT;
return true;
}
if (!strcmp(optarg, "back")) {
*facing = SC_CAMERA_FACING_BACK;
return true;
}
if (!strcmp(optarg, "external")) {
*facing = SC_CAMERA_FACING_EXTERNAL;
return true;
}
if (*optarg == '\0') {
// Empty string is a valid value (equivalent to not passing the option)
*facing = SC_CAMERA_FACING_ANY;
return true;
}
LOGE("Unsupported camera facing: %s (expected front, back or external)",
optarg);
return false;
}
static bool
parse_time_limit(const char *s, sc_tick *tick) {
long value;
@ -1998,6 +2097,12 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
case OPT_LIST_DISPLAYS:
opts->list |= SC_OPTION_LIST_DISPLAYS;
break;
case OPT_LIST_CAMERAS:
opts->list |= SC_OPTION_LIST_CAMERAS;
break;
case OPT_LIST_CAMERA_SIZES:
opts->list |= SC_OPTION_LIST_CAMERA_SIZES;
break;
case OPT_REQUIRE_AUDIO:
opts->require_audio = true;
break;
@ -2012,6 +2117,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
break;
case OPT_VIDEO_SOURCE:
if (!parse_video_source(optarg, &opts->video_source)) {
return false;
}
break;
case OPT_AUDIO_SOURCE:
if (!parse_audio_source(optarg, &opts->audio_source)) {
return false;
@ -2030,6 +2140,20 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false;
}
break;
case OPT_CAMERA_AR:
opts->camera_ar = optarg;
break;
case OPT_CAMERA_ID:
opts->camera_id = optarg;
break;
case OPT_CAMERA_SIZE:
opts->camera_size = optarg;
break;
case OPT_CAMERA_FACING:
if (!parse_camera_facing(optarg, &opts->camera_facing)) {
return false;
}
break;
default:
// getopt prints the error message on stderr
return false;
@ -2123,6 +2247,57 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->force_adb_forward = true;
}
if (opts->video_source == SC_VIDEO_SOURCE_CAMERA) {
if (opts->display_id) {
LOGE("--display-id is only available with --video-source=display");
return false;
}
if (opts->lock_video_orientation !=
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED) {
LOGE("--lock-video-orientation is not supported for camera");
return false;
}
if (opts->camera_id && opts->camera_facing != SC_CAMERA_FACING_ANY) {
LOGE("Could not specify both --camera-id and --camera-facing");
return false;
}
if (opts->camera_size) {
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and -m/--max-size");
return false;
}
if (opts->camera_ar) {
LOGE("Could not specify both --camera-size and --camera-ar");
return false;
}
}
if (opts->control) {
LOGI("Camera video source: control disabled");
opts->control = false;
}
} else if (opts->camera_id
|| opts->camera_ar
|| opts->camera_facing != SC_CAMERA_FACING_ANY
|| opts->camera_size) {
LOGE("Camera options are only available with --video-source=camera");
return false;
}
if (opts->audio && opts->audio_source == SC_AUDIO_SOURCE_AUTO) {
// Select the audio source according to the video source
if (opts->video_source == SC_VIDEO_SOURCE_DISPLAY) {
opts->audio_source = SC_AUDIO_SOURCE_OUTPUT;
} else {
opts->audio_source = SC_AUDIO_SOURCE_MIC;
LOGI("Camera video source: microphone audio source selected");
}
}
if (opts->record_format && !opts->record_filename) {
LOGE("Record format specified without recording");
return false;

View File

@ -11,13 +11,18 @@ const struct scrcpy_options scrcpy_options_default = {
.audio_codec_options = NULL,
.video_encoder = NULL,
.audio_encoder = NULL,
.camera_id = NULL,
.camera_size = NULL,
.camera_ar = NULL,
.log_level = SC_LOG_LEVEL_INFO,
.video_codec = SC_CODEC_H264,
.audio_codec = SC_CODEC_OPUS,
.audio_source = SC_AUDIO_SOURCE_OUTPUT,
.video_source = SC_VIDEO_SOURCE_DISPLAY,
.audio_source = SC_AUDIO_SOURCE_AUTO,
.record_format = SC_RECORD_FORMAT_AUTO,
.keyboard_input_mode = SC_KEYBOARD_INPUT_MODE_INJECT,
.mouse_input_mode = SC_MOUSE_INPUT_MODE_INJECT,
.camera_facing = SC_CAMERA_FACING_ANY,
.port_range = {
.first = DEFAULT_LOCAL_PORT_RANGE_FIRST,
.last = DEFAULT_LOCAL_PORT_RANGE_LAST,

View File

@ -44,11 +44,24 @@ enum sc_codec {
SC_CODEC_RAW,
};
enum sc_video_source {
SC_VIDEO_SOURCE_DISPLAY,
SC_VIDEO_SOURCE_CAMERA,
};
enum sc_audio_source {
SC_AUDIO_SOURCE_AUTO, // OUTPUT for video DISPLAY, MIC for video CAMERA
SC_AUDIO_SOURCE_OUTPUT,
SC_AUDIO_SOURCE_MIC,
};
enum sc_camera_facing {
SC_CAMERA_FACING_ANY,
SC_CAMERA_FACING_FRONT,
SC_CAMERA_FACING_BACK,
SC_CAMERA_FACING_EXTERNAL,
};
enum sc_lock_video_orientation {
SC_LOCK_VIDEO_ORIENTATION_UNLOCKED = -1,
// lock the current orientation when scrcpy starts
@ -117,13 +130,18 @@ struct scrcpy_options {
const char *audio_codec_options;
const char *video_encoder;
const char *audio_encoder;
const char *camera_id;
const char *camera_size;
const char *camera_ar;
enum sc_log_level log_level;
enum sc_codec video_codec;
enum sc_codec audio_codec;
enum sc_video_source video_source;
enum sc_audio_source audio_source;
enum sc_record_format record_format;
enum sc_keyboard_input_mode keyboard_input_mode;
enum sc_mouse_input_mode mouse_input_mode;
enum sc_camera_facing camera_facing;
struct sc_port_range port_range;
uint32_t tunnel_host;
uint16_t tunnel_port;
@ -182,6 +200,8 @@ struct scrcpy_options {
bool kill_adb_on_close;
#define SC_OPTION_LIST_ENCODERS 0x1
#define SC_OPTION_LIST_DISPLAYS 0x2
#define SC_OPTION_LIST_CAMERAS 0x4
#define SC_OPTION_LIST_CAMERA_SIZES 0x8
uint8_t list;
};

View File

@ -351,7 +351,9 @@ scrcpy(struct scrcpy_options *options) {
.log_level = options->log_level,
.video_codec = options->video_codec,
.audio_codec = options->audio_codec,
.video_source = options->video_source,
.audio_source = options->audio_source,
.camera_facing = options->camera_facing,
.crop = options->crop,
.port_range = options->port_range,
.tunnel_host = options->tunnel_host,
@ -371,6 +373,9 @@ scrcpy(struct scrcpy_options *options) {
.audio_codec_options = options->audio_codec_options,
.video_encoder = options->video_encoder,
.audio_encoder = options->audio_encoder,
.camera_id = options->camera_id,
.camera_size = options->camera_size,
.camera_ar = options->camera_ar,
.force_adb_forward = options->force_adb_forward,
.power_off_on_close = options->power_off_on_close,
.clipboard_autosync = options->clipboard_autosync,

View File

@ -76,6 +76,8 @@ sc_server_params_destroy(struct sc_server_params *params) {
free((char *) params->video_encoder);
free((char *) params->audio_encoder);
free((char *) params->tcpip_dst);
free((char *) params->camera_id);
free((char *) params->camera_ar);
}
static bool
@ -103,6 +105,8 @@ sc_server_params_copy(struct sc_server_params *dst,
COPY(video_encoder);
COPY(audio_encoder);
COPY(tcpip_dst);
COPY(camera_id);
COPY(camera_ar);
#undef COPY
return true;
@ -181,6 +185,20 @@ sc_server_get_codec_name(enum sc_codec codec) {
}
}
static const char *
sc_server_get_camera_facing_name(enum sc_camera_facing camera_facing) {
switch (camera_facing) {
case SC_CAMERA_FACING_FRONT:
return "front";
case SC_CAMERA_FACING_BACK:
return "back";
case SC_CAMERA_FACING_EXTERNAL:
return "external";
default:
return NULL;
}
}
static sc_pid
execute_server(struct sc_server *server,
const struct sc_server_params *params) {
@ -247,8 +265,11 @@ execute_server(struct sc_server *server,
ADD_PARAM("audio_codec=%s",
sc_server_get_codec_name(params->audio_codec));
}
if (params->audio_source != SC_AUDIO_SOURCE_OUTPUT) {
assert(params->audio_source == SC_AUDIO_SOURCE_MIC);
if (params->video_source != SC_VIDEO_SOURCE_DISPLAY) {
assert(params->video_source == SC_VIDEO_SOURCE_CAMERA);
ADD_PARAM("video_source=camera");
}
if (params->audio_source == SC_AUDIO_SOURCE_MIC) {
ADD_PARAM("audio_source=mic");
}
if (params->max_size) {
@ -274,6 +295,19 @@ execute_server(struct sc_server *server,
if (params->display_id) {
ADD_PARAM("display_id=%" PRIu32, params->display_id);
}
if (params->camera_id) {
ADD_PARAM("camera_id=%s", params->camera_id);
}
if (params->camera_size) {
ADD_PARAM("camera_size=%s", params->camera_size);
}
if (params->camera_facing != SC_CAMERA_FACING_ANY) {
ADD_PARAM("camera_facing=%s",
sc_server_get_camera_facing_name(params->camera_facing));
}
if (params->camera_ar) {
ADD_PARAM("camera_ar=%s", params->camera_ar);
}
if (params->show_touches) {
ADD_PARAM("show_touches=true");
}
@ -317,6 +351,12 @@ execute_server(struct sc_server *server,
if (params->list & SC_OPTION_LIST_DISPLAYS) {
ADD_PARAM("list_displays=true");
}
if (params->list & SC_OPTION_LIST_CAMERAS) {
ADD_PARAM("list_cameras=true");
}
if (params->list & SC_OPTION_LIST_CAMERA_SIZES) {
ADD_PARAM("list_camera_sizes=true");
}
#undef ADD_PARAM

View File

@ -26,12 +26,17 @@ struct sc_server_params {
enum sc_log_level log_level;
enum sc_codec video_codec;
enum sc_codec audio_codec;
enum sc_video_source video_source;
enum sc_audio_source audio_source;
enum sc_camera_facing camera_facing;
const char *crop;
const char *video_codec_options;
const char *audio_codec_options;
const char *video_encoder;
const char *audio_encoder;
const char *camera_id;
const char *camera_size;
const char *camera_ar;
struct sc_port_range port_range;
uint32_t tunnel_host;
uint16_t tunnel_port;

View File

@ -0,0 +1,37 @@
package com.genymobile.scrcpy;
public final class CameraAspectRatio {
private static final float SENSOR = -1;
private float ar;
private CameraAspectRatio(float ar) {
this.ar = ar;
}
public static CameraAspectRatio fromFloat(float ar) {
if (ar < 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + ar);
}
return new CameraAspectRatio(ar);
}
public static CameraAspectRatio fromFraction(int w, int h) {
if (w <= 0 || h <= 0) {
throw new IllegalArgumentException("Invalid aspect ratio: " + w + ":" + h);
}
return new CameraAspectRatio((float) w / h);
}
public static CameraAspectRatio sensorAspectRatio() {
return new CameraAspectRatio(SENSOR);
}
public boolean isSensor() {
return ar == SENSOR;
}
public float getAspectRatio() {
return ar;
}
}

View File

@ -0,0 +1,330 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.graphics.Rect;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.view.Surface;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
public class CameraCapture extends SurfaceCapture {
private final String explicitCameraId;
private final CameraFacing cameraFacing;
private final Size explicitSize;
private int maxSize;
private final CameraAspectRatio aspectRatio;
private String cameraId;
private Size size;
private HandlerThread cameraThread;
private Handler cameraHandler;
private CameraDevice cameraDevice;
private Executor cameraExecutor;
private final AtomicBoolean disconnected = new AtomicBoolean();
public CameraCapture(String explicitCameraId, CameraFacing cameraFacing, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio) {
this.explicitCameraId = explicitCameraId;
this.cameraFacing = cameraFacing;
this.explicitSize = explicitSize;
this.maxSize = maxSize;
this.aspectRatio = aspectRatio;
}
@Override
public void init() throws IOException {
cameraThread = new HandlerThread("camera");
cameraThread.start();
cameraHandler = new Handler(cameraThread.getLooper());
cameraExecutor = new HandlerExecutor(cameraHandler);
try {
cameraId = selectCamera(explicitCameraId, cameraFacing);
if (cameraId == null) {
throw new IOException("No matching camera found");
}
size = selectSize(cameraId, explicitSize, maxSize, aspectRatio);
if (size == null) {
throw new IOException("Could not select camera size");
}
Ln.i("Using camera '" + cameraId + "'");
cameraDevice = openCamera(cameraId);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
}
}
private static String selectCamera(String explicitCameraId, CameraFacing cameraFacing) throws CameraAccessException {
if (explicitCameraId != null) {
return explicitCameraId;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraFacing == null) {
// Use the first one
return cameraIds.length > 0 ? cameraIds[0] : null;
}
for (String cameraId : cameraIds) {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (cameraFacing.value() == facing) {
return cameraId;
}
}
// Not found
return null;
}
@TargetApi(Build.VERSION_CODES.N)
private static Size selectSize(String cameraId, Size explicitSize, int maxSize, CameraAspectRatio aspectRatio) throws CameraAccessException {
if (explicitSize != null) {
return explicitSize;
}
CameraManager cameraManager = ServiceManager.getCameraManager();
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
Stream<android.util.Size> stream = Arrays.stream(sizes);
if (maxSize > 0) {
stream = stream.filter(it -> it.getWidth() <= maxSize && it.getHeight() <= maxSize);
}
Float targetAspectRatio = resolveAspectRatio(aspectRatio, characteristics);
if (targetAspectRatio != null) {
stream = stream.filter(it -> {
float ar = ((float) it.getWidth() / it.getHeight());
float arRatio = ar / targetAspectRatio;
// Accept if the aspect ratio is the target aspect ratio + or - 10%
return arRatio >= 0.9f && arRatio <= 1.1f;
});
}
Optional<android.util.Size> selected = stream.max((s1, s2) -> {
// Greater width is better
int cmp = Integer.compare(s1.getWidth(), s2.getWidth());
if (cmp != 0) {
return cmp;
}
if (targetAspectRatio != null) {
// Closer to the target aspect ratio is better
float ar1 = ((float) s1.getWidth() / s1.getHeight());
float arRatio1 = ar1 / targetAspectRatio;
float distance1 = Math.abs(1 - arRatio1);
float ar2 = ((float) s2.getWidth() / s2.getHeight());
float arRatio2 = ar2 / targetAspectRatio;
float distance2 = Math.abs(1 - arRatio2);
// Reverse the order because lower distance is better
return Float.compare(distance2, distance1);
}
// Greater height is better
return Integer.compare(s1.getHeight(), s2.getHeight());
});
if (selected.isPresent()) {
android.util.Size size = selected.get();
return new Size(size.getWidth(), size.getHeight());
}
// Not found
return null;
}
private static Float resolveAspectRatio(CameraAspectRatio ratio, CameraCharacteristics characteristics) {
if (ratio == null) {
return null;
}
if (ratio.isSensor()) {
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
return (float) activeSize.width() / activeSize.height();
}
return ratio.getAspectRatio();
}
@Override
public void start(Surface surface) throws IOException {
try {
CameraCaptureSession session = createCaptureSession(cameraDevice, surface);
CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
requestBuilder.addTarget(surface);
CaptureRequest request = requestBuilder.build();
setRepeatingRequest(session, request);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
}
}
@Override
public void release() {
if (cameraDevice != null) {
cameraDevice.close();
}
if (cameraThread != null) {
cameraThread.quitSafely();
}
}
@Override
public Size getSize() {
return size;
}
@Override
public boolean setMaxSize(int maxSize) {
if (explicitSize != null) {
return false;
}
this.maxSize = maxSize;
try {
size = selectSize(cameraId, null, maxSize, aspectRatio);
return true;
} catch (CameraAccessException e) {
Ln.w("Could not select camera size", e);
return false;
}
}
@SuppressLint("MissingPermission")
@TargetApi(Build.VERSION_CODES.S)
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
CompletableFuture<CameraDevice> future = new CompletableFuture<>();
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
Ln.d("Camera opened successfully");
future.complete(camera);
}
@Override
public void onDisconnected(CameraDevice camera) {
Ln.w("Camera disconnected");
disconnected.set(true);
requestReset();
}
@Override
public void onError(CameraDevice camera, int error) {
int cameraAccessExceptionErrorCode;
switch (error) {
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_IN_USE;
break;
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
cameraAccessExceptionErrorCode = CameraAccessException.MAX_CAMERAS_IN_USE;
break;
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_DISABLED;
break;
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
default:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_ERROR;
break;
}
future.completeExceptionally(new CameraAccessException(cameraAccessExceptionErrorCode));
}
}, cameraHandler);
try {
return future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@TargetApi(Build.VERSION_CODES.S)
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
OutputConfiguration outputConfig = new OutputConfiguration(surface);
List<OutputConfiguration> outputs = Arrays.asList(outputConfig);
SessionConfiguration sessionConfig = new SessionConfiguration(SessionConfiguration.SESSION_REGULAR, outputs, cameraExecutor,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
future.complete(session);
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
}
});
camera.createCaptureSession(sessionConfig);
try {
return future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@TargetApi(Build.VERSION_CODES.S)
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
CompletableFuture<Void> future = new CompletableFuture<>();
session.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
future.complete(null);
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
}
}, cameraHandler);
try {
future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@Override
public boolean isClosed() {
return disconnected.get();
}
}

View File

@ -0,0 +1,31 @@
package com.genymobile.scrcpy;
import android.hardware.camera2.CameraCharacteristics;
public enum CameraFacing {
FRONT("front", CameraCharacteristics.LENS_FACING_FRONT),
BACK("back", CameraCharacteristics.LENS_FACING_BACK),
EXTERNAL("external", CameraCharacteristics.LENS_FACING_EXTERNAL);
private final String name;
private final int value;
CameraFacing(String name, int value) {
this.name = name;
this.value = value;
}
int value() {
return value;
}
static CameraFacing findByName(String name) {
for (CameraFacing facing : CameraFacing.values()) {
if (name.equals(facing.name)) {
return facing;
}
}
return null;
}
}

View File

@ -0,0 +1,23 @@
package com.genymobile.scrcpy;
import android.os.Handler;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
// Inspired from hidden android.os.HandlerExecutor
public class HandlerExecutor implements Executor {
private final Handler handler;
public HandlerExecutor(Handler handler) {
this.handler = handler;
}
@Override
public void execute(Runnable command) {
if (!handler.post(command)) {
throw new RejectedExecutionException(handler + " is shutting down");
}
}
}

View File

@ -3,6 +3,13 @@ package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.DisplayManager;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.graphics.Rect;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import java.util.List;
public final class LogUtils {
@ -60,4 +67,50 @@ public final class LogUtils {
}
return builder.toString();
}
private static String getCameraFacingName(int facing) {
switch (facing) {
case CameraCharacteristics.LENS_FACING_FRONT:
return "front";
case CameraCharacteristics.LENS_FACING_BACK:
return "back";
case CameraCharacteristics.LENS_FACING_EXTERNAL:
return "external";
default:
return "unknown";
}
}
public static String buildCameraListMessage(boolean includeSizes) {
StringBuilder builder = new StringBuilder("List of cameras:");
CameraManager cameraManager = ServiceManager.getCameraManager();
try {
String[] cameraIds = cameraManager.getCameraIdList();
if (cameraIds == null || cameraIds.length == 0) {
builder.append("\n (none)");
} else {
for (String id : cameraIds) {
builder.append("\n --video-source=camera --camera-id=").append(id);
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
builder.append(" (").append(getCameraFacingName(facing)).append(", ");
Rect activeSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
builder.append(activeSize.width()).append("x").append(activeSize.height());
builder.append(')');
if (includeSizes) {
StreamConfigurationMap configs = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = configs.getOutputSizes(MediaCodec.class);
for (android.util.Size size : sizes) {
builder.append("\n - ").append(size.getWidth()).append('x').append(size.getHeight());
}
}
}
}
} catch (CameraAccessException e) {
builder.append("\n (access denied)");
}
return builder.toString();
}
}

View File

@ -14,6 +14,7 @@ public class Options {
private int maxSize;
private VideoCodec videoCodec = VideoCodec.H264;
private AudioCodec audioCodec = AudioCodec.OPUS;
private VideoSource videoSource = VideoSource.DISPLAY;
private AudioSource audioSource = AudioSource.OUTPUT;
private int videoBitRate = 8000000;
private int audioBitRate = 128000;
@ -23,6 +24,10 @@ public class Options {
private Rect crop;
private boolean control = true;
private int displayId;
private String cameraId;
private Size cameraSize;
private CameraFacing cameraFacing;
private CameraAspectRatio cameraAspectRatio;
private boolean showTouches;
private boolean stayAwake;
private List<CodecOption> videoCodecOptions;
@ -38,6 +43,8 @@ public class Options {
private boolean listEncoders;
private boolean listDisplays;
private boolean listCameras;
private boolean listCameraSizes;
// Options not used by the scrcpy client, but useful to use scrcpy-server directly
private boolean sendDeviceMeta = true; // send device name and size
@ -73,6 +80,10 @@ public class Options {
return audioCodec;
}
public VideoSource getVideoSource() {
return videoSource;
}
public AudioSource getAudioSource() {
return audioSource;
}
@ -109,6 +120,22 @@ public class Options {
return displayId;
}
public String getCameraId() {
return cameraId;
}
public Size getCameraSize() {
return cameraSize;
}
public CameraFacing getCameraFacing() {
return cameraFacing;
}
public CameraAspectRatio getCameraAspectRatio() {
return cameraAspectRatio;
}
public boolean getShowTouches() {
return showTouches;
}
@ -154,7 +181,7 @@ public class Options {
}
public boolean getList() {
return listEncoders || listDisplays;
return listEncoders || listDisplays || listCameras || listCameraSizes;
}
public boolean getListEncoders() {
@ -165,6 +192,14 @@ public class Options {
return listDisplays;
}
public boolean getListCameras() {
return listCameras;
}
public boolean getListCameraSizes() {
return listCameraSizes;
}
public boolean getSendDeviceMeta() {
return sendDeviceMeta;
}
@ -234,6 +269,13 @@ public class Options {
}
options.audioCodec = audioCodec;
break;
case "video_source":
VideoSource videoSource = VideoSource.findByName(value);
if (videoSource == null) {
throw new IllegalArgumentException("Video source " + value + " not supported");
}
options.videoSource = videoSource;
break;
case "audio_source":
AudioSource audioSource = AudioSource.findByName(value);
if (audioSource == null) {
@ -312,6 +354,36 @@ public class Options {
case "list_displays":
options.listDisplays = Boolean.parseBoolean(value);
break;
case "list_cameras":
options.listCameras = Boolean.parseBoolean(value);
break;
case "list_camera_sizes":
options.listCameraSizes = Boolean.parseBoolean(value);
break;
case "camera_id":
if (!value.isEmpty()) {
options.cameraId = value;
}
break;
case "camera_size":
if (!value.isEmpty()) {
options.cameraSize = parseSize(value);
}
break;
case "camera_facing":
if (!value.isEmpty()) {
CameraFacing facing = CameraFacing.findByName(value);
if (facing == null) {
throw new IllegalArgumentException("Camera facing " + value + " not supported");
}
options.cameraFacing = facing;
}
break;
case "camera_ar":
if (!value.isEmpty()) {
options.cameraAspectRatio = parseCameraAspectRatio(value);
}
break;
case "send_device_meta":
options.sendDeviceMeta = Boolean.parseBoolean(value);
break;
@ -354,4 +426,31 @@ public class Options {
int y = Integer.parseInt(tokens[3]);
return new Rect(x, y, x + width, y + height);
}
private static Size parseSize(String size) {
// input format: "<width>x<height>"
String[] tokens = size.split("x");
if (tokens.length != 2) {
throw new IllegalArgumentException("Invalid size format (expected <width>x<height>): \"" + size + "\"");
}
int width = Integer.parseInt(tokens[0]);
int height = Integer.parseInt(tokens[1]);
return new Size(width, height);
}
private static CameraAspectRatio parseCameraAspectRatio(String ar) {
if ("sensor".equals(ar)) {
return CameraAspectRatio.sensorAspectRatio();
}
String[] tokens = ar.split(":");
if (tokens.length == 2) {
int w = Integer.parseInt(tokens[0]);
int h = Integer.parseInt(tokens[1]);
return CameraAspectRatio.fromFraction(w, h);
}
float floatAr = Float.parseFloat(tokens[0]);
return CameraAspectRatio.fromFloat(floatAr);
}
}

View File

@ -48,8 +48,9 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
}
@Override
public void setMaxSize(int size) {
public boolean setMaxSize(int size) {
device.setMaxSize(size);
return true;
}
@Override

View File

@ -88,6 +88,12 @@ public final class Server {
private static void scrcpy(Options options) throws IOException, ConfigurationException {
Ln.i("Device: [" + Build.MANUFACTURER + "] " + Build.BRAND + " " + Build.MODEL + " (Android " + Build.VERSION.RELEASE + ")");
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S && options.getVideoSource() == VideoSource.CAMERA) {
Ln.e("Camera mirroring is not supported before Android 12");
throw new ConfigurationException("Camera mirroring is not supported");
}
final Device device = new Device(options);
Thread initThread = startInitThread(options);
@ -99,7 +105,8 @@ public final class Server {
boolean audio = options.getAudio();
boolean sendDummyByte = options.getSendDummyByte();
Workarounds.apply(audio);
boolean camera = true;
Workarounds.apply(audio, camera);
List<AsyncProcessor> asyncProcessors = new ArrayList<>();
@ -132,10 +139,16 @@ public final class Server {
if (video) {
Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(),
options.getSendFrameMeta());
ScreenCapture screenCapture = new ScreenCapture(device);
SurfaceEncoder screenEncoder = new SurfaceEncoder(screenCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
SurfaceCapture surfaceCapture;
if (options.getVideoSource() == VideoSource.DISPLAY) {
surfaceCapture = new ScreenCapture(device);
} else {
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraFacing(), options.getCameraSize(),
options.getMaxSize(), options.getCameraAspectRatio());
}
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
asyncProcessors.add(screenEncoder);
asyncProcessors.add(surfaceEncoder);
}
Completion completion = new Completion(asyncProcessors.size());
@ -192,6 +205,10 @@ public final class Server {
if (options.getListDisplays()) {
Ln.i(LogUtils.buildDisplayListMessage());
}
if (options.getListCameras() || options.getListCameraSizes()) {
Workarounds.apply(false, true);
Ln.i(LogUtils.buildCameraListMessage(options.getListCameraSizes()));
}
// Just print the requested data, do not mirror
return;
}

View File

@ -2,6 +2,7 @@ package com.genymobile.scrcpy;
import android.view.Surface;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
@ -31,7 +32,7 @@ public abstract class SurfaceCapture {
/**
* Called once before the capture starts.
*/
public abstract void init();
public abstract void init() throws IOException;
/**
* Called after the capture ends (if and only if {@link #init()} has been called).
@ -43,7 +44,7 @@ public abstract class SurfaceCapture {
*
* @param surface the surface which will be encoded
*/
public abstract void start(Surface surface);
public abstract void start(Surface surface) throws IOException;
/**
* Return the video size
@ -57,5 +58,14 @@ public abstract class SurfaceCapture {
*
* @param size Maximum size
*/
public abstract void setMaxSize(int size);
public abstract boolean setMaxSize(int size);
/**
* Indicate if the capture has been closed internally.
*
* @return {@code true} is the capture is closed, {@code false} otherwise.
*/
public boolean isClosed() {
return false;
}
}

View File

@ -122,9 +122,13 @@ public class SurfaceEncoder implements AsyncProcessor {
return false;
}
// Retry with a smaller device size
boolean accepted = capture.setMaxSize(newMaxSize);
if (!accepted) {
return false;
}
// Retry with a smaller size
Ln.i("Retrying with -m" + newMaxSize + "...");
capture.setMaxSize(newMaxSize);
return true;
}
@ -177,6 +181,11 @@ public class SurfaceEncoder implements AsyncProcessor {
}
}
if (capture.isClosed()) {
// The capture might have been closed internally (for example if the camera is disconnected)
alive = false;
}
return !eof && alive;
}

View File

@ -0,0 +1,22 @@
package com.genymobile.scrcpy;
public enum VideoSource {
DISPLAY("display"),
CAMERA("camera");
private final String name;
VideoSource(String name) {
this.name = name;
}
static VideoSource findByName(String name) {
for (VideoSource videoSource : VideoSource.values()) {
if (name.equals(videoSource.name)) {
return videoSource;
}
}
return null;
}
}

View File

@ -28,14 +28,13 @@ public final class Workarounds {
// not instantiable
}
public static void apply(boolean audio) {
public static void apply(boolean audio, boolean camera) {
Workarounds.prepareMainLooper();
boolean mustFillAppInfo = false;
boolean mustFillBaseContext = false;
boolean mustFillAppContext = false;
if (Build.BRAND.equalsIgnoreCase("meizu")) {
// Workarounds must be applied for Meizu phones:
// - <https://github.com/Genymobile/scrcpy/issues/240>
@ -65,6 +64,11 @@ public final class Workarounds {
mustFillAppContext = true;
}
if (camera) {
mustFillAppInfo = true;
mustFillBaseContext = true;
}
if (mustFillAppInfo) {
Workarounds.fillAppInfo();
}

View File

@ -1,9 +1,14 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.FakeContext;
import android.annotation.SuppressLint;
import android.content.Context;
import android.hardware.camera2.CameraManager;
import android.os.IBinder;
import android.os.IInterface;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@ -26,6 +31,7 @@ public final class ServiceManager {
private static StatusBarManager statusBarManager;
private static ClipboardManager clipboardManager;
private static ActivityManager activityManager;
private static CameraManager cameraManager;
private ServiceManager() {
/* not instantiable */
@ -129,4 +135,16 @@ public final class ServiceManager {
return activityManager;
}
public static CameraManager getCameraManager() {
if (cameraManager == null) {
try {
Constructor<CameraManager> ctor = CameraManager.class.getDeclaredConstructor(Context.class);
cameraManager = ctor.newInstance(FakeContext.get());
} catch (Exception e) {
throw new AssertionError(e);
}
}
return cameraManager;
}
}