Add camera mirroring

Add --video-source=camera, and related options:
 - --camera-id=<id>: select the camera by its id (see --list-cameras);
 - --camera-size=<width>x<height>: select the capture size.

Fixed #241 <https://github.com/Genymobile/scrcpy/issues/241>
PR #4213 <https://github.com/Genymobile/scrcpy/pull/4213>

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
This commit is contained in:
Simon Chan 2023-07-16 17:07:19 +08:00 committed by Romain Vimont
parent f032262cd7
commit bfeecc0131
17 changed files with 426 additions and 10 deletions

View File

@ -10,6 +10,8 @@ _scrcpy() {
--audio-source= --audio-source=
--audio-output-buffer= --audio-output-buffer=
-b --video-bit-rate= -b --video-bit-rate=
--camera-id=
--camera-size=
--crop= --crop=
-d --select-usb -d --select-usb
--disable-screensaver --disable-screensaver
@ -74,6 +76,7 @@ _scrcpy() {
--video-codec= --video-codec=
--video-codec-options= --video-codec-options=
--video-encoder= --video-encoder=
--video-source=
-w --stay-awake -w --stay-awake
--window-borderless --window-borderless
--window-title= --window-title=
@ -93,6 +96,10 @@ _scrcpy() {
COMPREPLY=($(compgen -W 'opus aac raw' -- "$cur")) COMPREPLY=($(compgen -W 'opus aac raw' -- "$cur"))
return return
;; ;;
--video-source)
COMPREPLY=($(compgen -W 'display camera' -- "$cur"))
return
;;
--audio-source) --audio-source)
COMPREPLY=($(compgen -W 'output mic' -- "$cur")) COMPREPLY=($(compgen -W 'output mic' -- "$cur"))
return return
@ -141,6 +148,8 @@ _scrcpy() {
|--audio-codec-options \ |--audio-codec-options \
|--audio-encoder \ |--audio-encoder \
|--audio-output-buffer \ |--audio-output-buffer \
|--camera-id \
|--camera-size \
|--crop \ |--crop \
|--display-id \ |--display-id \
|--display-buffer \ |--display-buffer \

View File

@ -17,6 +17,8 @@ arguments=(
'--audio-source=[Select the audio source]:source:(output mic)' '--audio-source=[Select the audio source]:source:(output mic)'
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]' '--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]' {-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
'--camera-id=[Specify the camera id to mirror]'
'--camera-size=[Specify an explicit camera capture size]'
'--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]' '--crop=[\[width\:height\:x\:y\] Crop the device screen on the server]'
{-d,--select-usb}'[Use USB device]' {-d,--select-usb}'[Use USB device]'
'--disable-screensaver[Disable screensaver while scrcpy is running]' '--disable-screensaver[Disable screensaver while scrcpy is running]'
@ -78,6 +80,7 @@ arguments=(
'--video-codec=[Select the video codec]:codec:(h264 h265 av1)' '--video-codec=[Select the video codec]:codec:(h264 h265 av1)'
'--video-codec-options=[Set a list of comma-separated key\:type=value options for the device video encoder]' '--video-codec-options=[Set a list of comma-separated key\:type=value options for the device video encoder]'
'--video-encoder=[Use a specific MediaCodec video encoder]' '--video-encoder=[Use a specific MediaCodec video encoder]'
'--video-source=[Select the video source]:source:(display camera)'
{-w,--stay-awake}'[Keep the device on while scrcpy is running, when the device is plugged in]' {-w,--stay-awake}'[Keep the device on while scrcpy is running, when the device is plugged in]'
'--window-borderless[Disable window decorations \(display borderless window\)]' '--window-borderless[Disable window decorations \(display borderless window\)]'
'--window-title=[Set a custom window title]' '--window-title=[Set a custom window title]'

View File

@ -75,6 +75,16 @@ Encode the video at the given bit rate, expressed in bits/s. Unit suffixes are s
Default is 8M (8000000). Default is 8M (8000000).
.TP
.BI "\-\-camera\-id " id
Specify the device camera id to mirror.
The available camera ids can be listed by \-\-list\-cameras.
.TP
.BI "\-\-camera\-size " width\fRx\fIheight
Specify an explicit camera capture size.
.TP .TP
.BI "\-\-crop " width\fR:\fIheight\fR:\fIx\fR:\fIy .BI "\-\-crop " width\fR:\fIheight\fR:\fIx\fR:\fIy
Crop the device screen on the server. Crop the device screen on the server.
@ -434,6 +444,14 @@ Use a specific MediaCodec video encoder (depending on the codec provided by \fB\
The available encoders can be listed by \-\-list\-encoders. The available encoders can be listed by \-\-list\-encoders.
.TP
.BI "\-\-video\-source " source
Select the video source (display or camera).
Camera mirroring requires Android 12+.
Default is display.
.TP .TP
.B \-w, \-\-stay-awake .B \-w, \-\-stay-awake
Keep the device on while scrcpy is running, when the device is plugged in. Keep the device on while scrcpy is running, when the device is plugged in.

View File

@ -77,12 +77,15 @@ enum {
OPT_NO_VIDEO, OPT_NO_VIDEO,
OPT_NO_AUDIO_PLAYBACK, OPT_NO_AUDIO_PLAYBACK,
OPT_NO_VIDEO_PLAYBACK, OPT_NO_VIDEO_PLAYBACK,
OPT_VIDEO_SOURCE,
OPT_AUDIO_SOURCE, OPT_AUDIO_SOURCE,
OPT_KILL_ADB_ON_CLOSE, OPT_KILL_ADB_ON_CLOSE,
OPT_TIME_LIMIT, OPT_TIME_LIMIT,
OPT_PAUSE_ON_EXIT, OPT_PAUSE_ON_EXIT,
OPT_LIST_CAMERAS, OPT_LIST_CAMERAS,
OPT_LIST_CAMERA_SIZES, OPT_LIST_CAMERA_SIZES,
OPT_CAMERA_ID,
OPT_CAMERA_SIZE,
}; };
struct sc_option { struct sc_option {
@ -199,6 +202,20 @@ static const struct sc_option options[] = {
.longopt = "bit-rate", .longopt = "bit-rate",
.argdesc = "value", .argdesc = "value",
}, },
{
.longopt_id = OPT_CAMERA_ID,
.longopt = "camera-id",
.argdesc = "id",
.text = "Specify the device camera id to mirror.\n"
"The available camera ids can be listed by:\n"
" scrcpy --list-cameras",
},
{
.longopt_id = OPT_CAMERA_SIZE,
.longopt = "camera-size",
.argdesc = "<width>x<height>",
.text = "Specify an explicit camera capture size.",
},
{ {
// Not really deprecated (--codec has never been released), but without // Not really deprecated (--codec has never been released), but without
// declaring an explicit --codec option, getopt_long() partial matching // declaring an explicit --codec option, getopt_long() partial matching
@ -703,6 +720,14 @@ static const struct sc_option options[] = {
"codec provided by --video-codec).\n" "codec provided by --video-codec).\n"
"The available encoders can be listed by --list-encoders.", "The available encoders can be listed by --list-encoders.",
}, },
{
.longopt_id = OPT_VIDEO_SOURCE,
.longopt = "video-source",
.argdesc = "source",
.text = "Select the video source (display or camera).\n"
"Camera mirroring requires Android 12+.\n"
"Default is display.",
},
{ {
.shortopt = 'w', .shortopt = 'w',
.longopt = "stay-awake", .longopt = "stay-awake",
@ -1643,6 +1668,22 @@ parse_audio_codec(const char *optarg, enum sc_codec *codec) {
return false; return false;
} }
static bool
parse_video_source(const char *optarg, enum sc_video_source *source) {
if (!strcmp(optarg, "display")) {
*source = SC_VIDEO_SOURCE_DISPLAY;
return true;
}
if (!strcmp(optarg, "camera")) {
*source = SC_VIDEO_SOURCE_CAMERA;
return true;
}
LOGE("Unsupported video source: %s (expected display or camera)", optarg);
return false;
}
static bool static bool
parse_audio_source(const char *optarg, enum sc_audio_source *source) { parse_audio_source(const char *optarg, enum sc_audio_source *source) {
if (!strcmp(optarg, "mic")) { if (!strcmp(optarg, "mic")) {
@ -2030,6 +2071,11 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false; return false;
} }
break; break;
case OPT_VIDEO_SOURCE:
if (!parse_video_source(optarg, &opts->video_source)) {
return false;
}
break;
case OPT_AUDIO_SOURCE: case OPT_AUDIO_SOURCE:
if (!parse_audio_source(optarg, &opts->audio_source)) { if (!parse_audio_source(optarg, &opts->audio_source)) {
return false; return false;
@ -2048,6 +2094,12 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
return false; return false;
} }
break; break;
case OPT_CAMERA_ID:
opts->camera_id = optarg;
break;
case OPT_CAMERA_SIZE:
opts->camera_size = optarg;
break;
default: default:
// getopt prints the error message on stderr // getopt prints the error message on stderr
return false; return false;
@ -2141,6 +2193,32 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
opts->force_adb_forward = true; opts->force_adb_forward = true;
} }
if (opts->video_source == SC_VIDEO_SOURCE_CAMERA) {
if (opts->display_id) {
LOGE("--display-id is only available with --video-source=display");
return false;
}
if (!opts->camera_id) {
LOGE("Camera id must be specified by --camera-id "
"(list the available ids with --list-cameras)");
return false;
}
if (!opts->camera_size) {
LOGE("Camera size must be specified by --camera-size");
return false;
}
if (opts->control) {
LOGI("Camera video source: control disabled");
opts->control = false;
}
} else if (opts->camera_id || opts->camera_size) {
LOGE("Camera options are only available with --video-source=camera");
return false;
}
if (opts->record_format && !opts->record_filename) { if (opts->record_format && !opts->record_filename) {
LOGE("Record format specified without recording"); LOGE("Record format specified without recording");
return false; return false;

View File

@ -11,9 +11,12 @@ const struct scrcpy_options scrcpy_options_default = {
.audio_codec_options = NULL, .audio_codec_options = NULL,
.video_encoder = NULL, .video_encoder = NULL,
.audio_encoder = NULL, .audio_encoder = NULL,
.camera_id = NULL,
.camera_size = NULL,
.log_level = SC_LOG_LEVEL_INFO, .log_level = SC_LOG_LEVEL_INFO,
.video_codec = SC_CODEC_H264, .video_codec = SC_CODEC_H264,
.audio_codec = SC_CODEC_OPUS, .audio_codec = SC_CODEC_OPUS,
.video_source = SC_VIDEO_SOURCE_DISPLAY,
.audio_source = SC_AUDIO_SOURCE_OUTPUT, .audio_source = SC_AUDIO_SOURCE_OUTPUT,
.record_format = SC_RECORD_FORMAT_AUTO, .record_format = SC_RECORD_FORMAT_AUTO,
.keyboard_input_mode = SC_KEYBOARD_INPUT_MODE_INJECT, .keyboard_input_mode = SC_KEYBOARD_INPUT_MODE_INJECT,

View File

@ -44,6 +44,11 @@ enum sc_codec {
SC_CODEC_RAW, SC_CODEC_RAW,
}; };
enum sc_video_source {
SC_VIDEO_SOURCE_DISPLAY,
SC_VIDEO_SOURCE_CAMERA,
};
enum sc_audio_source { enum sc_audio_source {
SC_AUDIO_SOURCE_OUTPUT, SC_AUDIO_SOURCE_OUTPUT,
SC_AUDIO_SOURCE_MIC, SC_AUDIO_SOURCE_MIC,
@ -117,9 +122,12 @@ struct scrcpy_options {
const char *audio_codec_options; const char *audio_codec_options;
const char *video_encoder; const char *video_encoder;
const char *audio_encoder; const char *audio_encoder;
const char *camera_id;
const char *camera_size;
enum sc_log_level log_level; enum sc_log_level log_level;
enum sc_codec video_codec; enum sc_codec video_codec;
enum sc_codec audio_codec; enum sc_codec audio_codec;
enum sc_video_source video_source;
enum sc_audio_source audio_source; enum sc_audio_source audio_source;
enum sc_record_format record_format; enum sc_record_format record_format;
enum sc_keyboard_input_mode keyboard_input_mode; enum sc_keyboard_input_mode keyboard_input_mode;

View File

@ -351,6 +351,7 @@ scrcpy(struct scrcpy_options *options) {
.log_level = options->log_level, .log_level = options->log_level,
.video_codec = options->video_codec, .video_codec = options->video_codec,
.audio_codec = options->audio_codec, .audio_codec = options->audio_codec,
.video_source = options->video_source,
.audio_source = options->audio_source, .audio_source = options->audio_source,
.crop = options->crop, .crop = options->crop,
.port_range = options->port_range, .port_range = options->port_range,
@ -371,6 +372,8 @@ scrcpy(struct scrcpy_options *options) {
.audio_codec_options = options->audio_codec_options, .audio_codec_options = options->audio_codec_options,
.video_encoder = options->video_encoder, .video_encoder = options->video_encoder,
.audio_encoder = options->audio_encoder, .audio_encoder = options->audio_encoder,
.camera_id = options->camera_id,
.camera_size = options->camera_size,
.force_adb_forward = options->force_adb_forward, .force_adb_forward = options->force_adb_forward,
.power_off_on_close = options->power_off_on_close, .power_off_on_close = options->power_off_on_close,
.clipboard_autosync = options->clipboard_autosync, .clipboard_autosync = options->clipboard_autosync,

View File

@ -76,6 +76,7 @@ sc_server_params_destroy(struct sc_server_params *params) {
free((char *) params->video_encoder); free((char *) params->video_encoder);
free((char *) params->audio_encoder); free((char *) params->audio_encoder);
free((char *) params->tcpip_dst); free((char *) params->tcpip_dst);
free((char *) params->camera_id);
} }
static bool static bool
@ -103,6 +104,7 @@ sc_server_params_copy(struct sc_server_params *dst,
COPY(video_encoder); COPY(video_encoder);
COPY(audio_encoder); COPY(audio_encoder);
COPY(tcpip_dst); COPY(tcpip_dst);
COPY(camera_id);
#undef COPY #undef COPY
return true; return true;
@ -247,6 +249,10 @@ execute_server(struct sc_server *server,
ADD_PARAM("audio_codec=%s", ADD_PARAM("audio_codec=%s",
sc_server_get_codec_name(params->audio_codec)); sc_server_get_codec_name(params->audio_codec));
} }
if (params->video_source != SC_VIDEO_SOURCE_DISPLAY) {
assert(params->video_source == SC_VIDEO_SOURCE_CAMERA);
ADD_PARAM("video_source=camera");
}
if (params->audio_source != SC_AUDIO_SOURCE_OUTPUT) { if (params->audio_source != SC_AUDIO_SOURCE_OUTPUT) {
assert(params->audio_source == SC_AUDIO_SOURCE_MIC); assert(params->audio_source == SC_AUDIO_SOURCE_MIC);
ADD_PARAM("audio_source=mic"); ADD_PARAM("audio_source=mic");
@ -274,6 +280,12 @@ execute_server(struct sc_server *server,
if (params->display_id) { if (params->display_id) {
ADD_PARAM("display_id=%" PRIu32, params->display_id); ADD_PARAM("display_id=%" PRIu32, params->display_id);
} }
if (params->camera_id) {
ADD_PARAM("camera_id=%s", params->camera_id);
}
if (params->camera_size) {
ADD_PARAM("camera_size=%s", params->camera_size);
}
if (params->show_touches) { if (params->show_touches) {
ADD_PARAM("show_touches=true"); ADD_PARAM("show_touches=true");
} }

View File

@ -26,12 +26,15 @@ struct sc_server_params {
enum sc_log_level log_level; enum sc_log_level log_level;
enum sc_codec video_codec; enum sc_codec video_codec;
enum sc_codec audio_codec; enum sc_codec audio_codec;
enum sc_video_source video_source;
enum sc_audio_source audio_source; enum sc_audio_source audio_source;
const char *crop; const char *crop;
const char *video_codec_options; const char *video_codec_options;
const char *audio_codec_options; const char *audio_codec_options;
const char *video_encoder; const char *video_encoder;
const char *audio_encoder; const char *audio_encoder;
const char *camera_id;
const char *camera_size;
struct sc_port_range port_range; struct sc_port_range port_range;
uint32_t tunnel_host; uint32_t tunnel_host;
uint16_t tunnel_port; uint16_t tunnel_port;

View File

@ -0,0 +1,180 @@
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.view.Surface;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
public class CameraCapture extends SurfaceCapture {
private final String explicitCameraId;
private final Size explicitSize;
private HandlerThread cameraThread;
private Handler cameraHandler;
private CameraDevice cameraDevice;
private Executor cameraExecutor;
public CameraCapture(String explicitCameraId, Size explicitSize) {
this.explicitCameraId = explicitCameraId;
this.explicitSize = explicitSize;
}
@Override
public void init() throws IOException {
cameraThread = new HandlerThread("camera");
cameraThread.start();
cameraHandler = new Handler(cameraThread.getLooper());
cameraExecutor = new HandlerExecutor(cameraHandler);
try {
cameraDevice = openCamera(explicitCameraId);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
}
}
@Override
public void start(Surface surface) throws IOException {
try {
CameraCaptureSession session = createCaptureSession(cameraDevice, surface);
CaptureRequest request = createCaptureRequest(surface);
setRepeatingRequest(session, request);
} catch (CameraAccessException | InterruptedException e) {
throw new IOException(e);
}
}
@Override
public void release() {
if (cameraDevice != null) {
cameraDevice.close();
}
if (cameraThread != null) {
cameraThread.quitSafely();
}
}
@Override
public Size getSize() {
return explicitSize;
}
@Override
public boolean setMaxSize(int maxSize) {
return false;
}
@SuppressLint("MissingPermission")
@TargetApi(Build.VERSION_CODES.S)
private CameraDevice openCamera(String id) throws CameraAccessException, InterruptedException {
CompletableFuture<CameraDevice> future = new CompletableFuture<>();
ServiceManager.getCameraManager().openCamera(id, new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
Ln.d("Camera opened successfully");
future.complete(camera);
}
@Override
public void onDisconnected(CameraDevice camera) {
Ln.w("Camera disconnected");
// TODO
}
@Override
public void onError(CameraDevice camera, int error) {
int cameraAccessExceptionErrorCode;
switch (error) {
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_IN_USE;
break;
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
cameraAccessExceptionErrorCode = CameraAccessException.MAX_CAMERAS_IN_USE;
break;
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_DISABLED;
break;
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
default:
cameraAccessExceptionErrorCode = CameraAccessException.CAMERA_ERROR;
break;
}
future.completeExceptionally(new CameraAccessException(cameraAccessExceptionErrorCode));
}
}, cameraHandler);
try {
return future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
@TargetApi(Build.VERSION_CODES.S)
private CameraCaptureSession createCaptureSession(CameraDevice camera, Surface surface) throws CameraAccessException, InterruptedException {
CompletableFuture<CameraCaptureSession> future = new CompletableFuture<>();
OutputConfiguration outputConfig = new OutputConfiguration(surface);
List<OutputConfiguration> outputs = Arrays.asList(outputConfig);
SessionConfiguration sessionConfig = new SessionConfiguration(SessionConfiguration.SESSION_REGULAR, outputs, cameraExecutor,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
future.complete(session);
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
future.completeExceptionally(new CameraAccessException(CameraAccessException.CAMERA_ERROR));
}
});
camera.createCaptureSession(sessionConfig);
try {
return future.get();
} catch (ExecutionException e) {
throw (CameraAccessException) e.getCause();
}
}
private CaptureRequest createCaptureRequest(Surface surface) throws CameraAccessException {
CaptureRequest.Builder requestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
requestBuilder.addTarget(surface);
return requestBuilder.build();
}
@TargetApi(Build.VERSION_CODES.S)
private void setRepeatingRequest(CameraCaptureSession session, CaptureRequest request) throws CameraAccessException, InterruptedException {
session.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
// Called for each frame captured, do nothing
}
@Override
public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
Ln.w("Camera capture failed: frame " + failure.getFrameNumber());
}
}, cameraHandler);
}
}

View File

@ -0,0 +1,23 @@
package com.genymobile.scrcpy;
import android.os.Handler;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
// Inspired from hidden android.os.HandlerExecutor
public class HandlerExecutor implements Executor {
private final Handler handler;
public HandlerExecutor(Handler handler) {
this.handler = handler;
}
@Override
public void execute(Runnable command) {
if (!handler.post(command)) {
throw new RejectedExecutionException(handler + " is shutting down");
}
}
}

View File

@ -14,6 +14,7 @@ public class Options {
private int maxSize; private int maxSize;
private VideoCodec videoCodec = VideoCodec.H264; private VideoCodec videoCodec = VideoCodec.H264;
private AudioCodec audioCodec = AudioCodec.OPUS; private AudioCodec audioCodec = AudioCodec.OPUS;
private VideoSource videoSource = VideoSource.DISPLAY;
private AudioSource audioSource = AudioSource.OUTPUT; private AudioSource audioSource = AudioSource.OUTPUT;
private int videoBitRate = 8000000; private int videoBitRate = 8000000;
private int audioBitRate = 128000; private int audioBitRate = 128000;
@ -23,6 +24,8 @@ public class Options {
private Rect crop; private Rect crop;
private boolean control = true; private boolean control = true;
private int displayId; private int displayId;
private String cameraId;
private Size cameraSize;
private boolean showTouches; private boolean showTouches;
private boolean stayAwake; private boolean stayAwake;
private List<CodecOption> videoCodecOptions; private List<CodecOption> videoCodecOptions;
@ -75,6 +78,10 @@ public class Options {
return audioCodec; return audioCodec;
} }
public VideoSource getVideoSource() {
return videoSource;
}
public AudioSource getAudioSource() { public AudioSource getAudioSource() {
return audioSource; return audioSource;
} }
@ -111,6 +118,14 @@ public class Options {
return displayId; return displayId;
} }
public String getCameraId() {
return cameraId;
}
public Size getCameraSize() {
return cameraSize;
}
public boolean getShowTouches() { public boolean getShowTouches() {
return showTouches; return showTouches;
} }
@ -244,6 +259,13 @@ public class Options {
} }
options.audioCodec = audioCodec; options.audioCodec = audioCodec;
break; break;
case "video_source":
VideoSource videoSource = VideoSource.findByName(value);
if (videoSource == null) {
throw new IllegalArgumentException("Video source " + value + " not supported");
}
options.videoSource = videoSource;
break;
case "audio_source": case "audio_source":
AudioSource audioSource = AudioSource.findByName(value); AudioSource audioSource = AudioSource.findByName(value);
if (audioSource == null) { if (audioSource == null) {
@ -328,6 +350,16 @@ public class Options {
case "list_camera_sizes": case "list_camera_sizes":
options.listCameraSizes = Boolean.parseBoolean(value); options.listCameraSizes = Boolean.parseBoolean(value);
break; break;
case "camera_id":
if (!value.isEmpty()) {
options.cameraId = value;
}
break;
case "camera_size":
if (!value.isEmpty()) {
options.cameraSize = parseSize(value);
}
break;
case "send_device_meta": case "send_device_meta":
options.sendDeviceMeta = Boolean.parseBoolean(value); options.sendDeviceMeta = Boolean.parseBoolean(value);
break; break;
@ -370,4 +402,15 @@ public class Options {
int y = Integer.parseInt(tokens[3]); int y = Integer.parseInt(tokens[3]);
return new Rect(x, y, x + width, y + height); return new Rect(x, y, x + width, y + height);
} }
private static Size parseSize(String size) {
// input format: "<width>x<height>"
String[] tokens = size.split("x");
if (tokens.length != 2) {
throw new IllegalArgumentException("Invalid size format (expected <width>x<height>): \"" + size + "\"");
}
int width = Integer.parseInt(tokens[0]);
int height = Integer.parseInt(tokens[1]);
return new Size(width, height);
}
} }

View File

@ -48,8 +48,9 @@ public class ScreenCapture extends SurfaceCapture implements Device.RotationList
} }
@Override @Override
public void setMaxSize(int maxSize) { public boolean setMaxSize(int maxSize) {
device.setMaxSize(maxSize); device.setMaxSize(maxSize);
return true;
} }
@Override @Override

View File

@ -98,7 +98,7 @@ public final class Server {
boolean video = options.getVideo(); boolean video = options.getVideo();
boolean audio = options.getAudio(); boolean audio = options.getAudio();
boolean sendDummyByte = options.getSendDummyByte(); boolean sendDummyByte = options.getSendDummyByte();
boolean camera = false; boolean camera = options.getVideoSource() == VideoSource.CAMERA;
Workarounds.apply(audio, camera); Workarounds.apply(audio, camera);
@ -133,10 +133,15 @@ public final class Server {
if (video) { if (video) {
Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(), Streamer videoStreamer = new Streamer(connection.getVideoFd(), options.getVideoCodec(), options.getSendCodecMeta(),
options.getSendFrameMeta()); options.getSendFrameMeta());
ScreenCapture screenCapture = new ScreenCapture(device); SurfaceCapture surfaceCapture;
SurfaceEncoder screenEncoder = new SurfaceEncoder(screenCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(), if (options.getVideoSource() == VideoSource.DISPLAY) {
surfaceCapture = new ScreenCapture(device);
} else {
surfaceCapture = new CameraCapture(options.getCameraId(), options.getCameraSize());
}
SurfaceEncoder surfaceEncoder = new SurfaceEncoder(surfaceCapture, videoStreamer, options.getVideoBitRate(), options.getMaxFps(),
options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError()); options.getVideoCodecOptions(), options.getVideoEncoder(), options.getDownsizeOnError());
asyncProcessors.add(screenEncoder); asyncProcessors.add(surfaceEncoder);
} }
Completion completion = new Completion(asyncProcessors.size()); Completion completion = new Completion(asyncProcessors.size());

View File

@ -2,6 +2,7 @@ package com.genymobile.scrcpy;
import android.view.Surface; import android.view.Surface;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
/** /**
@ -31,7 +32,7 @@ public abstract class SurfaceCapture {
/** /**
* Called once before the capture starts. * Called once before the capture starts.
*/ */
public abstract void init(); public abstract void init() throws IOException;
/** /**
* Called after the capture ends (if and only if {@link #init()} has been called). * Called after the capture ends (if and only if {@link #init()} has been called).
@ -43,7 +44,7 @@ public abstract class SurfaceCapture {
* *
* @param surface the surface which will be encoded * @param surface the surface which will be encoded
*/ */
public abstract void start(Surface surface); public abstract void start(Surface surface) throws IOException;
/** /**
* Return the video size * Return the video size
@ -57,5 +58,5 @@ public abstract class SurfaceCapture {
* *
* @param maxSize Maximum size * @param maxSize Maximum size
*/ */
public abstract void setMaxSize(int maxSize); public abstract boolean setMaxSize(int maxSize);
} }

View File

@ -122,9 +122,13 @@ public class SurfaceEncoder implements AsyncProcessor {
return false; return false;
} }
// Retry with a smaller device size boolean accepted = capture.setMaxSize(newMaxSize);
if (!accepted) {
return false;
}
// Retry with a smaller size
Ln.i("Retrying with -m" + newMaxSize + "..."); Ln.i("Retrying with -m" + newMaxSize + "...");
capture.setMaxSize(newMaxSize);
return true; return true;
} }

View File

@ -0,0 +1,22 @@
package com.genymobile.scrcpy;
public enum VideoSource {
DISPLAY("display"),
CAMERA("camera");
private final String name;
VideoSource(String name) {
this.name = name;
}
static VideoSource findByName(String name) {
for (VideoSource videoSource : VideoSource.values()) {
if (name.equals(videoSource.name)) {
return videoSource;
}
}
return null;
}
}