Compare commits

...

20 Commits

Author SHA1 Message Date
1dce6abf20 AudioEncoder WIP 2023-02-07 23:23:45 +01:00
dd4c5d3820 audio demuxer 2023-02-07 23:12:54 +01:00
fa1178195e Rename demuxer to video_demuxer
There will be another demuxer instance for audio.
2023-02-07 23:12:54 +01:00
5dcce5f241 Capture device audio WIP
Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-02-07 23:12:54 +01:00
3f58dd22dc socketwip
Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-02-07 23:12:54 +01:00
cbd75ff1fb audio option
Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-02-07 23:12:54 +01:00
39cdf68150 Use FakeContext for Application instance
This will expose the correct package name and UID to the application
context.
2023-02-07 23:12:54 +01:00
3492c5c963 Use shell package name for workarounds
For consistency.
2023-02-07 23:12:54 +01:00
35f4040a78 Use PACKAGE_NAME from FakeContext
Remove duplicated constant.
2023-02-07 23:12:54 +01:00
fd7d6f3822 Use AttributionSource from FakeContext
FakeContext already provides an AttributeSource instance.

Co-authored-by: Simon Chan <1330321+yume-chan@users.noreply.github.com>
2023-02-07 23:12:54 +01:00
6c3e7f6b45 Add a fake Android Context
Since scrcpy-server is not an Android application (it's a java
executable), it has no Context.

Some features will require a Context instance to get the package name
and the UID. Add a FakeContext for this purpose.

Co-authored-by: Romain Vimont <rom@rom1v.com>
Signed-off-by: Romain Vimont <rom@rom1v.com>
2023-02-07 23:12:54 +01:00
94d90927ad Use Process.ROOT_UID
Replace ServiceManager.USER_ID by existing constant Process.ROOT_UID.
2023-02-07 23:12:54 +01:00
93504f5f34 Make streamer independent of codec type
Rename VideoStreamer to Streamer, and extract a Codec interface which
will also support audio codecs.
2023-02-07 23:12:54 +01:00
a52b1f025d Pass all args to ScreenEncoder constructor
There is no good reason to pass some of them in the constructor and some
others as parameters of the streamScreen() method.
2023-02-07 23:12:54 +01:00
46759d3f63 Move screen encoder initialization
This prepares further refactors.
2023-02-07 23:12:54 +01:00
e37f9a4d7c Write streamer header from ScreenEncoder
The screen encoder is responsible to write data to the video streamer
from its own thread (which will be added later).
2023-02-07 23:12:54 +01:00
842fdccaa9 Use VideoStreamer directly from ScreenEncoder
The Callbacks interface notifies new packets. In addition, the screen
encoder will need to write headers on start from its own thread (which
will be added later).

We could add a function onStart() instead, but for simplicity, just
remove the interface, which brings no value, and call the streamer
directly.

Refs 87972e2022
2023-02-07 23:12:54 +01:00
8cdae30b01 Simplify error handling on socket creation
On any error, all previously closed sockets must be closed.

Handle these errors in a single catch-block. Currently, there are only 2
sockets, but this will simplify even more with more sockets.

Note: this commit is better displayed with --ignore-space-change (-b).
2023-02-07 23:12:54 +01:00
30b8d140e8 Log component stopped in finally clause
The message must be logged even when no exception occurs.
2023-02-07 23:12:54 +01:00
4f986d4bbb Join all threads before end of main
Some calls run from separate threads may throw exceptions when the
main() method has returned.
2023-02-07 23:12:51 +01:00
24 changed files with 711 additions and 137 deletions

View File

@ -58,6 +58,7 @@
#define OPT_PRINT_FPS 1038
#define OPT_NO_POWER_ON 1039
#define OPT_CODEC 1040
#define OPT_NO_AUDIO 1041
struct sc_option {
char shortopt;
@ -298,6 +299,11 @@ static const struct sc_option options[] = {
.text = "Do not display device (only when screen recording or V4L2 "
"sink is enabled).",
},
{
.longopt_id = OPT_NO_AUDIO,
.longopt = "no-audio",
.text = "Disable audio forwarding.",
},
{
.longopt_id = OPT_NO_KEY_REPEAT,
.longopt = "no-key-repeat",
@ -1626,6 +1632,9 @@ parse_args_with_getopt(struct scrcpy_cli_args *args, int argc, char *argv[],
case OPT_NO_DOWNSIZE_ON_ERROR:
opts->downsize_on_error = false;
break;
case OPT_NO_AUDIO:
opts->audio = false;
break;
case OPT_NO_CLEANUP:
opts->cleanup = false;
break;

View File

@ -272,6 +272,8 @@ end:
void
sc_demuxer_init(struct sc_demuxer *demuxer, sc_socket socket,
const struct sc_demuxer_callbacks *cbs, void *cbs_userdata) {
assert(socket != SC_SOCKET_NONE);
demuxer->socket = socket;
demuxer->pending = NULL;
demuxer->sink_count = 0;

View File

@ -66,4 +66,5 @@ const struct scrcpy_options scrcpy_options_default = {
.cleanup = true,
.start_fps_counter = false,
.power_on = true,
.audio = true,
};

View File

@ -147,6 +147,7 @@ struct scrcpy_options {
bool cleanup;
bool start_fps_counter;
bool power_on;
bool audio;
};
extern const struct scrcpy_options scrcpy_options_default;

View File

@ -40,7 +40,8 @@
struct scrcpy {
struct sc_server server;
struct sc_screen screen;
struct sc_demuxer demuxer;
struct sc_demuxer video_demuxer;
struct sc_demuxer audio_demuxer;
struct sc_decoder decoder;
struct sc_recorder recorder;
#ifdef HAVE_V4L2
@ -233,13 +234,21 @@ av_log_callback(void *avcl, int level, const char *fmt, va_list vl) {
}
static void
sc_demuxer_on_eos(struct sc_demuxer *demuxer, void *userdata) {
sc_video_demuxer_on_eos(struct sc_demuxer *demuxer, void *userdata) {
(void) demuxer;
(void) userdata;
PUSH_EVENT(EVENT_STREAM_STOPPED);
}
static void
sc_audio_demuxer_on_eos(struct sc_demuxer *demuxer, void *userdata) {
(void) demuxer;
(void) userdata;
// TODO
}
static void
sc_server_on_connection_failed(struct sc_server *server, void *userdata) {
(void) server;
@ -295,7 +304,8 @@ scrcpy(struct scrcpy_options *options) {
#ifdef HAVE_V4L2
bool v4l2_sink_initialized = false;
#endif
bool demuxer_started = false;
bool video_demuxer_started = false;
bool audio_demuxer_started = false;
#ifdef HAVE_USB
bool aoa_hid_initialized = false;
bool hid_keyboard_initialized = false;
@ -326,6 +336,7 @@ scrcpy(struct scrcpy_options *options) {
.lock_video_orientation = options->lock_video_orientation,
.control = options->control,
.display_id = options->display_id,
.audio = options->audio,
.show_touches = options->show_touches,
.stay_awake = options->stay_awake,
.codec_options = options->codec_options,
@ -420,17 +431,26 @@ scrcpy(struct scrcpy_options *options) {
av_log_set_callback(av_log_callback);
static const struct sc_demuxer_callbacks demuxer_cbs = {
.on_eos = sc_demuxer_on_eos,
static const struct sc_demuxer_callbacks video_demuxer_cbs = {
.on_eos = sc_video_demuxer_on_eos,
};
sc_demuxer_init(&s->demuxer, s->server.video_socket, &demuxer_cbs, NULL);
sc_demuxer_init(&s->video_demuxer, s->server.video_socket,
&video_demuxer_cbs, NULL);
if (options->audio) {
static const struct sc_demuxer_callbacks audio_demuxer_cbs = {
.on_eos = sc_audio_demuxer_on_eos,
};
sc_demuxer_init(&s->audio_demuxer, s->server.audio_socket,
&audio_demuxer_cbs, NULL);
}
if (dec) {
sc_demuxer_add_sink(&s->demuxer, &dec->packet_sink);
sc_demuxer_add_sink(&s->video_demuxer, &dec->packet_sink);
}
if (rec) {
sc_demuxer_add_sink(&s->demuxer, &rec->packet_sink);
sc_demuxer_add_sink(&s->video_demuxer, &rec->packet_sink);
}
struct sc_controller *controller = NULL;
@ -638,17 +658,24 @@ aoa_hid_end:
#endif
// now we consumed the header values, the socket receives the video stream
// start the demuxer
if (!sc_demuxer_start(&s->demuxer)) {
// start the video demuxer
if (!sc_demuxer_start(&s->video_demuxer)) {
goto end;
}
demuxer_started = true;
video_demuxer_started = true;
if (options->audio) {
if (!sc_demuxer_start(&s->audio_demuxer)) {
goto end;
}
audio_demuxer_started = true;
}
ret = event_loop(s);
LOGD("quit...");
// Close the window immediately on closing, because screen_destroy() may
// only be called once the demuxer thread is joined (it may take time)
// only be called once the video demuxer thread is joined (it may take time)
sc_screen_hide_window(&s->screen);
end:
@ -686,8 +713,12 @@ end:
// now that the sockets are shutdown, the demuxer and controller are
// interrupted, we can join them
if (demuxer_started) {
sc_demuxer_join(&s->demuxer);
if (video_demuxer_started) {
sc_demuxer_join(&s->video_demuxer);
}
if (audio_demuxer_started) {
sc_demuxer_join(&s->audio_demuxer);
}
#ifdef HAVE_V4L2
@ -706,8 +737,9 @@ end:
}
#endif
// Destroy the screen only after the demuxer is guaranteed to be finished,
// because otherwise the screen could receive new frames after destruction
// Destroy the screen only after the video demuxer is guaranteed to be
// finished, because otherwise the screen could receive new frames after
// destruction
if (screen_initialized) {
sc_screen_join(&s->screen);
sc_screen_destroy(&s->screen);

View File

@ -217,6 +217,9 @@ execute_server(struct sc_server *server,
ADD_PARAM("log_level=%s", log_level_to_server_string(params->log_level));
ADD_PARAM("bit_rate=%" PRIu32, params->bit_rate);
if (!params->audio) {
ADD_PARAM("audio=false");
}
if (params->codec != SC_CODEC_H264) {
ADD_PARAM("codec=%s", sc_server_get_codec_name(params->codec));
}
@ -388,6 +391,7 @@ sc_server_init(struct sc_server *server, const struct sc_server_params *params,
server->stopped = false;
server->video_socket = SC_SOCKET_NONE;
server->audio_socket = SC_SOCKET_NONE;
server->control_socket = SC_SOCKET_NONE;
sc_adb_tunnel_init(&server->tunnel);
@ -431,9 +435,11 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
const char *serial = server->serial;
assert(serial);
bool audio = server->params.audio;
bool control = server->params.control;
sc_socket video_socket = SC_SOCKET_NONE;
sc_socket audio_socket = SC_SOCKET_NONE;
sc_socket control_socket = SC_SOCKET_NONE;
if (!tunnel->forward) {
video_socket = net_accept_intr(&server->intr, tunnel->server_socket);
@ -441,6 +447,14 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
goto fail;
}
if (audio) {
audio_socket =
net_accept_intr(&server->intr, tunnel->server_socket);
if (audio_socket == SC_SOCKET_NONE) {
goto fail;
}
}
if (control) {
control_socket =
net_accept_intr(&server->intr, tunnel->server_socket);
@ -467,6 +481,18 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
goto fail;
}
if (audio) {
audio_socket = net_socket();
if (audio_socket == SC_SOCKET_NONE) {
goto fail;
}
bool ok = net_connect_intr(&server->intr, audio_socket, tunnel_host,
tunnel_port);
if (!ok) {
goto fail;
}
}
if (control) {
// we know that the device is listening, we don't need several
// attempts
@ -493,9 +519,11 @@ sc_server_connect_to(struct sc_server *server, struct sc_server_info *info) {
}
assert(video_socket != SC_SOCKET_NONE);
assert(!audio || audio_socket != SC_SOCKET_NONE);
assert(!control || control_socket != SC_SOCKET_NONE);
server->video_socket = video_socket;
server->audio_socket = audio_socket;
server->control_socket = control_socket;
return true;
@ -507,6 +535,12 @@ fail:
}
}
if (audio_socket != SC_SOCKET_NONE) {
if (!net_close(audio_socket)) {
LOGW("Could not close audio socket");
}
}
if (control_socket != SC_SOCKET_NONE) {
if (!net_close(control_socket)) {
LOGW("Could not close control socket");
@ -852,6 +886,11 @@ run_server(void *data) {
assert(server->video_socket != SC_SOCKET_NONE);
net_interrupt(server->video_socket);
if (server->audio_socket != SC_SOCKET_NONE) {
// There is no audio_socket if --no-audio is set
net_interrupt(server->audio_socket);
}
if (server->control_socket != SC_SOCKET_NONE) {
// There is no control_socket if --no-control is set
net_interrupt(server->control_socket);
@ -913,6 +952,9 @@ sc_server_destroy(struct sc_server *server) {
if (server->video_socket != SC_SOCKET_NONE) {
net_close(server->video_socket);
}
if (server->audio_socket != SC_SOCKET_NONE) {
net_close(server->audio_socket);
}
if (server->control_socket != SC_SOCKET_NONE) {
net_close(server->control_socket);
}

View File

@ -38,6 +38,7 @@ struct sc_server_params {
int8_t lock_video_orientation;
bool control;
uint32_t display_id;
bool audio;
bool show_touches;
bool stay_awake;
bool force_adb_forward;
@ -69,6 +70,7 @@ struct sc_server {
struct sc_adb_tunnel tunnel;
sc_socket video_socket;
sc_socket audio_socket;
sc_socket control_socket;
const struct sc_server_callbacks *cbs;

View File

@ -0,0 +1,46 @@
package com.genymobile.scrcpy;
import android.media.MediaFormat;
public enum AudioCodec implements Codec {
OPUS(0x6f_70_75_73, "opus", MediaFormat.MIMETYPE_AUDIO_OPUS);
private final int id; // 4-byte ASCII representation of the name
private final String name;
private final String mimeType;
AudioCodec(int id, String name, String mimeType) {
this.id = id;
this.name = name;
this.mimeType = mimeType;
}
@Override
public Type getType() {
return Type.VIDEO;
}
@Override
public int getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public String getMimeType() {
return mimeType;
}
public static AudioCodec findByName(String name) {
for (AudioCodec codec : values()) {
if (codec.name.equals(name)) {
return codec;
}
}
return null;
}
}

View File

@ -0,0 +1,302 @@
package com.genymobile.scrcpy;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.AudioTimestamp;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
public final class AudioEncoder {
private static class InputTask {
final int index;
InputTask(int index) {
this.index = index;
}
}
private static class OutputTask {
final int index;
final MediaCodec.BufferInfo bufferInfo;
OutputTask(int index, MediaCodec.BufferInfo bufferInfo) {
this.index = index;
this.bufferInfo = bufferInfo;
}
}
private static final String MIMETYPE = MediaFormat.MIMETYPE_AUDIO_OPUS;
private static final int SAMPLE_RATE = 48000;
private static final int CHANNELS = 2;
private static final int BIT_RATE = 128000;
private static int BUFFER_MS = 15; // milliseconds
private static final int BUFFER_SIZE = SAMPLE_RATE * CHANNELS * BUFFER_MS / 1000;
private AudioRecord recorder;
private MediaCodec mediaCodec;
private final AtomicBoolean cleanUpDone = new AtomicBoolean(false);
// Capacity of 64 is in practice "infinite" (it is limited by the number of available MediaCodec buffers, typically 4).
// So many pending tasks would lead to an unacceptable delay anyway.
private final BlockingQueue<InputTask> inputTasks = new ArrayBlockingQueue<>(64);
private final BlockingQueue<OutputTask> outputTasks = new ArrayBlockingQueue<>(64);
private Thread thread;
private HandlerThread mediaCodecThread;
private Thread inputThread;
private Thread outputThread;
private boolean ended;
private static AudioFormat createAudioFormat() {
AudioFormat.Builder builder = new AudioFormat.Builder();
builder.setEncoding(AudioFormat.ENCODING_PCM_16BIT);
builder.setSampleRate(SAMPLE_RATE);
builder.setChannelMask(CHANNELS == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO);
return builder.build();
}
@TargetApi(Build.VERSION_CODES.M)
@SuppressLint({"WrongConstant", "MissingPermission"})
private static AudioRecord createAudioRecord() {
AudioRecord.Builder builder = new AudioRecord.Builder();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
// On older APIs, Workarounds.fillAppInfo() must be called beforehand
builder.setContext(FakeContext.get());
}
builder.setAudioSource(MediaRecorder.AudioSource.REMOTE_SUBMIX);
builder.setAudioFormat(createAudioFormat());
builder.setBufferSizeInBytes(1024 * 1024);
return builder.build();
}
private static MediaFormat createFormat() {
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, MIMETYPE);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
format.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
return format;
}
@TargetApi(Build.VERSION_CODES.N)
private void inputThread() throws IOException, InterruptedException {
final AudioTimestamp timestamp = new AudioTimestamp();
long previousPts = 0;
long nextPts = 0;
while (!Thread.currentThread().isInterrupted()) {
InputTask task = inputTasks.take();
ByteBuffer buffer = mediaCodec.getInputBuffer(task.index);
int r = recorder.read(buffer, BUFFER_SIZE);
if (r < 0) {
throw new IOException("Could not read audio: " + r);
}
long pts;
int ret = recorder.getTimestamp(timestamp, AudioTimestamp.TIMEBASE_MONOTONIC);
if (ret == AudioRecord.SUCCESS) {
pts = timestamp.nanoTime / 1000;
} else {
if (nextPts == 0) {
Ln.w("Could not get any audio timestamp");
}
// compute from previous timestamp and packet size
pts = nextPts;
}
long durationMs = r * 1000 / CHANNELS / SAMPLE_RATE;
nextPts = pts + durationMs;
if (previousPts != 0 && pts < previousPts) {
// Audio PTS may come from two sources:
// - recorder.getTimestamp() if the call works;
// - an estimation from the previous PTS and the packet size as a fallback.
//
// Therefore, the property that PTS are monotonically increasing is no guaranteed in corner cases, so enforce it.
pts = previousPts + 1;
}
mediaCodec.queueInputBuffer(task.index, 0, r, pts, 0);
previousPts = pts;
}
}
private void outputThread() throws IOException, InterruptedException {
while (!Thread.currentThread().isInterrupted()) {
OutputTask task = outputTasks.take();
ByteBuffer buffer = mediaCodec.getOutputBuffer(task.index);
try {
Ln.i("Audio packet [pts=" + task.bufferInfo.presentationTimeUs + "] " + buffer.remaining() + " bytes");
} finally {
mediaCodec.releaseOutputBuffer(task.index, false);
}
}
}
public void start() {
thread = new Thread(() -> {
try {
encode();
} catch (IOException e) {
// this is expected on close
} finally {
Ln.d("Audio encoder stopped");
}
});
thread.start();
}
public void stop() {
if (thread != null) {
// Just wake up the blocking wait from the thread, so that it properly releases all its resources and terminates
end();
}
}
public void join() throws InterruptedException {
if (thread != null) {
thread.join();
}
}
private synchronized void end() {
ended = true;
notify();
}
private synchronized void waitEnded() {
try {
while (!ended) {
wait();
}
} catch (InterruptedException e) {
// ignore
}
}
@TargetApi(Build.VERSION_CODES.M)
public void encode() throws IOException {
mediaCodec = MediaCodec.createEncoderByType(MIMETYPE); // may throw IOException
try {
recorder = createAudioRecord();
mediaCodecThread = new HandlerThread("AudioEncoder");
mediaCodecThread.start();
MediaFormat format = createFormat();
mediaCodec.setCallback(new EncoderCallback(), new Handler(mediaCodecThread.getLooper()));
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
recorder.startRecording();
inputThread = new Thread(() -> {
try {
inputThread();
} catch (IOException | InterruptedException e) {
// this is expected on close
} finally {
end();
}
});
outputThread = new Thread(() -> {
try {
outputThread();
} catch (IOException | InterruptedException e) {
// this is expected on close
} finally {
end();
}
});
mediaCodec.start();
inputThread.start();
outputThread.start();
} catch (Throwable e) {
mediaCodec.release();
if (recorder != null) {
recorder.release();
}
throw e;
}
try {
waitEnded();
} finally {
cleanUp();
}
}
private void cleanUp() {
mediaCodecThread.getLooper().quit();
inputThread.interrupt();
outputThread.interrupt();
try {
mediaCodecThread.join();
inputThread.join();
outputThread.join();
} catch (InterruptedException e) {
// Should never happen
throw new AssertionError(e);
}
mediaCodec.stop();
mediaCodec.release();
recorder.stop();
recorder.release();
}
private class EncoderCallback extends MediaCodec.Callback {
@TargetApi(Build.VERSION_CODES.N)
@Override
public void onInputBufferAvailable(MediaCodec codec, int index) {
try {
inputTasks.put(new InputTask(index));
} catch (InterruptedException e) {
end();
}
}
@Override
public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo bufferInfo) {
try {
outputTasks.put(new OutputTask(index, bufferInfo));
} catch (InterruptedException e) {
end();
}
}
@Override
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
Ln.e("MediaCodec error", e);
end();
}
@Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
// ignore
}
}
}

View File

@ -0,0 +1,11 @@
package com.genymobile.scrcpy;
public interface Codec {
enum Type {VIDEO}
Type getType();
int getId();
String getName();
String getMimeType();
}

View File

@ -90,6 +90,7 @@ public class Controller {
control();
} catch (IOException e) {
// this is expected on close
} finally {
Ln.d("Controller stopped");
}
});
@ -100,11 +101,17 @@ public class Controller {
public void stop() {
if (thread != null) {
thread.interrupt();
thread = null;
}
sender.stop();
}
public void join() throws InterruptedException {
if (thread != null) {
thread.join();
}
sender.join();
}
public DeviceMessageSender getSender() {
return sender;
}

View File

@ -20,6 +20,9 @@ public final class DesktopConnection implements Closeable {
private final LocalSocket videoSocket;
private final FileDescriptor videoFd;
private final LocalSocket audioSocket;
private final FileDescriptor audioFd;
private final LocalSocket controlSocket;
private final InputStream controlInputStream;
private final OutputStream controlOutputStream;
@ -27,9 +30,10 @@ public final class DesktopConnection implements Closeable {
private final ControlMessageReader reader = new ControlMessageReader();
private final DeviceMessageWriter writer = new DeviceMessageWriter();
private DesktopConnection(LocalSocket videoSocket, LocalSocket controlSocket) throws IOException {
private DesktopConnection(LocalSocket videoSocket, LocalSocket audioSocket, LocalSocket controlSocket) throws IOException {
this.videoSocket = videoSocket;
this.controlSocket = controlSocket;
this.audioSocket = audioSocket;
if (controlSocket != null) {
controlInputStream = controlSocket.getInputStream();
controlOutputStream = controlSocket.getOutputStream();
@ -38,6 +42,7 @@ public final class DesktopConnection implements Closeable {
controlOutputStream = null;
}
videoFd = videoSocket.getFileDescriptor();
audioFd = audioSocket != null ? audioSocket.getFileDescriptor() : null;
}
private static LocalSocket connect(String abstractName) throws IOException {
@ -55,40 +60,50 @@ public final class DesktopConnection implements Closeable {
return SOCKET_NAME_PREFIX + String.format("_%08x", uid);
}
public static DesktopConnection open(int uid, boolean tunnelForward, boolean control, boolean sendDummyByte) throws IOException {
public static DesktopConnection open(int uid, boolean tunnelForward, boolean audio, boolean control, boolean sendDummyByte) throws IOException {
String socketName = getSocketName(uid);
LocalSocket videoSocket;
LocalSocket videoSocket = null;
LocalSocket audioSocket = null;
LocalSocket controlSocket = null;
if (tunnelForward) {
try (LocalServerSocket localServerSocket = new LocalServerSocket(socketName)) {
videoSocket = localServerSocket.accept();
if (sendDummyByte) {
// send one byte so the client may read() to detect a connection error
videoSocket.getOutputStream().write(0);
}
if (control) {
try {
try {
if (tunnelForward) {
try (LocalServerSocket localServerSocket = new LocalServerSocket(socketName)) {
videoSocket = localServerSocket.accept();
if (sendDummyByte) {
// send one byte so the client may read() to detect a connection error
videoSocket.getOutputStream().write(0);
}
if (audio) {
audioSocket = localServerSocket.accept();
}
if (control) {
controlSocket = localServerSocket.accept();
} catch (IOException | RuntimeException e) {
videoSocket.close();
throw e;
}
}
}
} else {
videoSocket = connect(socketName);
if (control) {
try {
} else {
videoSocket = connect(socketName);
if (audio) {
audioSocket = connect(socketName);
}
if (control) {
controlSocket = connect(socketName);
} catch (IOException | RuntimeException e) {
videoSocket.close();
throw e;
}
}
} catch (IOException | RuntimeException e) {
if (videoSocket != null) {
videoSocket.close();
}
if (audioSocket != null) {
audioSocket.close();
}
if (controlSocket != null) {
controlSocket.close();
}
throw e;
}
return new DesktopConnection(videoSocket, controlSocket);
return new DesktopConnection(videoSocket, audioSocket, controlSocket);
}
public void close() throws IOException {
@ -121,6 +136,10 @@ public final class DesktopConnection implements Closeable {
return videoFd;
}
public FileDescriptor getAudioFd() {
return audioFd;
}
public ControlMessage receiveControlMessage() throws IOException {
ControlMessage msg = reader.next();
while (msg == null) {

View File

@ -57,6 +57,7 @@ public final class DeviceMessageSender {
loop();
} catch (IOException | InterruptedException e) {
// this is expected on close
} finally {
Ln.d("Device message sender stopped");
}
});
@ -66,7 +67,12 @@ public final class DeviceMessageSender {
public void stop() {
if (thread != null) {
thread.interrupt();
thread = null;
}
}
public void join() throws InterruptedException {
if (thread != null) {
thread.join();
}
}
}

View File

@ -0,0 +1,40 @@
package com.genymobile.scrcpy;
import android.annotation.TargetApi;
import android.content.AttributionSource;
import android.content.ContextWrapper;
import android.os.Build;
import android.os.Process;
public final class FakeContext extends ContextWrapper {
public static final String PACKAGE_NAME = "com.android.shell";
private static final FakeContext INSTANCE = new FakeContext();
public static FakeContext get() {
return INSTANCE;
}
private FakeContext() {
super(null);
}
@Override
public String getPackageName() {
return PACKAGE_NAME;
}
@Override
public String getOpPackageName() {
return PACKAGE_NAME;
}
@TargetApi(Build.VERSION_CODES.S)
@Override
public AttributionSource getAttributionSource() {
AttributionSource.Builder builder = new AttributionSource.Builder(Process.SHELL_UID);
builder.setPackageName(PACKAGE_NAME);
return builder.build();
}
}

View File

@ -8,6 +8,7 @@ public class Options {
private Ln.Level logLevel = Ln.Level.DEBUG;
private int uid = -1; // 31-bit non-negative value, or -1
private boolean audio = true;
private int maxSize;
private VideoCodec codec = VideoCodec.H264;
private int bitRate = 8000000;
@ -49,6 +50,14 @@ public class Options {
this.uid = uid;
}
public boolean getAudio() {
return audio;
}
public void setAudio(boolean audio) {
this.audio = audio;
}
public int getMaxSize() {
return maxSize;
}

View File

@ -21,10 +21,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
public class ScreenEncoder implements Device.RotationListener {
public interface Callbacks {
void onPacket(ByteBuffer codecBuffer, MediaCodec.BufferInfo bufferInfo) throws IOException;
}
private static final int DEFAULT_I_FRAME_INTERVAL = 10; // seconds
private static final int REPEAT_FRAME_DELAY_US = 100_000; // repeat after 100ms
private static final String KEY_MAX_FPS_TO_ENCODER = "max-fps-to-encoder";
@ -35,7 +31,8 @@ public class ScreenEncoder implements Device.RotationListener {
private final AtomicBoolean rotationChanged = new AtomicBoolean();
private final String videoMimeType;
private final Device device;
private final Streamer streamer;
private final String encoderName;
private final List<CodecOption> codecOptions;
private final int bitRate;
@ -45,8 +42,10 @@ public class ScreenEncoder implements Device.RotationListener {
private boolean firstFrameSent;
private int consecutiveErrors;
public ScreenEncoder(String videoMimeType, int bitRate, int maxFps, List<CodecOption> codecOptions, String encoderName, boolean downsizeOnError) {
this.videoMimeType = videoMimeType;
public ScreenEncoder(Device device, Streamer streamer, int bitRate, int maxFps, List<CodecOption> codecOptions,
String encoderName, boolean downsizeOnError) {
this.device = device;
this.streamer = streamer;
this.bitRate = bitRate;
this.maxFps = maxFps;
this.codecOptions = codecOptions;
@ -63,11 +62,15 @@ public class ScreenEncoder implements Device.RotationListener {
return rotationChanged.getAndSet(false);
}
public void streamScreen(Device device, Callbacks callbacks) throws IOException {
public void streamScreen() throws IOException {
String videoMimeType = streamer.getCodec().getMimeType();
MediaCodec codec = createCodec(videoMimeType, encoderName);
MediaFormat format = createFormat(videoMimeType, bitRate, maxFps, codecOptions);
IBinder display = createDisplay();
device.setRotationListener(this);
streamer.writeHeader();
boolean alive;
try {
do {
@ -92,7 +95,7 @@ public class ScreenEncoder implements Device.RotationListener {
codec.start();
alive = encode(codec, callbacks);
alive = encode(codec, streamer);
// do not call stop() on exception, it would trigger an IllegalStateException
codec.stop();
} catch (IllegalStateException | IllegalArgumentException e) {
@ -161,7 +164,7 @@ public class ScreenEncoder implements Device.RotationListener {
return 0;
}
private boolean encode(MediaCodec codec, Callbacks callbacks) throws IOException {
private boolean encode(MediaCodec codec, Streamer streamer) throws IOException {
boolean eof = false;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
@ -184,7 +187,7 @@ public class ScreenEncoder implements Device.RotationListener {
consecutiveErrors = 0;
}
callbacks.onPacket(codecBuffer, bufferInfo);
streamer.writePacket(codecBuffer, bufferInfo);
}
} finally {
if (outputBufferId >= 0) {

View File

@ -69,29 +69,36 @@ public final class Server {
int uid = options.getUid();
boolean tunnelForward = options.isTunnelForward();
boolean control = options.getControl();
boolean audio = options.getAudio();
boolean sendDummyByte = options.getSendDummyByte();
Workarounds.prepareMainLooper();
if (Build.BRAND.equalsIgnoreCase("meizu")) {
// Workarounds must be applied for Meizu phones:
// - <https://github.com/Genymobile/scrcpy/issues/240>
// - <https://github.com/Genymobile/scrcpy/issues/365>
// - <https://github.com/Genymobile/scrcpy/issues/2656>
//
// But only apply when strictly necessary, since workarounds can cause other issues:
// - <https://github.com/Genymobile/scrcpy/issues/940>
// - <https://github.com/Genymobile/scrcpy/issues/994>
// Workarounds must be applied for Meizu phones:
// - <https://github.com/Genymobile/scrcpy/issues/240>
// - <https://github.com/Genymobile/scrcpy/issues/365>
// - <https://github.com/Genymobile/scrcpy/issues/2656>
//
// But only apply when strictly necessary, since workarounds can cause other issues:
// - <https://github.com/Genymobile/scrcpy/issues/940>
// - <https://github.com/Genymobile/scrcpy/issues/994>
boolean mustFillAppInfo = Build.BRAND.equalsIgnoreCase("meizu");
// Before Android 11, audio is not supported.
// Since Android 12, we can properly set a context on the AudioRecord.
// Only on Android 11 we must fill app info for the AudioRecord to work.
mustFillAppInfo |= audio && Build.VERSION.SDK_INT == Build.VERSION_CODES.R;
if (mustFillAppInfo) {
Workarounds.fillAppInfo();
}
try (DesktopConnection connection = DesktopConnection.open(uid, tunnelForward, control, sendDummyByte)) {
try (DesktopConnection connection = DesktopConnection.open(uid, tunnelForward, audio, control, sendDummyByte)) {
VideoCodec codec = options.getCodec();
if (options.getSendDeviceMeta()) {
Size videoSize = device.getScreenInfo().getVideoSize();
connection.sendDeviceMeta(Device.getDeviceName(), videoSize.getWidth(), videoSize.getHeight());
}
ScreenEncoder screenEncoder = new ScreenEncoder(codec.getMimeType(), options.getBitRate(), options.getMaxFps(), codecOptions,
options.getEncoderName(), options.getDownsizeOnError());
Controller controller = null;
if (control) {
@ -102,21 +109,41 @@ public final class Server {
device.setClipboardListener(text -> controllerRef.getSender().pushClipboardText(text));
}
AudioEncoder audioEncoder = null;
if (audio) {
audioEncoder = new AudioEncoder();
audioEncoder.start();
}
Streamer videoStreamer = new Streamer(connection.getVideoFd(), codec, options.getSendCodecId(), options.getSendFrameMeta());
ScreenEncoder screenEncoder = new ScreenEncoder(device, videoStreamer, options.getBitRate(), options.getMaxFps(),
codecOptions, options.getEncoderName(), options.getDownsizeOnError());
try {
// synchronous
VideoStreamer videoStreamer = new VideoStreamer(connection.getVideoFd(), options.getSendFrameMeta());
if (options.getSendCodecId()) {
videoStreamer.writeHeader(codec.getId());
}
screenEncoder.streamScreen(device, videoStreamer);
screenEncoder.streamScreen();
} catch (IOException e) {
// this is expected on close
Ln.d("Screen streaming stopped");
} finally {
Ln.d("Screen streaming stopped");
initThread.interrupt();
if (audioEncoder != null) {
audioEncoder.stop();
}
if (controller != null) {
controller.stop();
}
try {
initThread.join();
if (audioEncoder != null) {
audioEncoder.join();
}
if (controller != null) {
controller.join();
}
} catch (InterruptedException e) {
// ignore
}
}
}
}
@ -160,6 +187,10 @@ public final class Server {
Ln.Level level = Ln.Level.valueOf(value.toUpperCase(Locale.ENGLISH));
options.setLogLevel(level);
break;
case "audio":
boolean audio = Boolean.parseBoolean(value);
options.setAudio(audio);
break;
case "codec":
VideoCodec codec = VideoCodec.findByName(value);
if (codec == null) {

View File

@ -6,30 +6,39 @@ import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
public final class VideoStreamer implements ScreenEncoder.Callbacks {
public final class Streamer {
private static final long PACKET_FLAG_CONFIG = 1L << 63;
private static final long PACKET_FLAG_KEY_FRAME = 1L << 62;
private final FileDescriptor fd;
private final Codec codec;
private final boolean sendCodecId;
private final boolean sendFrameMeta;
private final ByteBuffer headerBuffer = ByteBuffer.allocate(12);
public VideoStreamer(FileDescriptor fd, boolean sendFrameMeta) {
public Streamer(FileDescriptor fd, Codec codec, boolean sendCodecId, boolean sendFrameMeta) {
this.fd = fd;
this.codec = codec;
this.sendCodecId = sendCodecId;
this.sendFrameMeta = sendFrameMeta;
}
public void writeHeader(int codecId) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(4);
buffer.putInt(codecId);
buffer.flip();
IO.writeFully(fd, buffer);
public Codec getCodec() {
return codec;
}
@Override
public void onPacket(ByteBuffer codecBuffer, MediaCodec.BufferInfo bufferInfo) throws IOException {
public void writeHeader() throws IOException {
if (sendCodecId) {
ByteBuffer buffer = ByteBuffer.allocate(4);
buffer.putInt(codec.getId());
buffer.flip();
IO.writeFully(fd, buffer);
}
}
public void writePacket(ByteBuffer codecBuffer, MediaCodec.BufferInfo bufferInfo) throws IOException {
if (sendFrameMeta) {
writeFrameMeta(fd, bufferInfo, codecBuffer.remaining());
}

View File

@ -2,7 +2,7 @@ package com.genymobile.scrcpy;
import android.media.MediaFormat;
public enum VideoCodec {
public enum VideoCodec implements Codec {
H264(0x68_32_36_34, "h264", MediaFormat.MIMETYPE_VIDEO_AVC),
H265(0x68_32_36_35, "h265", MediaFormat.MIMETYPE_VIDEO_HEVC),
AV1(0x00_61_76_31, "av1", MediaFormat.MIMETYPE_VIDEO_AV1);
@ -17,10 +17,22 @@ public enum VideoCodec {
this.mimeType = mimeType;
}
@Override
public Type getType() {
return Type.VIDEO;
}
@Override
public int getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public String getMimeType() {
return mimeType;
}

View File

@ -3,13 +3,12 @@ package com.genymobile.scrcpy;
import android.annotation.SuppressLint;
import android.app.Application;
import android.app.Instrumentation;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.pm.ApplicationInfo;
import android.os.Looper;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
public final class Workarounds {
private Workarounds() {
@ -50,7 +49,7 @@ public final class Workarounds {
Object appBindData = appBindDataConstructor.newInstance();
ApplicationInfo applicationInfo = new ApplicationInfo();
applicationInfo.packageName = "com.genymobile.scrcpy";
applicationInfo.packageName = FakeContext.PACKAGE_NAME;
// appBindData.appInfo = applicationInfo;
Field appInfoField = appBindDataClass.getDeclaredField("appInfo");
@ -62,11 +61,10 @@ public final class Workarounds {
mBoundApplicationField.setAccessible(true);
mBoundApplicationField.set(activityThread, appBindData);
// Context ctx = activityThread.getSystemContext();
Method getSystemContextMethod = activityThreadClass.getDeclaredMethod("getSystemContext");
Context ctx = (Context) getSystemContextMethod.invoke(activityThread);
Application app = Instrumentation.newApplication(Application.class, ctx);
Application app = Application.class.newInstance();
Field baseField = ContextWrapper.class.getDeclaredField("mBase");
baseField.setAccessible(true);
baseField.set(app, FakeContext.get());
// activityThread.mInitialApplication = app;
Field mInitialApplicationField = activityThreadClass.getDeclaredField("mInitialApplication");

View File

@ -5,6 +5,7 @@ import com.genymobile.scrcpy.Ln;
import android.os.Binder;
import android.os.IBinder;
import android.os.IInterface;
import android.os.Process;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
@ -48,10 +49,10 @@ public class ActivityManager {
Object[] args;
if (getContentProviderExternalMethodNewVersion) {
// new version
args = new Object[]{name, ServiceManager.USER_ID, token, null};
args = new Object[]{name, Process.ROOT_UID, token, null};
} else {
// old version
args = new Object[]{name, ServiceManager.USER_ID, token};
args = new Object[]{name, Process.ROOT_UID, token};
}
// ContentProviderHolder providerHolder = getContentProviderExternal(...);
Object providerHolder = method.invoke(manager, args);

View File

@ -1,11 +1,13 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.FakeContext;
import com.genymobile.scrcpy.Ln;
import android.content.ClipData;
import android.content.IOnPrimaryClipChangedListener;
import android.os.Build;
import android.os.IInterface;
import android.os.Process;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@ -58,22 +60,22 @@ public class ClipboardManager {
private static ClipData getPrimaryClip(Method method, boolean alternativeMethod, IInterface manager)
throws InvocationTargetException, IllegalAccessException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME);
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME);
}
if (alternativeMethod) {
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
}
return (ClipData) method.invoke(manager, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
return (ClipData) method.invoke(manager, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
}
private static void setPrimaryClip(Method method, boolean alternativeMethod, IInterface manager, ClipData clipData)
throws InvocationTargetException, IllegalAccessException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME);
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME);
} else if (alternativeMethod) {
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
} else {
method.invoke(manager, clipData, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
method.invoke(manager, clipData, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
}
}
@ -106,11 +108,11 @@ public class ClipboardManager {
private static void addPrimaryClipChangedListener(Method method, boolean alternativeMethod, IInterface manager,
IOnPrimaryClipChangedListener listener) throws InvocationTargetException, IllegalAccessException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME);
method.invoke(manager, listener, FakeContext.PACKAGE_NAME);
} else if (alternativeMethod) {
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME, null, ServiceManager.USER_ID);
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, null, Process.ROOT_UID);
} else {
method.invoke(manager, listener, ServiceManager.PACKAGE_NAME, ServiceManager.USER_ID);
method.invoke(manager, listener, FakeContext.PACKAGE_NAME, Process.ROOT_UID);
}
}

View File

@ -1,11 +1,15 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.FakeContext;
import com.genymobile.scrcpy.Ln;
import com.genymobile.scrcpy.SettingsException;
import android.annotation.SuppressLint;
import android.content.AttributionSource;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
import android.os.Process;
import java.io.Closeable;
import java.lang.reflect.InvocationTargetException;
@ -51,11 +55,10 @@ public class ContentProvider implements Closeable {
@SuppressLint("PrivateApi")
private Method getCallMethod() throws NoSuchMethodException {
if (callMethod == null) {
try {
Class<?> attributionSourceClass = Class.forName("android.content.AttributionSource");
callMethod = provider.getClass().getMethod("call", attributionSourceClass, String.class, String.class, String.class, Bundle.class);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
callMethod = provider.getClass().getMethod("call", AttributionSource.class, String.class, String.class, String.class, Bundle.class);
callMethodVersion = 0;
} catch (NoSuchMethodException | ClassNotFoundException e0) {
} else {
// old versions
try {
callMethod = provider.getClass()
@ -75,40 +78,29 @@ public class ContentProvider implements Closeable {
return callMethod;
}
@SuppressLint("PrivateApi")
private Object getAttributionSource()
throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
if (attributionSource == null) {
Class<?> cl = Class.forName("android.content.AttributionSource$Builder");
Object builder = cl.getConstructor(int.class).newInstance(ServiceManager.USER_ID);
cl.getDeclaredMethod("setPackageName", String.class).invoke(builder, ServiceManager.PACKAGE_NAME);
attributionSource = cl.getDeclaredMethod("build").invoke(builder);
}
return attributionSource;
}
private Bundle call(String callMethod, String arg, Bundle extras)
throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException {
throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
try {
Method method = getCallMethod();
Object[] args;
switch (callMethodVersion) {
case 0:
args = new Object[]{getAttributionSource(), "settings", callMethod, arg, extras};
break;
case 1:
args = new Object[]{ServiceManager.PACKAGE_NAME, null, "settings", callMethod, arg, extras};
break;
case 2:
args = new Object[]{ServiceManager.PACKAGE_NAME, "settings", callMethod, arg, extras};
break;
default:
args = new Object[]{ServiceManager.PACKAGE_NAME, callMethod, arg, extras};
break;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && callMethodVersion == 0) {
args = new Object[]{FakeContext.get().getAttributionSource(), "settings", callMethod, arg, extras};
} else {
switch (callMethodVersion) {
case 1:
args = new Object[]{FakeContext.PACKAGE_NAME, null, "settings", callMethod, arg, extras};
break;
case 2:
args = new Object[]{FakeContext.PACKAGE_NAME, "settings", callMethod, arg, extras};
break;
default:
args = new Object[]{FakeContext.PACKAGE_NAME, callMethod, arg, extras};
break;
}
}
return (Bundle) method.invoke(provider, args);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException | ClassNotFoundException | InstantiationException e) {
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
throw e;
}
@ -147,7 +139,7 @@ public class ContentProvider implements Closeable {
public String getValue(String table, String key) throws SettingsException {
String method = getGetMethod(table);
Bundle arg = new Bundle();
arg.putInt(CALL_METHOD_USER_KEY, ServiceManager.USER_ID);
arg.putInt(CALL_METHOD_USER_KEY, Process.ROOT_UID);
try {
Bundle bundle = call(method, key, arg);
if (bundle == null) {
@ -163,7 +155,7 @@ public class ContentProvider implements Closeable {
public void putValue(String table, String key, String value) throws SettingsException {
String method = getPutMethod(table);
Bundle arg = new Bundle();
arg.putInt(CALL_METHOD_USER_KEY, ServiceManager.USER_ID);
arg.putInt(CALL_METHOD_USER_KEY, Process.ROOT_UID);
arg.putString(NAME_VALUE_TABLE_VALUE, value);
try {
call(method, key, arg);

View File

@ -10,9 +10,6 @@ import java.lang.reflect.Method;
@SuppressLint("PrivateApi,DiscouragedPrivateApi")
public final class ServiceManager {
public static final String PACKAGE_NAME = "com.android.shell";
public static final int USER_ID = 0;
private static final Method GET_SERVICE_METHOD;
static {
try {