Compare commits

..

8 Commits

Author SHA1 Message Date
5f2cf12acf Add intermediate frame in video buffer
There were only two frames simultaneously:
 - one used by the decoder;
 - one used by the renderer.

When the decoder finished decoding a frame, it swapped it with the
rendering frame.

Adding a third frame provides several benefits:
 - the decoder do not have to wait for the renderer to release the
   mutex;
 - it simplifies the video_buffer API;
 - it makes the rendering frame valid until the next call to
   video_buffer_take_rendering_frame(), which will be useful for
   swscaling on window resize.
2021-02-04 08:12:22 +01:00
2a3c6b64dd Assert non-recursive usage of mutexes 2021-02-04 08:12:22 +01:00
a9582b1d43 Add mutex assertions 2021-02-04 08:12:22 +01:00
fa3e84b700 Expose mutex assertions
Add a function to assert that the mutex is held (or not).
2021-02-04 08:12:22 +01:00
1521de9051 Expose thread id 2021-02-04 08:12:22 +01:00
eabaabdb78 Wrap SDL thread functions into scrcpy-specific API
The goal is to expose a consistent API for system tools, and paves the
way to make the "core" independant of SDL in the future.
2021-02-04 08:12:22 +01:00
8b48003074 Replace SDL_strdup() by strdup()
The functions SDL_malloc(), SDL_free() and SDL_strdup() were used only
because strdup() was not available everywhere.

Now that it is available, use the native version of these functions.
2021-02-04 08:12:22 +01:00
74bd25a0ed Provide strdup() compat
Make strdup() available on all platforms.
2021-02-04 08:12:22 +01:00
9 changed files with 32 additions and 51 deletions

View File

@ -2,6 +2,7 @@
#include <libavformat/avformat.h>
#include <libavutil/time.h>
#include <SDL2/SDL_events.h>
#include <unistd.h>
#include "events.h"
@ -17,22 +18,18 @@ push_frame(struct decoder *decoder) {
video_buffer_offer_decoded_frame(decoder->video_buffer,
&previous_frame_skipped);
if (previous_frame_skipped) {
// the previous callback will consume this frame
// the previous EVENT_NEW_FRAME will consume this frame
return;
}
decoder->cbs->on_new_frame(decoder, decoder->cbs_userdata);
static SDL_Event new_frame_event = {
.type = EVENT_NEW_FRAME,
};
SDL_PushEvent(&new_frame_event);
}
void
decoder_init(struct decoder *decoder, struct video_buffer *vb,
const struct decoder_callbacks *cbs, void *cbs_userdata) {
decoder_init(struct decoder *decoder, struct video_buffer *vb) {
decoder->video_buffer = vb;
assert(cbs);
assert(cbs->on_new_frame);
decoder->cbs = cbs;
decoder->cbs_userdata = cbs_userdata;
}
bool

View File

@ -11,20 +11,10 @@ struct video_buffer;
struct decoder {
struct video_buffer *video_buffer;
AVCodecContext *codec_ctx;
const struct decoder_callbacks *cbs;
void *cbs_userdata;
};
struct decoder_callbacks {
// Called when a new frame can be consumed by
// video_buffer_take_rendering_frame(decoder->video_buffer).
void (*on_new_frame)(struct decoder *decoder, void *userdata);
};
void
decoder_init(struct decoder *decoder, struct video_buffer *vb,
const struct decoder_callbacks *cbs, void *cbs_userdata);
decoder_init(struct decoder *decoder, struct video_buffer *vb);
bool
decoder_open(struct decoder *decoder, const AVCodec *codec);

View File

@ -1,2 +1,3 @@
#define EVENT_NEW_FRAME SDL_USEREVENT
#define EVENT_STREAM_STOPPED (SDL_USEREVENT + 1)
#define EVENT_NEW_SESSION SDL_USEREVENT
#define EVENT_NEW_FRAME (SDL_USEREVENT + 1)
#define EVENT_STREAM_STOPPED (SDL_USEREVENT + 2)

View File

@ -184,9 +184,7 @@ handle_event(SDL_Event *event, const struct scrcpy_options *options) {
}
break;
case SDL_WINDOWEVENT:
if (screen.has_frame) {
screen_handle_window_event(&screen, &event->window);
}
screen_handle_window_event(&screen, &event->window);
break;
case SDL_TEXTINPUT:
if (!options->control) {
@ -306,17 +304,6 @@ av_log_callback(void *avcl, int level, const char *fmt, va_list vl) {
free(local_fmt);
}
static void
decoder_on_new_frame(struct decoder *decoder, void *userdata) {
(void) decoder;
(void) userdata;
static SDL_Event new_frame_event = {
.type = EVENT_NEW_FRAME,
};
SDL_PushEvent(&new_frame_event);
}
bool
scrcpy(const struct scrcpy_options *options) {
if (!server_init(&server)) {
@ -397,11 +384,7 @@ scrcpy(const struct scrcpy_options *options) {
file_handler_initialized = true;
}
static const struct decoder_callbacks decoder_cbs = {
.on_new_frame = decoder_on_new_frame,
};
decoder_init(&decoder, &video_buffer, &decoder_cbs, NULL);
decoder_init(&decoder, &video_buffer);
dec = &decoder;
}

View File

@ -270,7 +270,7 @@ screen_init_rendering(struct screen *screen, const char *window_title,
SDL_RENDERER_ACCELERATED);
if (!screen->renderer) {
LOGC("Could not create renderer: %s", SDL_GetError());
SDL_DestroyWindow(screen->window);
screen_destroy(screen);
return false;
}
@ -280,8 +280,8 @@ screen_init_rendering(struct screen *screen, const char *window_title,
LOGI("Renderer: %s", renderer_name ? renderer_name : "(unknown)");
// starts with "opengl"
bool use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
if (use_opengl) {
screen->use_opengl = renderer_name && !strncmp(renderer_name, "opengl", 6);
if (screen->use_opengl) {
struct sc_opengl *gl = &screen->gl;
sc_opengl_init(gl);
@ -301,7 +301,7 @@ screen_init_rendering(struct screen *screen, const char *window_title,
} else {
LOGI("Trilinear filtering disabled");
}
} else if (mipmaps) {
} else {
LOGD("Trilinear filtering disabled (not an OpenGL renderer)");
}
@ -318,8 +318,6 @@ screen_init_rendering(struct screen *screen, const char *window_title,
screen->texture = create_texture(screen);
if (!screen->texture) {
LOGC("Could not create texture: %s", SDL_GetError());
SDL_DestroyRenderer(screen->renderer);
SDL_DestroyWindow(screen->window);
screen_destroy(screen);
return false;
}
@ -344,8 +342,12 @@ screen_destroy(struct screen *screen) {
if (screen->texture) {
SDL_DestroyTexture(screen->texture);
}
SDL_DestroyRenderer(screen->renderer);
SDL_DestroyWindow(screen->window);
if (screen->renderer) {
SDL_DestroyRenderer(screen->renderer);
}
if (screen->window) {
SDL_DestroyWindow(screen->window);
}
}
static void
@ -442,6 +444,7 @@ update_texture(struct screen *screen, const AVFrame *frame) {
frame->data[2], frame->linesize[2]);
if (screen->mipmaps) {
assert(screen->use_opengl);
SDL_GL_BindTexture(screen->texture, NULL, NULL);
screen->gl.GenerateMipmap(GL_TEXTURE_2D);
SDL_GL_UnbindTexture(screen->texture);
@ -453,6 +456,7 @@ screen_update_frame(struct screen *screen, struct video_buffer *vb) {
const AVFrame *frame = video_buffer_take_rendering_frame(vb);
struct size new_frame_size = {frame->width, frame->height};
if (!prepare_for_frame(screen, new_frame_size)) {
sc_mutex_unlock(&vb->mutex);
return false;
}
update_texture(screen, frame);

View File

@ -16,6 +16,7 @@ struct screen {
SDL_Window *window;
SDL_Renderer *renderer;
SDL_Texture *texture;
bool use_opengl;
struct sc_opengl gl;
struct size frame_size;
struct size content_size; // rotated frame_size
@ -40,6 +41,7 @@ struct screen {
.window = NULL, \
.renderer = NULL, \
.texture = NULL, \
.use_opengl = false, \
.gl = {0}, \
.frame_size = { \
.width = 0, \

View File

@ -375,6 +375,8 @@ server_init(struct server *server) {
server->video_socket = INVALID_SOCKET;
server->control_socket = INVALID_SOCKET;
server->port_range.first = 0;
server->port_range.last = 0;
server->local_port = 0;
server->tunnel_enabled = false;
@ -408,6 +410,8 @@ run_wait_server(void *data) {
bool
server_start(struct server *server, const char *serial,
const struct server_params *params) {
server->port_range = params->port_range;
if (serial) {
server->serial = strdup(serial);
if (!server->serial) {

View File

@ -26,6 +26,7 @@ struct server {
socket_t server_socket; // only used if !tunnel_forward
socket_t video_socket;
socket_t control_socket;
struct sc_port_range port_range;
uint16_t local_port; // selected from port_range
bool tunnel_enabled;
bool tunnel_forward; // use "adb forward" instead of "adb reverse"

View File

@ -61,7 +61,6 @@ sc_mutex_lock(sc_mutex *mutex) {
void
sc_mutex_unlock(sc_mutex *mutex) {
#ifndef NDEBUG
assert(sc_mutex_held(mutex));
mutex->locker = 0;
#endif
int r = SDL_UnlockMutex(mutex->mutex);