Compare commits
1 Commits
refactor-e
...
hw_dec_poc
Author | SHA1 | Date | |
---|---|---|---|
be6c6325d9 |
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
#include <libavcodec/avcodec.h>
|
#include <libavcodec/avcodec.h>
|
||||||
#include <libavformat/avformat.h>
|
#include <libavformat/avformat.h>
|
||||||
|
#include <libavutil/pixdesc.h>
|
||||||
|
|
||||||
#include "events.h"
|
#include "events.h"
|
||||||
#include "video_buffer.h"
|
#include "video_buffer.h"
|
||||||
@ -11,6 +12,39 @@
|
|||||||
/** Downcast packet_sink to decoder */
|
/** Downcast packet_sink to decoder */
|
||||||
#define DOWNCAST(SINK) container_of(SINK, struct sc_decoder, packet_sink)
|
#define DOWNCAST(SINK) container_of(SINK, struct sc_decoder, packet_sink)
|
||||||
|
|
||||||
|
static int hw_decoder_init(struct sc_decoder *decoder, const enum AVHWDeviceType type)
|
||||||
|
{
|
||||||
|
int err = 0;
|
||||||
|
|
||||||
|
AVBufferRef *hw_device_ctx;
|
||||||
|
if ((err = av_hwdevice_ctx_create(&hw_device_ctx, type,
|
||||||
|
NULL, NULL, 0)) < 0) {
|
||||||
|
LOGE("Failed to create specified HW device.");
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
decoder->codec_ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
|
||||||
|
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
static enum AVPixelFormat get_hw_format(AVCodecContext *ctx,
|
||||||
|
const enum AVPixelFormat *pix_fmts)
|
||||||
|
{
|
||||||
|
(void) ctx;
|
||||||
|
const enum AVPixelFormat *p;
|
||||||
|
|
||||||
|
LOGD("== get_hw_format ==");
|
||||||
|
|
||||||
|
for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) {
|
||||||
|
LOGD("==== %s (%d)", av_get_pix_fmt_name(*p), *p);
|
||||||
|
if (*p == AV_PIX_FMT_VAAPI)
|
||||||
|
return *p;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOGE("Failed to get HW surface format.");
|
||||||
|
return AV_PIX_FMT_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
sc_decoder_close_first_sinks(struct sc_decoder *decoder, unsigned count) {
|
sc_decoder_close_first_sinks(struct sc_decoder *decoder, unsigned count) {
|
||||||
while (count) {
|
while (count) {
|
||||||
@ -48,6 +82,10 @@ sc_decoder_open(struct sc_decoder *decoder, const AVCodec *codec) {
|
|||||||
|
|
||||||
decoder->codec_ctx->flags |= AV_CODEC_FLAG_LOW_DELAY;
|
decoder->codec_ctx->flags |= AV_CODEC_FLAG_LOW_DELAY;
|
||||||
|
|
||||||
|
int r = hw_decoder_init(decoder, AV_HWDEVICE_TYPE_VAAPI);
|
||||||
|
assert(!r);
|
||||||
|
decoder->codec_ctx->get_format = get_hw_format;
|
||||||
|
|
||||||
if (avcodec_open2(decoder->codec_ctx, codec, NULL) < 0) {
|
if (avcodec_open2(decoder->codec_ctx, codec, NULL) < 0) {
|
||||||
LOGE("Could not open codec");
|
LOGE("Could not open codec");
|
||||||
avcodec_free_context(&decoder->codec_ctx);
|
avcodec_free_context(&decoder->codec_ctx);
|
||||||
@ -62,6 +100,9 @@ sc_decoder_open(struct sc_decoder *decoder, const AVCodec *codec) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
decoder->hw_frame = av_frame_alloc();
|
||||||
|
assert(decoder->hw_frame);
|
||||||
|
|
||||||
if (!sc_decoder_open_sinks(decoder)) {
|
if (!sc_decoder_open_sinks(decoder)) {
|
||||||
LOGE("Could not open decoder sinks");
|
LOGE("Could not open decoder sinks");
|
||||||
av_frame_free(&decoder->frame);
|
av_frame_free(&decoder->frame);
|
||||||
@ -76,6 +117,7 @@ sc_decoder_open(struct sc_decoder *decoder, const AVCodec *codec) {
|
|||||||
static void
|
static void
|
||||||
sc_decoder_close(struct sc_decoder *decoder) {
|
sc_decoder_close(struct sc_decoder *decoder) {
|
||||||
sc_decoder_close_sinks(decoder);
|
sc_decoder_close_sinks(decoder);
|
||||||
|
av_frame_free(&decoder->hw_frame);
|
||||||
av_frame_free(&decoder->frame);
|
av_frame_free(&decoder->frame);
|
||||||
avcodec_close(decoder->codec_ctx);
|
avcodec_close(decoder->codec_ctx);
|
||||||
avcodec_free_context(&decoder->codec_ctx);
|
avcodec_free_context(&decoder->codec_ctx);
|
||||||
@ -107,15 +149,26 @@ sc_decoder_push(struct sc_decoder *decoder, const AVPacket *packet) {
|
|||||||
LOGE("Could not send video packet: %d", ret);
|
LOGE("Could not send video packet: %d", ret);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
ret = avcodec_receive_frame(decoder->codec_ctx, decoder->frame);
|
ret = avcodec_receive_frame(decoder->codec_ctx, decoder->hw_frame);
|
||||||
if (!ret) {
|
if (!ret) {
|
||||||
// a frame was received
|
// a frame was received
|
||||||
|
|
||||||
|
sc_tick t = sc_tick_now();
|
||||||
|
ret = av_hwframe_transfer_data(decoder->frame, decoder->hw_frame, 0);
|
||||||
|
if (ret < 0) {
|
||||||
|
LOGE("HWFRAME transfer fail");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOGD("av_hwframe_transfer_data: %ld", sc_tick_now() - t);
|
||||||
|
|
||||||
bool ok = push_frame_to_sinks(decoder, decoder->frame);
|
bool ok = push_frame_to_sinks(decoder, decoder->frame);
|
||||||
// A frame lost should not make the whole pipeline fail. The error, if
|
// A frame lost should not make the whole pipeline fail. The error, if
|
||||||
// any, is already logged.
|
// any, is already logged.
|
||||||
(void) ok;
|
(void) ok;
|
||||||
|
|
||||||
av_frame_unref(decoder->frame);
|
av_frame_unref(decoder->frame);
|
||||||
|
av_frame_unref(decoder->hw_frame);
|
||||||
} else if (ret != AVERROR(EAGAIN)) {
|
} else if (ret != AVERROR(EAGAIN)) {
|
||||||
LOGE("Could not receive video frame: %d", ret);
|
LOGE("Could not receive video frame: %d", ret);
|
||||||
return false;
|
return false;
|
||||||
|
@ -19,6 +19,7 @@ struct sc_decoder {
|
|||||||
|
|
||||||
AVCodecContext *codec_ctx;
|
AVCodecContext *codec_ctx;
|
||||||
AVFrame *frame;
|
AVFrame *frame;
|
||||||
|
AVFrame *hw_frame;
|
||||||
};
|
};
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -244,7 +244,7 @@ static inline SDL_Texture *
|
|||||||
create_texture(struct sc_screen *screen) {
|
create_texture(struct sc_screen *screen) {
|
||||||
SDL_Renderer *renderer = screen->renderer;
|
SDL_Renderer *renderer = screen->renderer;
|
||||||
struct sc_size size = screen->frame_size;
|
struct sc_size size = screen->frame_size;
|
||||||
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12,
|
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_NV12,
|
||||||
SDL_TEXTUREACCESS_STREAMING,
|
SDL_TEXTUREACCESS_STREAMING,
|
||||||
size.width, size.height);
|
size.width, size.height);
|
||||||
if (!texture) {
|
if (!texture) {
|
||||||
@ -711,10 +711,11 @@ prepare_for_frame(struct sc_screen *screen, struct sc_size new_frame_size) {
|
|||||||
// write the frame into the texture
|
// write the frame into the texture
|
||||||
static void
|
static void
|
||||||
update_texture(struct sc_screen *screen, const AVFrame *frame) {
|
update_texture(struct sc_screen *screen, const AVFrame *frame) {
|
||||||
SDL_UpdateYUVTexture(screen->texture, NULL,
|
// SDL_UpdateYUVTexture(screen->texture, NULL,
|
||||||
frame->data[0], frame->linesize[0],
|
// frame->data[0], frame->linesize[0],
|
||||||
frame->data[1], frame->linesize[1],
|
// frame->data[1], frame->linesize[1],
|
||||||
frame->data[2], frame->linesize[2]);
|
// frame->data[2], frame->linesize[2]);
|
||||||
|
SDL_UpdateTexture(screen->texture, NULL, frame->data[0], frame->linesize[0]);
|
||||||
|
|
||||||
if (screen->mipmaps) {
|
if (screen->mipmaps) {
|
||||||
SDL_GL_BindTexture(screen->texture, NULL, NULL);
|
SDL_GL_BindTexture(screen->texture, NULL, NULL);
|
||||||
|
Reference in New Issue
Block a user