提交 5f09ae22 编写于 作者: Z Zhang Rui

ijkplayer: support direct draw for YUV420P and YUVJ420P

上级 a2b52d44
......@@ -652,7 +652,6 @@ display:
}
}
is->force_refresh = 0;
#ifdef FFP_MERGE
if (ffp->show_status) {
static int64_t last_time;
int64_t cur_time;
......@@ -696,7 +695,6 @@ display:
last_time = cur_time;
}
}
#endif
}
// TODO: 9 alloc_picture in video_refresh_thread if overlay referenced by vout
......@@ -814,8 +812,7 @@ static int queue_picture(FFPlayer *ffp, AVFrame *src_frame, double pts, int64_t
/* get a pointer on the bitmap */
SDL_VoutLockYUVOverlay(vp->bmp);
if (SDL_VoutFFmpeg_ConvertPicture(vp->bmp, vp->width, vp->height,
src_frame->format, (const uint8_t**)src_frame->data, src_frame->linesize,
if (SDL_VoutFFmpeg_ConvertFrame(vp->bmp, src_frame,
&is->img_convert_ctx, ffp->sws_flags) < 0) {
av_log(NULL, AV_LOG_FATAL, "Cannot initialize the conversion context\n");
exit(1);
......@@ -1833,7 +1830,7 @@ static int read_thread(void *arg)
#endif
// TODO: 8 set options from java side
av_dict_set(&ffp->format_opts, "timeout", "10000000", 0);
// av_dict_set(&ffp->format_opts, "timeout", "10000000", 0);
ic = avformat_alloc_context();
ic->interrupt_callback.callback = decode_interrupt_cb;
......@@ -1920,9 +1917,7 @@ static int read_thread(void *arg)
st_index[AVMEDIA_TYPE_VIDEO]),
NULL, 0);
#endif
if (ffp->show_status) {
av_dump_format(ic, 0, is->filename, 0);
}
av_dump_format(ic, 0, is->filename, 0);
is->show_mode = ffp->show_mode;
......
......@@ -452,7 +452,7 @@ inline static void ffp_reset_internal(FFPlayer *ffp)
ffp->wanted_stream[AVMEDIA_TYPE_SUBTITLE] = -1;
ffp->seek_by_bytes = -1;
ffp->display_disable = 0;
ffp->show_status = 1;
ffp->show_status = 0;
ffp->av_sync_type = AV_SYNC_AUDIO_MASTER;
ffp->start_time = AV_NOPTS_VALUE;
ffp->duration = AV_NOPTS_VALUE;
......@@ -489,8 +489,8 @@ inline static void ffp_reset_internal(FFPlayer *ffp)
ffp->sar_den = 0;
// ffp->overlay_format = SDL_FCC_YV12;
// ffp->overlay_format = SDL_FCC_RV16;
ffp->overlay_format = SDL_FCC_RV32;
// ffp->overlay_format = SDL_FCC_RV32;
ffp->overlay_format = SDL_FCC_RV16;
ffp->last_error = 0;
ffp->prepared = 0;
......
......@@ -26,6 +26,8 @@
#include "libavutil/imgutils.h"
#include "libavutil/pixfmt.h"
#include "libavutil/frame.h"
#include "libavutil/common.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
......
......@@ -34,8 +34,11 @@
typedef struct SDL_VoutOverlay_Opaque {
SDL_mutex *mutex;
AVBufferRef *frame_buffer;
AVFrame *frame;
AVBufferRef *frame_buffer;
int planes;
AVFrame *linked_frame;
Uint16 pitches[AV_NUM_DATA_POINTERS];
Uint8 *pixels[AV_NUM_DATA_POINTERS];
......@@ -58,10 +61,18 @@ static AVFrame *alloc_avframe(SDL_VoutOverlay_Opaque* opaque, enum AVPixelFormat
return NULL;
}
AVFrame *linked_frame = av_frame_alloc();
if (!frame) {
av_frame_free(&frame);
av_buffer_unref(&frame_buffer_ref);
return NULL;
}
AVPicture *pic = (AVPicture *) frame;
avcodec_get_frame_defaults(frame);
avpicture_fill(pic, frame_buffer_ref->data, format, width, height);
opaque->frame_buffer = frame_buffer_ref;
opaque->linked_frame = linked_frame;
return frame;
}
......@@ -78,6 +89,11 @@ static void overlay_free_l(SDL_VoutOverlay *overlay)
if (opaque->frame)
av_frame_free(&opaque->frame);
if (opaque->linked_frame) {
av_frame_unref(opaque->linked_frame);
av_frame_free(&opaque->linked_frame);
}
if (opaque->frame_buffer)
av_buffer_unref(&opaque->frame_buffer);
......@@ -87,7 +103,7 @@ static void overlay_free_l(SDL_VoutOverlay *overlay)
SDL_VoutOverlay_FreeInternal(overlay);
}
static void overlay_fill(SDL_VoutOverlay *overlay, AVFrame *frame, Uint32 format, int planes)
static void overlay_fill(SDL_VoutOverlay *overlay, AVFrame *frame, int planes)
{
AVPicture *pic = (AVPicture *) frame;
overlay->planes = planes;
......@@ -128,26 +144,25 @@ SDL_VoutOverlay *SDL_VoutFFmpeg_CreateOverlay(int width, int height, Uint32 form
overlay->h = height;
enum AVPixelFormat ff_format = AV_PIX_FMT_NONE;
int planes = 0;
int buf_width = width; // must be aligned to 16 bytes pitch for arm-neon image-convert
int buf_height = height;
switch (format) {
case SDL_FCC_YV12: {
ff_format = AV_PIX_FMT_YUV420P;
buf_width = IJKALIGN(width, 16); // 1 bytes per pixel for Y-plane
planes = 3;
opaque->planes = 3;
break;
}
case SDL_FCC_RV16: {
ff_format = AV_PIX_FMT_RGB565;
buf_width = IJKALIGN(width, 8); // 2 bytes per pixel
planes = 1;
opaque->planes = 1;
break;
}
case SDL_FCC_RV32: {
ff_format = AV_PIX_FMT_0BGR32;
buf_width = IJKALIGN(width, 4); // 4 bytes per pixel
planes = 1;
opaque->planes = 1;
break;
}
default:
......@@ -161,7 +176,7 @@ SDL_VoutOverlay *SDL_VoutFFmpeg_CreateOverlay(int width, int height, Uint32 form
goto fail;
}
opaque->mutex = SDL_CreateMutex();
overlay_fill(overlay, opaque->frame, format, planes);
overlay_fill(overlay, opaque->frame, opaque->planes);
overlay->free_l = overlay_free_l;
overlay->lock = overlay_lock;
......@@ -174,10 +189,8 @@ SDL_VoutOverlay *SDL_VoutFFmpeg_CreateOverlay(int width, int height, Uint32 form
return NULL;
}
int SDL_VoutFFmpeg_ConvertPicture(
const SDL_VoutOverlay *overlay,
int width, int height,
enum AVPixelFormat src_format, const uint8_t **src_data, int *src_linesize,
int SDL_VoutFFmpeg_ConvertFrame(
SDL_VoutOverlay *overlay, AVFrame *frame,
struct SwsContext **p_sws_ctx, int sws_flags)
{
assert(overlay);
......@@ -190,19 +203,33 @@ int SDL_VoutFFmpeg_ConvertPicture(
dest_pic.linesize[i] = overlay->pitches[i];
}
av_frame_unref(opaque->linked_frame);
int use_linked_frame = 0;
enum AVPixelFormat dst_format = AV_PIX_FMT_NONE;
switch (overlay->format) {
case SDL_FCC_YV12:
dst_format = AV_PIX_FMT_YUV420P;
dest_pic.data[2] = overlay->pixels[1];
dest_pic.data[1] = overlay->pixels[2];
if (frame->format == AV_PIX_FMT_YUV420P || frame->format == AV_PIX_FMT_YUVJ420P) {
// ALOGE("direct draw frame");
use_linked_frame = 1;
av_frame_ref(opaque->linked_frame, frame);
overlay_fill(overlay, opaque->linked_frame, opaque->planes);
FFSWAP(Uint8*, overlay->pixels[1], overlay->pixels[2]);
} else {
// ALOGE("copy draw frame");
overlay_fill(overlay, opaque->frame, opaque->planes);
dest_pic.data[2] = overlay->pixels[1];
dest_pic.data[1] = overlay->pixels[2];
}
break;
case SDL_FCC_RV32:
// TODO: 9 android only
overlay_fill(overlay, opaque->frame, opaque->planes);
dst_format = AV_PIX_FMT_0BGR32;
break;
case SDL_FCC_RV16:
// TODO: 9 android only
overlay_fill(overlay, opaque->frame, opaque->planes);
dst_format = AV_PIX_FMT_RGB565;
break;
default:
......@@ -211,23 +238,25 @@ int SDL_VoutFFmpeg_ConvertPicture(
return -1;
}
if (ijk_image_convert(width, height,
if (use_linked_frame) {
// do nothing
} else if (ijk_image_convert(frame->width, frame->height,
dst_format, dest_pic.data, dest_pic.linesize,
src_format, src_data, src_linesize)) {
frame->format, (const uint8_t**) frame->data, frame->linesize)) {
*p_sws_ctx = sws_getCachedContext(*p_sws_ctx,
width, height, src_format, width, height,
frame->width, frame->height, frame->format, frame->width, frame->height,
dst_format, sws_flags, NULL, NULL, NULL);
if (*p_sws_ctx == NULL) {
ALOGE("sws_getCachedContext failed");
return -1;
}
sws_scale(*p_sws_ctx, (const uint8_t **) src_data, src_linesize,
0, height, dest_pic.data, dest_pic.linesize);
sws_scale(*p_sws_ctx, (const uint8_t**) frame->data, frame->linesize,
0, frame->height, dest_pic.data, dest_pic.linesize);
if (!opaque->no_neon_warned) {
opaque->no_neon_warned = 1;
ALOGE("non-neon image convert %s -> %s", av_get_pix_fmt_name(src_format), av_get_pix_fmt_name(dst_format));
ALOGE("non-neon image convert %s -> %s", av_get_pix_fmt_name(frame->format), av_get_pix_fmt_name(dst_format));
}
}
......
......@@ -31,10 +31,8 @@
// TODO: 9 alignment to speed up memcpy when display
SDL_VoutOverlay *SDL_VoutFFmpeg_CreateOverlay(int width, int height, Uint32 format, SDL_Vout *display);
int SDL_VoutFFmpeg_ConvertPicture(
const SDL_VoutOverlay *overlay,
int width, int height,
enum AVPixelFormat src_format, const uint8_t **src_data, int *src_linesize,
int SDL_VoutFFmpeg_ConvertFrame(
SDL_VoutOverlay *overlay, AVFrame *frame,
struct SwsContext **p_sws_ctx, int sws_flags);
#endif
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册