提交 24b60140 编写于 作者: Y yuazhen 提交者: Xinzheng Zhang

android: add subtitle supports (#2450)

* android: add subtitle supports

* supplements of subtitle meta

* add post_event2 for posting an object to java

* fix memleak, using malloced area to store obj data to isolate life cycle
上级 dfac8489
......@@ -67,10 +67,13 @@ public class TracksFragment extends Fragment {
int selectedVideoTrack = trackHolder.getSelectedTrack(ITrackInfo.MEDIA_TRACK_TYPE_VIDEO);
int selectedAudioTrack = trackHolder.getSelectedTrack(ITrackInfo.MEDIA_TRACK_TYPE_AUDIO);
int selectedSubtitleTrack = trackHolder.getSelectedTrack(ITrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT);
if (selectedVideoTrack >= 0)
mTrackListView.setItemChecked(selectedVideoTrack, true);
if (selectedAudioTrack >= 0)
mTrackListView.setItemChecked(selectedAudioTrack, true);
if (selectedSubtitleTrack >= 0)
mTrackListView.setItemChecked(selectedSubtitleTrack, true);
mTrackListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
......
......@@ -36,6 +36,7 @@ import android.view.View;
import android.widget.FrameLayout;
import android.widget.MediaController;
import android.widget.TableLayout;
import android.widget.TextView;
import java.io.File;
import java.io.IOException;
......@@ -43,11 +44,14 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import tv.danmaku.ijk.media.exo.IjkExoMediaPlayer;
import tv.danmaku.ijk.media.player.AndroidMediaPlayer;
import tv.danmaku.ijk.media.player.IMediaPlayer;
import tv.danmaku.ijk.media.player.IjkMediaPlayer;
import tv.danmaku.ijk.media.player.IjkTimedText;
import tv.danmaku.ijk.media.player.TextureMediaPlayer;
import tv.danmaku.ijk.media.player.misc.IMediaDataSource;
import tv.danmaku.ijk.media.player.misc.IMediaFormat;
......@@ -122,6 +126,8 @@ public class IjkVideoView extends FrameLayout implements MediaController.MediaPl
private long mSeekStartTime = 0;
private long mSeekEndTime = 0;
private TextView subtitleDisplay;
public IjkVideoView(Context context) {
super(context);
initVideoView(context);
......@@ -165,6 +171,15 @@ public class IjkVideoView extends FrameLayout implements MediaController.MediaPl
// REMOVED: mPendingSubtitleTracks = new Vector<Pair<InputStream, MediaFormat>>();
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
subtitleDisplay = new TextView(context);
subtitleDisplay.setTextSize(24);
subtitleDisplay.setGravity(Gravity.CENTER);
FrameLayout.LayoutParams layoutParams_txt = new FrameLayout.LayoutParams(
FrameLayout.LayoutParams.MATCH_PARENT,
FrameLayout.LayoutParams.WRAP_CONTENT,
Gravity.BOTTOM);
addView(subtitleDisplay, layoutParams_txt);
}
public void setRenderView(IRenderView renderView) {
......@@ -314,6 +329,7 @@ public class IjkVideoView extends FrameLayout implements MediaController.MediaPl
mMediaPlayer.setOnInfoListener(mInfoListener);
mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
mMediaPlayer.setOnSeekCompleteListener(mSeekCompleteListener);
mMediaPlayer.setOnTimedTextListener(mOnTimedTextListener);
mCurrentBufferPercentage = 0;
String scheme = mUri.getScheme();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&
......@@ -580,6 +596,15 @@ public class IjkVideoView extends FrameLayout implements MediaController.MediaPl
}
};
private IMediaPlayer.OnTimedTextListener mOnTimedTextListener = new IMediaPlayer.OnTimedTextListener() {
@Override
public void onTimedText(IMediaPlayer mp, IjkTimedText text) {
if (text != null) {
subtitleDisplay.setText(text.getText());
}
}
};
/**
* Register a callback to be invoked when the media file
* is loaded and ready to go.
......@@ -1095,6 +1120,7 @@ public class IjkVideoView extends FrameLayout implements MediaController.MediaPl
int selectedVideoTrack = MediaPlayerCompat.getSelectedTrack(mMediaPlayer, ITrackInfo.MEDIA_TRACK_TYPE_VIDEO);
int selectedAudioTrack = MediaPlayerCompat.getSelectedTrack(mMediaPlayer, ITrackInfo.MEDIA_TRACK_TYPE_AUDIO);
int selectedSubtitleTrack = MediaPlayerCompat.getSelectedTrack(mMediaPlayer, ITrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT);
TableLayoutBinder builder = new TableLayoutBinder(getContext());
builder.appendSection(R.string.mi_player);
......@@ -1114,6 +1140,8 @@ public class IjkVideoView extends FrameLayout implements MediaController.MediaPl
builder.appendSection(getContext().getString(R.string.mi_stream_fmt1, index) + " " + getContext().getString(R.string.mi__selected_video_track));
} else if (index == selectedAudioTrack) {
builder.appendSection(getContext().getString(R.string.mi_stream_fmt1, index) + " " + getContext().getString(R.string.mi__selected_audio_track));
} else if (index == selectedSubtitleTrack) {
builder.appendSection(getContext().getString(R.string.mi_stream_fmt1, index) + " " + getContext().getString(R.string.mi__selected_subtitle_track));
} else {
builder.appendSection(getContext().getString(R.string.mi_stream_fmt1, index));
}
......
......@@ -41,6 +41,7 @@
<string name="mi_channels">Channels</string>
<string name="mi__selected_video_track">*</string>
<string name="mi__selected_audio_track">*</string>
<string name="mi__selected_subtitle_track">*</string>
<string name="TrackType_video">Video</string>
<string name="TrackType_audio">Audio</string>
......
......@@ -27,6 +27,7 @@ public abstract class AbstractMediaPlayer implements IMediaPlayer {
private OnVideoSizeChangedListener mOnVideoSizeChangedListener;
private OnErrorListener mOnErrorListener;
private OnInfoListener mOnInfoListener;
private OnTimedTextListener mOnTimedTextListener;
public final void setOnPreparedListener(OnPreparedListener listener) {
mOnPreparedListener = listener;
......@@ -58,6 +59,10 @@ public abstract class AbstractMediaPlayer implements IMediaPlayer {
mOnInfoListener = listener;
}
public final void setOnTimedTextListener(OnTimedTextListener listener) {
mOnTimedTextListener = listener;
}
public void resetListeners() {
mOnPreparedListener = null;
mOnBufferingUpdateListener = null;
......@@ -66,6 +71,7 @@ public abstract class AbstractMediaPlayer implements IMediaPlayer {
mOnVideoSizeChangedListener = null;
mOnErrorListener = null;
mOnInfoListener = null;
mOnTimedTextListener = null;
}
protected final void notifyOnPrepared() {
......@@ -103,6 +109,11 @@ public abstract class AbstractMediaPlayer implements IMediaPlayer {
return mOnInfoListener != null && mOnInfoListener.onInfo(this, what, extra);
}
protected final void notifyOnTimedText(IjkTimedText text) {
if (mOnTimedTextListener != null)
mOnTimedTextListener.onTimedText(this, text);
}
public void setDataSource(IMediaDataSource mediaDataSource) {
throw new UnsupportedOperationException();
}
......
......@@ -22,6 +22,7 @@ import android.content.Context;
import android.media.AudioManager;
import android.media.MediaDataSource;
import android.media.MediaPlayer;
import android.media.TimedText;
import android.net.Uri;
import android.os.Build;
import android.text.TextUtils;
......@@ -342,6 +343,7 @@ public class AndroidMediaPlayer extends AbstractMediaPlayer {
.setOnVideoSizeChangedListener(mInternalListenerAdapter);
mInternalMediaPlayer.setOnErrorListener(mInternalListenerAdapter);
mInternalMediaPlayer.setOnInfoListener(mInternalListenerAdapter);
mInternalMediaPlayer.setOnTimedTextListener(mInternalListenerAdapter);
}
private class AndroidMediaPlayerListenerHolder implements
......@@ -349,7 +351,8 @@ public class AndroidMediaPlayer extends AbstractMediaPlayer {
MediaPlayer.OnBufferingUpdateListener,
MediaPlayer.OnSeekCompleteListener,
MediaPlayer.OnVideoSizeChangedListener,
MediaPlayer.OnErrorListener, MediaPlayer.OnInfoListener {
MediaPlayer.OnErrorListener, MediaPlayer.OnInfoListener,
MediaPlayer.OnTimedTextListener {
public final WeakReference<AndroidMediaPlayer> mWeakMediaPlayer;
public AndroidMediaPlayerListenerHolder(AndroidMediaPlayer mp) {
......@@ -414,5 +417,15 @@ public class AndroidMediaPlayer extends AbstractMediaPlayer {
notifyOnPrepared();
}
@Override
public void onTimedText(MediaPlayer mp, TimedText text) {
AndroidMediaPlayer self = mWeakMediaPlayer.get();
if (self == null)
return;
IjkTimedText ijkText = new IjkTimedText(text.getBounds(), text.getText());
notifyOnTimedText(ijkText);
}
}
}
......@@ -132,6 +132,8 @@ public interface IMediaPlayer {
void setOnInfoListener(OnInfoListener listener);
void setOnTimedTextListener(OnTimedTextListener listener);
/*--------------------
* Listeners
*/
......@@ -164,6 +166,10 @@ public interface IMediaPlayer {
boolean onInfo(IMediaPlayer mp, int what, int extra);
}
interface OnTimedTextListener {
void onTimedText(IMediaPlayer mp, IjkTimedText text);
}
/*--------------------
* Optional
*/
......
......@@ -15,11 +15,13 @@ public class IjkMediaMeta {
public static final String IJKM_KEY_BITRATE = "bitrate";
public static final String IJKM_KEY_VIDEO_STREAM = "video";
public static final String IJKM_KEY_AUDIO_STREAM = "audio";
public static final String IJKM_KEY_TIMEDTEXT_STREAM = "timedtext";
// stream meta
public static final String IJKM_KEY_TYPE = "type";
public static final String IJKM_VAL_TYPE__VIDEO = "video";
public static final String IJKM_VAL_TYPE__AUDIO = "audio";
public static final String IJKM_VAL_TYPE__TIMEDTEXT = "timedtext";
public static final String IJKM_VAL_TYPE__UNKNOWN = "unknown";
public static final String IJKM_KEY_LANGUAGE = "language";
......@@ -202,6 +204,7 @@ public class IjkMediaMeta {
int videoStreamIndex = meta.getInt(IJKM_KEY_VIDEO_STREAM, -1);
int audioStreamIndex = meta.getInt(IJKM_KEY_AUDIO_STREAM, -1);
int subtitleStreamIndex = meta.getInt(IJKM_KEY_TIMEDTEXT_STREAM, -1);
ArrayList<Bundle> streams = meta
.getParcelableArrayList(IJKM_KEY_STREAMS);
......
......@@ -23,6 +23,7 @@ import android.content.ContentResolver;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.graphics.SurfaceTexture;
import android.graphics.Rect;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.RingtoneManager;
......@@ -108,6 +109,7 @@ public final class IjkMediaPlayer extends AbstractMediaPlayer {
public static final int FFP_PROP_INT64_SELECTED_VIDEO_STREAM = 20001;
public static final int FFP_PROP_INT64_SELECTED_AUDIO_STREAM = 20002;
public static final int FFP_PROP_INT64_SELECTED_TIMEDTEXT_STREAM = 20011;
public static final int FFP_PROP_INT64_VIDEO_DECODER = 20003;
public static final int FFP_PROP_INT64_AUDIO_DECODER = 20004;
......@@ -584,6 +586,8 @@ public final class IjkMediaPlayer extends AbstractMediaPlayer {
trackInfo.setTrackType(ITrackInfo.MEDIA_TRACK_TYPE_VIDEO);
} else if (streamMeta.mType.equalsIgnoreCase(IjkMediaMeta.IJKM_VAL_TYPE__AUDIO)) {
trackInfo.setTrackType(ITrackInfo.MEDIA_TRACK_TYPE_AUDIO);
} else if (streamMeta.mType.equalsIgnoreCase(IjkMediaMeta.IJKM_VAL_TYPE__TIMEDTEXT)) {
trackInfo.setTrackType(ITrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT);
}
trackInfos.add(trackInfo);
}
......@@ -598,6 +602,8 @@ public final class IjkMediaPlayer extends AbstractMediaPlayer {
return (int)_getPropertyLong(FFP_PROP_INT64_SELECTED_VIDEO_STREAM, -1);
case ITrackInfo.MEDIA_TRACK_TYPE_AUDIO:
return (int)_getPropertyLong(FFP_PROP_INT64_SELECTED_AUDIO_STREAM, -1);
case ITrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT:
return (int)_getPropertyLong(FFP_PROP_INT64_SELECTED_TIMEDTEXT_STREAM, -1);
default:
return -1;
}
......@@ -966,9 +972,13 @@ public final class IjkMediaPlayer extends AbstractMediaPlayer {
// No real default action so far.
return;
case MEDIA_TIMED_TEXT:
// do nothing
break;
if (msg.obj == null) {
player.notifyOnTimedText(null);
} else {
IjkTimedText text = new IjkTimedText(new Rect(0, 0, 1, 1), (String)msg.obj);
player.notifyOnTimedText(text);
}
return;
case MEDIA_NOP: // interface test message - ignore
break;
......
/*
* Copyright (C) 2016 Zheng Yuan <zhengyuan10503@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tv.danmaku.ijk.media.player;
import android.graphics.Rect;
import java.lang.String;
public final class IjkTimedText {
private Rect mTextBounds = null;
private String mTextChars = null;
public IjkTimedText(Rect bounds, String text) {
mTextBounds = bounds;
mTextChars = text;
}
public Rect getBounds() {
return mTextBounds;
}
public String getText() {
return mTextChars;
}
}
......@@ -281,6 +281,21 @@ public class MediaPlayerProxy implements IMediaPlayer {
}
}
@Override
public void setOnTimedTextListener(OnTimedTextListener listener) {
if (listener != null) {
final OnTimedTextListener finalListener = listener;
mBackEndMediaPlayer.setOnTimedTextListener(new OnTimedTextListener() {
@Override
public void onTimedText(IMediaPlayer mp, IjkTimedText text) {
finalListener.onTimedText(MediaPlayerProxy.this, text);
}
});
} else {
mBackEndMediaPlayer.setOnTimedTextListener(null);
}
}
@Override
public void setAudioStreamType(int streamtype) {
mBackEndMediaPlayer.setAudioStreamType(streamtype);
......
......@@ -83,6 +83,8 @@ public class IjkTrackInfo implements ITrackInfo {
break;
case MEDIA_TRACK_TYPE_TIMEDTEXT:
out.append("TIMEDTEXT");
out.append(", ");
out.append(mStreamMeta.mLanguage);
break;
case MEDIA_TRACK_TYPE_SUBTITLE:
out.append("SUBTITLE");
......
......@@ -622,6 +622,7 @@ IjkMediaPlayer_getMediaMeta(JNIEnv *env, jobject thiz)
fillMetaInternal(env, jlocal_bundle, meta, IJKM_KEY_VIDEO_STREAM, "-1");
fillMetaInternal(env, jlocal_bundle, meta, IJKM_KEY_AUDIO_STREAM, "-1");
fillMetaInternal(env, jlocal_bundle, meta, IJKM_KEY_TIMEDTEXT_STREAM, "-1");
jarray_list = J4AC_ArrayList__ArrayList(env);
if (J4A_ExceptionCheck__throwAny(env)) {
......@@ -831,6 +832,13 @@ inline static void post_event(JNIEnv *env, jobject weak_this, int what, int arg1
// MPTRACE("post_event()=void");
}
inline static void post_event2(JNIEnv *env, jobject weak_this, int what, int arg1, int arg2, jobject obj)
{
// MPTRACE("post_event2(%p, %p, %d, %d, %d, %p)", (void*)env, (void*) weak_this, what, arg1, arg2, (void*)obj);
J4AC_IjkMediaPlayer__postEventFromNative(env, weak_this, what, arg1, arg2, obj);
// MPTRACE("post_event2()=void");
}
static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp)
{
jobject weak_thiz = (jobject) ijkmp_get_weak_thiz(mp);
......@@ -905,10 +913,21 @@ static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp)
break;
case FFP_MSG_PLAYBACK_STATE_CHANGED:
break;
case FFP_MSG_TIMED_TEXT:
if (msg.obj) {
jstring text = (*env)->NewStringUTF(env, (char *)msg.obj);
post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, text);
J4A_DeleteLocalRef__p(env, &text);
}
else {
post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, NULL);
}
break;
default:
ALOGE("unknown FFP_MSG_xxx(%d)\n", msg.what);
break;
}
msg_free_res(&msg);
}
LABEL_RETURN:
......
......@@ -40,6 +40,7 @@
#define FFP_MSG_BUFFERING_TIME_UPDATE 504 /* arg1 = cached duration in milliseconds, arg2 = high water mark */
#define FFP_MSG_SEEK_COMPLETE 600 /* arg1 = seek position, arg2 = error */
#define FFP_MSG_PLAYBACK_STATE_CHANGED 700
#define FFP_MSG_TIMED_TEXT 800
#define FFP_MSG_VIDEO_DECODER_OPEN 10001
......@@ -56,6 +57,7 @@
#define FFP_PROP_INT64_SELECTED_VIDEO_STREAM 20001
#define FFP_PROP_INT64_SELECTED_AUDIO_STREAM 20002
#define FFP_PROP_INT64_SELECTED_TIMEDTEXT_STREAM 20011
#define FFP_PROP_INT64_VIDEO_DECODER 20003
#define FFP_PROP_INT64_AUDIO_DECODER 20004
#define FFP_PROPV_DECODER_UNKNOWN 0
......
......@@ -143,6 +143,24 @@ inline static void msg_queue_put_simple3(MessageQueue *q, int what, int arg1, in
msg_queue_put(q, &msg);
}
inline static void msg_obj_free_l(void *obj)
{
av_free(obj);
}
inline static void msg_queue_put_simple4(MessageQueue *q, int what, int arg1, int arg2, void *obj, int obj_len)
{
AVMessage msg;
msg_init_msg(&msg);
msg.what = what;
msg.arg1 = arg1;
msg.arg2 = arg2;
msg.obj = av_malloc(obj_len);
memcpy(msg.obj, obj, obj_len);
msg.free_l = msg_obj_free_l;
msg_queue_put(q, &msg);
}
inline static void msg_queue_init(MessageQueue *q)
{
memset(q, 0, sizeof(MessageQueue));
......
......@@ -420,7 +420,9 @@ static int decoder_decode_frame(FFPlayer *ffp, Decoder *d, AVFrame *frame, AVSub
}
}
break;
// FFP_MERGE: case AVMEDIA_TYPE_SUBTITLE:
case AVMEDIA_TYPE_SUBTITLE:
ret = avcodec_decode_subtitle2(d->avctx, sub, &got_frame, &d->pkt_temp);
break;
default:
break;
}
......@@ -458,9 +460,7 @@ static void frame_queue_unref_item(Frame *vp)
{
av_frame_unref(vp->frame);
SDL_VoutUnrefYUVOverlay(vp->bmp);
#ifdef FFP_MERGE
avsubtitle_free(&vp->sub);
#endif
}
static int frame_queue_init(FrameQueue *f, PacketQueue *pktq, int max_size, int keep_last)
......@@ -625,10 +625,51 @@ static void free_picture(Frame *vp)
// FFP_MERGE: upload_texture
// FFP_MERGE: video_image_display
static int parse_ass_subtitle(const char *ass, char *output)
{
char *tok = NULL;
tok = strchr(ass, ':'); if (tok) tok += 1; // skip event
tok = strchr(tok, ','); if (tok) tok += 1; // skip layer
tok = strchr(tok, ','); if (tok) tok += 1; // skip start_time
tok = strchr(tok, ','); if (tok) tok += 1; // skip end_time
tok = strchr(tok, ','); if (tok) tok += 1; // skip style
tok = strchr(tok, ','); if (tok) tok += 1; // skip name
tok = strchr(tok, ','); if (tok) tok += 1; // skip margin_l
tok = strchr(tok, ','); if (tok) tok += 1; // skip margin_r
tok = strchr(tok, ','); if (tok) tok += 1; // skip margin_v
tok = strchr(tok, ','); if (tok) tok += 1; // skip effect
if (tok) {
char *text = tok;
int idx = 0;
do {
char *found = strstr(text, "\\N");
if (found) {
int n = found - text;
memcpy(output+idx, text, n);
output[idx + n] = '\n';
idx = n + 1;
text = found + 2;
}
else {
int left_text_len = strlen(text);
memcpy(output+idx, text, left_text_len);
if (output[idx + left_text_len - 1] == '\n')
output[idx + left_text_len - 1] = '\0';
else
output[idx + left_text_len] = '\0';
break;
}
} while(1);
return strlen(output) + 1;
}
return 0;
}
static void video_image_display2(FFPlayer *ffp)
{
VideoState *is = ffp->is;
Frame *vp;
Frame *sp = NULL;
vp = frame_queue_peek_last(&is->pictq);
......@@ -637,6 +678,27 @@ static void video_image_display2(FFPlayer *ffp)
ffp->stat.latest_seek_load_duration = (av_gettime() - is->latest_seek_load_start_at) / 1000;
if (vp->bmp) {
if (is->subtitle_st) {
if (frame_queue_nb_remaining(&is->subpq) > 0) {
sp = frame_queue_peek(&is->subpq);
if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) {
if (!sp->uploaded) {
if (sp->sub.num_rects > 0) {
char buffered_text[4096];
if (sp->sub.rects[0]->text) {
strncpy(buffered_text, sp->sub.rects[0]->text, 4096);
}
else if (sp->sub.rects[0]->ass) {
parse_ass_subtitle(sp->sub.rects[0]->ass, buffered_text);
}
ffp_notify_msg4(ffp, FFP_MSG_TIMED_TEXT, 0, 0, buffered_text, sizeof(buffered_text));
}
sp->uploaded = 1;
}
}
}
}
SDL_VoutDisplayYUVOverlay(ffp->vout, vp->bmp);
ffp->stat.vfps = SDL_SpeedSamplerAdd(&ffp->vfps_sampler, FFP_SHOW_VFPS_FFPLAY, "vfps[ffplay]");
if (!ffp->first_video_frame_rendered) {
......@@ -683,7 +745,10 @@ static void stream_component_close(FFPlayer *ffp, int stream_index)
decoder_abort(&is->viddec, &is->pictq);
decoder_destroy(&is->viddec);
break;
// FFP_MERGE: case AVMEDIA_TYPE_SUBTITLE:
case AVMEDIA_TYPE_SUBTITLE:
decoder_abort(&is->subdec, &is->subpq);
decoder_destroy(&is->subdec);
break;
default:
break;
}
......@@ -698,7 +763,10 @@ static void stream_component_close(FFPlayer *ffp, int stream_index)
is->video_st = NULL;
is->video_stream = -1;
break;
// FFP_MERGE: case AVMEDIA_TYPE_SUBTITLE:
case AVMEDIA_TYPE_SUBTITLE:
is->subtitle_st = NULL;
is->subtitle_stream = -1;
break;
default:
break;
}
......@@ -719,10 +787,8 @@ static void stream_close(FFPlayer *ffp)
stream_component_close(ffp, is->audio_stream);
if (is->video_stream >= 0)
stream_component_close(ffp, is->video_stream);
#ifdef FFP_MERGE
if (is->subtitle_stream >= 0)
stream_component_close(ffp, is->subtitle_stream);
#endif
avformat_close_input(&is->ic);
......@@ -731,16 +797,12 @@ static void stream_close(FFPlayer *ffp)
packet_queue_destroy(&is->videoq);
packet_queue_destroy(&is->audioq);
#ifdef FFP_MERGE
packet_queue_destroy(&is->subtitleq);
#endif
/* free all pictures */
frame_queue_destory(&is->pictq);
frame_queue_destory(&is->sampq);
#ifdef FFP_MERGE
frame_queue_destory(&is->subpq);
#endif
SDL_DestroyCond(is->continue_read_thread);
SDL_DestroyMutex(is->play_mutex);
#if !CONFIG_AVFILTER
......@@ -999,9 +1061,7 @@ static void video_refresh(FFPlayer *opaque, double *remaining_time)
VideoState *is = ffp->is;
double time;
#ifdef FFP_MERGE
Frame *sp, *sp2;
#endif
if (!is->paused && get_master_sync_type(is) == AV_SYNC_EXTERNAL_CLOCK && is->realtime)
check_external_clock_speed(is);
......@@ -1068,7 +1128,28 @@ retry:
}
}
// FFP_MERGE: if (is->subtitle_st) { {...}
if (is->subtitle_st) {
while (frame_queue_nb_remaining(&is->subpq) > 0) {
sp = frame_queue_peek(&is->subpq);
if (frame_queue_nb_remaining(&is->subpq) > 1)
sp2 = frame_queue_peek_next(&is->subpq);
else
sp2 = NULL;
if (sp->serial != is->subtitleq.serial
|| (is->vidclk.pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
|| (sp2 && is->vidclk.pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
{
if (sp->uploaded) {
ffp_notify_msg4(ffp, FFP_MSG_TIMED_TEXT, 0, 0, "", 1);
}
frame_queue_next(&is->subpq);
} else {
break;
}
}
}
frame_queue_next(&is->pictq);
is->force_refresh = 1;
......@@ -1775,7 +1856,45 @@ static int video_thread(void *arg)
return ret;
}
// FFP_MERGE: subtitle_thread
static int subtitle_thread(void *arg)
{
FFPlayer *ffp = arg;
VideoState *is = ffp->is;
Frame *sp;
int got_subtitle;
double pts;
for (;;) {
if (!(sp = frame_queue_peek_writable(&is->subpq)))
return 0;
if ((got_subtitle = decoder_decode_frame(ffp, &is->subdec, NULL, &sp->sub)) < 0)
break;
pts = 0;
#ifdef FFP_MERGE
if (got_subtitle && sp->sub.format == 0) {
#else
if (got_subtitle) {
#endif
if (sp->sub.pts != AV_NOPTS_VALUE)
pts = sp->sub.pts / (double)AV_TIME_BASE;
sp->pts = pts;
sp->serial = is->subdec.pkt_serial;
sp->width = is->subdec.avctx->width;
sp->height = is->subdec.avctx->height;
sp->uploaded = 0;
/* now we can update the picture count */
frame_queue_push(&is->subpq);
#ifdef FFP_MERGE
} else if (got_subtitle) {
avsubtitle_free(&sp->sub);
#endif
}
}
return 0;
}
/* copy samples for viewing in editor window */
static void update_sample_display(VideoState *is, short *samples, int samples_size)
......@@ -2176,7 +2295,7 @@ static int stream_component_open(FFPlayer *ffp, int stream_index)
switch (avctx->codec_type) {
case AVMEDIA_TYPE_AUDIO : is->last_audio_stream = stream_index; forced_codec_name = ffp->audio_codec_name; break;
// FFP_MERGE: case AVMEDIA_TYPE_SUBTITLE:
case AVMEDIA_TYPE_SUBTITLE: is->last_subtitle_stream = stream_index; forced_codec_name = ffp->subtitle_codec_name; break;
case AVMEDIA_TYPE_VIDEO : is->last_video_stream = stream_index; forced_codec_name = ffp->video_codec_name; break;
default: break;
}
......@@ -2326,7 +2445,16 @@ static int stream_component_open(FFPlayer *ffp, int stream_index)
}
break;
// FFP_MERGE: case AVMEDIA_TYPE_SUBTITLE:
case AVMEDIA_TYPE_SUBTITLE:
is->subtitle_stream = stream_index;
is->subtitle_st = ic->streams[stream_index];
ffp_set_subtitle_codec_info(ffp, AVCODEC_MODULE_NAME, avcodec_get_name(avctx->codec_id));
decoder_init(&is->subdec, avctx, &is->subtitleq, is->continue_read_thread);
if ((ret = decoder_start(&is->subdec, subtitle_thread, ffp, "ff_subtitle_dec")) < 0)
goto out;
break;
default:
break;
}
......@@ -2403,9 +2531,7 @@ static int read_thread(void *arg)
memset(st_index, -1, sizeof(st_index));
is->last_video_stream = is->video_stream = -1;
is->last_audio_stream = is->audio_stream = -1;
#ifdef FFP_MERGE
is->last_subtitle_stream = is->subtitle_stream = -1;
#endif
is->eof = 0;
ic = avformat_alloc_context();
......@@ -2539,7 +2665,6 @@ static int read_thread(void *arg)
st_index[AVMEDIA_TYPE_AUDIO],
st_index[AVMEDIA_TYPE_VIDEO],
NULL, 0);
#ifdef FFP_MERGE
if (!ffp->video_disable && !ffp->subtitle_disable)
st_index[AVMEDIA_TYPE_SUBTITLE] =
av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE,
......@@ -2548,7 +2673,6 @@ static int read_thread(void *arg)
st_index[AVMEDIA_TYPE_AUDIO] :
st_index[AVMEDIA_TYPE_VIDEO]),
NULL, 0);
#endif
is->show_mode = ffp->show_mode;
#ifdef FFP_MERGE // bbc: dunno if we need this
......@@ -2573,17 +2697,18 @@ static int read_thread(void *arg)
if (is->show_mode == SHOW_MODE_NONE)
is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT;
#ifdef FFP_MERGE
if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
stream_component_open(ffp, st_index[AVMEDIA_TYPE_SUBTITLE]);
}
#endif
ijkmeta_set_avformat_context_l(ffp->meta, ic);
ffp->stat.bit_rate = ic->bit_rate;
if (st_index[AVMEDIA_TYPE_VIDEO] >= 0)
ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_VIDEO_STREAM, st_index[AVMEDIA_TYPE_VIDEO]);
if (st_index[AVMEDIA_TYPE_AUDIO] >= 0)
ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_AUDIO_STREAM, st_index[AVMEDIA_TYPE_AUDIO]);
if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0)
ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_TIMEDTEXT_STREAM, st_index[AVMEDIA_TYPE_SUBTITLE]);
if (is->video_stream < 0 && is->audio_stream < 0) {
av_log(NULL, AV_LOG_FATAL, "Failed to open file '%s' or configure filtergraph\n",
......@@ -2667,12 +2792,10 @@ static int read_thread(void *arg)
packet_queue_flush(&is->audioq);
packet_queue_put(&is->audioq, &flush_pkt);
}
#ifdef FFP_MERGE
if (is->subtitle_stream >= 0) {
packet_queue_flush(&is->subtitleq);
packet_queue_put(&is->subtitleq, &flush_pkt);
}
#endif
if (is->video_stream >= 0) {
if (ffp->node_vdec) {
ffpipenode_flush(ffp->node_vdec);
......@@ -2728,15 +2851,11 @@ static int read_thread(void *arg)
#ifdef FFP_MERGE
(is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
#else
(is->audioq.size + is->videoq.size > ffp->dcc.max_buffer_size
(is->audioq.size + is->videoq.size + is->subtitleq.size > ffp->dcc.max_buffer_size
#endif
|| ( stream_has_enough_packets(is->audio_st, is->audio_stream, &is->audioq, MIN_FRAMES)
&& stream_has_enough_packets(is->video_st, is->video_stream, &is->videoq, MIN_FRAMES)
#ifdef FFP_MERGE
&& stream_has_enough_packets(is->subtitle_st, is->subtitle_stream, &is->subtitleq, MIN_FRAMES)))) {
#else
))) {
#endif
if (!is->eof) {
ffp_toggle_buffering(ffp, 0);
}
......@@ -2805,10 +2924,8 @@ static int read_thread(void *arg)
packet_queue_put_nullpacket(&is->videoq, is->video_stream);
if (is->audio_stream >= 0)
packet_queue_put_nullpacket(&is->audioq, is->audio_stream);
#ifdef FFP_MERGE
if (is->subtitle_stream >= 0)
packet_queue_put_nullpacket(&is->subtitleq, is->subtitle_stream);
#endif
is->eof = 1;
}
if (pb_error) {
......@@ -2816,10 +2933,8 @@ static int read_thread(void *arg)
packet_queue_put_nullpacket(&is->videoq, is->video_stream);
if (is->audio_stream >= 0)
packet_queue_put_nullpacket(&is->audioq, is->audio_stream);
#ifdef FFP_MERGE
if (is->subtitle_stream >= 0)
packet_queue_put_nullpacket(&is->subtitleq, is->subtitle_stream);
#endif
is->eof = 1;
ffp->error = pb_error;
av_log(ffp, AV_LOG_ERROR, "av_read_frame error: %x(%c,%c,%c,%c): %s\n", ffp->error,
......@@ -2849,11 +2964,9 @@ static int read_thread(void *arg)
if (is->audio_stream >= 0) {
packet_queue_put(&is->audioq, &flush_pkt);
}
#ifdef FFP_MERGE
if (is->subtitle_stream >= 0) {
packet_queue_put(&is->subtitleq, &flush_pkt);
}
#endif
if (is->video_stream >= 0) {
packet_queue_put(&is->videoq, &flush_pkt);
}
......@@ -2872,10 +2985,8 @@ static int read_thread(void *arg)
} else if (pkt->stream_index == is->video_stream && pkt_in_play_range
&& !(is->video_st && (is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC))) {
packet_queue_put(&is->videoq, pkt);
#ifdef FFP_MERGE
} else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
packet_queue_put(&is->subtitleq, pkt);
#endif
} else {
av_packet_unref(pkt);
}
......@@ -2923,20 +3034,14 @@ static VideoState *stream_open(FFPlayer *ffp, const char *filename, AVInputForma
/* start video display */
if (frame_queue_init(&is->pictq, &is->videoq, ffp->pictq_size, 1) < 0)
goto fail;
#ifdef FFP_MERGE
if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) < 0)
goto fail;
#endif
if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) < 0)
goto fail;
if (packet_queue_init(&is->videoq) < 0 ||
packet_queue_init(&is->audioq) < 0 ||
#ifdef FFP_MERGE
packet_queue_init(&is->subtitleq) < 0)
#else
0)
#endif
goto fail;
if (!(is->continue_read_thread = SDL_CreateCond())) {
......@@ -3913,6 +4018,13 @@ void ffp_set_audio_codec_info(FFPlayer *ffp, const char *module, const char *cod
av_log(ffp, AV_LOG_INFO, "AudioCodec: %s\n", ffp->audio_codec_info);
}
void ffp_set_subtitle_codec_info(FFPlayer *ffp, const char *module, const char *codec)
{
av_freep(&ffp->subtitle_codec_info);
ffp->subtitle_codec_info = av_asprintf("%s, %s", module ? module : "", codec ? codec : "");
av_log(ffp, AV_LOG_INFO, "SubtitleCodec: %s\n", ffp->subtitle_codec_info);
}
void ffp_set_playback_rate(FFPlayer *ffp, float rate)
{
if (!ffp)
......@@ -3983,6 +4095,10 @@ int ffp_set_stream_selected(FFPlayer *ffp, int stream, int selected)
if (stream != is->audio_stream && is->audio_stream >= 0)
stream_component_close(ffp, is->audio_stream);
break;
case AVMEDIA_TYPE_SUBTITLE:
if (stream != is->subtitle_stream && is->subtitle_stream >= 0)
stream_component_close(ffp, is->subtitle_stream);
break;
default:
av_log(ffp, AV_LOG_ERROR, "select invalid stream %d of video type %d\n", stream, codecpar->codec_type);
return -1;
......@@ -3998,6 +4114,10 @@ int ffp_set_stream_selected(FFPlayer *ffp, int stream, int selected)
if (stream == is->audio_stream)
stream_component_close(ffp, is->audio_stream);
break;
case AVMEDIA_TYPE_SUBTITLE:
if (stream == is->subtitle_stream)
stream_component_close(ffp, is->subtitle_stream);
break;
default:
av_log(ffp, AV_LOG_ERROR, "select invalid stream %d of audio type %d\n", stream, codecpar->codec_type);
return -1;
......@@ -4051,6 +4171,10 @@ int64_t ffp_get_property_int64(FFPlayer *ffp, int id, int64_t default_value)
if (!ffp || !ffp->is)
return default_value;
return ffp->is->audio_stream;
case FFP_PROP_INT64_SELECTED_TIMEDTEXT_STREAM:
if (!ffp || !ffp->is)
return default_value;
return ffp->is->subtitle_stream;
case FFP_PROP_INT64_VIDEO_DECODER:
if (!ffp)
return default_value;
......
......@@ -100,6 +100,7 @@ int ffp_video_thread(FFPlayer *ffp);
void ffp_set_video_codec_info(FFPlayer *ffp, const char *module, const char *codec);
void ffp_set_audio_codec_info(FFPlayer *ffp, const char *module, const char *codec);
void ffp_set_subtitle_codec_info(FFPlayer *ffp, const char *module, const char *codec);
void ffp_set_playback_rate(FFPlayer *ffp, float rate);
void ffp_set_playback_volume(FFPlayer *ffp, float volume);
......
......@@ -185,9 +185,7 @@ typedef struct Clock {
/* Common struct for handling all types of decoded data and allocated render buffers. */
typedef struct Frame {
AVFrame *frame;
#ifdef FFP_MERGE
AVSubtitle sub;
#endif
int serial;
double pts; /* presentation timestamp for the frame */
double duration; /* estimated duration of the frame */
......@@ -202,9 +200,7 @@ typedef struct Frame {
int height;
int format;
AVRational sar;
#ifdef FFP_MERGE
int uploaded;
#endif
} Frame;
typedef struct FrameQueue {
......@@ -274,16 +270,12 @@ typedef struct VideoState {
Clock extclk;
FrameQueue pictq;
#ifdef FFP_MERGE
FrameQueue subpq;
#endif
FrameQueue sampq;
Decoder auddec;
Decoder viddec;
#ifdef FFP_MERGE
Decoder subdec;
#endif
int audio_stream;
......@@ -334,11 +326,9 @@ typedef struct VideoState {
SDL_Texture *sub_texture;
#endif
#ifdef FFP_MERGE
int subtitle_stream;
AVStream *subtitle_st;
PacketQueue subtitleq;
#endif
double frame_timer;
double frame_last_returned_time;
......@@ -540,9 +530,7 @@ typedef struct FFPlayer {
#endif
int audio_disable;
int video_disable;
#ifdef FFP_MERGE
int subtitle_disable;
#endif
const char* wanted_stream_spec[AVMEDIA_TYPE_NB];
int seek_by_bytes;
int display_disable;
......@@ -565,9 +553,7 @@ typedef struct FFPlayer {
int infinite_buffer;
enum ShowMode show_mode;
char *audio_codec_name;
#ifdef FFP_MERGE
char *subtitle_codec_name;
#endif
char *video_codec_name;
double rdftspeed;
#ifdef FFP_MERGE
......@@ -603,6 +589,7 @@ typedef struct FFPlayer {
char *video_codec_info;
char *audio_codec_info;
char *subtitle_codec_info;
Uint32 overlay_format;
int last_error;
......@@ -729,6 +716,7 @@ inline static void ffp_reset_internal(FFPlayer *ffp)
av_freep(&ffp->video_codec_info);
av_freep(&ffp->audio_codec_info);
av_freep(&ffp->subtitle_codec_info);
ffp->overlay_format = SDL_FCC_RV32;
ffp->last_error = 0;
......@@ -799,6 +787,10 @@ inline static void ffp_notify_msg3(FFPlayer *ffp, int what, int arg1, int arg2)
msg_queue_put_simple3(&ffp->msg_queue, what, arg1, arg2);
}
inline static void ffp_notify_msg4(FFPlayer *ffp, int what, int arg1, int arg2, void *obj, int obj_len) {
msg_queue_put_simple4(&ffp->msg_queue, what, arg1, arg2, obj, obj_len);
}
inline static void ffp_remove_msg(FFPlayer *ffp, int what) {
msg_queue_remove(&ffp->msg_queue, what);
}
......
......@@ -259,6 +259,10 @@ void ijkmeta_set_avformat_context_l(IjkMediaMeta *meta, AVFormatContext *ic)
ijkmeta_set_int64_l(stream_meta, IJKM_KEY_CHANNEL_LAYOUT, codecpar->channel_layout);
break;
}
case AVMEDIA_TYPE_SUBTITLE: {
ijkmeta_set_string_l(stream_meta, IJKM_KEY_TYPE, IJKM_VAL_TYPE__TIMEDTEXT);
break;
}
default: {
ijkmeta_set_string_l(stream_meta, IJKM_KEY_TYPE, IJKM_VAL_TYPE__UNKNOWN);
break;
......
......@@ -33,11 +33,13 @@
#define IJKM_KEY_BITRATE "bitrate"
#define IJKM_KEY_VIDEO_STREAM "video"
#define IJKM_KEY_AUDIO_STREAM "audio"
#define IJKM_KEY_TIMEDTEXT_STREAM "timedtext"
// stream meta
#define IJKM_KEY_TYPE "type"
#define IJKM_VAL_TYPE__VIDEO "video"
#define IJKM_VAL_TYPE__AUDIO "audio"
#define IJKM_VAL_TYPE__TIMEDTEXT "timedtext"
#define IJKM_VAL_TYPE__UNKNOWN "unknown"
#define IJKM_KEY_LANGUAGE "language"
......
......@@ -764,9 +764,10 @@ int ijkmp_get_msg(IjkMediaPlayer *mp, AVMessage *msg, int block)
pthread_mutex_unlock(&mp->mutex);
break;
}
msg_free_res(msg);
if (continue_wait_next_msg)
if (continue_wait_next_msg) {
msg_free_res(msg);
continue;
}
return retval;
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册