提交 2c4b2295 编写于 作者: Z Zhang Rui

ios: support playbackRate change

上级 73d76759
......@@ -1475,6 +1475,7 @@ static int configure_video_filters(FFPlayer *ffp, AVFilterGraph *graph, VideoSta
}
}
#ifdef FFP_AVFILTER_PLAYBACK_RATE
if (fabsf(ffp->pf_playback_rate) > 0.00001 &&
fabsf(ffp->pf_playback_rate - 1.0f) > 0.00001) {
char setpts_buf[256];
......@@ -1484,6 +1485,7 @@ static int configure_video_filters(FFPlayer *ffp, AVFilterGraph *graph, VideoSta
snprintf(setpts_buf, sizeof(setpts_buf), "%f*PTS", rate);
INSERT_FILT("setpts", setpts_buf);
}
#endif
if ((ret = configure_filtergraph(graph, vfilters, filt_src, last_filter)) < 0)
goto fail;
......@@ -1564,6 +1566,7 @@ static int configure_audio_filters(FFPlayer *ffp, const char *afilters, int forc
if (afilters)
snprintf(afilters_args, sizeof(afilters_args), "%s", afilters);
#ifdef FFP_AVFILTER_PLAYBACK_RATE
if (fabsf(ffp->pf_playback_rate) > 0.00001 &&
fabsf(ffp->pf_playback_rate - 1.0f) > 0.00001) {
if (afilters_args[0])
......@@ -1572,6 +1575,7 @@ static int configure_audio_filters(FFPlayer *ffp, const char *afilters, int forc
av_log(ffp, AV_LOG_INFO, "af_rate=%f\n", ffp->pf_playback_rate);
av_strlcatf(afilters_args, sizeof(afilters_args), "atempo=%f", ffp->pf_playback_rate);
}
#endif
if ((ret = configure_filtergraph(is->agraph, afilters_args[0] ? afilters_args : NULL, filt_asrc, filt_asink)) < 0)
goto end;
......@@ -2037,6 +2041,11 @@ static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
ffp->audio_callback_time = av_gettime_relative();
if (ffp->pf_playback_rate_changed) {
ffp->pf_playback_rate_changed = 0;
SDL_AoutSetPlaybackRate(ffp->aout, ffp->pf_playback_rate);
}
while (len > 0) {
if (is->audio_buf_index >= is->audio_buf_size) {
audio_size = audio_decode_frame(ffp);
......@@ -3827,13 +3836,8 @@ void ffp_set_playback_rate(FFPlayer *ffp, float rate)
if (!ffp)
return;
SDL_LockMutex(ffp->af_mutex);
SDL_LockMutex(ffp->vf_mutex);
ffp->pf_playback_rate = rate;
ffp->vf_changed = 1;
ffp->af_changed = 1;
SDL_UnlockMutex(ffp->vf_mutex);
SDL_UnlockMutex(ffp->af_mutex);
ffp->pf_playback_rate_changed = 1;
}
int ffp_get_video_rotate_degrees(FFPlayer *ffp)
......
......@@ -45,4 +45,6 @@
// #define FFP_SHOW_AMC_DROPS
// #define FFP_AMC_DISABLE_OUTPUT
// #define FFP_AVFILTER_PLAYBACK_RATE
#endif
......@@ -624,6 +624,7 @@ typedef struct FFPlayer {
int vf_changed;
int af_changed;
float pf_playback_rate;
int pf_playback_rate_changed;
FFStatistic stat;
FFDemuxCacheControl dcc;
......@@ -730,6 +731,7 @@ inline static void ffp_reset_internal(FFPlayer *ffp)
ffp->vf_changed = 0;
ffp->af_changed = 0;
ffp->pf_playback_rate = 1.0f;
ffp->pf_playback_rate_changed = 0;
msg_queue_flush(&ffp->msg_queue);
......
......@@ -96,6 +96,14 @@ void SDL_AoutSetDefaultLatencySeconds(SDL_Aout *aout, double latency)
}
}
void SDL_AoutSetPlaybackRate(SDL_Aout *aout, float playbackRate)
{
if (aout) {
if (aout->func_set_playback_rate)
aout->func_set_playback_rate(aout, playbackRate);
}
}
int SDL_AoutGetAudioSessionId(SDL_Aout *aout)
{
if (aout) {
......
......@@ -47,6 +47,9 @@ struct SDL_Aout {
void (*func_set_default_latency_seconds)(SDL_Aout *aout, double latency);
// optional
void (*func_set_playback_rate)(SDL_Aout *aout, float playbackRate);
// Android only
int (*func_get_audio_session_id)(SDL_Aout *aout);
};
......@@ -62,6 +65,8 @@ double SDL_AoutGetLatencySeconds(SDL_Aout *aout);
void SDL_AoutSetDefaultLatencySeconds(SDL_Aout *aout, double latency);
// optional
void SDL_AoutSetPlaybackRate(SDL_Aout *aout, float playbackRate);
// android only
int SDL_AoutGetAudioSessionId(SDL_Aout *aout);
......
......@@ -35,6 +35,7 @@
- (void)flush;
- (void)stop;
- (void)close;
- (void)setPlaybackRate:(float)playbackRate;
@property (nonatomic, readonly) SDL_AudioSpec spec;
......
......@@ -70,6 +70,14 @@
return nil;
}
UInt32 propValue = 1;
AudioQueueSetProperty(audioQueueRef, kAudioQueueProperty_EnableTimePitch, &propValue, sizeof(propValue));
propValue = 1;
AudioQueueSetProperty(_audioQueueRef, kAudioQueueProperty_TimePitchBypass, &propValue, sizeof(propValue));
propValue = kAudioQueueTimePitchAlgorithm_Spectral;
AudioQueueSetProperty(_audioQueueRef, kAudioQueueProperty_TimePitchAlgorithm, &propValue, sizeof(propValue));
status = AudioQueueStart(audioQueueRef, NULL);
if (status != noErr) {
NSLog(@"AudioQueue: AudioQueueStart failed (%d)\n", (int)status);
......@@ -179,6 +187,19 @@
_audioQueueRef = nil;
}
- (void)setPlaybackRate:(float)playbackRate
{
if (fabsf(playbackRate - 1.0f) <= 0.000001) {
UInt32 propValue = 1;
AudioQueueSetProperty(_audioQueueRef, kAudioQueueProperty_TimePitchBypass, &propValue, sizeof(propValue));
AudioQueueSetParameter(_audioQueueRef, kAudioQueueParam_PlayRate, 1.0f);
} else {
UInt32 propValue = 0;
AudioQueueSetProperty(_audioQueueRef, kAudioQueueProperty_TimePitchBypass, &propValue, sizeof(propValue));
AudioQueueSetParameter(_audioQueueRef, kAudioQueueParam_PlayRate, playbackRate);
}
}
static void IJKSDLAudioQueueOuptutCallback(void * inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer) {
@autoreleasepool {
IJKSDLAudioQueueController* aqController = (__bridge IJKSDLAudioQueueController *) inUserData;
......
......@@ -80,6 +80,14 @@ static void aout_close_audio(SDL_Aout *aout)
[opaque->aoutController close];
}
static void aout_set_playback_rate(SDL_Aout *aout, float playbackRate)
{
SDLTRACE("aout_close_audio()\n");
SDL_Aout_Opaque *opaque = aout->opaque;
[opaque->aoutController setPlaybackRate:playbackRate];
}
static void aout_free_l(SDL_Aout *aout)
{
if (!aout)
......@@ -110,5 +118,7 @@ SDL_Aout *SDL_AoutIos_CreateForAudioUnit()
aout->flush_audio = aout_flush_audio;
aout->close_audio = aout_close_audio;
aout->func_set_playback_rate = aout_set_playback_rate;
return aout;
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册