提交 baa26258 编写于 作者: Z Zhang Rui

ijksdl: add android audiotrack implement

上级 39c62235
......@@ -25,6 +25,65 @@
#include <assert.h>
#include "ijkutil/ijkutil.h"
#include "ijksdl/ijksdl_audio.h"
typedef struct AudioChannelMapEntry {
Uint8 sdl_channel;
int android_channel;
} AudioChannelMapEntry;
static AudioChannelMapEntry g_audio_channel_map[] = {
{ 2, CHANNEL_OUT_STEREO },
{ 1, CHANNEL_OUT_MONO },
};
typedef struct AudioFormatMapEntry {
SDL_AudioFormat sdl_format;
int android_format;
} AudioFormatMapEntry;
static AudioFormatMapEntry g_audio_format_map[] = {
{ AUDIO_S16, ENCODING_PCM_16BIT },
{ AUDIO_U8, ENCODING_PCM_8BIT },
};
static Uint8 find_sdl_channel(int android_channel)
{
for (int i = 0; i < NELEM(g_audio_channel_map); ++i) {
AudioChannelMapEntry *entry = &g_audio_channel_map[i];
if (entry->android_channel == android_channel)
return entry->sdl_channel;
}
return 0;
}
static int find_android_channel(int sdl_channel)
{
for (int i = 0; i < NELEM(g_audio_channel_map); ++i) {
AudioChannelMapEntry *entry = &g_audio_channel_map[i];
if (entry->sdl_channel == sdl_channel)
return entry->android_channel;
}
return CHANNEL_OUT_INVALID;
}
static Uint8 find_sdl_format(int android_format)
{
for (int i = 0; i < NELEM(g_audio_format_map); ++i) {
AudioFormatMapEntry *entry = &g_audio_format_map[i];
if (entry->android_format == android_format)
return entry->sdl_format;
}
return 0;
}
static int find_android_format(int sdl_format)
{
for (int i = 0; i < NELEM(g_audio_format_map); ++i) {
AudioFormatMapEntry *entry = &g_audio_format_map[i];
if (entry->sdl_format == sdl_format)
return entry->android_format;
}
return ENCODING_INVALID;
}
typedef struct SDL_AndroidAudioTrack {
jobject thiz;
......@@ -88,7 +147,7 @@ int sdl_audiotrack_global_init(JNIEnv *env)
return 0;
}
void sdl_audiotrack_get_default_spec(SDL_AndroidAudioTrack_Spec *spec)
static void sdl_audiotrack_get_default_spec(SDL_AndroidAudioTrack_Spec *spec)
{
assert(spec);
spec->stream_type = STREAM_MUSIC;
......@@ -99,13 +158,54 @@ void sdl_audiotrack_get_default_spec(SDL_AndroidAudioTrack_Spec *spec)
spec->mode = MODE_STREAM;
}
SDL_AndroidAudioTrack *sdl_audiotrack_new(JNIEnv *env, SDL_AndroidAudioTrack_Spec *spec)
static int audiotrack_get_min_buffer_size(JNIEnv *env, SDL_AndroidAudioTrack_Spec *spec)
{
int retval = (*env)->CallStaticIntMethod(env, g_clazz.clazz, g_clazz.getMinBufferSize,
(int) spec->sample_rate_in_hz,
(int) spec->channel_config,
(int) spec->audio_format);
if ((*env)->ExceptionCheck(env)) {
ALOGE("sdl_audiotrack_get_min_buffer_size: getMinBufferSize: Exception:");
(*env)->ExceptionDescribe(env);
(*env)->ExceptionClear(env);
return -1;
}
return retval;
}
SDL_AndroidAudioTrack *sdl_audiotrack_new_from_spec(JNIEnv *env, SDL_AndroidAudioTrack_Spec *spec)
{
assert(spec);
switch (spec->channel_config) {
case CHANNEL_OUT_MONO:
break;
case CHANNEL_OUT_STEREO:
break;
default:
ALOGE("sdl_audiotrack_new_from_spec: invalid channel %d", spec->channel_config);
return NULL;
}
switch (spec->audio_format) {
case ENCODING_PCM_16BIT:
break;
case ENCODING_PCM_8BIT:
break;
default:
ALOGE("sdl_audiotrack_new_from_spec: invalid format %d", spec->audio_format);
return NULL;
}
int min_buffer_size = audiotrack_get_min_buffer_size(env, spec);
if (min_buffer_size <= 0) {
ALOGE("sdl_audiotrack_new: sdl_audiotrack_get_min_buffer_size: return %d:", min_buffer_size);
return NULL;
}
jobject thiz = (*env)->NewObject(env, g_clazz.clazz, g_clazz.constructor,
spec->stream_type, spec->sample_rate_in_hz, spec->channel_config,
spec->audio_format, spec->buffer_size_in_bytes, spec->mode);
spec->audio_format, min_buffer_size, spec->mode);
if (!thiz || (*env)->ExceptionCheck(env)) {
ALOGE("sdl_audiotrack_new: NewObject: Exception:");
if ((*env)->ExceptionCheck(env)) {
......@@ -124,7 +224,7 @@ SDL_AndroidAudioTrack *sdl_audiotrack_new(JNIEnv *env, SDL_AndroidAudioTrack_Spe
memset(atrack, 0, sizeof(SDL_AndroidAudioTrack));
atrack->spec = *spec;
atrack->min_buffer_size = sdl_audiotrack_get_min_buffer_size(env, atrack);
atrack->min_buffer_size = min_buffer_size;
atrack->thiz = (*env)->NewGlobalRef(env, thiz);
(*env)->DeleteLocalRef(env, thiz);
......@@ -132,20 +232,51 @@ SDL_AndroidAudioTrack *sdl_audiotrack_new(JNIEnv *env, SDL_AndroidAudioTrack_Spe
return atrack;
}
int sdl_audiotrack_get_min_buffer_size(JNIEnv *env, SDL_AndroidAudioTrack *atrack)
SDL_AndroidAudioTrack *sdl_audiotrack_new_from_sdl_spec(JNIEnv *env, SDL_AudioSpec *sdl_spec)
{
int retval = (*env)->CallStaticIntMethod(env, g_clazz.clazz, g_clazz.getMinBufferSize,
(int) atrack->spec.sample_rate_in_hz,
(int) atrack->spec.channel_config,
(int) atrack->spec.audio_format);
if ((*env)->ExceptionCheck(env)) {
ALOGE("sdl_audiotrack_get_min_buffer_size: getMinBufferSize: Exception:");
(*env)->ExceptionDescribe(env);
(*env)->ExceptionClear(env);
return -1;
SDL_AndroidAudioTrack_Spec atrack_spec;
sdl_audiotrack_get_default_spec(&atrack_spec);
atrack_spec.sample_rate_in_hz = sdl_spec->freq;
atrack_spec.channel_config = find_android_channel(sdl_spec->channels);
atrack_spec.audio_format = find_android_format(sdl_spec->format);
atrack_spec.buffer_size_in_bytes = sdl_spec->size;
// TODO: consider spec.sample
return sdl_audiotrack_new_from_spec(env, &atrack_spec);
}
void sdl_audiotrack_free(JNIEnv *env, SDL_AndroidAudioTrack* atrack)
{
if (atrack->buffer) {
(*env)->DeleteGlobalRef(env, atrack->buffer);
atrack->buffer = NULL;
}
atrack->buffer_capacity = 0;
return retval;
if (atrack->thiz) {
(*env)->DeleteGlobalRef(env, atrack->thiz);
atrack->thiz = NULL;
}
free(atrack);
}
void sdl_audiotrack_get_target_spec(SDL_AndroidAudioTrack *atrack, SDL_AudioSpec *sdl_spec)
{
SDL_AndroidAudioTrack_Spec *atrack_spec = &atrack->spec;
sdl_spec->freq = atrack_spec->sample_rate_in_hz;
sdl_spec->channels = find_sdl_channel(atrack_spec->channel_config);
sdl_spec->format = find_sdl_format(atrack_spec->audio_format);
sdl_spec->size = atrack_spec->buffer_size_in_bytes;
sdl_spec->silence = 0;
sdl_spec->padding = 0;
}
int sdl_audiotrack_get_min_buffer_size(SDL_AndroidAudioTrack* atrack)
{
return atrack->min_buffer_size;
}
void sdl_audiotrack_play(JNIEnv *env, SDL_AndroidAudioTrack *atrack)
......
......@@ -26,6 +26,8 @@
#include <stdint.h>
#include <jni.h>
#include "ijksdl_audio.h"
#include "ijksdl_aout.h"
typedef struct SDL_AndroidAudioTrack_Spec {
enum StreamType {
......@@ -40,10 +42,11 @@ typedef struct SDL_AndroidAudioTrack_Spec {
int sample_rate_in_hz;
enum ChannelConfig {
CHANNEL_OUT_INVALID = 0x0,
CHANNEL_OUT_DEFAULT = 0x1, /* f-l */
CHANNEL_OUT_MONO = 0x4, /* f-l, f-r */
CHANNEL_OUT_STEREO = 0xc, /* f-l, f-r, b-l, b-r */
CHANNEL_OUT_QUAD = 0xcc, /* f-l, f-r, b-l, b-r, f-c, low */
CHANNEL_OUT_QUAD = 0xcc, /* f-l, f-r, b-l, b-r */
CHANNEL_OUT_SURROUND = 0x41c, /* f-l, f-r, f-c, b-c */
CHANNEL_OUT_5POINT1 = 0xfc, /* f-l, f-r, b-l, b-r, f-c, low */
CHANNEL_OUT_7POINT1 = 0x3fc, /* f-l, f-r, b-l, b-r, f-c, low, f-lc, f-rc */
......@@ -62,8 +65,8 @@ typedef struct SDL_AndroidAudioTrack_Spec {
enum AudioFormat {
ENCODING_INVALID = 0,
ENCODING_DEFAULT = 1,
ENCODING_PCM_16BIT = 2,
ENCODING_PCM_8BIT = 3,
ENCODING_PCM_16BIT = 2, // signed, guaranteed to be supported by devices.
ENCODING_PCM_8BIT = 3, // unsigned, not guaranteed to be supported by devices.
} audio_format;
int buffer_size_in_bytes;
......@@ -71,16 +74,22 @@ typedef struct SDL_AndroidAudioTrack_Spec {
MODE_STATIC = 0,
MODE_STREAM = 1,
} mode;
// extra field
int sdl_samples;
} SDL_AndroidAudioTrack_Spec;
typedef struct SDL_AndroidAudioTrack SDL_AndroidAudioTrack;
int sdl_audiotrack_global_init(JNIEnv *env);
void sdl_audiotrack_get_default_spec(SDL_AndroidAudioTrack_Spec *spec);
SDL_AndroidAudioTrack *sdl_audiotrack_new(JNIEnv *env, SDL_AndroidAudioTrack_Spec *spec);
SDL_AndroidAudioTrack *sdl_audiotrack_new_from_spec(JNIEnv *env, SDL_AndroidAudioTrack_Spec *spec);
SDL_AndroidAudioTrack *sdl_audiotrack_new_from_sdl_spec(JNIEnv *env, SDL_AudioSpec *sdl_spec);
void sdl_audiotrack_free(JNIEnv *env, SDL_AndroidAudioTrack* atrack);
void sdl_audiotrack_get_target_spec(SDL_AndroidAudioTrack* atrack, SDL_AudioSpec *spec);
int sdl_audiotrack_get_min_buffer_size(SDL_AndroidAudioTrack* atrack);
int sdl_audiotrack_get_min_buffer_size(JNIEnv *env, SDL_AndroidAudioTrack *atrack);
void sdl_audiotrack_play(JNIEnv *env, SDL_AndroidAudioTrack *atrack);
void sdl_audiotrack_pause(JNIEnv *env, SDL_AndroidAudioTrack *atrack);
void sdl_audiotrack_flush(JNIEnv *env, SDL_AndroidAudioTrack *atrack);
......
......@@ -23,57 +23,170 @@
#include "ijksdl_aout_android_audiotrack.h"
#include <stdbool.h>
#include <assert.h>
#include "ijkutil/ijkutil.h"
#include "ijksdl_thread.h"
#include "ijksdl_aout_internal.h"
#include "android/android_audiotrack.h"
typedef struct SDL_Aout_Opaque {
JavaVM *jvm;
SDL_cond *wakeup;
int abort_request;
SDL_Thread *audio_thread;
SDL_cond *wakeup_cond;
SDL_mutex *wakeup_mutex;
SDL_AudioSpec spec;
SDL_AndroidAudioTrack* atrack;
uint8_t *buffer;
int buffer_size;
volatile bool need_flush;
volatile bool pause_on;
volatile bool abort_request;
SDL_Thread *audio_tid;
SDL_Thread _audio_tid;
} SDL_Aout_Opaque;
int aout_thread(void *arg)
int aout_thread_n(JNIEnv *env, SDL_Aout *aout)
{
SDL_Aout *aout = arg;
SDL_Aout_Opaque *opaque = aout->opaque;
SDL_AndroidAudioTrack *atrack = opaque->atrack;
SDL_AudioCallback audio_cblk = opaque->spec.callback;
void *userdata = opaque->spec.userdata;
uint8_t *buffer = opaque->buffer;
int buffer_size = sdl_audiotrack_get_min_buffer_size(atrack);
assert(atrack);
assert(buffer);
SDL_SetThreadPriority(SDL_THREAD_PRIORITY_HIGH);
while (!opaque->abort_request) {
SDL_LockMutex(opaque->wakeup_mutex);
while (!opaque->abort_request && opaque->pause_on)
SDL_CondWaitTimeout(opaque->wakeup_cond, opaque->wakeup_mutex, 1000);
SDL_UnlockMutex(opaque->wakeup_mutex);
audio_cblk(userdata, buffer, buffer_size);
if (opaque->need_flush) {
sdl_audiotrack_flush(env, atrack);
}
sdl_audiotrack_write_byte(env, atrack, buffer, buffer_size);
// FIXME: if callback return -1 or 0
}
sdl_audiotrack_free(env, atrack);
return 0;
}
void aout_free_l(SDL_Aout *aout)
int aout_thread(void *arg)
{
if (!aout)
return;
SDL_Aout *aout = arg;
SDL_Aout_Opaque *opaque = aout->opaque;
JavaVM *jvm = opaque->jvm;
JNIEnv *env = NULL;
if (!(*jvm)->AttachCurrentThread(jvm, &env, NULL)) {
ALOGE("aout_thread: AttachCurrentThread: failed");
return -1;
}
int retval = aout_thread_n(env, aout);
(*jvm)->DetachCurrentThread(jvm);
return retval;
}
int aout_open_audio_n(JNIEnv *env, SDL_Aout *aout, SDL_AudioSpec *desired, SDL_AudioSpec *obtained)
{
assert(desired);
SDL_Aout_Opaque *opaque = aout->opaque;
if (opaque) {
SDL_DestroyCond(opaque->wakeup);
opaque->spec = *desired;
opaque->atrack = sdl_audiotrack_new_from_sdl_spec(env, desired);
if (!opaque->atrack)
return -1;
opaque->buffer_size = sdl_audiotrack_get_min_buffer_sizoe(opaque->atrack);
opaque->buffer = malloc(opaque->buffer_size);
if (!opaque->buffer) {
sdl_audiotrack_free(env, opaque->atrack);
opaque->atrack = NULL;
return -1;
}
SDL_Aout_FreeInternal(aout);
opaque->pause_on = 1;
opaque->abort_request = 0;
opaque->audio_tid = SDL_CreateThreadEx(&opaque->_audio_tid, aout_thread, aout);
if (!opaque->audio_tid) {
sdl_audiotrack_free(env, opaque->atrack);
}
return 0;
}
int aout_open_audio(SDL_Aout *aout, SDL_AudioSpec *desired, SDL_AudioSpec *obtained)
{
return -1;
SDL_Aout_Opaque *opaque = aout->opaque;
JavaVM *jvm = opaque->jvm;
JNIEnv *env = NULL;
if ((*jvm)->AttachCurrentThread(jvm, &env, NULL) != 0) {
ALOGE("aout_open_audio: AttachCurrentThread: failed");
return -1;
}
int retval = aout_open_audio_n(env, aout, desired, obtained);
(*jvm)->DetachCurrentThread(jvm);
return retval;
}
void aout_pause_audio(SDL_Aout *aout, int pause_on)
{
SDL_Aout_Opaque *opaque = aout->opaque;
SDL_LockMutex(opaque->wakeup_mutex);
opaque->pause_on = pause_on;
if (!pause_on)
SDL_SignalCond(opaque->wakeup_cond);
SDL_UnlockMutex(opaque->wakeup_mutex);
}
void aout_close_audio(SDL_Aout *aout)
{
SDL_Aout_Opaque *opaque = aout->opaque;
SDL_LockMutex(opaque->wakeup_mutex);
opaque->abort_request = true;
SDL_SignalCond(opaque->wakeup_cond);
SDL_UnlockMutex(opaque->wakeup_mutex);
SDL_WaitThread(opaque->audio_tid, NULL);
opaque->audio_tid = NULL;
}
void SDL_Init_AoutAndroid(JNIEnv *env)
void aout_free_l(SDL_Aout *aout)
{
if (!aout)
return;
aout_close_audio(aout);
SDL_Aout_Opaque *opaque = aout->opaque;
if (opaque) {
free(opaque->buffer);
opaque->buffer = NULL;
opaque->buffer_size = 0;
SDL_DestroyCond(opaque->wakeup_cond);
SDL_DestroyMutex(opaque->wakeup_mutex);
}
SDL_Aout_FreeInternal(aout);
}
SDL_Aout *SDL_AoutAndroid_CreateForAudioTrack(JavaVM* jvm)
......@@ -84,7 +197,8 @@ SDL_Aout *SDL_AoutAndroid_CreateForAudioTrack(JavaVM* jvm)
SDL_Aout_Opaque *opaque = aout->opaque;
opaque->jvm = jvm;
opaque->wakeup = SDL_CreateCond();
opaque->wakeup_cond = SDL_CreateCond();
opaque->wakeup_mutex = SDL_CreateMutex();
aout->free_l = aout_free_l;
aout->open_audio = aout_open_audio;
......@@ -93,3 +207,8 @@ SDL_Aout *SDL_AoutAndroid_CreateForAudioTrack(JavaVM* jvm)
return aout;
}
void SDL_Init_AoutAndroid(JNIEnv *env)
{
}
......@@ -29,6 +29,7 @@
typedef uint16_t SDL_AudioFormat;
#define AUDIO_INVALID 0x0000
#define AUDIO_U8 0x0008 /**< Unsigned 8-bit samples */
#define AUDIO_S8 0x8008 /**< Signed 8-bit samples */
#define AUDIO_U16LSB 0x0010 /**< Unsigned 16-bit samples */
......@@ -68,7 +69,7 @@ typedef struct SDL_AudioSpec
Uint8 channels; /**< Number of channels: 1 mono, 2 stereo */
Uint8 silence; /**< Audio buffer silence value (calculated) */
Uint16 samples; /**< Audio buffer size in samples (power of 2) */
Uint16 padding; /**< Necessary for some compile environments */
Uint16 padding; /**< NOT USED. Necessary for some compile environments */
Uint32 size; /**< Audio buffer size in bytes (calculated) */
SDL_AudioCallback callback;
void *userdata;
......
......@@ -23,6 +23,7 @@
#include <errno.h>
#include <assert.h>
#include "ijkutil/ijkutil.h"
#include "ijksdl_thread.h"
static void *SDL_RunThread(void *data)
......@@ -43,6 +44,32 @@ SDL_Thread *SDL_CreateThreadEx(SDL_Thread *thread, int (*fn)(void *), void *data
return thread;
}
int SDL_SetThreadPriority(SDL_ThreadPriority priority)
{
struct sched_param sched;
int policy;
pthread_t thread = pthread_self();
if (pthread_getschedparam(thread, &policy, &sched) < 0) {
ALOGE("pthread_getschedparam() failed");
return -1;
}
if (priority == SDL_THREAD_PRIORITY_LOW) {
sched.sched_priority = sched_get_priority_min(policy);
} else if (priority == SDL_THREAD_PRIORITY_HIGH) {
sched.sched_priority = sched_get_priority_max(policy);
} else {
int min_priority = sched_get_priority_min(policy);
int max_priority = sched_get_priority_max(policy);
sched.sched_priority = (min_priority + (max_priority - min_priority) / 2);
}
if (pthread_setschedparam(thread, policy, &sched) < 0) {
ALOGE("pthread_setschedparam() failed");
return -1;
}
return 0;
}
void SDL_WaitThread(SDL_Thread *thread, int *status)
{
assert(thread);
......
......@@ -27,6 +27,12 @@
#include <stdint.h>
#include <pthread.h>
typedef enum {
SDL_THREAD_PRIORITY_LOW,
SDL_THREAD_PRIORITY_NORMAL,
SDL_THREAD_PRIORITY_HIGH
} SDL_ThreadPriority;
typedef struct SDL_Thread
{
pthread_t id;
......@@ -36,6 +42,7 @@ typedef struct SDL_Thread
} SDL_Thread;
SDL_Thread *SDL_CreateThreadEx(SDL_Thread *thread, int (*fn)(void *), void *data);
int SDL_SetThreadPriority(SDL_ThreadPriority priority);
void SDL_WaitThread(SDL_Thread *thread, int *status);
#endif
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册