未验证 提交 73fab5d7 编写于 作者: O openharmony_ci 提交者: Gitee

!7257 更新设备描述符、焦点类型和样本格式、示例代码修改

Merge pull request !7257 from 一杯丞丞汁儿/OpenHarmony-3.2-Beta2
...@@ -36,6 +36,24 @@ getAudioManager(): AudioManager ...@@ -36,6 +36,24 @@ getAudioManager(): AudioManager
var audioManager = audio.getAudioManager(); var audioManager = audio.getAudioManager();
``` ```
## audio.getStreamManager<sup>9+</sup>
getStreamManager(): AudioStreamManager
获取音频流管理器实例。
**系统能力:** SystemCapability.Multimedia.Audio.Core
**返回值:**
| 类型 | 说明 |
| -------------------------------------------------| ------------------------------- |
| [AudioStreamManager](#audiostreammanager9) | 返回音频流管理器实例。 |
**示例:**
```
var audioStreamManager = audio.getStreamManager();
```
## audio.createAudioRenderer<sup>8+</sup> ## audio.createAudioRenderer<sup>8+</sup>
createAudioRenderer(options: AudioRendererOptions, callback: AsyncCallback\<AudioRenderer>): void createAudioRenderer(options: AudioRendererOptions, callback: AsyncCallback\<AudioRenderer>): void
...@@ -44,7 +62,7 @@ createAudioRenderer(options: AudioRendererOptions, callback: AsyncCallback\<Audi ...@@ -44,7 +62,7 @@ createAudioRenderer(options: AudioRendererOptions, callback: AsyncCallback\<Audi
**系统能力:** SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数** **参数**
| 参数名 | 类型 | 必填 | 说明 | | 参数名 | 类型 | 必填 | 说明 |
| -------- | ----------------------------------------------- | ---- | ---------------- | | -------- | ----------------------------------------------- | ---- | ---------------- |
...@@ -56,31 +74,31 @@ createAudioRenderer(options: AudioRendererOptions, callback: AsyncCallback\<Audi ...@@ -56,31 +74,31 @@ createAudioRenderer(options: AudioRendererOptions, callback: AsyncCallback\<Audi
``` ```
import audio from '@ohos.multimedia.audio'; import audio from '@ohos.multimedia.audio';
var audioStreamInfo = { var audioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_1, channels: audio.AudioChannel.CHANNEL_1,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
} }
var audioRendererInfo = { var audioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_SPEECH, content: audio.ContentType.CONTENT_TYPE_SPEECH,
usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION,
rendererFlags: 1 rendererFlags: 1
} }
var audioRendererOptions = { var audioRendererOptions = {
streamInfo: audioStreamInfo, streamInfo: audioStreamInfo,
rendererInfo: audioRendererInfo rendererInfo: audioRendererInfo
} }
audio.createAudioRenderer(audioRendererOptions,(err, data) => { audio.createAudioRenderer(audioRendererOptions,(err, data) => {
if (err) { if (err) {
console.error(`AudioRenderer Created : Error: ${err.message}`); console.error(`AudioRenderer Created : Error: ${err.message}`);
} }
else { else {
console.info('AudioRenderer Created : Success : SUCCESS'); console.info('AudioRenderer Created : Success : SUCCESS');
let audioRenderer = data; let audioRenderer = data;
} }
}); });
``` ```
...@@ -110,29 +128,29 @@ createAudioRenderer(options: AudioRendererOptions): Promise<AudioRenderer\> ...@@ -110,29 +128,29 @@ createAudioRenderer(options: AudioRendererOptions): Promise<AudioRenderer\>
import audio from '@ohos.multimedia.audio'; import audio from '@ohos.multimedia.audio';
var audioStreamInfo = { var audioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_1, channels: audio.AudioChannel.CHANNEL_1,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
} }
var audioRendererInfo = { var audioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_SPEECH, content: audio.ContentType.CONTENT_TYPE_SPEECH,
usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION,
rendererFlags: 1 rendererFlags: 1
} }
var audioRendererOptions = { var audioRendererOptions = {
streamInfo: audioStreamInfo, streamInfo: audioStreamInfo,
rendererInfo: audioRendererInfo rendererInfo: audioRendererInfo
} }
var audioRenderer; var audioRenderer;
audio.createAudioRenderer(audioRendererOptions).then((data) => { audio.createAudioRenderer(audioRendererOptions).then((data) => {
audioRenderer = data; audioRenderer = data;
console.info('AudioFrameworkRenderLog: AudioRenderer Created : Success : Stream Type: SUCCESS'); console.info('AudioFrameworkRenderLog: AudioRenderer Created : Success : Stream Type: SUCCESS');
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRenderLog: AudioRenderer Created : ERROR : '+err.message); console.info('AudioFrameworkRenderLog: AudioRenderer Created : ERROR : '+err.message);
}); });
``` ```
...@@ -156,30 +174,30 @@ createAudioCapturer(options: AudioCapturerOptions, callback: AsyncCallback<Audio ...@@ -156,30 +174,30 @@ createAudioCapturer(options: AudioCapturerOptions, callback: AsyncCallback<Audio
``` ```
import audio from '@ohos.multimedia.audio'; import audio from '@ohos.multimedia.audio';
var audioStreamInfo = { var audioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_2, channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
} }
var audioCapturerInfo = { var audioCapturerInfo = {
source: audio.SourceType.SOURCE_TYPE_MIC, source: audio.SourceType.SOURCE_TYPE_MIC,
capturerFlags: 1 capturerFlags: 1
} }
var audioCapturerOptions = { var audioCapturerOptions = {
streamInfo: audioStreamInfo, streamInfo: audioStreamInfo,
capturerInfo: audioCapturerInfo capturerInfo: audioCapturerInfo
} }
audio.createAudioCapturer(audioCapturerOptions,(err, data) => { audio.createAudioCapturer(audioCapturerOptions,(err, data) => {
if (err) { if (err) {
console.error(`AudioCapturer Created : Error: ${err.message}`); console.error(`AudioCapturer Created : Error: ${err.message}`);
} }
else { else {
console.info('AudioCapturer Created : Success : SUCCESS'); console.info('AudioCapturer Created : Success : SUCCESS');
let audioCapturer = data; let audioCapturer = data;
} }
}); });
``` ```
...@@ -209,28 +227,28 @@ createAudioCapturer(options: AudioCapturerOptions): Promise<AudioCapturer\> ...@@ -209,28 +227,28 @@ createAudioCapturer(options: AudioCapturerOptions): Promise<AudioCapturer\>
import audio from '@ohos.multimedia.audio'; import audio from '@ohos.multimedia.audio';
var audioStreamInfo = { var audioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_2, channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
} }
var audioCapturerInfo = { var audioCapturerInfo = {
source: audio.SourceType.SOURCE_TYPE_MIC, source: audio.SourceType.SOURCE_TYPE_MIC,
capturerFlags: 1 capturerFlags: 1
} }
var audioCapturerOptions = { var audioCapturerOptions = {
streamInfo: audioStreamInfo, streamInfo: audioStreamInfo,
capturerInfo: audioCapturerInfo capturerInfo: audioCapturerInfo
} }
var audioCapturer; var audioCapturer;
audio.createAudioCapturer(audioCapturerOptions).then((data) => { audio.createAudioCapturer(audioCapturerOptions).then((data) => {
audioCapturer = data; audioCapturer = data;
console.info('AudioCapturer Created : Success : Stream Type: SUCCESS'); console.info('AudioCapturer Created : Success : Stream Type: SUCCESS');
}).catch((err) => { }).catch((err) => {
console.info('AudioCapturer Created : ERROR : '+err.message); console.info('AudioCapturer Created : ERROR : '+err.message);
}); });
``` ```
...@@ -252,7 +270,7 @@ audio.createAudioCapturer(audioCapturerOptions).then((data) => { ...@@ -252,7 +270,7 @@ audio.createAudioCapturer(audioCapturerOptions).then((data) => {
枚举,焦点模型。 枚举,焦点模型。
**系统能力:** SystemCapability.Multimedia.Audio.InterruptMode **系统能力:** SystemCapability.Multimedia.Audio.Core
| 名称 | 默认值 | 描述 | | 名称 | 默认值 | 描述 |
| ---------------------------- | ------ | ---------- | | ---------------------------- | ------ | ---------- |
...@@ -331,13 +349,14 @@ audio.createAudioCapturer(audioCapturerOptions).then((data) => { ...@@ -331,13 +349,14 @@ audio.createAudioCapturer(audioCapturerOptions).then((data) => {
**系统能力:** 以下各项对应的系统能力均为SystemCapability.Multimedia.Audio.Core **系统能力:** 以下各项对应的系统能力均为SystemCapability.Multimedia.Audio.Core
| 名称 | 默认值 | 描述 | | 名称 | 默认值 | 描述 |
| --------------------- | ------ | -------------------------- | | ---------------------------------- | ------ | -------------------------- |
| SAMPLE_FORMAT_INVALID | -1 | 无效格式。 | | SAMPLE_FORMAT_INVALID | -1 | 无效格式。 |
| SAMPLE_FORMAT_U8 | 0 | 无符号8位整数。 | | SAMPLE_FORMAT_U8 | 0 | 无符号8位整数。 |
| SAMPLE_FORMAT_S16LE | 1 | 带符号的16位整数,小尾数。 | | SAMPLE_FORMAT_S16LE | 1 | 带符号的16位整数,小尾数。 |
| SAMPLE_FORMAT_S24LE | 2 | 带符号的24位整数,小尾数。 | | SAMPLE_FORMAT_S24LE | 2 | 带符号的24位整数,小尾数。 <br>由于系统限制,该采样格式仅部分设备支持,请根据实际情况使用。|
| SAMPLE_FORMAT_S32LE | 3 | 带符号的32位整数,小尾数。 | | SAMPLE_FORMAT_S32LE | 3 | 带符号的32位整数,小尾数。 <br>由于系统限制,该采样格式仅部分设备支持,请根据实际情况使用。|
| SAMPLE_FORMAT_F32LE<sup>9+</sup> | 4 | 带符号的32位整数,小尾数。 <br>由于系统限制,该采样格式仅部分设备支持,请根据实际情况使用。|
## AudioChannel<sup>8+</sup> ## AudioChannel<sup>8+</sup>
...@@ -409,6 +428,17 @@ audio.createAudioCapturer(audioCapturerOptions).then((data) => { ...@@ -409,6 +428,17 @@ audio.createAudioCapturer(audioCapturerOptions).then((data) => {
| STREAM_USAGE_VOICE_COMMUNICATION | 2 | 语音通信。 | | STREAM_USAGE_VOICE_COMMUNICATION | 2 | 语音通信。 |
| STREAM_USAGE_NOTIFICATION_RINGTONE | 6 | 通知铃声。 | | STREAM_USAGE_NOTIFICATION_RINGTONE | 6 | 通知铃声。 |
## FocusType<sup>9+</sup>
表示焦点类型的枚举。
**系统能力:**: SystemCapability.Multimedia.Audio.Core
| 名称 | 默认值 | 描述 |
| ---------------------------------- | ------ | ------------------------------- |
| FOCUS_TYPE_RECORDING | 0 | 在录制场景使用,可打断其他音频。 |
## AudioState<sup>8+</sup> ## AudioState<sup>8+</sup>
枚举,音频状态。 枚举,音频状态。
...@@ -576,7 +606,7 @@ audio.createAudioCapturer(audioCapturerOptions).then((data) => { ...@@ -576,7 +606,7 @@ audio.createAudioCapturer(audioCapturerOptions).then((data) => {
描述设备连接状态变化和设备信息。 描述设备连接状态变化和设备信息。
**系统能力:**SystemCapability.Multimedia.Audio.Device **系统能力:** SystemCapability.Multimedia.Audio.Device
| 名称 | 类型 | 必填 | 说明 | | 名称 | 类型 | 必填 | 说明 |
| :---------------- | :------------------------------------------------ | :--- | :----------------- | | :---------------- | :------------------------------------------------ | :--- | :----------------- |
...@@ -651,7 +681,7 @@ setVolume(volumeType: AudioVolumeType, volume: number, callback: AsyncCallback&l ...@@ -651,7 +681,7 @@ setVolume(volumeType: AudioVolumeType, volume: number, callback: AsyncCallback&l
设置指定流的音量,使用callback方式异步返回结果。 设置指定流的音量,使用callback方式异步返回结果。
**需要权限:** ohos.permission.ACCESS_NOTIFICATION_POLICY仅设置铃声(即volumeType为AudioVolumeType.RINGTONE)在静音和非静音状态切换时需要该权限。 **需要权限:** ohos.permission.ACCESS_NOTIFICATION_POLICY<br/>仅设置铃声(即volumeType为AudioVolumeType.RINGTONE)在静音和非静音状态切换时需要该权限。
**系统能力:** SystemCapability.Multimedia.Audio.Volume **系统能力:** SystemCapability.Multimedia.Audio.Volume
...@@ -667,10 +697,10 @@ setVolume(volumeType: AudioVolumeType, volume: number, callback: AsyncCallback&l ...@@ -667,10 +697,10 @@ setVolume(volumeType: AudioVolumeType, volume: number, callback: AsyncCallback&l
``` ```
audioManager.setVolume(audio.AudioVolumeType.MEDIA, 10, (err) => { audioManager.setVolume(audio.AudioVolumeType.MEDIA, 10, (err) => {
if (err) { if (err) {
console.error('Failed to set the volume. ${err.message}'); console.error('Failed to set the volume. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate a successful volume setting.'); console.log('Callback invoked to indicate a successful volume setting.');
}); });
``` ```
...@@ -681,7 +711,7 @@ setVolume(volumeType: AudioVolumeType, volume: number): Promise&lt;void&gt; ...@@ -681,7 +711,7 @@ setVolume(volumeType: AudioVolumeType, volume: number): Promise&lt;void&gt;
设置指定流的音量,使用Promise方式异步返回结果。 设置指定流的音量,使用Promise方式异步返回结果。
**需要权限:** ohos.permission.ACCESS_NOTIFICATION_POLICY仅设置铃声(即volumeType为AudioVolumeType.RINGTONE)在静音和非静音状态切换时需要该权限。 **需要权限:** ohos.permission.ACCESS_NOTIFICATION_POLICY<br/>仅设置铃声(即volumeType为AudioVolumeType.RINGTONE)在静音和非静音状态切换时需要该权限。
**系统能力:** SystemCapability.Multimedia.Audio.Volume **系统能力:** SystemCapability.Multimedia.Audio.Volume
...@@ -702,7 +732,7 @@ setVolume(volumeType: AudioVolumeType, volume: number): Promise&lt;void&gt; ...@@ -702,7 +732,7 @@ setVolume(volumeType: AudioVolumeType, volume: number): Promise&lt;void&gt;
``` ```
audioManager.setVolume(audio.AudioVolumeType.MEDIA, 10).then(() => { audioManager.setVolume(audio.AudioVolumeType.MEDIA, 10).then(() => {
console.log('Promise returned to indicate a successful volume setting.'); console.log('Promise returned to indicate a successful volume setting.');
}); });
``` ```
...@@ -725,11 +755,11 @@ getVolume(volumeType: AudioVolumeType, callback: AsyncCallback&lt;number&gt;): v ...@@ -725,11 +755,11 @@ getVolume(volumeType: AudioVolumeType, callback: AsyncCallback&lt;number&gt;): v
``` ```
audioManager.getVolume(audio.AudioVolumeType.MEDIA, (err, value) => { audioManager.getVolume(audio.AudioVolumeType.MEDIA, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the volume. ${err.message}'); console.error('Failed to obtain the volume. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the volume is obtained.'); console.log('Callback invoked to indicate that the volume is obtained.');
}); });
``` ```
...@@ -757,7 +787,7 @@ getVolume(volumeType: AudioVolumeType): Promise&lt;number&gt; ...@@ -757,7 +787,7 @@ getVolume(volumeType: AudioVolumeType): Promise&lt;number&gt;
``` ```
audioManager.getVolume(audio.AudioVolumeType.MEDIA).then((value) => { audioManager.getVolume(audio.AudioVolumeType.MEDIA).then((value) => {
console.log('Promise returned to indicate that the volume is obtained.' + value); console.log('Promise returned to indicate that the volume is obtained.' + value);
}); });
``` ```
...@@ -780,11 +810,11 @@ getMinVolume(volumeType: AudioVolumeType, callback: AsyncCallback&lt;number&gt;) ...@@ -780,11 +810,11 @@ getMinVolume(volumeType: AudioVolumeType, callback: AsyncCallback&lt;number&gt;)
``` ```
audioManager.getMinVolume(audio.AudioVolumeType.MEDIA, (err, value) => { audioManager.getMinVolume(audio.AudioVolumeType.MEDIA, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the minimum volume. ${err.message}'); console.error('Failed to obtain the minimum volume. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the minimum volume is obtained.' + value); console.log('Callback invoked to indicate that the minimum volume is obtained.' + value);
}); });
``` ```
...@@ -812,7 +842,7 @@ getMinVolume(volumeType: AudioVolumeType): Promise&lt;number&gt; ...@@ -812,7 +842,7 @@ getMinVolume(volumeType: AudioVolumeType): Promise&lt;number&gt;
``` ```
audioManager.getMinVolume(audio.AudioVolumeType.MEDIA).then((value) => { audioManager.getMinVolume(audio.AudioVolumeType.MEDIA).then((value) => {
console.log('Promised returned to indicate that the minimum volume is obtained.' + value); console.log('Promised returned to indicate that the minimum volume is obtained.' + value);
}); });
``` ```
...@@ -835,11 +865,11 @@ getMaxVolume(volumeType: AudioVolumeType, callback: AsyncCallback&lt;number&gt;) ...@@ -835,11 +865,11 @@ getMaxVolume(volumeType: AudioVolumeType, callback: AsyncCallback&lt;number&gt;)
``` ```
audioManager.getMaxVolume(audio.AudioVolumeType.MEDIA, (err, value) => { audioManager.getMaxVolume(audio.AudioVolumeType.MEDIA, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the maximum volume. ${err.message}'); console.error('Failed to obtain the maximum volume. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the maximum volume is obtained.' + value); console.log('Callback invoked to indicate that the maximum volume is obtained.' + value);
}); });
``` ```
...@@ -867,7 +897,7 @@ getMaxVolume(volumeType: AudioVolumeType): Promise&lt;number&gt; ...@@ -867,7 +897,7 @@ getMaxVolume(volumeType: AudioVolumeType): Promise&lt;number&gt;
``` ```
audioManager.getMaxVolume(audio.AudioVolumeType.MEDIA).then((data) => { audioManager.getMaxVolume(audio.AudioVolumeType.MEDIA).then((data) => {
console.log('Promised returned to indicate that the maximum volume is obtained.'); console.log('Promised returned to indicate that the maximum volume is obtained.');
}); });
``` ```
...@@ -877,7 +907,7 @@ mute(volumeType: AudioVolumeType, mute: boolean, callback: AsyncCallback&lt;void ...@@ -877,7 +907,7 @@ mute(volumeType: AudioVolumeType, mute: boolean, callback: AsyncCallback&lt;void
设置指定音量流静音,使用callback方式异步返回结果。 设置指定音量流静音,使用callback方式异步返回结果。
**需要权限:** ohos.permission.ACCESS_NOTIFICATION_POLICY仅设置铃声(即volumeType为AudioVolumeType.RINGTONE)在静音和非静音状态切换时需要该权限。 **需要权限:** ohos.permission.ACCESS_NOTIFICATION_POLICY<br/>仅设置铃声(即volumeType为AudioVolumeType.RINGTONE)在静音和非静音状态切换时需要该权限。
**系统能力:** SystemCapability.Multimedia.Audio.Volume **系统能力:** SystemCapability.Multimedia.Audio.Volume
...@@ -893,11 +923,11 @@ mute(volumeType: AudioVolumeType, mute: boolean, callback: AsyncCallback&lt;void ...@@ -893,11 +923,11 @@ mute(volumeType: AudioVolumeType, mute: boolean, callback: AsyncCallback&lt;void
``` ```
audioManager.mute(audio.AudioVolumeType.MEDIA, true, (err) => { audioManager.mute(audio.AudioVolumeType.MEDIA, true, (err) => {
if (err) { if (err) {
console.error('Failed to mute the stream. ${err.message}'); console.error('Failed to mute the stream. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the stream is muted.'); console.log('Callback invoked to indicate that the stream is muted.');
}); });
``` ```
...@@ -907,7 +937,7 @@ mute(volumeType: AudioVolumeType, mute: boolean): Promise&lt;void&gt; ...@@ -907,7 +937,7 @@ mute(volumeType: AudioVolumeType, mute: boolean): Promise&lt;void&gt;
设置指定音量流静音,使用Promise方式异步返回结果。 设置指定音量流静音,使用Promise方式异步返回结果。
**需要权限:** ohos.permission.ACCESS_NOTIFICATION_POLICY仅设置铃声(即volumeType为AudioVolumeType.RINGTONE)在静音和非静音状态切换时需要该权限。 **需要权限:** ohos.permission.ACCESS_NOTIFICATION_POLICY<br/>仅设置铃声(即volumeType为AudioVolumeType.RINGTONE)在静音和非静音状态切换时需要该权限。
**系统能力:** SystemCapability.Multimedia.Audio.Volume **系统能力:** SystemCapability.Multimedia.Audio.Volume
...@@ -929,7 +959,7 @@ mute(volumeType: AudioVolumeType, mute: boolean): Promise&lt;void&gt; ...@@ -929,7 +959,7 @@ mute(volumeType: AudioVolumeType, mute: boolean): Promise&lt;void&gt;
``` ```
audioManager.mute(audio.AudioVolumeType.MEDIA, true).then(() => { audioManager.mute(audio.AudioVolumeType.MEDIA, true).then(() => {
console.log('Promise returned to indicate that the stream is muted.'); console.log('Promise returned to indicate that the stream is muted.');
}); });
``` ```
...@@ -953,11 +983,11 @@ isMute(volumeType: AudioVolumeType, callback: AsyncCallback&lt;boolean&gt;): voi ...@@ -953,11 +983,11 @@ isMute(volumeType: AudioVolumeType, callback: AsyncCallback&lt;boolean&gt;): voi
``` ```
audioManager.isMute(audio.AudioVolumeType.MEDIA, (err, value) => { audioManager.isMute(audio.AudioVolumeType.MEDIA, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the mute status. ${err.message}'); console.error('Failed to obtain the mute status. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the mute status of the stream is obtained.' + value); console.log('Callback invoked to indicate that the mute status of the stream is obtained.' + value);
}); });
``` ```
...@@ -986,7 +1016,7 @@ isMute(volumeType: AudioVolumeType): Promise&lt;boolean&gt; ...@@ -986,7 +1016,7 @@ isMute(volumeType: AudioVolumeType): Promise&lt;boolean&gt;
``` ```
audioManager.isMute(audio.AudioVolumeType.MEDIA).then((value) => { audioManager.isMute(audio.AudioVolumeType.MEDIA).then((value) => {
console.log('Promise returned to indicate that the mute status of the stream is obtained.' + value); console.log('Promise returned to indicate that the mute status of the stream is obtained.' + value);
}); });
``` ```
...@@ -1009,11 +1039,11 @@ isActive(volumeType: AudioVolumeType, callback: AsyncCallback&lt;boolean&gt;): v ...@@ -1009,11 +1039,11 @@ isActive(volumeType: AudioVolumeType, callback: AsyncCallback&lt;boolean&gt;): v
``` ```
audioManager.isActive(audio.AudioVolumeType.MEDIA, (err, value) => { audioManager.isActive(audio.AudioVolumeType.MEDIA, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the active status of the stream. ${err.message}'); console.error('Failed to obtain the active status of the stream. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the active status of the stream is obtained.' + value); console.log('Callback invoked to indicate that the active status of the stream is obtained.' + value);
}); });
``` ```
...@@ -1041,7 +1071,7 @@ isActive(volumeType: AudioVolumeType): Promise&lt;boolean&gt; ...@@ -1041,7 +1071,7 @@ isActive(volumeType: AudioVolumeType): Promise&lt;boolean&gt;
``` ```
audioManager.isActive(audio.AudioVolumeType.MEDIA).then((value) => { audioManager.isActive(audio.AudioVolumeType.MEDIA).then((value) => {
console.log('Promise returned to indicate that the active status of the stream is obtained.' + value); console.log('Promise returned to indicate that the active status of the stream is obtained.' + value);
}); });
``` ```
...@@ -1066,11 +1096,11 @@ setRingerMode(mode: AudioRingMode, callback: AsyncCallback&lt;void&gt;): void ...@@ -1066,11 +1096,11 @@ setRingerMode(mode: AudioRingMode, callback: AsyncCallback&lt;void&gt;): void
``` ```
audioManager.setRingerMode(audio.AudioRingMode.RINGER_MODE_NORMAL, (err) => { audioManager.setRingerMode(audio.AudioRingMode.RINGER_MODE_NORMAL, (err) => {
if (err) { if (err) {
console.error('Failed to set the ringer mode.​ ${err.message}'); console.error('Failed to set the ringer mode.​ ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate a successful setting of the ringer mode.'); console.log('Callback invoked to indicate a successful setting of the ringer mode.');
}); });
``` ```
...@@ -1100,7 +1130,7 @@ setRingerMode(mode: AudioRingMode): Promise&lt;void&gt; ...@@ -1100,7 +1130,7 @@ setRingerMode(mode: AudioRingMode): Promise&lt;void&gt;
``` ```
audioManager.setRingerMode(audio.AudioRingMode.RINGER_MODE_NORMAL).then(() => { audioManager.setRingerMode(audio.AudioRingMode.RINGER_MODE_NORMAL).then(() => {
console.log('Promise returned to indicate a successful setting of the ringer mode.'); console.log('Promise returned to indicate a successful setting of the ringer mode.');
}); });
``` ```
...@@ -1123,11 +1153,11 @@ getRingerMode(callback: AsyncCallback&lt;AudioRingMode&gt;): void ...@@ -1123,11 +1153,11 @@ getRingerMode(callback: AsyncCallback&lt;AudioRingMode&gt;): void
``` ```
audioManager.getRingerMode((err, value) => { audioManager.getRingerMode((err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the ringer mode.​ ${err.message}'); console.error('Failed to obtain the ringer mode.​ ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the ringer mode is obtained.' + value); console.log('Callback invoked to indicate that the ringer mode is obtained.' + value);
}); });
``` ```
...@@ -1150,7 +1180,7 @@ getRingerMode(): Promise&lt;AudioRingMode&gt; ...@@ -1150,7 +1180,7 @@ getRingerMode(): Promise&lt;AudioRingMode&gt;
``` ```
audioManager.getRingerMode().then((value) => { audioManager.getRingerMode().then((value) => {
console.log('Promise returned to indicate that the ringer mode is obtained.' + value); console.log('Promise returned to indicate that the ringer mode is obtained.' + value);
}); });
``` ```
...@@ -1178,11 +1208,11 @@ setAudioParameter(key: string, value: string, callback: AsyncCallback&lt;void&gt ...@@ -1178,11 +1208,11 @@ setAudioParameter(key: string, value: string, callback: AsyncCallback&lt;void&gt
``` ```
audioManager.setAudioParameter('key_example', 'value_example', (err) => { audioManager.setAudioParameter('key_example', 'value_example', (err) => {
if (err) { if (err) {
console.error('Failed to set the audio parameter. ${err.message}'); console.error('Failed to set the audio parameter. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate a successful setting of the audio parameter.'); console.log('Callback invoked to indicate a successful setting of the audio parameter.');
}); });
``` ```
...@@ -1215,7 +1245,7 @@ setAudioParameter(key: string, value: string): Promise&lt;void&gt; ...@@ -1215,7 +1245,7 @@ setAudioParameter(key: string, value: string): Promise&lt;void&gt;
``` ```
audioManager.setAudioParameter('key_example', 'value_example').then(() => { audioManager.setAudioParameter('key_example', 'value_example').then(() => {
console.log('Promise returned to indicate a successful setting of the audio parameter.'); console.log('Promise returned to indicate a successful setting of the audio parameter.');
}); });
``` ```
...@@ -1240,11 +1270,11 @@ getAudioParameter(key: string, callback: AsyncCallback&lt;string&gt;): void ...@@ -1240,11 +1270,11 @@ getAudioParameter(key: string, callback: AsyncCallback&lt;string&gt;): void
``` ```
audioManager.getAudioParameter('key_example', (err, value) => { audioManager.getAudioParameter('key_example', (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the value of the audio parameter. ${err.message}'); console.error('Failed to obtain the value of the audio parameter. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the value of the audio parameter is obtained.' + value); console.log('Callback invoked to indicate that the value of the audio parameter is obtained.' + value);
}); });
``` ```
...@@ -1274,7 +1304,7 @@ getAudioParameter(key: string): Promise&lt;string&gt; ...@@ -1274,7 +1304,7 @@ getAudioParameter(key: string): Promise&lt;string&gt;
``` ```
audioManager.getAudioParameter('key_example').then((value) => { audioManager.getAudioParameter('key_example').then((value) => {
console.log('Promise returned to indicate that the value of the audio parameter is obtained.' + value); console.log('Promise returned to indicate that the value of the audio parameter is obtained.' + value);
}); });
``` ```
...@@ -1296,11 +1326,11 @@ getDevices(deviceFlag: DeviceFlag, callback: AsyncCallback&lt;AudioDeviceDescrip ...@@ -1296,11 +1326,11 @@ getDevices(deviceFlag: DeviceFlag, callback: AsyncCallback&lt;AudioDeviceDescrip
**示例:** **示例:**
``` ```
audioManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG, (err, value) => { audioManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the device list. ${err.message}'); console.error('Failed to obtain the device list. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the device list is obtained.'); console.log('Callback invoked to indicate that the device list is obtained.');
}); });
``` ```
...@@ -1328,7 +1358,7 @@ getDevices(deviceFlag: DeviceFlag): Promise&lt;AudioDeviceDescriptors&gt; ...@@ -1328,7 +1358,7 @@ getDevices(deviceFlag: DeviceFlag): Promise&lt;AudioDeviceDescriptors&gt;
``` ```
audioManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG).then((data) => { audioManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG).then((data) => {
console.log('Promise returned to indicate that the device list is obtained.'); console.log('Promise returned to indicate that the device list is obtained.');
}); });
``` ```
...@@ -1352,11 +1382,11 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean, callback: AsyncCa ...@@ -1352,11 +1382,11 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean, callback: AsyncCa
``` ```
audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true, (err) => { audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true, (err) => {
if (err) { if (err) {
console.error('Failed to set the active status of the device. ${err.message}'); console.error('Failed to set the active status of the device. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the device is set to the active status.'); console.log('Callback invoked to indicate that the device is set to the active status.');
}); });
``` ```
...@@ -1386,7 +1416,7 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean): Promise&lt;void& ...@@ -1386,7 +1416,7 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean): Promise&lt;void&
``` ```
audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true).then(() => { audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true).then(() => {
console.log('Promise returned to indicate that the device is set to the active status.'); console.log('Promise returned to indicate that the device is set to the active status.');
}); });
``` ```
...@@ -1409,11 +1439,11 @@ isDeviceActive(deviceType: ActiveDeviceType, callback: AsyncCallback&lt;boolean& ...@@ -1409,11 +1439,11 @@ isDeviceActive(deviceType: ActiveDeviceType, callback: AsyncCallback&lt;boolean&
``` ```
audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER, (err, value) => { audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the active status of the device. ${err.message}'); console.error('Failed to obtain the active status of the device. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the active status of the device is obtained.'); console.log('Callback invoked to indicate that the active status of the device is obtained.');
}); });
``` ```
...@@ -1442,7 +1472,7 @@ isDeviceActive(deviceType: ActiveDeviceType): Promise&lt;boolean&gt; ...@@ -1442,7 +1472,7 @@ isDeviceActive(deviceType: ActiveDeviceType): Promise&lt;boolean&gt;
``` ```
audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER).then((value) => { audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER).then((value) => {
console.log('Promise returned to indicate that the active status of the device is obtained.' + value); console.log('Promise returned to indicate that the active status of the device is obtained.' + value);
}); });
``` ```
...@@ -1467,11 +1497,11 @@ setMicrophoneMute(mute: boolean, callback: AsyncCallback&lt;void&gt;): void ...@@ -1467,11 +1497,11 @@ setMicrophoneMute(mute: boolean, callback: AsyncCallback&lt;void&gt;): void
``` ```
audioManager.setMicrophoneMute(true, (err) => { audioManager.setMicrophoneMute(true, (err) => {
if (err) { if (err) {
console.error('Failed to mute the microphone. ${err.message}'); console.error('Failed to mute the microphone. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the microphone is muted.'); console.log('Callback invoked to indicate that the microphone is muted.');
}); });
``` ```
...@@ -1501,7 +1531,7 @@ setMicrophoneMute(mute: boolean): Promise&lt;void&gt; ...@@ -1501,7 +1531,7 @@ setMicrophoneMute(mute: boolean): Promise&lt;void&gt;
``` ```
audioManager.setMicrophoneMute(true).then(() => { audioManager.setMicrophoneMute(true).then(() => {
console.log('Promise returned to indicate that the microphone is muted.'); console.log('Promise returned to indicate that the microphone is muted.');
}); });
``` ```
...@@ -1525,11 +1555,11 @@ isMicrophoneMute(callback: AsyncCallback&lt;boolean&gt;): void ...@@ -1525,11 +1555,11 @@ isMicrophoneMute(callback: AsyncCallback&lt;boolean&gt;): void
``` ```
audioManager.isMicrophoneMute((err, value) => { audioManager.isMicrophoneMute((err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the mute status of the microphone. ${err.message}'); console.error('Failed to obtain the mute status of the microphone. ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the mute status of the microphone is obtained.' + value); console.log('Callback invoked to indicate that the mute status of the microphone is obtained.' + value);
}); });
``` ```
...@@ -1554,7 +1584,7 @@ isMicrophoneMute(): Promise&lt;boolean&gt; ...@@ -1554,7 +1584,7 @@ isMicrophoneMute(): Promise&lt;boolean&gt;
``` ```
audioManager.isMicrophoneMute().then((value) => { audioManager.isMicrophoneMute().then((value) => {
console.log('Promise returned to indicate that the mute status of the microphone is obtained.', + value); console.log('Promise returned to indicate that the mute status of the microphone is obtained.', + value);
}); });
``` ```
...@@ -1566,6 +1596,8 @@ on(type: 'volumeChange', callback: Callback\<VolumeEvent>): void ...@@ -1566,6 +1596,8 @@ on(type: 'volumeChange', callback: Callback\<VolumeEvent>): void
此接口为系统接口,三方应用不支持调用。 此接口为系统接口,三方应用不支持调用。
目前此订阅接口在单进程多AudioManager实例的使用场景下,仅最后一个实例的订阅生效,其他实例的订阅会被覆盖(即使最后一个实例没有进行订阅),因此推荐使用单一AudioManager实例进行开发。
**系统能力:** SystemCapability.Multimedia.Audio.Volume **系统能力:** SystemCapability.Multimedia.Audio.Volume
**参数:** **参数:**
...@@ -1579,9 +1611,9 @@ on(type: 'volumeChange', callback: Callback\<VolumeEvent>): void ...@@ -1579,9 +1611,9 @@ on(type: 'volumeChange', callback: Callback\<VolumeEvent>): void
``` ```
audioManager.on('volumeChange', (volumeEvent) => { audioManager.on('volumeChange', (volumeEvent) => {
console.log('VolumeType of stream: ' + volumeEvent.volumeType); console.log('VolumeType of stream: ' + volumeEvent.volumeType);
console.log('Volume level: ' + volumeEvent.volume); console.log('Volume level: ' + volumeEvent.volume);
console.log('Whether to updateUI: ' + volumeEvent.updateUi); console.log('Whether to updateUI: ' + volumeEvent.updateUi);
}); });
``` ```
...@@ -1606,7 +1638,7 @@ on(type: 'ringerModeChange', callback: Callback\<AudioRingMode>): void ...@@ -1606,7 +1638,7 @@ on(type: 'ringerModeChange', callback: Callback\<AudioRingMode>): void
``` ```
audioManager.on('ringerModeChange', (ringerMode) => { audioManager.on('ringerModeChange', (ringerMode) => {
console.log('Updated ringermode: ' + ringerMode); console.log('Updated ringermode: ' + ringerMode);
}); });
``` ```
...@@ -1629,10 +1661,10 @@ on(type: 'deviceChange', callback: Callback<DeviceChangeAction\>): void ...@@ -1629,10 +1661,10 @@ on(type: 'deviceChange', callback: Callback<DeviceChangeAction\>): void
``` ```
audioManager.on('deviceChange', (deviceChanged) => { audioManager.on('deviceChange', (deviceChanged) => {
console.info("device change type : " + deviceChanged.type); console.info("device change type : " + deviceChanged.type);
console.info("device descriptor size : " + deviceChanged.deviceDescriptors.length); console.info("device descriptor size : " + deviceChanged.deviceDescriptors.length);
console.info("device change descriptor : " + deviceChanged.deviceDescriptors[0].deviceRole); console.info("device change descriptor : " + deviceChanged.deviceDescriptors[0].deviceRole);
console.info("device change descriptor : " + deviceChanged.deviceDescriptors[0].deviceType); console.info("device change descriptor : " + deviceChanged.deviceDescriptors[0].deviceType);
}); });
``` ```
...@@ -1655,7 +1687,7 @@ off(type: 'deviceChange', callback?: Callback<DeviceChangeAction\>): void ...@@ -1655,7 +1687,7 @@ off(type: 'deviceChange', callback?: Callback<DeviceChangeAction\>): void
``` ```
audioManager.off('deviceChange', (deviceChanged) => { audioManager.off('deviceChange', (deviceChanged) => {
console.log("Should be no callback."); console.log("Should be no callback.");
}); });
``` ```
...@@ -1679,19 +1711,19 @@ on(type: 'interrupt', interrupt: AudioInterrupt, callback: Callback\<InterruptAc ...@@ -1679,19 +1711,19 @@ on(type: 'interrupt', interrupt: AudioInterrupt, callback: Callback\<InterruptAc
``` ```
var interAudioInterrupt = { var interAudioInterrupt = {
streamUsage:2, streamUsage:2,
contentType:0, contentType:0,
pauseWhenDucked:true pauseWhenDucked:true
}; };
audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => { audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) { if (InterruptAction.actionType === 0) {
console.log("An event to gain the audio focus starts."); console.log("An event to gain the audio focus starts.");
console.log("Focus gain event:" + JSON.stringify(InterruptAction)); console.log("Focus gain event:" + JSON.stringify(InterruptAction));
} }
if (InterruptAction.actionType === 1) { if (InterruptAction.actionType === 1) {
console.log("An audio interruption event starts."); console.log("An audio interruption event starts.");
console.log("Audio interruption event:" + JSON.stringify(InterruptAction)); console.log("Audio interruption event:" + JSON.stringify(InterruptAction));
} }
}); });
``` ```
...@@ -1715,15 +1747,15 @@ off(type: 'interrupt', interrupt: AudioInterrupt, callback?: Callback\<Interrupt ...@@ -1715,15 +1747,15 @@ off(type: 'interrupt', interrupt: AudioInterrupt, callback?: Callback\<Interrupt
``` ```
var interAudioInterrupt = { var interAudioInterrupt = {
streamUsage:2, streamUsage:2,
contentType:0, contentType:0,
pauseWhenDucked:true pauseWhenDucked:true
}; };
audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => { audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) { if (InterruptAction.actionType === 0) {
console.log("An event to release the audio focus starts."); console.log("An event to release the audio focus starts.");
console.log("Focus release event:" + JSON.stringify(InterruptAction)); console.log("Focus release event:" + JSON.stringify(InterruptAction));
} }
}); });
``` ```
...@@ -1748,11 +1780,11 @@ setAudioScene\(scene: AudioScene, callback: AsyncCallback<void\>\): void ...@@ -1748,11 +1780,11 @@ setAudioScene\(scene: AudioScene, callback: AsyncCallback<void\>\): void
``` ```
audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL, (err) => { audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL, (err) => {
if (err) { if (err) {
console.error('Failed to set the audio scene mode.​ ${err.message}'); console.error('Failed to set the audio scene mode.​ ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate a successful setting of the audio scene mode.'); console.log('Callback invoked to indicate a successful setting of the audio scene mode.');
}); });
``` ```
...@@ -1782,9 +1814,9 @@ setAudioScene\(scene: AudioScene\): Promise<void\> ...@@ -1782,9 +1814,9 @@ setAudioScene\(scene: AudioScene\): Promise<void\>
``` ```
audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL).then(() => { audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL).then(() => {
console.log('Promise returned to indicate a successful setting of the audio scene mode.'); console.log('Promise returned to indicate a successful setting of the audio scene mode.');
}).catch ((err) => { }).catch ((err) => {
console.log('Failed to set the audio scene mode'); console.log('Failed to set the audio scene mode');
}); });
``` ```
...@@ -1806,11 +1838,11 @@ getAudioScene\(callback: AsyncCallback<AudioScene\>\): void ...@@ -1806,11 +1838,11 @@ getAudioScene\(callback: AsyncCallback<AudioScene\>\): void
``` ```
audioManager.getAudioScene((err, value) => { audioManager.getAudioScene((err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the audio scene mode.​ ${err.message}'); console.error('Failed to obtain the audio scene mode.​ ${err.message}');
return; return;
} }
console.log('Callback invoked to indicate that the audio scene mode is obtained.' + value); console.log('Callback invoked to indicate that the audio scene mode is obtained.' + value);
}); });
``` ```
...@@ -1833,9 +1865,446 @@ getAudioScene\(\): Promise<AudioScene\> ...@@ -1833,9 +1865,446 @@ getAudioScene\(\): Promise<AudioScene\>
``` ```
audioManager.getAudioScene().then((value) => { audioManager.getAudioScene().then((value) => {
console.log('Promise returned to indicate that the audio scene mode is obtained.' + value); console.log('Promise returned to indicate that the audio scene mode is obtained.' + value);
}).catch ((err) => { }).catch ((err) => {
console.log('Failed to obtain the audio scene mode'); console.log('Failed to obtain the audio scene mode');
});
```
## AudioStreamManager<sup>9+</sup>
管理音频流。在使用AudioStreamManager的API前,需要使用[getStreamManager](#audiogetstreammanager9)获取AudioStreamManager实例。
### getCurrentAudioRendererInfoArray<sup>9+</sup>
getCurrentAudioRendererInfoArray(callback: AsyncCallback&lt;AudioRendererChangeInfoArray&gt;): void
获取当前音频渲染器的信息。使用callback异步回调。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer
**参数:**
| 名称 | 类型 | 必填 | 说明 |
| -------- | ----------------------------------- | -------- | --------------------------- |
| callback | AsyncCallback<[AudioRendererChangeInfoArray](#audiorendererchangeinfoarray9)> | 是 | 回调函数,返回当前音频渲染器的信息。 |
**示例:**
```
audioStreamManager.getCurrentAudioRendererInfoArray(async (err, AudioRendererChangeInfoArray) => {
console.info('[GET_RENDERER_STATE_1_CALLBACK] **** Get Callback Called ****');
if (err) {
console.log('getCurrentAudioRendererInfoArray :ERROR: '+err.message);
resultFlag = false;
}
else {
if (AudioRendererChangeInfoArray !=null) {
for (let i=0;i<AudioRendererChangeInfoArray.length;i++) {
AudioRendererChangeInfo = AudioRendererChangeInfoArray[i];
console.info('StreamId for '+i+' is:'+AudioRendererChangeInfo.streamId);
console.info('ClientUid for '+i+' is:'+AudioRendererChangeInfo.clientUid);
console.info('Content '+i+' is:'+AudioRendererChangeInfo.rendererInfo.content);
console.info('Stream'+i+' is:'+AudioRendererChangeInfo.rendererInfo.usage);
console.info('Flag'+i+' is:'+AudioRendererChangeInfo.rendererInfo.rendererFlags);
console.info('State for '+i+' is:'+AudioRendererChangeInfo.rendererState);
var devDescriptor = AudioRendererChangeInfo.deviceDescriptors;
for (let j=0;j<AudioRendererChangeInfo.deviceDescriptors.length; j++) {
console.info('Id:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].id);
console.info('Type:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].deviceType);
console.info('Role:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].deviceRole);
console.info('Name:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].name);
console.info('Address:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].address);
console.info('SampleRates:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].sampleRates[0]);
console.info('ChannelCount'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].channelCounts[0]);
console.info('ChannelMask:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].channelMasks);
}
}
}
}
});
```
### getCurrentAudioRendererInfoArray<sup>9+</sup>
getCurrentAudioRendererInfoArray(): Promise&lt;AudioRendererChangeInfoArray&gt;
获取当前音频渲染器的信息。使用Promise异步回调。
**系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:**
| 类型 | 说明 |
| ---------------------------------------------------------------------------------| --------------------------------------- |
| Promise<[AudioRendererChangeInfoArray](#audiorendererchangeinfoarray9)> | Promise对象,返回当前音频渲染器信息。 |
**示例:**
```
await audioStreamManager.getCurrentAudioRendererInfoArray().then( function (AudioRendererChangeInfoArray) {
console.info('[GET_RENDERER_STATE_3_PROMISE] ######### Get Promise is called ##########');
if (AudioRendererChangeInfoArray!=null) {
for (let i=0;i<AudioRendererChangeInfoArray.length;i++) {
AudioRendererChangeInfo = AudioRendererChangeInfoArray[i];
console.info('StreamId for '+i+' is:'+AudioRendererChangeInfo.streamId);
console.info('ClientUid for '+i+' is:'+AudioRendererChangeInfo.clientUid);
console.info('Content '+i+' is:'+AudioRendererChangeInfo.rendererInfo.content);
console.info('Stream'+i+' is:'+AudioRendererChangeInfo.rendererInfo.usage);
console.info('Flag'+i+' is:'+AudioRendererChangeInfo.rendererInfo.rendererFlags);
console.info('State for '+i+' is:'+AudioRendererChangeInfo.rendererState);
var devDescriptor = AudioRendererChangeInfo.deviceDescriptors;
for (let j=0;j<AudioRendererChangeInfo.deviceDescriptors.length; j++) {
console.info('Id:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].id);
console.info('Type:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].deviceType);
console.info('Role:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].deviceRole);
console.info('Name:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].name);
console.info('Address:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].address);
console.info('SampleRates:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].sampleRates[0]);
console.info('ChannelCounts'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].channelCounts[0]);
console.info('ChannnelMask:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].channelMasks);
}
}
}
}).catch((err) => {
console.log('getCurrentAudioRendererInfoArray :ERROR: '+err.message);
resultFlag = false;
});
```
### getCurrentAudioCapturerInfoArray<sup>9+</sup>
getCurrentAudioCapturerInfoArray(callback: AsyncCallback&lt;AudioCapturerChangeInfoArray&gt;): void
获取当前音频采集器的信息。使用callback异步回调。
**系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:**
| 名称 | 类型 | 必填 | 说明 |
| ---------- | ----------------------------------- | --------- | -------------------------------------------------------- |
| callback | AsyncCallback<[AudioCapturerChangeInfoArray](#audiocapturerchangeinfoarray9)> | 是 | 回调函数,返回当前音频采集器的信息。 |
**示例:**
```
audioStreamManager.getCurrentAudioCapturerInfoArray(async (err, AudioCapturerChangeInfoArray) => {
console.info('[GET_CAPTURER_STATE_1_CALLBACK] **** Get Callback Called ****');
if (err) {
console.log('getCurrentAudioCapturerInfoArray :ERROR: '+err.message);
resultFlag = false;
}
else {
if (AudioCapturerChangeInfoArray !=null) {
for (let i=0;i<AudioCapturerChangeInfoArray.length;i++) {
console.info('StreamId for '+i+'is:'+AudioCapturerChangeInfoArray[i].streamId);
console.info('ClientUid for '+i+'is:'+AudioCapturerChangeInfoArray[i].clientUid);
console.info('Source for '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerInfo.source);
console.info('Flag '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerInfo.capturerFlags);
console.info('State for '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerState);
var devDescriptor = AudioCapturerChangeInfoArray[i].deviceDescriptors;
for (let j=0;j<AudioCapturerChangeInfoArray[i].deviceDescriptors.length; j++) {
console.info('Id:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].id);
console.info('Type:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].deviceType);
console.info('Role:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].deviceRole);
console.info('Name:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].name);
console.info('Address:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].address);
console.info('SampleRates:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].sampleRates[0]);
console.info('ChannelCounts'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].channelCounts[0]);
console.info('ChannelMask:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].channelMasks);
}
}
}
}
});
```
### getCurrentAudioCapturerInfoArray<sup>9+</sup>
getCurrentAudioCapturerInfoArray(): Promise&lt;AudioCapturerChangeInfoArray&gt;
获取当前音频采集器的信息。使用Promise异步回调。
**系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:**
| 类型 | 说明 |
| -----------------------------------------------------------------------------| ----------------------------------- |
| Promise<[AudioCapturerChangeInfoArray](#audiocapturerchangeinfoarray9)> | Promise对象,返回当前音频渲染器信息。 |
**示例:**
```
await audioStreamManagerCB.getCurrentAudioCapturerInfoArray().then( function (AudioCapturerChangeInfoArray) {
console.info('AFCapturerChangeLog: [GET_CAP_STA_1_PR] **** Get Promise Called ****');
if (AudioCapturerChangeInfoArray!=null) {
for (let i=0;i<AudioCapturerChangeInfoArray.length;i++) {
console.info('StreamId for '+i+'is:'+AudioCapturerChangeInfoArray[i].streamId);
console.info('ClientUid for '+i+'is:'+AudioCapturerChangeInfoArray[i].clientUid);
console.info('Source for '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerInfo.source);
console.info('Flag '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerInfo.capturerFlags);
console.info('State for '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerState);
var devDescriptor = AudioCapturerChangeInfoArray[i].deviceDescriptors;
for (let j=0;j<AudioCapturerChangeInfoArray[i].deviceDescriptors.length; j++) {
console.info('Id:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].id);
console.info('Type:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].deviceType);
console.info('Role:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].deviceRole);
console.info('Name:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].name)
console.info('Address:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].address);
console.info('SampleRates:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].sampleRates[0]);
console.info('ChannelCounts'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].channelCounts[0]);
console.info('ChannelMask:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].channelMasks);
}
}
}
}).catch((err) => {
console.log('getCurrentAudioCapturerInfoArray :ERROR: '+err.message);
resultFlag = false;
});
```
### on('audioRendererChange')<sup>9+</sup>
on(type: "audioRendererChange", callback: Callback&lt;AudioRendererChangeInfoArray&gt;): void
监听音频渲染器更改事件。
**系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:**
| 名称 | 类型 | 必填 | 说明 |
| -------- | ---------- | --------- | ------------------------------------------------------------------------ |
| type | string | 是 | 事件类型,支持的事件`'audioRendererChange'`:当音频渲染器发生更改时触发。 |
| callback | Callback<[AudioRendererChangeInfoArray](#audiorendererchangeinfoarry9)> | 是 | 回调函数。 |
**示例:**
```
audioStreamManagerCB.on('audioRendererChange', (AudioRendererChangeInfoArray) => {
for (let i=0;i<AudioRendererChangeInfoArray.length;i++) {
AudioRendererChangeInfo = AudioRendererChangeInfoArray[i];
console.info('## RendererChange on is called for '+i+' ##');
console.info('StreamId for '+i+' is:'+AudioRendererChangeInfo.streamId);
console.info('ClientUid for '+i+' is:'+AudioRendererChangeInfo.clientUid);
console.info('Content for '+i+' is:'+AudioRendererChangeInfo.rendererInfo.content);
console.info('Stream for '+i+' is:'+AudioRendererChangeInfo.rendererInfo.usage);
console.info('Flag '+i+' is:'+AudioRendererChangeInfo.rendererInfo.rendererFlags);
console.info('State for '+i+' is:'+AudioRendererChangeInfo.rendererState);
var devDescriptor = AudioRendererChangeInfo.deviceDescriptors;
for (let j=0;j<AudioRendererChangeInfo.deviceDescriptors.length; j++) {
console.info('Id:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].id);
console.info('Type:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].deviceType);
console.info('Role:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].deviceRole);
console.info('Name:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].name);
console.info('Address:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].address);
console.info('SampleRates:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].sampleRates[0]);
console.info('ChannelCounts'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].channelCounts[0]);
console.info('ChannelMask:'+i+':'+AudioRendererChangeInfo.deviceDescriptors[j].channelMasks);
}
}
});
```
### off('audioRendererChange')<sup>9+</sup>
off(type: "audioRendererChange");
监听音频渲染器更改事件。
**系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:**
| 名称 | 类型 | 必填 | 说明 |
| -------- | ------- | ---- | ---------------- |
| type | string | 是 | 事件类型,支持的事件`'audioRendererChange'`:音频渲染器更改事件。 |
**示例:**
```
audioStreamManagerCB.off('audioRendererChange');
console.info('[RENDERER-CHANGE-ON-001] ######### RendererChange Off is called #########');
```
### on('audioCapturerChange')<sup>9+</sup>
on(type: "audioCapturerChange", callback: Callback&lt;AudioCapturerChangeInfoArray&gt;): void
监听音频捕获器更改事件。
**系统能力:** SystemCapability.Multimedia.Audio.Capturer
**参数:**
| 名称 | 类型 | 必填 | 说明 |
| -------- | ------- | --------- | ------------------------------------------------------------------- ---- |
| type | string | 是 | 事件类型,支持的事件`'audioCapturerChange'`:当音频采集器发生更改时触发。 |
| callback | Callback<[AudioCapturerChangeInfoArray](#audiocapturerchangeinfoarry9)> | 是 | 回调函数。 |
**示例:**
```
audioStreamManager.on('audioCapturerChange', (AudioCapturerChangeInfoArray) => {
for (let i=0;i<AudioCapturerChangeInfoArray.length;i++) {
console.info(' ## CapChange on is called for element '+i+' ##');
console.info('StreamId for '+i+'is:'+AudioCapturerChangeInfoArray[i].streamId);
console.info('ClientUid for '+i+'is:'+AudioCapturerChangeInfoArray[i].clientUid);
console.info('Source for '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerInfo.source);
console.info('Flag '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerInfo.capturerFlags);
console.info('State for '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerState);
for (let j=0;j<AudioCapturerChangeInfoArray[i].deviceDescriptors.length; j++) {
console.info('Id:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].id);
console.info('Type:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].deviceType);
console.info('Role:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].deviceRole);
console.info('Name:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].name);
console.info('Address:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].address);
console.info('SampleRates:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].sampleRates[0]);
console.info('ChannelCounts'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].channelCounts[0]);
console.info('ChannelMask:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].channelMasks);
}
}
});
```
### off('audioCapturerChange')<sup>9+</sup>
off(type: "audioCapturerChange");
监听音频捕获器更改事件。
**系统能力:** SystemCapability.Multimedia.Audio.Capturer
**参数:**
| 名称 | 类型 | 必填 | 说明 |
| -------- | -------- | --- | ------------------------------------------------------------- |
| type | string |是 | 事件类型,支持的事件`'audioCapturerChange'`:音频采集器更改事件。 |
**示例:**
```
audioStreamManager.off('audioCapturerChange');
console.info('[GET_CAPTURER_STATE_2_PROMISE] ######### CapturerChange Off is called #########');
```
## AudioRendererChangeInfo<sup>9+</sup>
描述音频渲染器更改信息。
**系统能力:** 以下各项对应的系统能力均为SystemCapability.Multimedia.Audio.Renderer
| 名称 | 类型 | 可读 | 可写 | 说明 |
| -------------------| ----------------------------------------- | ---- | ---- | ---------------------------- |
| streamId | number | 是 | 否 | 音频流唯一id。 |
| clientUid | number | 是 | 否 | 音频渲染器客户端应用程序的Uid。<br/>此接口为系统接口,三方应用不支持调用。 |
| rendererInfo | [AudioRendererInfo](#audiorendererinfo8) | 是 | 否 | 音频渲染器信息。 |
| rendererState | [AudioState](#audiostate) | 是 | 否 | 音频状态。<br/>此接口为系统接口,三方应用不支持调用。|
## AudioRendererChangeInfoArray<sup>9+</sup>
AudioRenderChangeInfo数组,只读。
**系统能力:**: SystemCapability.Multimedia.Audio.Renderer
**示例:**
```
import audio from '@ohos.multimedia.audio';
var audioStreamManager;
var audioStreamManagerCB;
var Tag = "AFCapLog : ";
await audioManager.getStreamManager().then(async function (data) {
audioStreamManager = data;
console.info(Tag+'Get AudioStream Manager : Success ');
}).catch((err) => {
console.info(Tag+'Get AudioStream Manager : ERROR : '+err.message);
});
audioManager.getStreamManager((err, data) => {
if (err) {
console.error(Tag+'Get AudioStream Manager : ERROR : '+err.message);
}
else {
audioStreamManagerCB = data;
console.info(Tag+'Get AudioStream Manager : Success ');
}
});
audioStreamManagerCB.on('audioRendererChange', (AudioRendererChangeInfoArray) => {
for (let i=0;i<AudioRendererChangeInfoArray.length;i++) {
console.info(Tag+'## RendererChange on is called for '+i+' ##');
console.info(Tag+'StreamId for '+i+' is:'+AudioRendererChangeInfoArray[i].streamId);
console.info(Tag+'ClientUid for '+i+' is:'+AudioRendererChangeInfoArray[i].clientUid);
console.info(Tag+'Content for '+i+' is:'+AudioRendererChangeInfoArray[i].rendererInfo.content);
console.info(Tag+'Stream for '+i+' is:'+AudioRendererChangeInfoArray[i].rendererInfo.usage);
console.info(Tag+'Flag '+i+' is:'+AudioRendererChangeInfoArray[i].rendererInfo.rendererFlags);
console.info(Tag+'State for '+i+' is:'+AudioRendererChangeInfoArray[i].rendererState);
var devDescriptor = AudioRendererChangeInfoArray[i].deviceDescriptors;
for (let j=0;j<AudioRendererChangeInfoArray[i].deviceDescriptors.length; j++) {
console.info(Tag+'Id:'+i+':'+AudioRendererChangeInfoArray[i].deviceDescriptors[j].id);
console.info(Tag+'Type:'+i+':'+AudioRendererChangeInfoArray[i].deviceDescriptors[j].deviceType);
console.info(Tag+'Role:'+i+':'+AudioRendererChangeInfoArray[i].deviceDescriptors[j].deviceRole);
console.info(Tag+'Name:'+i+':'+AudioRendererChangeInfoArray[i].deviceDescriptors[j].name);
console.info(Tag+'Addr:'+i+':'+AudioRendererChangeInfoArray[i].deviceDescriptors[j].address);
console.info(Tag+'SR:'+i+':'+AudioRendererChangeInfoArray[i].deviceDescriptors[j].sampleRates[0]);
console.info(Tag+'C'+i+':'+AudioRendererChangeInfoArray[i].deviceDescriptors[j].channelCounts[0]);
console.info(Tag+'CM:'+i+':'+AudioRendererChangeInfoArray[i].deviceDescriptors[j].channelMasks);
}
if (AudioRendererChangeInfoArray[i].rendererState == 1 && devDescriptor != null) {
resultFlag = true;
console.info(Tag+'[RENDERER-CHANGE-ON-001] ResultFlag for '+i+' is:'+resultFlag);
}
}
});
```
## AudioCapturerChangeInfo<sup>9+</sup>
描述音频捕获器更改信息。
**系统能力:** 以下各项对应的系统能力均为SystemCapability.Multimedia.Audio.Capturer
| 名称 | 类型 | 可读 | 可写 | 说明 |
| -------------------| ----------------------------------------- | ---- | ---- | ---------------------------- |
| streamId | number | 是 | 否 | 音频流唯一id。 |
| clientUid | number | 是 | 否 | 音频渲染器客户端应用程序的Uid。<br/>此接口为系统接口,三方应用不支持调用。 |
| capturerInfo | [AudioCapturerInfo](#audiocaptureinfo8) | 是 | 否 | 音频渲染器信息。 |
| capturerState | [AudioState](#audiostate) | 是 | 否 | 音频状态。<br/>此接口为系统接口,三方应用不支持调用。|
## AudioCapturerChangeInfoArray<sup>9+</sup>
AudioCapturerChangeInfo数组,只读。
**系统能力:** SystemCapability.Multimedia.Audio.Capturer
**示例:**
```
import audio from '@ohos.multimedia.audio';
var Tag = "AFCapLog : ";
const audioManager = audio.getAudioManager();
audioStreamManager.on('audioCapturerChange', (AudioCapturerChangeInfoArray) => {
for (let i=0;i<AudioCapturerChangeInfoArray.length;i++) {
console.info(Tag+' ## CapChange on is called for element '+i+' ##');
console.info(Tag+'StrId for '+i+'is:'+AudioCapturerChangeInfoArray[i].streamId);
console.info(Tag+'CUid for '+i+'is:'+AudioCapturerChangeInfoArray[i].clientUid);
console.info(Tag+'Src for '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerInfo.source);
console.info(Tag+'Flag '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerInfo.capturerFlags);
console.info(Tag+'State for '+i+'is:'+AudioCapturerChangeInfoArray[i].capturerState);
var devDescriptor = AudioCapturerChangeInfoArray[i].deviceDescriptors;
for (let j=0;j<AudioCapturerChangeInfoArray[i].deviceDescriptors.length; j++) {
console.info(Tag+'Id:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].id);
console.info(Tag+'Type:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].deviceType);
console.info(Tag+'Role:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].deviceRole);
console.info(Tag+'Name:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].name);
console.info(Tag+'Addr:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].address);
console.info(Tag+'SR:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].sampleRates[0]);
console.info(Tag+'C'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].channelCounts[0]);
console.info(Tag+'CM:'+i+':'+AudioCapturerChangeInfoArray[i].deviceDescriptors[j].channelMasks);
}
if (AudioCapturerChangeInfoArray[i].capturerState == 1 && devDescriptor != null) {
resultFlag = true;
console.info(Tag+'[CAPTURER-CHANGE-ON-001] ResultFlag for element '+i +' is: '+ resultFlag);
}
}
}); });
``` ```
...@@ -1845,10 +2314,16 @@ audioManager.getAudioScene().then((value) => { ...@@ -1845,10 +2314,16 @@ audioManager.getAudioScene().then((value) => {
**系统能力:** 以下各项对应的系统能力均为SystemCapability.Multimedia.Audio.Device **系统能力:** 以下各项对应的系统能力均为SystemCapability.Multimedia.Audio.Device
| 名称 | 类型 | 可读 | 可写 | 说明 | | 名称 | 类型 | 可读 | 可写 | 说明 |
| ---------- | ------------------------- | ---- | ---- | ---------- | | -------------------------- | -------------------------- | ---- | ---- | ---------- |
| deviceRole | [DeviceRole](#devicerole) | 是 | 否 | 设备角色。 | | deviceRole | [DeviceRole](#devicerole) | 是 | 否 | 设备角色。 |
| deviceType | [DeviceType](#devicetype) | 是 | 否 | 设备类型。 | | deviceType | [DeviceType](#devicetype) | 是 | 否 | 设备类型。 |
| id<sup>9+</sup> | number | 是 | 否 | 设备id。 |
| name<sup>9+</sup> | string | 是 | 否 | 设备名称。 |
| address<sup>9+</sup> | string | 是 | 否 | 设备地址。 |
| sampleRates<sup>9+</sup> | Array&lt;number&gt; | 是 | 否 | 支持的采样率。 |
| channelCounts<sup>9+</sup> | Array&lt;number&gt; | 是 | 否 | 支持的通道数。 |
| channelMasks<sup>9+</sup> | Array&lt;number&gt; | 是 | 否 | 支持的通道掩码。 |
## AudioDeviceDescriptors ## AudioDeviceDescriptors
...@@ -1860,23 +2335,22 @@ audioManager.getAudioScene().then((value) => { ...@@ -1860,23 +2335,22 @@ audioManager.getAudioScene().then((value) => {
import audio from '@ohos.multimedia.audio'; import audio from '@ohos.multimedia.audio';
function displayDeviceProp(value) { function displayDeviceProp(value) {
deviceRoleValue = value.deviceRole; deviceRoleValue = value.deviceRole;
deviceTypeValue = value.deviceType; deviceTypeValue = value.deviceType;
} }
var deviceRoleValue = null; var deviceRoleValue = null;
var deviceTypeValue = null; var deviceTypeValue = null;
const promise = audio.getAudioManager().getDevices(1); const promise = audio.getAudioManager().getDevices(1);
promise.then(function (value) { promise.then(function (value) {
console.info('AudioFrameworkTest: Promise: getDevices OUTPUT_DEVICES_FLAG'); console.info('AudioFrameworkTest: Promise: getDevices OUTPUT_DEVICES_FLAG');
value.forEach(displayDeviceProp); value.forEach(displayDeviceProp);
if (deviceTypeValue != null && deviceRoleValue != null){ if (deviceTypeValue != null && deviceRoleValue != null){
console.info('AudioFrameworkTest: Promise: getDevices : OUTPUT_DEVICES_FLAG : PASS'); console.info('AudioFrameworkTest: Promise: getDevices : OUTPUT_DEVICES_FLAG : PASS');
} }
else{ else{
console.info('AudioFrameworkTest: Promise: getDevices : OUTPUT_DEVICES_FLAG : FAIL'); console.info('AudioFrameworkTest: Promise: getDevices : OUTPUT_DEVICES_FLAG : FAIL');
} }
}); });
``` ```
...@@ -1904,7 +2378,7 @@ getRendererInfo(callback: AsyncCallback<AudioRendererInfo\>): void ...@@ -1904,7 +2378,7 @@ getRendererInfo(callback: AsyncCallback<AudioRendererInfo\>): void
获取当前被创建的音频渲染器的信息,使用callback方式异步返回结果。 获取当前被创建的音频渲染器的信息,使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -1916,10 +2390,10 @@ getRendererInfo(callback: AsyncCallback<AudioRendererInfo\>): void ...@@ -1916,10 +2390,10 @@ getRendererInfo(callback: AsyncCallback<AudioRendererInfo\>): void
``` ```
audioRenderer.getRendererInfo((err, rendererInfo) => { audioRenderer.getRendererInfo((err, rendererInfo) => {
console.log('Renderer GetRendererInfo:'); console.log('Renderer GetRendererInfo:');
console.log('Renderer content:' + rendererInfo.content); console.log('Renderer content:' + rendererInfo.content);
console.log('Renderer usage:' + rendererInfo.usage); console.log('Renderer usage:' + rendererInfo.usage);
console.log('Renderer flags:' + rendererInfo.rendererFlags); console.log('Renderer flags:' + rendererInfo.rendererFlags);
}); });
``` ```
...@@ -1929,7 +2403,7 @@ getRendererInfo(): Promise<AudioRendererInfo\> ...@@ -1929,7 +2403,7 @@ getRendererInfo(): Promise<AudioRendererInfo\>
获取当前被创建的音频渲染器的信息,使用Promise方式异步返回结果。 获取当前被创建的音频渲染器的信息,使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -1942,13 +2416,13 @@ getRendererInfo(): Promise<AudioRendererInfo\> ...@@ -1942,13 +2416,13 @@ getRendererInfo(): Promise<AudioRendererInfo\>
``` ```
var resultFlag = true; var resultFlag = true;
audioRenderer.getRendererInfo().then((rendererInfo) => { audioRenderer.getRendererInfo().then((rendererInfo) => {
console.log('Renderer GetRendererInfo:'); console.log('Renderer GetRendererInfo:');
console.log('Renderer content:' + rendererInfo.content); console.log('Renderer content:' + rendererInfo.content);
console.log('Renderer usage:' + rendererInfo.usage); console.log('Renderer usage:' + rendererInfo.usage);
console.log('Renderer flags:' + rendererInfo.rendererFlags); console.log('Renderer flags:' + rendererInfo.rendererFlags);
}).catch((err) => { }).catch((err) => {
console.log('AudioFrameworkRenderLog: RendererInfo :ERROR: '+err.message); console.log('AudioFrameworkRenderLog: RendererInfo :ERROR: '+err.message);
resultFlag = false; resultFlag = false;
}); });
``` ```
...@@ -1958,7 +2432,7 @@ getStreamInfo(callback: AsyncCallback<AudioStreamInfo\>): void ...@@ -1958,7 +2432,7 @@ getStreamInfo(callback: AsyncCallback<AudioStreamInfo\>): void
获取音频流信息,使用callback方式异步返回结果。 获取音频流信息,使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -1970,11 +2444,11 @@ getStreamInfo(callback: AsyncCallback<AudioStreamInfo\>): void ...@@ -1970,11 +2444,11 @@ getStreamInfo(callback: AsyncCallback<AudioStreamInfo\>): void
``` ```
audioRenderer.getStreamInfo((err, streamInfo) => { audioRenderer.getStreamInfo((err, streamInfo) => {
console.log('Renderer GetStreamInfo:'); console.log('Renderer GetStreamInfo:');
console.log('Renderer sampling rate:' + streamInfo.samplingRate); console.log('Renderer sampling rate:' + streamInfo.samplingRate);
console.log('Renderer channel:' + streamInfo.channels); console.log('Renderer channel:' + streamInfo.channels);
console.log('Renderer format:' + streamInfo.sampleFormat); console.log('Renderer format:' + streamInfo.sampleFormat);
console.log('Renderer encoding type:' + streamInfo.encodingType); console.log('Renderer encoding type:' + streamInfo.encodingType);
}); });
``` ```
...@@ -1984,7 +2458,7 @@ getStreamInfo(): Promise<AudioStreamInfo\> ...@@ -1984,7 +2458,7 @@ getStreamInfo(): Promise<AudioStreamInfo\>
获取音频流信息,使用Promise方式异步返回结果。 获取音频流信息,使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -1996,13 +2470,13 @@ getStreamInfo(): Promise<AudioStreamInfo\> ...@@ -1996,13 +2470,13 @@ getStreamInfo(): Promise<AudioStreamInfo\>
``` ```
audioRenderer.getStreamInfo().then((streamInfo) => { audioRenderer.getStreamInfo().then((streamInfo) => {
console.log('Renderer GetStreamInfo:'); console.log('Renderer GetStreamInfo:');
console.log('Renderer sampling rate:' + streamInfo.samplingRate); console.log('Renderer sampling rate:' + streamInfo.samplingRate);
console.log('Renderer channel:' + streamInfo.channels); console.log('Renderer channel:' + streamInfo.channels);
console.log('Renderer format:' + streamInfo.sampleFormat); console.log('Renderer format:' + streamInfo.sampleFormat);
console.log('Renderer encoding type:' + streamInfo.encodingType); console.log('Renderer encoding type:' + streamInfo.encodingType);
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
...@@ -2012,7 +2486,7 @@ start(callback: AsyncCallback<void\>): void ...@@ -2012,7 +2486,7 @@ start(callback: AsyncCallback<void\>): void
启动音频渲染器。使用callback方式异步返回结果。 启动音频渲染器。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2024,11 +2498,11 @@ start(callback: AsyncCallback<void\>): void ...@@ -2024,11 +2498,11 @@ start(callback: AsyncCallback<void\>): void
``` ```
audioRenderer.start((err) => { audioRenderer.start((err) => {
if (err) { if (err) {
console.error('Renderer start failed.'); console.error('Renderer start failed.');
} else { } else {
console.info('Renderer start success.'); console.info('Renderer start success.');
} }
}); });
``` ```
...@@ -2038,7 +2512,7 @@ start(): Promise<void\> ...@@ -2038,7 +2512,7 @@ start(): Promise<void\>
启动音频渲染器。使用Promise方式异步返回结果。 启动音频渲染器。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2050,9 +2524,9 @@ start(): Promise<void\> ...@@ -2050,9 +2524,9 @@ start(): Promise<void\>
``` ```
audioRenderer.start().then(() => { audioRenderer.start().then(() => {
console.log('Renderer started'); console.log('Renderer started');
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
...@@ -2062,7 +2536,7 @@ pause(callback: AsyncCallback\<void>): void ...@@ -2062,7 +2536,7 @@ pause(callback: AsyncCallback\<void>): void
暂停渲染。使用callback方式异步返回结果。 暂停渲染。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2074,11 +2548,11 @@ pause(callback: AsyncCallback\<void>): void ...@@ -2074,11 +2548,11 @@ pause(callback: AsyncCallback\<void>): void
``` ```
audioRenderer.pause((err) => { audioRenderer.pause((err) => {
if (err) { if (err) {
console.error('Renderer pause failed'); console.error('Renderer pause failed');
} else { } else {
console.log('Renderer paused.'); console.log('Renderer paused.');
} }
}); });
``` ```
...@@ -2088,7 +2562,7 @@ pause(): Promise\<void> ...@@ -2088,7 +2562,7 @@ pause(): Promise\<void>
暂停渲染。使用Promise方式异步返回结果。 暂停渲染。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2100,9 +2574,9 @@ pause(): Promise\<void> ...@@ -2100,9 +2574,9 @@ pause(): Promise\<void>
``` ```
audioRenderer.pause().then(() => { audioRenderer.pause().then(() => {
console.log('Renderer paused'); console.log('Renderer paused');
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
...@@ -2112,7 +2586,7 @@ drain(callback: AsyncCallback\<void>): void ...@@ -2112,7 +2586,7 @@ drain(callback: AsyncCallback\<void>): void
检查缓冲区是否已被耗尽。使用callback方式异步返回结果。 检查缓冲区是否已被耗尽。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2124,11 +2598,11 @@ drain(callback: AsyncCallback\<void>): void ...@@ -2124,11 +2598,11 @@ drain(callback: AsyncCallback\<void>): void
``` ```
audioRenderer.drain((err) => { audioRenderer.drain((err) => {
if (err) { if (err) {
console.error('Renderer drain failed'); console.error('Renderer drain failed');
} else { } else {
console.log('Renderer drained.'); console.log('Renderer drained.');
} }
}); });
``` ```
...@@ -2138,7 +2612,7 @@ drain(): Promise\<void> ...@@ -2138,7 +2612,7 @@ drain(): Promise\<void>
检查缓冲区是否已被耗尽。使用Promise方式异步返回结果。 检查缓冲区是否已被耗尽。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2150,9 +2624,9 @@ drain(): Promise\<void> ...@@ -2150,9 +2624,9 @@ drain(): Promise\<void>
``` ```
audioRenderer.drain().then(() => { audioRenderer.drain().then(() => {
console.log('Renderer drained successfully'); console.log('Renderer drained successfully');
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
...@@ -2162,7 +2636,7 @@ stop(callback: AsyncCallback\<void>): void ...@@ -2162,7 +2636,7 @@ stop(callback: AsyncCallback\<void>): void
停止渲染。使用callback方式异步返回结果。 停止渲染。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2174,11 +2648,11 @@ stop(callback: AsyncCallback\<void>): void ...@@ -2174,11 +2648,11 @@ stop(callback: AsyncCallback\<void>): void
``` ```
audioRenderer.stop((err) => { audioRenderer.stop((err) => {
if (err) { if (err) {
console.error('Renderer stop failed'); console.error('Renderer stop failed');
} else { } else {
console.log('Renderer stopped.'); console.log('Renderer stopped.');
} }
}); });
``` ```
...@@ -2188,7 +2662,7 @@ stop(): Promise\<void> ...@@ -2188,7 +2662,7 @@ stop(): Promise\<void>
停止渲染。使用Promise方式异步返回结果。 停止渲染。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2200,9 +2674,9 @@ stop(): Promise\<void> ...@@ -2200,9 +2674,9 @@ stop(): Promise\<void>
``` ```
audioRenderer.stop().then(() => { audioRenderer.stop().then(() => {
console.log('Renderer stopped successfully'); console.log('Renderer stopped successfully');
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
...@@ -2212,7 +2686,7 @@ release(callback: AsyncCallback\<void>): void ...@@ -2212,7 +2686,7 @@ release(callback: AsyncCallback\<void>): void
释放音频渲染器。使用callback方式异步返回结果。 释放音频渲染器。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2224,11 +2698,11 @@ release(callback: AsyncCallback\<void>): void ...@@ -2224,11 +2698,11 @@ release(callback: AsyncCallback\<void>): void
``` ```
audioRenderer.release((err) => { audioRenderer.release((err) => {
if (err) { if (err) {
console.error('Renderer release failed'); console.error('Renderer release failed');
} else { } else {
console.log('Renderer released.'); console.log('Renderer released.');
} }
}); });
``` ```
...@@ -2238,7 +2712,7 @@ release(): Promise\<void> ...@@ -2238,7 +2712,7 @@ release(): Promise\<void>
释放渲染器。使用Promise方式异步返回结果。 释放渲染器。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2250,9 +2724,9 @@ release(): Promise\<void> ...@@ -2250,9 +2724,9 @@ release(): Promise\<void>
``` ```
audioRenderer.release().then(() => { audioRenderer.release().then(() => {
console.log('Renderer released successfully'); console.log('Renderer released successfully');
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
...@@ -2262,7 +2736,7 @@ write(buffer: ArrayBuffer, callback: AsyncCallback\<number>): void ...@@ -2262,7 +2736,7 @@ write(buffer: ArrayBuffer, callback: AsyncCallback\<number>): void
写入缓冲区。使用callback方式异步返回结果。 写入缓冲区。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2279,36 +2753,35 @@ import fileio from '@ohos.fileio'; ...@@ -2279,36 +2753,35 @@ import fileio from '@ohos.fileio';
import featureAbility from '@ohos.ability.featureAbility' import featureAbility from '@ohos.ability.featureAbility'
var audioStreamInfo = { var audioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000,
channels: audio.AudioChannel.CHANNEL_2, channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE, sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
} }
var audioRendererInfo = { var audioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_SPEECH, content: audio.ContentType.CONTENT_TYPE_SPEECH,
usage: audio.streamUsage.STREAM_USAGE_VOICE_COMMUNICATION usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION
rendererFlags: 1 rendererFlags: 1
}
var audioRendererOptions = { var audioRendererOptions = {
streamInfo: audioStreamInfo, streamInfo: audioStreamInfo,
rendererInfo: audioRendererInfo rendererInfo: audioRendererInfo
} }
var audioRenderer; var audioRenderer;
audio.createAudioRenderer(audioRendererOptions).then((data)=> { audio.createAudioRenderer(audioRendererOptions).then((data)=> {
audioRenderer = data; audioRenderer = data;
console.info('AudioFrameworkRenderLog: AudioRenderer Created: SUCCESS'); console.info('AudioFrameworkRenderLog: AudioRenderer Created: SUCCESS');
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRenderLog: AudioRenderer Created: ERROR: '+err.message); console.info('AudioFrameworkRenderLog: AudioRenderer Created: ERROR: '+err.message);
}); });
var bufferSize; var bufferSize;
audioRenderer.getBufferSize().then((data)=> { audioRenderer.getBufferSize().then((data)=> {
console.info('AudioFrameworkRenderLog: getBufferSize: SUCCESS '+data); console.info('AudioFrameworkRenderLog: getBufferSize: SUCCESS '+data);
bufferSize = data; bufferSize = data;
}).catch((err) => { }).catch((err) => {
console.info.('AudioFrameworkRenderLog: getBufferSize: ERROR: '+err.message); console.info.('AudioFrameworkRenderLog: getBufferSize: ERROR: '+err.message);
}); });
console.info('Buffer size:'+bufferSize); console.info('Buffer size:'+bufferSize);
var context = featureAbility.getContext(); var context = featureAbility.getContext();
var path = await context.getCacheDir(); var path = await context.getCacheDir();
...@@ -2317,11 +2790,11 @@ let ss = fileio.createStreamSync(filePath, 'r'); ...@@ -2317,11 +2790,11 @@ let ss = fileio.createStreamSync(filePath, 'r');
let buf = new ArrayBuffer(bufferSize); let buf = new ArrayBuffer(bufferSize);
ss.readSync(buf); ss.readSync(buf);
audioRenderer.write(buf, (err, writtenbytes) => { audioRenderer.write(buf, (err, writtenbytes) => {
if (writtenbytes < 0) { if (writtenbytes < 0) {
console.error('write failed.'); console.error('write failed.');
} else { } else {
console.log('Actual written bytes: ' + writtenbytes); console.log('Actual written bytes: ' + writtenbytes);
} }
}); });
``` ```
...@@ -2331,7 +2804,7 @@ write(buffer: ArrayBuffer): Promise\<number> ...@@ -2331,7 +2804,7 @@ write(buffer: ArrayBuffer): Promise\<number>
写入缓冲区。使用Promise方式异步返回结果。 写入缓冲区。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2347,36 +2820,36 @@ import fileio from '@ohos.fileio'; ...@@ -2347,36 +2820,36 @@ import fileio from '@ohos.fileio';
import featureAbility from '@ohos.ability.featureAbility' import featureAbility from '@ohos.ability.featureAbility'
var audioStreamInfo = { var audioStreamInfo = {
samplingRate:audio.AudioSamplingRate.SAMPLE_RATE_48000, samplingRate:audio.AudioSamplingRate.SAMPLE_RATE_48000,
channels:audio.AudioChannel.CHANNEL_2, channels:audio.AudioChannel.CHANNEL_2,
sampleFormat.audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE, sampleFormat:audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE,
encodingType.audio.AudioEncodingType.ENCODING_TYPE_RAW encodingType:audio.AudioEncodingType.ENCODING_TYPE_RAW
} }
var audioRendererInfo = { var audioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_SPEECH, content: audio.ContentType.CONTENT_TYPE_SPEECH,
usage: audio.streamUsage.STREAM_USAGE_VOICE_COMMUNICATION, usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION,
rendererFlags: 1 rendererFlags: 1
} }
var audioRendererOptions = { var audioRendererOptions = {
streamInfo: audioStreamInfo, streamInfo: audioStreamInfo,
rendererInfo: audioRendererInfo rendererInfo: audioRendererInfo
} }
var audioRenderer; var audioRenderer;
audio.createAudioRenderer(audioRendererOptions).then((data) => { audio.createAudioRenderer(audioRendererOptions).then((data) => {
audioRenderer = data; audioRenderer = data;
console.info('AudioFrameworkRenderLog: AudioRenderer Created: SUCCESS'); console.info('AudioFrameworkRenderLog: AudioRenderer Created: SUCCESS');
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRenderLog: AudioRenderer Created: ERROR: '+err.message); console.info('AudioFrameworkRenderLog: AudioRenderer Created: ERROR: '+err.message);
}); });
var bufferSize; var bufferSize;
audioRenderer.getBufferSize().then((data) => { audioRenderer.getBufferSize().then((data) => {
console.info('AudioFrameworkRenderLog: getBufferSize: SUCCESS '+data); console.info('AudioFrameworkRenderLog: getBufferSize: SUCCESS '+data);
bufferSize = data; bufferSize = data;
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRenderLog: getBufferSize: ERROR: '+err.message); console.info('AudioFrameworkRenderLog: getBufferSize: ERROR: '+err.message);
}); });
console.info('BufferSize: ' + bufferSize); console.info('BufferSize: ' + bufferSize);
var context = featureAbility.getContext(); var context = featureAbility.getContext();
var path = await context.getCacheDir(); var path = await context.getCacheDir();
...@@ -2385,11 +2858,11 @@ let ss = fileio.createStreamSync(filePath, 'r'); ...@@ -2385,11 +2858,11 @@ let ss = fileio.createStreamSync(filePath, 'r');
let buf = new ArrayBuffer(bufferSize); let buf = new ArrayBuffer(bufferSize);
ss.readSync(buf); ss.readSync(buf);
audioRenderer.write(buf).then((writtenbytes) => { audioRenderer.write(buf).then((writtenbytes) => {
if (writtenbytes < 0) { if (writtenbytes < 0) {
console.error('write failed.'); console.error('write failed.');
} else { } else {
console.log('Actual written bytes: ' + writtenbytes); console.log('Actual written bytes: ' + writtenbytes);
} }
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
...@@ -2401,7 +2874,7 @@ getAudioTime(callback: AsyncCallback\<number>): void ...@@ -2401,7 +2874,7 @@ getAudioTime(callback: AsyncCallback\<number>): void
获取时间戳(从 1970 年 1 月 1 日开始)。使用callback方式异步返回结果。 获取时间戳(从 1970 年 1 月 1 日开始)。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2413,7 +2886,7 @@ getAudioTime(callback: AsyncCallback\<number>): void ...@@ -2413,7 +2886,7 @@ getAudioTime(callback: AsyncCallback\<number>): void
``` ```
audioRenderer.getAudioTime((err, timestamp) => { audioRenderer.getAudioTime((err, timestamp) => {
console.log('Current timestamp: ' + timestamp); console.log('Current timestamp: ' + timestamp);
}); });
``` ```
...@@ -2423,7 +2896,7 @@ getAudioTime(): Promise\<number> ...@@ -2423,7 +2896,7 @@ getAudioTime(): Promise\<number>
获取时间戳(从 1970 年 1 月 1 日开始)。使用Promise方式异步返回结果。 获取时间戳(从 1970 年 1 月 1 日开始)。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2435,9 +2908,9 @@ getAudioTime(): Promise\<number> ...@@ -2435,9 +2908,9 @@ getAudioTime(): Promise\<number>
``` ```
audioRenderer.getAudioTime().then((timestamp) => { audioRenderer.getAudioTime().then((timestamp) => {
console.log('Current timestamp: ' + timestamp); console.log('Current timestamp: ' + timestamp);
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
...@@ -2447,7 +2920,7 @@ getBufferSize(callback: AsyncCallback\<number>): void ...@@ -2447,7 +2920,7 @@ getBufferSize(callback: AsyncCallback\<number>): void
获取音频渲染器的最小缓冲区大小。使用callback方式异步返回结果。 获取音频渲染器的最小缓冲区大小。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2459,9 +2932,9 @@ getBufferSize(callback: AsyncCallback\<number>): void ...@@ -2459,9 +2932,9 @@ getBufferSize(callback: AsyncCallback\<number>): void
``` ```
var bufferSize = audioRenderer.getBufferSize(async(err, bufferSize) => { var bufferSize = audioRenderer.getBufferSize(async(err, bufferSize) => {
if (err) { if (err) {
console.error('getBufferSize error'); console.error('getBufferSize error');
} }
}); });
``` ```
...@@ -2471,7 +2944,7 @@ getBufferSize(): Promise\<number> ...@@ -2471,7 +2944,7 @@ getBufferSize(): Promise\<number>
获取音频渲染器的最小缓冲区大小。使用Promise方式异步返回结果。 获取音频渲染器的最小缓冲区大小。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2486,35 +2959,35 @@ import audio from '@ohos.multimedia.audio'; ...@@ -2486,35 +2959,35 @@ import audio from '@ohos.multimedia.audio';
import fileio from '@ohos.fileio'; import fileio from '@ohos.fileio';
var audioStreamInfo = { var audioStreamInfo = {
samplingRate:audio.AudioSamplingRate.SAMPLE_RATE_48000, samplingRate:audio.AudioSamplingRate.SAMPLE_RATE_48000,
channels:audio.AudioChannel.CHANNEL_2, channels:audio.AudioChannel.CHANNEL_2,
sampleFormat.audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE, sampleFormat:audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE,
encodingType.audio.AudioEncodingType.ENCODING_TYPE_RAW encodingType:audio.AudioEncodingType.ENCODING_TYPE_RAW
} }
var audioRendererInfo = { var audioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_SPEECH, content: audio.ContentType.CONTENT_TYPE_SPEECH,
usage: audio.streamUsage.STREAM_USAGE_VOICE_COMMUNICATION, usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION,
rendererFlags: 1 rendererFlags: 1
} }
var audioRendererOptions = { var audioRendererOptions = {
streamInfo: audioStreamInfo, streamInfo: audioStreamInfo,
rendererInfo: audioRendererInfo rendererInfo: audioRendererInfo
} }
var audioRenderer; var audioRenderer;
audio.createAudioRenderer(audioRendererOptions).then((data) => { audio.createAudioRenderer(audioRendererOptions).then((data) => {
audioRenderer = data; audioRenderer = data;
console.info('AudioFrameworkRenderLog: AudioRenderer Created: SUCCESS'); console.info('AudioFrameworkRenderLog: AudioRenderer Created: SUCCESS');
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRenderLog: AudioRenderer Created: ERROR: '+err.message); console.info('AudioFrameworkRenderLog: AudioRenderer Created: ERROR: '+err.message);
}); });
var bufferSize; var bufferSize;
audioRenderer.getBufferSize().then((data) => { audioRenderer.getBufferSize().then((data) => {
console.info('AudioFrameworkRenderLog: getBufferSize: SUCCESS '+data); console.info('AudioFrameworkRenderLog: getBufferSize: SUCCESS '+data);
bufferSize=data; bufferSize=data;
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRenderLog: getBufferSize: ERROR: '+err.message); console.info('AudioFrameworkRenderLog: getBufferSize: ERROR: '+err.message);
}); });
``` ```
...@@ -2524,7 +2997,7 @@ setRenderRate(rate: AudioRendererRate, callback: AsyncCallback\<void>): void ...@@ -2524,7 +2997,7 @@ setRenderRate(rate: AudioRendererRate, callback: AsyncCallback\<void>): void
设置音频渲染速率。使用callback方式异步返回结果。 设置音频渲染速率。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2537,11 +3010,11 @@ setRenderRate(rate: AudioRendererRate, callback: AsyncCallback\<void>): void ...@@ -2537,11 +3010,11 @@ setRenderRate(rate: AudioRendererRate, callback: AsyncCallback\<void>): void
``` ```
audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_NORMAL, (err) => { audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_NORMAL, (err) => {
if (err) { if (err) {
console.error('Failed to set params'); console.error('Failed to set params');
} else { } else {
console.log('Callback invoked to indicate a successful render rate setting.'); console.log('Callback invoked to indicate a successful render rate setting.');
} }
}); });
``` ```
...@@ -2551,7 +3024,7 @@ setRenderRate(rate: AudioRendererRate): Promise\<void> ...@@ -2551,7 +3024,7 @@ setRenderRate(rate: AudioRendererRate): Promise\<void>
设置音频渲染速率。使用Promise方式异步返回结果。 设置音频渲染速率。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2569,9 +3042,9 @@ setRenderRate(rate: AudioRendererRate): Promise\<void> ...@@ -2569,9 +3042,9 @@ setRenderRate(rate: AudioRendererRate): Promise\<void>
``` ```
audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_NORMAL).then(() => { audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_NORMAL).then(() => {
console.log('setRenderRate SUCCESS'); console.log('setRenderRate SUCCESS');
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
...@@ -2581,7 +3054,7 @@ getRenderRate(callback: AsyncCallback\<AudioRendererRate>): void ...@@ -2581,7 +3054,7 @@ getRenderRate(callback: AsyncCallback\<AudioRendererRate>): void
获取当前渲染速率。使用callback方式异步返回结果。 获取当前渲染速率。使用callback方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2593,7 +3066,7 @@ getRenderRate(callback: AsyncCallback\<AudioRendererRate>): void ...@@ -2593,7 +3066,7 @@ getRenderRate(callback: AsyncCallback\<AudioRendererRate>): void
``` ```
audioRenderer.getRenderRate((err, renderrate) => { audioRenderer.getRenderRate((err, renderrate) => {
console.log('getRenderRate: ' + renderrate); console.log('getRenderRate: ' + renderrate);
}); });
``` ```
...@@ -2603,7 +3076,7 @@ getRenderRate(): Promise\<AudioRendererRate> ...@@ -2603,7 +3076,7 @@ getRenderRate(): Promise\<AudioRendererRate>
获取当前渲染速率。使用Promise方式异步返回结果。 获取当前渲染速率。使用Promise方式异步返回结果。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**返回值:** **返回值:**
...@@ -2615,14 +3088,14 @@ getRenderRate(): Promise\<AudioRendererRate> ...@@ -2615,14 +3088,14 @@ getRenderRate(): Promise\<AudioRendererRate>
``` ```
audioRenderer.getRenderRate().then((renderRate) => { audioRenderer.getRenderRate().then((renderRate) => {
console.log('getRenderRate: ' + renderRate); console.log('getRenderRate: ' + renderRate);
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.log('ERROR: '+err.message);
}); });
``` ```
### setInterruptMode<sup>9+</sup> ### setInterruptMode<sup>9+</sup>
setInterruptMode(interruptMode: InterruptMode): Promise&lt;void&gt; setInterruptMode(mode: InterruptMode): Promise&lt;void&gt;
设置应用的焦点模型。使用Promise异步回调。 设置应用的焦点模型。使用Promise异步回调。
...@@ -2630,9 +3103,9 @@ setInterruptMode(interruptMode: InterruptMode): Promise&lt;void&gt; ...@@ -2630,9 +3103,9 @@ setInterruptMode(interruptMode: InterruptMode): Promise&lt;void&gt;
**参数:** **参数:**
| 参数名 | 类型 | 必填 | 说明 | | 参数名 | 类型 | 必填 | 说明 |
| ---------- | ----------------------------------- | ---- | -------------------------------------------------------- | | ---------- | ---------------------------------- | ------ | ---------- |
| interruptMode | [InterruptMode](#InterruptMode) | 是 | 焦点模型。 | | mode | [InterruptMode](#InterruptMode) | 是 | 焦点模型。 |
**返回值:** **返回值:**
...@@ -2643,14 +3116,32 @@ setInterruptMode(interruptMode: InterruptMode): Promise&lt;void&gt; ...@@ -2643,14 +3116,32 @@ setInterruptMode(interruptMode: InterruptMode): Promise&lt;void&gt;
**示例:** **示例:**
``` ```
const audioManager = audio.getAudioManager(); var audioStreamInfo = {
audioManager.setInterruptMode(audio.InterruptMode.SHARE_MODE).then(() => { samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000,
console.log('Promise returned to indicate a successful volume setting.'); channels: audio.AudioChannel.CHANNEL_1,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
var audioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_MUSIC,
usage: audio.StreamUsage.STREAM_USAGE_MEDIA,
rendererFlags: 0
}
var audioRendererOptions = {
streamInfo: audioStreamInfo,
rendererInfo: audioRendererInfo
}
let audioRenderer = await audio.createAudioRenderer(audioRendererOptions);
let mode = 0;
audioRenderer.setInterruptMode(mode).then(data=>{
console.log("setInterruptMode Success!");
}).catch(err=>{
console.log("setInterruptMode Fail:" + err.message);
}); });
``` ```
### setInterruptMode<sup>9+</sup> ### setInterruptMode<sup>9+</sup>
setInterruptMode(interruptMode: InterruptMode, callback: Callback\<void>): void setInterruptMode(mode: InterruptMode, callback: Callback\<void>): void
设置应用的焦点模型。使用Callback回调返回执行结果。 设置应用的焦点模型。使用Callback回调返回执行结果。
...@@ -2658,17 +3149,36 @@ setInterruptMode(interruptMode: InterruptMode, callback: Callback\<void>): void ...@@ -2658,17 +3149,36 @@ setInterruptMode(interruptMode: InterruptMode, callback: Callback\<void>): void
**参数:** **参数:**
| 参数名 | 类型 | 必填 | 说明 | | 参数名 | 类型 | 必填 | 说明 |
| ---------- | ----------------------------------- | ---- | -------------------------------------------------------- | | ------- | ----------------------------------- | ------ | -------------- |
|interruptMode | [InterruptMode](#InterruptMode) | 是 | 焦点模型。| |mode | [InterruptMode](#InterruptMode) | 是 | 焦点模型。|
|callback | Callback\<void> | 是 |回调返回执行结果。| |callback | Callback\<void> | 是 |回调返回执行结果。|
**示例:** **示例:**
``` ```
const audioManager = audio.getAudioManager(); var audioStreamInfo = {
audioManager.setInterruptMode(audio.InterruptMode.SHARE_MODE,()=>{ samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000,
console.log('Callback returned to indicate a successful volume setting.'); channels: audio.AudioChannel.CHANNEL_1,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
var audioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_MUSIC,
usage: audio.StreamUsage.STREAM_USAGE_MEDIA,
rendererFlags: 0
}
var audioRendererOptions = {
streamInfo: audioStreamInfo,
rendererInfo: audioRendererInfo
}
let audioRenderer = await audio.createAudioRenderer(audioRendererOptions);
let mode = 1;
audioRenderer.setInterruptMode(mode,(err,data)=>{
if(err){
console.log("setInterruptMode Fail:" + err.message);
}
console.log("setInterruptMode Success!");
}); });
``` ```
### on('interrupt')<sup>9+</sup> ### on('interrupt')<sup>9+</sup>
...@@ -2677,7 +3187,7 @@ on(type: 'interrupt', callback: Callback\<InterruptEvent>): void ...@@ -2677,7 +3187,7 @@ on(type: 'interrupt', callback: Callback\<InterruptEvent>): void
监听音频中断事件。使用callback获取中断事件。 监听音频中断事件。使用callback获取中断事件。
**系统能力**: SystemCapability.Multimedia.Audio.Renderer **系统能力:** SystemCapability.Multimedia.Audio.Renderer
**参数:** **参数:**
...@@ -2692,49 +3202,48 @@ on(type: 'interrupt', callback: Callback\<InterruptEvent>): void ...@@ -2692,49 +3202,48 @@ on(type: 'interrupt', callback: Callback\<InterruptEvent>): void
var isPlay; var isPlay;
var started; var started;
audioRenderer.on('interrupt', async(interruptEvent) => { audioRenderer.on('interrupt', async(interruptEvent) => {
if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) { if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) {
switch (interruptEvent.hintType) { switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_PAUSE: case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
console.log('Force paused. Stop writing'); console.log('Force paused. Stop writing');
isPlay = false; isPlay = false;
break; break;
case audio.InterruptHint.INTERRUPT_HINT_STOP: case audio.InterruptHint.INTERRUPT_HINT_STOP:
console.log('Force stopped. Stop writing'); console.log('Force stopped. Stop writing');
isPlay = false; isPlay = false;
break; break;
} }
} else if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_SHARE) { } else if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_SHARE) {
switch (interruptEvent.hintType) { switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_RESUME: case audio.InterruptHint.INTERRUPT_HINT_RESUME:
console.log('Resume force paused renderer or ignore'); console.log('Resume force paused renderer or ignore');
await audioRenderer.start().then(async function () { await audioRenderer.start().then(async function () {
console.info('AudioInterruptMusic: renderInstant started :SUCCESS '); console.info('AudioInterruptMusic: renderInstant started :SUCCESS ');
started = true; started = true;
}).catch((err) => { }).catch((err) => {
console.info('AudioInterruptMusic: renderInstant start :ERROR : '+err.message); console.info('AudioInterruptMusic: renderInstant start :ERROR : '+err.message);
started = false; started = false;
}); });
if (started) { if (started) {
isPlay = true; isPlay = true;
console.info('AudioInterruptMusic Renderer started : isPlay : '+isPlay); console.info('AudioInterruptMusic Renderer started : isPlay : '+isPlay);
} else { } else {
console.error('AudioInterruptMusic Renderer start failed'); console.error('AudioInterruptMusic Renderer start failed');
}
break;
case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
console.log('Choose to pause or ignore');
if (isPlay == true) {
isPlay == false;
console.info('AudioInterruptMusic: Media PAUSE : TRUE');
}
else {
isPlay = true;
console.info('AudioInterruptMusic: Media PLAY : TRUE');
}
break;
} }
break;
case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
console.log('Choose to pause or ignore');
if (isPlay == true) {
isPlay == false;
console.info('AudioInterruptMusic: Media PAUSE : TRUE');
}else {
isPlay = true;
console.info('AudioInterruptMusic: Media PLAY : TRUE');
}
break;
} }
}); }
});
``` ```
### on('markReach')<sup>8+</sup> ### on('markReach')<sup>8+</sup>
...@@ -2757,9 +3266,9 @@ on(type: 'markReach', frame: number, callback: (position: number) => {}): void ...@@ -2757,9 +3266,9 @@ on(type: 'markReach', frame: number, callback: (position: number) => {}): void
``` ```
audioRenderer.on('markReach', 1000, (position) => { audioRenderer.on('markReach', 1000, (position) => {
if (position == 1000) { if (position == 1000) {
console.log('ON Triggered successfully'); console.log('ON Triggered successfully');
} }
}); });
``` ```
...@@ -2804,9 +3313,9 @@ on(type: "periodReach", frame: number, callback: (position: number) => {}): void ...@@ -2804,9 +3313,9 @@ on(type: "periodReach", frame: number, callback: (position: number) => {}): void
``` ```
audioRenderer.on('periodReach', 1000, (position) => { audioRenderer.on('periodReach', 1000, (position) => {
if (position == 1000) { if (position == 1000) {
console.log('ON Triggered successfully'); console.log('ON Triggered successfully');
} }
}); });
``` ```
...@@ -2849,12 +3358,12 @@ on(type: 'stateChange', callback: Callback<AudioState\>): void ...@@ -2849,12 +3358,12 @@ on(type: 'stateChange', callback: Callback<AudioState\>): void
``` ```
audioRenderer.on('stateChange', (state) => { audioRenderer.on('stateChange', (state) => {
if (state == 1) { if (state == 1) {
console.log("audio renderer state is: STATE_PREPARED"); console.log("audio renderer state is: STATE_PREPARED");
} }
if (state == 2) { if (state == 2) {
console.log("audio renderer state is: STATE_RUNNING"); console.log("audio renderer state is: STATE_RUNNING");
} }
}); });
``` ```
...@@ -2894,13 +3403,13 @@ getCapturerInfo(callback: AsyncCallback<AudioCapturerInfo\>): void ...@@ -2894,13 +3403,13 @@ getCapturerInfo(callback: AsyncCallback<AudioCapturerInfo\>): void
``` ```
audioCapturer.getCapturerInfo((err, capturerInfo) => { audioCapturer.getCapturerInfo((err, capturerInfo) => {
if (err) { if (err) {
console.error('Failed to get capture info'); console.error('Failed to get capture info');
} else { } else {
console.log('Capturer getCapturerInfo:'); console.log('Capturer getCapturerInfo:');
console.log('Capturer source:' + capturerInfo.source); console.log('Capturer source:' + capturerInfo.source);
console.log('Capturer flags:' + capturerInfo.capturerFlags); console.log('Capturer flags:' + capturerInfo.capturerFlags);
} }
}); });
``` ```
...@@ -2923,16 +3432,16 @@ getCapturerInfo(): Promise<AudioCapturerInfo\> ...@@ -2923,16 +3432,16 @@ getCapturerInfo(): Promise<AudioCapturerInfo\>
``` ```
audioCapturer.getCapturerInfo().then((audioParamsGet) => { audioCapturer.getCapturerInfo().then((audioParamsGet) => {
if (audioParamsGet != undefined) { if (audioParamsGet != undefined) {
console.info('AudioFrameworkRecLog: Capturer CapturerInfo:'); console.info('AudioFrameworkRecLog: Capturer CapturerInfo:');
console.info('AudioFrameworkRecLog: Capturer SourceType:' + audioParamsGet.source); console.info('AudioFrameworkRecLog: Capturer SourceType:' + audioParamsGet.source);
console.info('AudioFrameworkRecLog: Capturer capturerFlags:' + audioParamsGet.capturerFlags); console.info('AudioFrameworkRecLog: Capturer capturerFlags:' + audioParamsGet.capturerFlags);
}else { }else {
console.info('AudioFrameworkRecLog: audioParamsGet is : '+audioParamsGet); console.info('AudioFrameworkRecLog: audioParamsGet is : '+audioParamsGet);
console.info('AudioFrameworkRecLog: audioParams getCapturerInfo are incorrect: '); console.info('AudioFrameworkRecLog: audioParams getCapturerInfo are incorrect: ');
} }
}).catch((err) => { }).catch((err) => {
console.log('AudioFrameworkRecLog: CapturerInfo :ERROR: '+err.message); console.log('AudioFrameworkRecLog: CapturerInfo :ERROR: '+err.message);
}); });
``` ```
...@@ -2954,15 +3463,15 @@ getStreamInfo(callback: AsyncCallback<AudioStreamInfo\>): void ...@@ -2954,15 +3463,15 @@ getStreamInfo(callback: AsyncCallback<AudioStreamInfo\>): void
``` ```
audioCapturer.getStreamInfo((err, streamInfo) => { audioCapturer.getStreamInfo((err, streamInfo) => {
if (err) { if (err) {
console.error('Failed to get stream info'); console.error('Failed to get stream info');
} else { } else {
console.log('Capturer GetStreamInfo:'); console.log('Capturer GetStreamInfo:');
console.log('Capturer sampling rate:' + streamInfo.samplingRate); console.log('Capturer sampling rate:' + streamInfo.samplingRate);
console.log('Capturer channel:' + streamInfo.channels); console.log('Capturer channel:' + streamInfo.channels);
console.log('Capturer format:' + streamInfo.sampleFormat); console.log('Capturer format:' + streamInfo.sampleFormat);
console.log('Capturer encoding type:' + streamInfo.encodingType); console.log('Capturer encoding type:' + streamInfo.encodingType);
} }
}); });
``` ```
...@@ -2984,13 +3493,13 @@ getStreamInfo(): Promise<AudioStreamInfo\> ...@@ -2984,13 +3493,13 @@ getStreamInfo(): Promise<AudioStreamInfo\>
``` ```
audioCapturer.getStreamInfo().then((audioParamsGet) => { audioCapturer.getStreamInfo().then((audioParamsGet) => {
console.info('getStreamInfo:'); console.info('getStreamInfo:');
console.info('sampleFormat:' + audioParamsGet.sampleFormat); console.info('sampleFormat:' + audioParamsGet.sampleFormat);
console.info('samplingRate:' + audioParamsGet.samplingRate); console.info('samplingRate:' + audioParamsGet.samplingRate);
console.info('channels:' + audioParamsGet.channels); console.info('channels:' + audioParamsGet.channels);
console.info('encodingType:' + audioParamsGet.encodingType); console.info('encodingType:' + audioParamsGet.encodingType);
}).catch((err) => { }).catch((err) => {
console.log('getStreamInfo :ERROR: ' + err.message); console.log('getStreamInfo :ERROR: ' + err.message);
}); });
``` ```
...@@ -3002,7 +3511,7 @@ start(callback: AsyncCallback<void\>): void ...@@ -3002,7 +3511,7 @@ start(callback: AsyncCallback<void\>): void
**系统能力:** SystemCapability.Multimedia.Audio.Capturer **系统能力:** SystemCapability.Multimedia.Audio.Capturer
**参数** **参数**
| 参数名 | 类型 | 必填 | 说明 | | 参数名 | 类型 | 必填 | 说明 |
| :------- | :------------------- | :--- | :----------------------------- | | :------- | :------------------- | :--- | :----------------------------- |
...@@ -3012,11 +3521,11 @@ start(callback: AsyncCallback<void\>): void ...@@ -3012,11 +3521,11 @@ start(callback: AsyncCallback<void\>): void
``` ```
audioCapturer.start((err) => { audioCapturer.start((err) => {
if (err) { if (err) {
console.error('Capturer start failed.'); console.error('Capturer start failed.');
} else { } else {
console.info('Capturer start success.'); console.info('Capturer start success.');
} }
}); });
``` ```
...@@ -3042,35 +3551,36 @@ import audio from '@ohos.multimedia.audio'; ...@@ -3042,35 +3551,36 @@ import audio from '@ohos.multimedia.audio';
import fileio from '@ohos.fileio'; import fileio from '@ohos.fileio';
var audioStreamInfo = { var audioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_2, channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
} }
var audioCapturerInfo = { var audioCapturerInfo = {
source: audio.SourceType.SOURCE_TYPE_MIC, source: audio.SourceType.SOURCE_TYPE_MIC,
capturerFlags = 1 capturerFlags: 1
} }
var audioCapturer; var audioCapturer;
var stateFlag;
audio.createAudioCapturer(audioCapturerOptions).then((data) => { audio.createAudioCapturer(audioCapturerOptions).then((data) => {
audioCapturer = data; audioCapturer = data;
console.info('AudioFrameworkRecLog: AudioCapturer Created: SUCCESS'); console.info('AudioFrameworkRecLog: AudioCapturer Created: SUCCESS');
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: AudioCapturer Created: ERROR: '+err.message); console.info('AudioFrameworkRecLog: AudioCapturer Created: ERROR: '+err.message);
}); });
audioCapturer.start().then(() => { audioCapturer.start().then(() => {
console.info('AudioFrameworkRecLog: ---------START---------'); console.info('AudioFrameworkRecLog: ---------START---------');
console.info('AudioFrameworkRecLog: Capturer started: SUCCESS'); console.info('AudioFrameworkRecLog: Capturer started: SUCCESS');
console.info('AudioFrameworkRecLog: AudioCapturer: STATE: '+audioCapturer.state); console.info('AudioFrameworkRecLog: AudioCapturer: STATE: '+audioCapturer.state);
console.info('AudioFrameworkRecLog: Capturer started: SUCCESS '); console.info('AudioFrameworkRecLog: Capturer started: SUCCESS ');
if ((audioCapturer.state == audio.AudioState.STATE_RUNNING)) { if ((audioCapturer.state == audio.AudioState.STATE_RUNNING)) {
console.info('AudioFrameworkRecLog: AudioCapturer is in Running State'); console.info('AudioFrameworkRecLog: AudioCapturer is in Running State');
} }
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: Capturer start :ERROR : '+err.message); console.info('AudioFrameworkRecLog: Capturer start :ERROR : '+err.message);
stateFlag=false; stateFlag=false;
}); });
``` ```
...@@ -3092,11 +3602,11 @@ stop(callback: AsyncCallback<void\>): void ...@@ -3092,11 +3602,11 @@ stop(callback: AsyncCallback<void\>): void
``` ```
audioCapturer.stop((err) => { audioCapturer.stop((err) => {
if (err) { if (err) {
console.error('Capturer stop failed'); console.error('Capturer stop failed');
} else { } else {
console.log('Capturer stopped.'); console.log('Capturer stopped.');
} }
}); });
``` ```
...@@ -3119,13 +3629,13 @@ stop(): Promise<void\> ...@@ -3119,13 +3629,13 @@ stop(): Promise<void\>
``` ```
audioCapturer.stop().then(() => { audioCapturer.stop().then(() => {
console.info('AudioFrameworkRecLog: ---------STOP RECORD---------'); console.info('AudioFrameworkRecLog: ---------STOP RECORD---------');
console.info('AudioFrameworkRecLog: Capturer stopped: SUCCESS'); console.info('AudioFrameworkRecLog: Capturer stopped: SUCCESS');
if ((audioCapturer.state == audio.AudioState.STATE_STOPPED)){ if ((audioCapturer.state == audio.AudioState.STATE_STOPPED)){
console.info('AudioFrameworkRecLog: State is Stopped': '); console.info('AudioFrameworkRecLog: State is Stopped': ');
} }
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: Capturer stop: ERROR: '+err.message); console.info('AudioFrameworkRecLog: Capturer stop: ERROR: '+err.message);
}); });
``` ```
...@@ -3147,11 +3657,11 @@ release(callback: AsyncCallback<void\>): void ...@@ -3147,11 +3657,11 @@ release(callback: AsyncCallback<void\>): void
``` ```
audioCapturer.release((err) => { audioCapturer.release((err) => {
if (err) { if (err) {
console.error('capturer release failed'); console.error('capturer release failed');
} else { } else {
console.log('capturer released.'); console.log('capturer released.');
} }
}); });
``` ```
...@@ -3173,13 +3683,14 @@ release(): Promise<void\> ...@@ -3173,13 +3683,14 @@ release(): Promise<void\>
**示例:** **示例:**
``` ```
var stateFlag;
audioCapturer.release().then(() => { audioCapturer.release().then(() => {
console.info('AudioFrameworkRecLog: ---------RELEASE RECORD---------'); console.info('AudioFrameworkRecLog: ---------RELEASE RECORD---------');
console.info('AudioFrameworkRecLog: Capturer release : SUCCESS'); console.info('AudioFrameworkRecLog: Capturer release : SUCCESS');
console.info('AudioFrameworkRecLog: AudioCapturer : STATE : '+audioCapturer.state); console.info('AudioFrameworkRecLog: AudioCapturer : STATE : '+audioCapturer.state);
console.info('AudioFrameworkRecLog: stateFlag : '+stateFlag); console.info('AudioFrameworkRecLog: stateFlag : '+stateFlag);
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: Capturer stop: ERROR: '+err.message); console.info('AudioFrameworkRecLog: Capturer stop: ERROR: '+err.message);
}); });
``` ```
...@@ -3192,7 +3703,7 @@ read(size: number, isBlockingRead: boolean, callback: AsyncCallback<ArrayBuffer\ ...@@ -3192,7 +3703,7 @@ read(size: number, isBlockingRead: boolean, callback: AsyncCallback<ArrayBuffer\
**系统能力:** SystemCapability.Multimedia.Audio.Capturer **系统能力:** SystemCapability.Multimedia.Audio.Capturer
**参数** **参数**
| 参数名 | 类型 | 必填 | 说明 | | 参数名 | 类型 | 必填 | 说明 |
| :------------- | :-------------------------- | :--- | :------------------------------- | | :------------- | :-------------------------- | :--- | :------------------------------- |
...@@ -3205,15 +3716,15 @@ read(size: number, isBlockingRead: boolean, callback: AsyncCallback<ArrayBuffer\ ...@@ -3205,15 +3716,15 @@ read(size: number, isBlockingRead: boolean, callback: AsyncCallback<ArrayBuffer\
``` ```
var bufferSize; var bufferSize;
audioCapturer.getBufferSize().then((data) => { audioCapturer.getBufferSize().then((data) => {
console.info('AudioFrameworkRecLog: getBufferSize: SUCCESS '+data); console.info('AudioFrameworkRecLog: getBufferSize: SUCCESS '+data);
bufferSize = data; bufferSize = data;
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: getBufferSize: EROOR: '+err.message); console.info('AudioFrameworkRecLog: getBufferSize: EROOR: '+err.message);
}); });
audioCapturer.read(bufferSize, true, async(err, buffer) => { audioCapturer.read(bufferSize, true, async(err, buffer) => {
if (!err) { if (!err) {
console.log("Success in reading the buffer data"); console.log("Success in reading the buffer data");
} }
}); });
``` ```
...@@ -3244,16 +3755,16 @@ read(size: number, isBlockingRead: boolean): Promise<ArrayBuffer\> ...@@ -3244,16 +3755,16 @@ read(size: number, isBlockingRead: boolean): Promise<ArrayBuffer\>
``` ```
var bufferSize; var bufferSize;
audioCapturer.getBufferSize().then((data) => { audioCapturer.getBufferSize().then((data) => {
console.info('AudioFrameworkRecLog: getBufferSize: SUCCESS '+data); console.info('AudioFrameworkRecLog: getBufferSize: SUCCESS '+data);
bufferSize = data; bufferSize = data;
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: getBufferSize: ERROR '+err.message); console.info('AudioFrameworkRecLog: getBufferSize: ERROR '+err.message);
}); });
console.info('Buffer size: ' + bufferSize); console.info('Buffer size: ' + bufferSize);
audioCapturer.read(bufferSize, true).then((buffer) => { audioCapturer.read(bufferSize, true).then((buffer) => {
console.info('buffer read successfully'); console.info('buffer read successfully');
}).catch((err) => { }).catch((err) => {
console.info('ERROR : '+err.message); console.info('ERROR : '+err.message);
}); });
``` ```
...@@ -3276,7 +3787,7 @@ getAudioTime(callback: AsyncCallback<number\>): void ...@@ -3276,7 +3787,7 @@ getAudioTime(callback: AsyncCallback<number\>): void
``` ```
audioCapturer.getAudioTime((err, timestamp) => { audioCapturer.getAudioTime((err, timestamp) => {
console.log('Current timestamp: ' + timestamp); console.log('Current timestamp: ' + timestamp);
}); });
``` ```
...@@ -3299,9 +3810,9 @@ getAudioTime(): Promise<number\> ...@@ -3299,9 +3810,9 @@ getAudioTime(): Promise<number\>
``` ```
audioCapturer.getAudioTime().then((audioTime) => { audioCapturer.getAudioTime().then((audioTime) => {
console.info('AudioFrameworkRecLog: AudioCapturer getAudioTime : Success' + audioTime ); console.info('AudioFrameworkRecLog: AudioCapturer getAudioTime : Success' + audioTime );
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: AudioCapturer Created : ERROR : '+err.message); console.info('AudioFrameworkRecLog: AudioCapturer Created : ERROR : '+err.message);
}); });
``` ```
...@@ -3324,14 +3835,14 @@ getBufferSize(callback: AsyncCallback<number\>): void ...@@ -3324,14 +3835,14 @@ getBufferSize(callback: AsyncCallback<number\>): void
``` ```
audioCapturer.getBufferSize((err, bufferSize) => { audioCapturer.getBufferSize((err, bufferSize) => {
if (!err) { if (!err) {
console.log('BufferSize : ' + bufferSize); console.log('BufferSize : ' + bufferSize);
audioCapturer.read(bufferSize, true).then((buffer) => { audioCapturer.read(bufferSize, true).then((buffer) => {
console.info('Buffer read is ' + buffer ); console.info('Buffer read is ' + buffer );
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: AudioCapturer Created : ERROR : '+err.message); console.info('AudioFrameworkRecLog: AudioCapturer Created : ERROR : '+err.message);
}); });
} }
}); });
``` ```
...@@ -3355,10 +3866,10 @@ getBufferSize(): Promise<number\> ...@@ -3355,10 +3866,10 @@ getBufferSize(): Promise<number\>
``` ```
var bufferSize; var bufferSize;
audioCapturer.getBufferSize().then((data) => { audioCapturer.getBufferSize().then((data) => {
console.info('AudioFrameworkRecLog: getBufferSize :SUCCESS '+ data); console.info('AudioFrameworkRecLog: getBufferSize :SUCCESS '+ data);
bufferSize = data; bufferSize = data;
}).catch((err) => { }).catch((err) => {
console.info('AudioFrameworkRecLog: getBufferSize :ERROR : '+ err.message); console.info('AudioFrameworkRecLog: getBufferSize :ERROR : '+ err.message);
}); });
``` ```
...@@ -3383,9 +3894,9 @@ on(type: 'markReach', frame: number, callback: (position: number) => {}): void ...@@ -3383,9 +3894,9 @@ on(type: 'markReach', frame: number, callback: (position: number) => {}): void
``` ```
audioCapturer.on('markReach', 1000, (position) => { audioCapturer.on('markReach', 1000, (position) => {
if (position == 1000) { if (position == 1000) {
console.log('ON Triggered successfully'); console.log('ON Triggered successfully');
} }
}); });
``` ```
...@@ -3429,9 +3940,9 @@ on(type: "periodReach", frame: number, callback: (position: number) => {}): void ...@@ -3429,9 +3940,9 @@ on(type: "periodReach", frame: number, callback: (position: number) => {}): void
``` ```
audioCapturer.on('periodReach', 1000, (position) => { audioCapturer.on('periodReach', 1000, (position) => {
if (position == 1000) { if (position == 1000) {
console.log('ON Triggered successfully'); console.log('ON Triggered successfully');
} }
}); });
``` ```
...@@ -3474,11 +3985,11 @@ on(type: 'stateChange', callback: Callback<AudioState\>): void ...@@ -3474,11 +3985,11 @@ on(type: 'stateChange', callback: Callback<AudioState\>): void
``` ```
audioCapturer.on('stateChange', (state) => { audioCapturer.on('stateChange', (state) => {
if (state == 1) { if (state == 1) {
console.log("audio capturer state is: STATE_PREPARED"); console.log("audio capturer state is: STATE_PREPARED");
} }
if (state == 2) { if (state == 2) {
console.log("audio capturer state is: STATE_RUNNING"); console.log("audio capturer state is: STATE_RUNNING");
} }
}); });
``` ```
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册