未验证 提交 181b2927 编写于 作者: O openharmony_ci 提交者: Gitee

!6157 示例代码内容修改

Merge pull request !6157 from 一杯丞丞汁儿/OpenHarmony-3.1-Release
...@@ -1325,7 +1325,7 @@ Sets a device to the active state. This API uses an asynchronous callback to ret ...@@ -1325,7 +1325,7 @@ Sets a device to the active state. This API uses an asynchronous callback to ret
**Example** **Example**
``` ```
audioManager.setDeviceActive(audio.DeviceType.SPEAKER, true, (err) => { audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true, (err) => {
if (err) { if (err) {
console.error('Failed to set the active status of the device. ${err.message}'); console.error('Failed to set the active status of the device. ${err.message}');
return; return;
...@@ -1359,7 +1359,7 @@ Sets a device to the active state. This API uses a promise to return the result. ...@@ -1359,7 +1359,7 @@ Sets a device to the active state. This API uses a promise to return the result.
``` ```
audioManager.setDeviceActive(audio.DeviceType.SPEAKER, true).then(() => { audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true).then(() => {
console.log('Promise returned to indicate that the device is set to the active status.'); console.log('Promise returned to indicate that the device is set to the active status.');
}); });
``` ```
...@@ -1382,7 +1382,7 @@ Checks whether a device is active. This API uses an asynchronous callback to ret ...@@ -1382,7 +1382,7 @@ Checks whether a device is active. This API uses an asynchronous callback to ret
**Example** **Example**
``` ```
audioManager.isDeviceActive(audio.DeviceType.SPEAKER, (err, value) => { audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the active status of the device. ${err.message}'); console.error('Failed to obtain the active status of the device. ${err.message}');
return; return;
...@@ -1415,7 +1415,7 @@ Checks whether a device is active. This API uses a promise to return the result. ...@@ -1415,7 +1415,7 @@ Checks whether a device is active. This API uses a promise to return the result.
**Example** **Example**
``` ```
audioManager.isDeviceActive(audio.DeviceType.SPEAKER).then((value) => { audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER).then((value) => {
console.log('Promise returned to indicate that the active status of the device is obtained.' + value); console.log('Promise returned to indicate that the active status of the device is obtained.' + value);
}); });
``` ```
...@@ -1649,7 +1649,7 @@ var interAudioInterrupt = { ...@@ -1649,7 +1649,7 @@ var interAudioInterrupt = {
contentType:0, contentType:0,
pauseWhenDucked:true pauseWhenDucked:true
}; };
this.audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => { audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) { if (InterruptAction.actionType === 0) {
console.log("An event to gain the audio focus starts."); console.log("An event to gain the audio focus starts.");
console.log("Focus gain event:" + JSON.stringify(InterruptAction)); console.log("Focus gain event:" + JSON.stringify(InterruptAction));
...@@ -1685,7 +1685,7 @@ var interAudioInterrupt = { ...@@ -1685,7 +1685,7 @@ var interAudioInterrupt = {
contentType:0, contentType:0,
pauseWhenDucked:true pauseWhenDucked:true
}; };
this.audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => { audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) { if (InterruptAction.actionType === 0) {
console.log("An event to release the audio focus starts."); console.log("An event to release the audio focus starts.");
console.log("Focus release event:" + JSON.stringify(InterruptAction)); console.log("Focus release event:" + JSON.stringify(InterruptAction));
...@@ -1747,7 +1747,7 @@ This is a system API and cannot be called by third-party applications. ...@@ -1747,7 +1747,7 @@ This is a system API and cannot be called by third-party applications.
**Example** **Example**
``` ```
audioManager.setAudioScene(audio.AudioSceneMode.AUDIO_SCENE_PHONE_CALL).then(() => { audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL).then(() => {
console.log('Promise returned to indicate a successful setting of the audio scene mode.'); console.log('Promise returned to indicate a successful setting of the audio scene mode.');
}).catch ((err) => { }).catch ((err) => {
console.log('Failed to set the audio scene mode'); console.log('Failed to set the audio scene mode');
...@@ -1906,6 +1906,7 @@ Obtains the renderer information of this **AudioRenderer** instance. This API us ...@@ -1906,6 +1906,7 @@ Obtains the renderer information of this **AudioRenderer** instance. This API us
**Example** **Example**
``` ```
var resultFlag = true;
audioRenderer.getRendererInfo().then((rendererInfo) => { audioRenderer.getRendererInfo().then((rendererInfo) => {
console.log('Renderer GetRendererInfo:'); console.log('Renderer GetRendererInfo:');
console.log('Renderer content:' + rendererInfo.content); console.log('Renderer content:' + rendererInfo.content);
...@@ -2352,13 +2353,11 @@ Obtains a reasonable minimum buffer size in bytes for rendering. This API uses a ...@@ -2352,13 +2353,11 @@ Obtains a reasonable minimum buffer size in bytes for rendering. This API uses a
**Example** **Example**
``` ```
audioRenderer.getBufferSize((err, bufferSize) => { var bufferSize = audioRenderer.getBufferSize(async(err, bufferSize) => {
if (err) { if (err) {
console.error('getBufferSize error'); console.error('getBufferSize error');
} }
}); });
let buf = new ArrayBuffer(bufferSize);
ss.readSync(buf);
``` ```
### getBufferSize<sup>8+</sup> ### getBufferSize<sup>8+</sup>
...@@ -2378,11 +2377,12 @@ Obtains a reasonable minimum buffer size in bytes for rendering. This API uses a ...@@ -2378,11 +2377,12 @@ Obtains a reasonable minimum buffer size in bytes for rendering. This API uses a
**Example** **Example**
``` ```
audioRenderer.getBufferSize().then((bufferSize) => { var bufferSize;
let buf = new ArrayBuffer(bufferSize); await audioRenderer.getBufferSize().then(async function (data) => {
ss.readSync(buf); console.info('AudioFrameworkRenderLog: getBufferSize :SUCCESS '+data);
bufferSize=data;
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.info('AudioFrameworkRenderLog: getBufferSize :ERROR : '+err.message);
}); });
``` ```
...@@ -2507,7 +2507,9 @@ Subscribes to audio interruption events. This API uses a callback to get interru ...@@ -2507,7 +2507,9 @@ Subscribes to audio interruption events. This API uses a callback to get interru
**Example** **Example**
``` ```
audioRenderer.on('interrupt', (interruptEvent) => { var isPlay;
var started;
audioRenderer.on('interrupt', async(interruptEvent) => {
if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) { if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) {
switch (interruptEvent.hintType) { switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_PAUSE: case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
...@@ -2523,11 +2525,30 @@ audioRenderer.on('interrupt', (interruptEvent) => { ...@@ -2523,11 +2525,30 @@ audioRenderer.on('interrupt', (interruptEvent) => {
switch (interruptEvent.hintType) { switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_RESUME: case audio.InterruptHint.INTERRUPT_HINT_RESUME:
console.log('Resume force paused renderer or ignore'); console.log('Resume force paused renderer or ignore');
startRenderer(); await audioRenderer.start().then(async function () {
console.info('AudioInterruptMusic: renderInstant started :SUCCESS ');
started = true;
}).catch((err) => {
console.info('AudioInterruptMusic: renderInstant start :ERROR : '+err.message);
started = false;
});
if (started) {
isPlay = true;
console.info('AudioInterruptMusic Renderer started : isPlay : '+isPlay);
} else {
console.error('AudioInterruptMusic Renderer start failed');
}
break; break;
case audio.InterruptHint.INTERRUPT_HINT_PAUSE: case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
console.log('Choose to pause or ignore'); console.log('Choose to pause or ignore');
pauseRenderer(); if (isPlay == true) {
isPlay == false;
console.info('AudioInterruptMusic: Media PAUSE : TRUE');
}
else {
isPlay = true;
console.info('AudioInterruptMusic: Media PLAY : TRUE');
}
break; break;
} }
} }
...@@ -2988,7 +3009,7 @@ audioCapturer.read(bufferSize, true, async(err, buffer) => { ...@@ -2988,7 +3009,7 @@ audioCapturer.read(bufferSize, true, async(err, buffer) => {
if (!err) { if (!err) {
console.log("Success in reading the buffer data"); console.log("Success in reading the buffer data");
} }
}; });
``` ```
......
...@@ -1338,7 +1338,7 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean, callback: AsyncCa ...@@ -1338,7 +1338,7 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean, callback: AsyncCa
**示例:** **示例:**
``` ```
audioManager.setDeviceActive(audio.DeviceType.SPEAKER, true, (err) => { audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true, (err) => {
if (err) { if (err) {
console.error('Failed to set the active status of the device. ${err.message}'); console.error('Failed to set the active status of the device. ${err.message}');
return; return;
...@@ -1372,7 +1372,7 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean): Promise&lt;void& ...@@ -1372,7 +1372,7 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean): Promise&lt;void&
``` ```
audioManager.setDeviceActive(audio.DeviceType.SPEAKER, true).then(() => { audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true).then(() => {
console.log('Promise returned to indicate that the device is set to the active status.'); console.log('Promise returned to indicate that the device is set to the active status.');
}); });
``` ```
...@@ -1395,7 +1395,7 @@ isDeviceActive(deviceType: ActiveDeviceType, callback: AsyncCallback&lt;boolean& ...@@ -1395,7 +1395,7 @@ isDeviceActive(deviceType: ActiveDeviceType, callback: AsyncCallback&lt;boolean&
**示例:** **示例:**
``` ```
audioManager.isDeviceActive(audio.DeviceType.SPEAKER, (err, value) => { audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER, (err, value) => {
if (err) { if (err) {
console.error('Failed to obtain the active status of the device. ${err.message}'); console.error('Failed to obtain the active status of the device. ${err.message}');
return; return;
...@@ -1428,7 +1428,7 @@ isDeviceActive(deviceType: ActiveDeviceType): Promise&lt;boolean&gt; ...@@ -1428,7 +1428,7 @@ isDeviceActive(deviceType: ActiveDeviceType): Promise&lt;boolean&gt;
**示例:** **示例:**
``` ```
audioManager.isDeviceActive(audio.DeviceType.SPEAKER).then((value) => { audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER).then((value) => {
console.log('Promise returned to indicate that the active status of the device is obtained.' + value); console.log('Promise returned to indicate that the active status of the device is obtained.' + value);
}); });
``` ```
...@@ -1670,7 +1670,7 @@ var interAudioInterrupt = { ...@@ -1670,7 +1670,7 @@ var interAudioInterrupt = {
contentType:0, contentType:0,
pauseWhenDucked:true pauseWhenDucked:true
}; };
this.audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => { audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) { if (InterruptAction.actionType === 0) {
console.log("An event to gain the audio focus starts."); console.log("An event to gain the audio focus starts.");
console.log("Focus gain event:" + JSON.stringify(InterruptAction)); console.log("Focus gain event:" + JSON.stringify(InterruptAction));
...@@ -1706,7 +1706,7 @@ var interAudioInterrupt = { ...@@ -1706,7 +1706,7 @@ var interAudioInterrupt = {
contentType:0, contentType:0,
pauseWhenDucked:true pauseWhenDucked:true
}; };
this.audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => { audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) { if (InterruptAction.actionType === 0) {
console.log("An event to release the audio focus starts."); console.log("An event to release the audio focus starts.");
console.log("Focus release event:" + JSON.stringify(InterruptAction)); console.log("Focus release event:" + JSON.stringify(InterruptAction));
...@@ -1768,7 +1768,7 @@ setAudioScene\(scene: AudioScene\): Promise<void\> ...@@ -1768,7 +1768,7 @@ setAudioScene\(scene: AudioScene\): Promise<void\>
**示例:** **示例:**
``` ```
audioManager.setAudioScene(audio.AudioSceneMode.AUDIO_SCENE_PHONE_CALL).then(() => { audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL).then(() => {
console.log('Promise returned to indicate a successful setting of the audio scene mode.'); console.log('Promise returned to indicate a successful setting of the audio scene mode.');
}).catch ((err) => { }).catch ((err) => {
console.log('Failed to set the audio scene mode'); console.log('Failed to set the audio scene mode');
...@@ -1927,6 +1927,7 @@ getRendererInfo(): Promise<AudioRendererInfo\> ...@@ -1927,6 +1927,7 @@ getRendererInfo(): Promise<AudioRendererInfo\>
**示例:** **示例:**
``` ```
var resultFlag = true;
audioRenderer.getRendererInfo().then((rendererInfo) => { audioRenderer.getRendererInfo().then((rendererInfo) => {
console.log('Renderer GetRendererInfo:'); console.log('Renderer GetRendererInfo:');
console.log('Renderer content:' + rendererInfo.content); console.log('Renderer content:' + rendererInfo.content);
...@@ -2373,13 +2374,11 @@ getBufferSize(callback: AsyncCallback\<number>): void ...@@ -2373,13 +2374,11 @@ getBufferSize(callback: AsyncCallback\<number>): void
**示例:** **示例:**
``` ```
audioRenderer.getBufferSize((err, bufferSize) => { var bufferSize = audioRenderer.getBufferSize(async(err, bufferSize) => {
if (err) { if (err) {
console.error('getBufferSize error'); console.error('getBufferSize error');
} }
}); });
let buf = new ArrayBuffer(bufferSize);
ss.readSync(buf);
``` ```
### getBufferSize<sup>8+</sup> ### getBufferSize<sup>8+</sup>
...@@ -2399,11 +2398,12 @@ getBufferSize(): Promise\<number> ...@@ -2399,11 +2398,12 @@ getBufferSize(): Promise\<number>
**示例:** **示例:**
``` ```
audioRenderer.getBufferSize().then((bufferSize) => { var bufferSize;
let buf = new ArrayBuffer(bufferSize); await audioRenderer.getBufferSize().then(async function (data) => {
ss.readSync(buf); console.info('AudioFrameworkRenderLog: getBufferSize :SUCCESS '+data);
bufferSize=data;
}).catch((err) => { }).catch((err) => {
console.log('ERROR: '+err.message); console.info('AudioFrameworkRenderLog: getBufferSize :ERROR : '+err.message);
}); });
``` ```
...@@ -2528,7 +2528,9 @@ on(type: 'interrupt', callback: Callback\<InterruptEvent>): void ...@@ -2528,7 +2528,9 @@ on(type: 'interrupt', callback: Callback\<InterruptEvent>): void
**示例:** **示例:**
``` ```
audioRenderer.on('interrupt', (interruptEvent) => { var isPlay;
var started;
audioRenderer.on('interrupt', async(interruptEvent) => {
if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) { if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) {
switch (interruptEvent.hintType) { switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_PAUSE: case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
...@@ -2544,11 +2546,30 @@ audioRenderer.on('interrupt', (interruptEvent) => { ...@@ -2544,11 +2546,30 @@ audioRenderer.on('interrupt', (interruptEvent) => {
switch (interruptEvent.hintType) { switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_RESUME: case audio.InterruptHint.INTERRUPT_HINT_RESUME:
console.log('Resume force paused renderer or ignore'); console.log('Resume force paused renderer or ignore');
startRenderer(); await audioRenderer.start().then(async function () {
console.info('AudioInterruptMusic: renderInstant started :SUCCESS ');
started = true;
}).catch((err) => {
console.info('AudioInterruptMusic: renderInstant start :ERROR : '+err.message);
started = false;
});
if (started) {
isPlay = true;
console.info('AudioInterruptMusic Renderer started : isPlay : '+isPlay);
} else {
console.error('AudioInterruptMusic Renderer start failed');
}
break; break;
case audio.InterruptHint.INTERRUPT_HINT_PAUSE: case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
console.log('Choose to pause or ignore'); console.log('Choose to pause or ignore');
pauseRenderer(); if (isPlay == true) {
isPlay == false;
console.info('AudioInterruptMusic: Media PAUSE : TRUE');
}
else {
isPlay = true;
console.info('AudioInterruptMusic: Media PLAY : TRUE');
}
break; break;
} }
} }
...@@ -3009,7 +3030,7 @@ audioCapturer.read(bufferSize, true, async(err, buffer) => { ...@@ -3009,7 +3030,7 @@ audioCapturer.read(bufferSize, true, async(err, buffer) => {
if (!err) { if (!err) {
console.log("Success in reading the buffer data"); console.log("Success in reading the buffer data");
} }
}; });
``` ```
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册