diff --git a/en/application-dev/reference/apis/js-apis-audio.md b/en/application-dev/reference/apis/js-apis-audio.md
index 3e1071581b34df1b3896b8605bab5f00732f588d..bd47176d4680b7a72b3b834dce75d96039b4c810 100644
--- a/en/application-dev/reference/apis/js-apis-audio.md
+++ b/en/application-dev/reference/apis/js-apis-audio.md
@@ -1322,7 +1322,7 @@ Sets a device to the active state. This API uses an asynchronous callback to ret
**Example**
```
-audioManager.setDeviceActive(audio.DeviceType.SPEAKER, true, (err) => {
+audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true, (err) => {
if (err) {
console.error('Failed to set the active status of the device. ${err.message}');
return;
@@ -1356,7 +1356,7 @@ Sets a device to the active state. This API uses a promise to return the result.
```
-audioManager.setDeviceActive(audio.DeviceType.SPEAKER, true).then(() => {
+audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true).then(() => {
console.log('Promise returned to indicate that the device is set to the active status.');
});
```
@@ -1379,7 +1379,7 @@ Checks whether a device is active. This API uses an asynchronous callback to ret
**Example**
```
-audioManager.isDeviceActive(audio.DeviceType.SPEAKER, (err, value) => {
+audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER, (err, value) => {
if (err) {
console.error('Failed to obtain the active status of the device. ${err.message}');
return;
@@ -1412,7 +1412,7 @@ Checks whether a device is active. This API uses a promise to return the result.
**Example**
```
-audioManager.isDeviceActive(audio.DeviceType.SPEAKER).then((value) => {
+audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER).then((value) => {
console.log('Promise returned to indicate that the active status of the device is obtained.' + value);
});
```
@@ -1646,7 +1646,7 @@ var interAudioInterrupt = {
contentType:0,
pauseWhenDucked:true
};
-this.audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
+audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) {
console.log("An event to gain the audio focus starts.");
console.log("Focus gain event:" + JSON.stringify(InterruptAction));
@@ -1682,7 +1682,7 @@ var interAudioInterrupt = {
contentType:0,
pauseWhenDucked:true
};
-this.audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => {
+audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) {
console.log("An event to release the audio focus starts.");
console.log("Focus release event:" + JSON.stringify(InterruptAction));
@@ -1744,7 +1744,7 @@ This is a system API and cannot be called by third-party applications.
**Example**
```
-audioManager.setAudioScene(audio.AudioSceneMode.AUDIO_SCENE_PHONE_CALL).then(() => {
+audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL).then(() => {
console.log('Promise returned to indicate a successful setting of the audio scene mode.');
}).catch ((err) => {
console.log('Failed to set the audio scene mode');
@@ -1903,6 +1903,7 @@ Obtains the renderer information of this **AudioRenderer** instance. This API us
**Example**
```
+var resultFlag = true;
audioRenderer.getRendererInfo().then((rendererInfo) => {
console.log('Renderer GetRendererInfo:');
console.log('Renderer content:' + rendererInfo.content);
@@ -2349,13 +2350,11 @@ Obtains a reasonable minimum buffer size in bytes for rendering. This API uses a
**Example**
```
-audioRenderer.getBufferSize((err, bufferSize) => {
+var bufferSize = audioRenderer.getBufferSize(async(err, bufferSize) => {
if (err) {
console.error('getBufferSize error');
}
});
-let buf = new ArrayBuffer(bufferSize);
-ss.readSync(buf);
```
### getBufferSize8+
@@ -2375,11 +2374,12 @@ Obtains a reasonable minimum buffer size in bytes for rendering. This API uses a
**Example**
```
-audioRenderer.getBufferSize().then((bufferSize) => {
- let buf = new ArrayBuffer(bufferSize);
- ss.readSync(buf);
+var bufferSize;
+await audioRenderer.getBufferSize().then(async function (data) => {
+ console.info('AudioFrameworkRenderLog: getBufferSize :SUCCESS '+data);
+ bufferSize=data;
}).catch((err) => {
- console.log('ERROR: '+err.message);
+ console.info('AudioFrameworkRenderLog: getBufferSize :ERROR : '+err.message);
});
```
@@ -2504,7 +2504,9 @@ Subscribes to audio interruption events. This API uses a callback to get interru
**Example**
```
-audioRenderer.on('interrupt', (interruptEvent) => {
+var isPlay;
+var started;
+audioRenderer.on('interrupt', async(interruptEvent) => {
if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) {
switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
@@ -2517,14 +2519,33 @@ audioRenderer.on('interrupt', (interruptEvent) => {
break;
}
} else if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_SHARE) {
- switch (interruptEvent.hintType) {
+ switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_RESUME:
console.log('Resume force paused renderer or ignore');
- startRenderer();
+ await audioRenderer.start().then(async function () {
+ console.info('AudioInterruptMusic: renderInstant started :SUCCESS ');
+ started = true;
+ }).catch((err) => {
+ console.info('AudioInterruptMusic: renderInstant start :ERROR : '+err.message);
+ started = false;
+ });
+ if (started) {
+ isPlay = true;
+ console.info('AudioInterruptMusic Renderer started : isPlay : '+isPlay);
+ } else {
+ console.error('AudioInterruptMusic Renderer start failed');
+ }
break;
case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
console.log('Choose to pause or ignore');
- pauseRenderer();
+ if (isPlay == true) {
+ isPlay == false;
+ console.info('AudioInterruptMusic: Media PAUSE : TRUE');
+ }
+ else {
+ isPlay = true;
+ console.info('AudioInterruptMusic: Media PLAY : TRUE');
+ }
break;
}
}
@@ -2985,7 +3006,7 @@ audioCapturer.read(bufferSize, true, async(err, buffer) => {
if (!err) {
console.log("Success in reading the buffer data");
}
-};
+});
```
diff --git a/zh-cn/application-dev/reference/apis/js-apis-audio.md b/zh-cn/application-dev/reference/apis/js-apis-audio.md
index aaa2a62260f36307bf97ff1c1e2b021dcc50a1d1..220de9f46c8285353f5c93279a89458a7a1f87ef 100644
--- a/zh-cn/application-dev/reference/apis/js-apis-audio.md
+++ b/zh-cn/application-dev/reference/apis/js-apis-audio.md
@@ -1336,7 +1336,7 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean, callback: AsyncCa
**示例:**
```
-audioManager.setDeviceActive(audio.DeviceType.SPEAKER, true, (err) => {
+audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true, (err) => {
if (err) {
console.error('Failed to set the active status of the device. ${err.message}');
return;
@@ -1370,7 +1370,7 @@ setDeviceActive(deviceType: ActiveDeviceType, active: boolean): Promise<void&
```
-audioManager.setDeviceActive(audio.DeviceType.SPEAKER, true).then(() => {
+audioManager.setDeviceActive(audio.ActiveDeviceType.SPEAKER, true).then(() => {
console.log('Promise returned to indicate that the device is set to the active status.');
});
```
@@ -1393,7 +1393,7 @@ isDeviceActive(deviceType: ActiveDeviceType, callback: AsyncCallback<boolean&
**示例:**
```
-audioManager.isDeviceActive(audio.DeviceType.SPEAKER, (err, value) => {
+audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER, (err, value) => {
if (err) {
console.error('Failed to obtain the active status of the device. ${err.message}');
return;
@@ -1426,7 +1426,7 @@ isDeviceActive(deviceType: ActiveDeviceType): Promise<boolean>
**示例:**
```
-audioManager.isDeviceActive(audio.DeviceType.SPEAKER).then((value) => {
+audioManager.isDeviceActive(audio.ActiveDeviceType.SPEAKER).then((value) => {
console.log('Promise returned to indicate that the active status of the device is obtained.' + value);
});
```
@@ -1668,7 +1668,7 @@ var interAudioInterrupt = {
contentType:0,
pauseWhenDucked:true
};
-this.audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
+audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) {
console.log("An event to gain the audio focus starts.");
console.log("Focus gain event:" + JSON.stringify(InterruptAction));
@@ -1704,7 +1704,7 @@ var interAudioInterrupt = {
contentType:0,
pauseWhenDucked:true
};
-this.audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => {
+audioManager.off('interrupt', interAudioInterrupt, (InterruptAction) => {
if (InterruptAction.actionType === 0) {
console.log("An event to release the audio focus starts.");
console.log("Focus release event:" + JSON.stringify(InterruptAction));
@@ -1766,7 +1766,7 @@ setAudioScene\(scene: AudioScene\): Promise
**示例:**
```
-audioManager.setAudioScene(audio.AudioSceneMode.AUDIO_SCENE_PHONE_CALL).then(() => {
+audioManager.setAudioScene(audio.AudioScene.AUDIO_SCENE_PHONE_CALL).then(() => {
console.log('Promise returned to indicate a successful setting of the audio scene mode.');
}).catch ((err) => {
console.log('Failed to set the audio scene mode');
@@ -1926,6 +1926,7 @@ getRendererInfo(): Promise
**示例:**
```
+var resultFlag = true;
audioRenderer.getRendererInfo().then((rendererInfo) => {
console.log('Renderer GetRendererInfo:');
console.log('Renderer content:' + rendererInfo.content);
@@ -2372,13 +2373,11 @@ getBufferSize(callback: AsyncCallback\): void
**示例:**
```
-audioRenderer.getBufferSize((err, bufferSize) => {
+var bufferSize = audioRenderer.getBufferSize(async(err, bufferSize) => {
if (err) {
console.error('getBufferSize error');
}
});
-let buf = new ArrayBuffer(bufferSize);
-ss.readSync(buf);
```
### getBufferSize8+
@@ -2398,11 +2397,12 @@ getBufferSize(): Promise\
**示例:**
```
-audioRenderer.getBufferSize().then((bufferSize) => {
- let buf = new ArrayBuffer(bufferSize);
- ss.readSync(buf);
+var bufferSize;
+await audioRenderer.getBufferSize().then(async function (data) => {
+ console.info('AudioFrameworkRenderLog: getBufferSize :SUCCESS '+data);
+ bufferSize=data;
}).catch((err) => {
- console.log('ERROR: '+err.message);
+ console.info('AudioFrameworkRenderLog: getBufferSize :ERROR : '+err.message);
});
```
@@ -2527,7 +2527,9 @@ on(type: 'interrupt', callback: Callback\): void
**示例:**
```
-audioRenderer.on('interrupt', (interruptEvent) => {
+var isPlay;
+var started;
+audioRenderer.on('interrupt', async(interruptEvent) => {
if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) {
switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
@@ -2540,14 +2542,33 @@ audioRenderer.on('interrupt', (interruptEvent) => {
break;
}
} else if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_SHARE) {
- switch (interruptEvent.hintType) {
+ switch (interruptEvent.hintType) {
case audio.InterruptHint.INTERRUPT_HINT_RESUME:
console.log('Resume force paused renderer or ignore');
- startRenderer();
+ await audioRenderer.start().then(async function () {
+ console.info('AudioInterruptMusic: renderInstant started :SUCCESS ');
+ started = true;
+ }).catch((err) => {
+ console.info('AudioInterruptMusic: renderInstant start :ERROR : '+err.message);
+ started = false;
+ });
+ if (started) {
+ isPlay = true;
+ console.info('AudioInterruptMusic Renderer started : isPlay : '+isPlay);
+ } else {
+ console.error('AudioInterruptMusic Renderer start failed');
+ }
break;
case audio.InterruptHint.INTERRUPT_HINT_PAUSE:
console.log('Choose to pause or ignore');
- pauseRenderer();
+ if (isPlay == true) {
+ isPlay == false;
+ console.info('AudioInterruptMusic: Media PAUSE : TRUE');
+ }
+ else {
+ isPlay = true;
+ console.info('AudioInterruptMusic: Media PLAY : TRUE');
+ }
break;
}
}
@@ -3008,7 +3029,7 @@ audioCapturer.read(bufferSize, true, async(err, buffer) => {
if (!err) {
console.log("Success in reading the buffer data");
}
-};
+});
```