diff --git a/en/application-dev/media/audio-capturer.md b/en/application-dev/media/audio-capturer.md index 8371b6248d71f48e9088da849dc36c3edb2be3cf..f7b01ce2a387af3471b297de329fe3267b9e9785 100644 --- a/en/application-dev/media/audio-capturer.md +++ b/en/application-dev/media/audio-capturer.md @@ -27,32 +27,43 @@ Before developing the audio data collection feature, configure the **ohos.permis For details about the APIs, see [AudioCapturer in Audio Management](../reference/apis/js-apis-audio.md#audiocapturer8). -1. Use **createAudioCapturer()** to create an **AudioCapturer** instance. +1. Use **createAudioCapturer()** to create a global **AudioCapturer** instance. Set parameters of the **AudioCapturer** instance in **audioCapturerOptions**. This instance is used to capture audio, control and obtain the recording state, and register a callback for notification. ```js - import audio from '@ohos.multimedia.audio'; + import audio from '@ohos.multimedia.audio'; + import fs from '@ohos.file.fs'; // It will be used for the call of the read function in step 3. + + // Perform a self-test on APIs related to audio rendering. + @Entry + @Component + struct AudioRenderer { + @State message: string = 'Hello World' + private audioCapturer: audio.AudioCapturer; // It will be called globally. + + async initAudioCapturer(){ + let audioStreamInfo = { + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, + channels: audio.AudioChannel.CHANNEL_1, + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW + } + + let audioCapturerInfo = { + source: audio.SourceType.SOURCE_TYPE_MIC, + capturerFlags: 0 // 0 is the extended flag bit of the audio capturer. The default value is 0. + } + + let audioCapturerOptions = { + streamInfo: audioStreamInfo, + capturerInfo: audioCapturerInfo + } + + this.audioCapturer = await audio.createAudioCapturer(audioCapturerOptions); + console.log('AudioRecLog: Create audio capturer success.'); + } - let audioStreamInfo = { - samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, - channels: audio.AudioChannel.CHANNEL_1, - sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, - encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW - } - - let audioCapturerInfo = { - source: audio.SourceType.SOURCE_TYPE_MIC, - capturerFlags: 0 // 0 is the extended flag bit of the audio capturer. The default value is 0. - } - - let audioCapturerOptions = { - streamInfo: audioStreamInfo, - capturerInfo: audioCapturerInfo - } - - let audioCapturer = await audio.createAudioCapturer(audioCapturerOptions); - console.log('AudioRecLog: Create audio capturer success.'); ``` 2. Use **start()** to start audio recording. @@ -60,23 +71,18 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference The capturer state will be **STATE_RUNNING** once the audio capturer is started. The application can then begin reading buffers. ```js - import audio from '@ohos.multimedia.audio'; - - async function startCapturer() { - let state = audioCapturer.state; + async startCapturer() { + let state = this.audioCapturer.state; // The audio capturer should be in the STATE_PREPARED, STATE_PAUSED, or STATE_STOPPED state after being started. - if (state != audio.AudioState.STATE_PREPARED || state != audio.AudioState.STATE_PAUSED || - state != audio.AudioState.STATE_STOPPED) { - console.info('Capturer is not in a correct state to start'); - return; - } - await audioCapturer.start(); - - state = audioCapturer.state; - if (state == audio.AudioState.STATE_RUNNING) { - console.info('AudioRecLog: Capturer started'); - } else { - console.error('AudioRecLog: Capturer start failed'); + if (state == audio.AudioState.STATE_PREPARED || state == audio.AudioState.STATE_PAUSED || + state == audio.AudioState.STATE_STOPPED) { + await this.audioCapturer.start(); + state = this.audioCapturer.state; + if (state == audio.AudioState.STATE_RUNNING) { + console.info('AudioRecLog: Capturer started'); + } else { + console.error('AudioRecLog: Capturer start failed'); + } } } ``` @@ -86,91 +92,88 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference The following example shows how to write recorded data into a file. ```js - import fs from '@ohos.file.fs'; - - let state = audioCapturer.state; - // The read operation can be performed only when the state is STATE_RUNNING. - if (state != audio.AudioState.STATE_RUNNING) { - console.info('Capturer is not in a correct state to read'); - return; - } - - const path = '/data/data/.pulse_dir/capture_js.wav'; // Path for storing the collected audio file. - let file = fs.openSync(filePath, 0o2); - let fd = file.fd; - if (file !== null) { - console.info('AudioRecLog: file created'); - } else { - console.info('AudioRecLog: file create : FAILED'); - return; - } - - if (fd !== null) { - console.info('AudioRecLog: file fd opened in append mode'); - } - - let numBuffersToCapture = 150; // Write data for 150 times. - let count = 0; - while (numBuffersToCapture) { - let bufferSize = await audioCapturer.getBufferSize(); - let buffer = await audioCapturer.read(bufferSize, true); - let options = { - offset: count * this.bufferSize, - length: this.bufferSize + async readData(){ + let state = this.audioCapturer.state; + // The read operation can be performed only when the state is STATE_RUNNING. + if (state != audio.AudioState.STATE_RUNNING) { + console.info('Capturer is not in a correct state to read'); + return; } - if (typeof(buffer) == undefined) { - console.info('AudioRecLog: read buffer failed'); + const path = '/data/data/.pulse_dir/capture_js.wav'; // Path for storing the collected audio file. + let file = fs.openSync(path, 0o2); + let fd = file.fd; + if (file !== null) { + console.info('AudioRecLog: file created'); } else { - let number = fs.writeSync(fd, buffer, options); - console.info(`AudioRecLog: data written: ${number}`); - } - numBuffersToCapture--; - count++; + console.info('AudioRecLog: file create : FAILED'); + return; + } + if (fd !== null) { + console.info('AudioRecLog: file fd opened in append mode'); + } + let numBuffersToCapture = 150; // Write data for 150 times. + let count = 0; + while (numBuffersToCapture) { + this.bufferSize = await this.audioCapturer.getBufferSize(); + let buffer = await this.audioCapturer.read(this.bufferSize, true); + let options = { + offset: count * this.bufferSize, + length: this.bufferSize + } + if (typeof(buffer) == undefined) { + console.info('AudioRecLog: read buffer failed'); + } else { + let number = fs.writeSync(fd, buffer, options); + console.info(`AudioRecLog: data written: ${number}`); + } + numBuffersToCapture--; + count++; + } } ``` 4. Once the recording is complete, call **stop()** to stop the recording. ```js - async function StopCapturer() { - let state = audioCapturer.state; - // The audio capturer can be stopped only when it is in STATE_RUNNING or STATE_PAUSED state. - if (state != audio.AudioState.STATE_RUNNING && state != audio.AudioState.STATE_PAUSED) { - console.info('AudioRecLog: Capturer is not running or paused'); - return; - } - - await audioCapturer.stop(); - - state = audioCapturer.state; - if (state == audio.AudioState.STATE_STOPPED) { - console.info('AudioRecLog: Capturer stopped'); - } else { - console.error('AudioRecLog: Capturer stop failed'); - } - } + async StopCapturer() { + let state = this.audioCapturer.state; + // The audio capturer can be stopped only when it is in STATE_RUNNING or STATE_PAUSED state. + if (state != audio.AudioState.STATE_RUNNING && state != audio.AudioState.STATE_PAUSED) { + console.info('AudioRecLog: Capturer is not running or paused'); + return; + } + + await this.audioCapturer.stop(); + + state = this.audioCapturer.state; + if (state == audio.AudioState.STATE_STOPPED) { + console.info('AudioRecLog: Capturer stopped'); + } else { + console.error('AudioRecLog: Capturer stop failed'); + } + } ``` 5. After the task is complete, call **release()** to release related resources. ```js - async function releaseCapturer() { - let state = audioCapturer.state; - // The audio capturer can be released only when it is not in the STATE_RELEASED or STATE_NEW state. - if (state == audio.AudioState.STATE_RELEASED || state == audio.AudioState.STATE_NEW) { - console.info('AudioRecLog: Capturer already released'); - return; - } - - await audioCapturer.release(); - - state = audioCapturer.state; - if (state == audio.AudioState.STATE_RELEASED) { - console.info('AudioRecLog: Capturer released'); - } else { - console.info('AudioRecLog: Capturer release failed'); - } - } + async releaseCapturer() { + let state = this.audioCapturer.state; + // The audio capturer can be released only when it is not in the STATE_RELEASED or STATE_NEW state. + if (state == audio.AudioState.STATE_RELEASED || state == audio.AudioState.STATE_NEW) { + console.info('AudioRecLog: Capturer already released'); + return; + } + + await this.audioCapturer.release(); + + state = this.audioCapturer.state; + if (state == audio.AudioState.STATE_RELEASED) { + console.info('AudioRecLog: Capturer released'); + } else { + console.info('AudioRecLog: Capturer release failed'); + } + } ``` 6. (Optional) Obtain the audio capturer information. @@ -178,23 +181,20 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference You can use the following code to obtain the audio capturer information: ```js - // Obtain the audio capturer state. - let state = audioCapturer.state; - - // Obtain the audio capturer information. - let audioCapturerInfo : audio.AuduioCapturerInfo = await audioCapturer.getCapturerInfo(); - - // Obtain the audio stream information. - let audioStreamInfo : audio.AudioStreamInfo = await audioCapturer.getStreamInfo(); - - // Obtain the audio stream ID. - let audioStreamId : number = await audioCapturer.getAudioStreamId(); - - // Obtain the Unix timestamp, in nanoseconds. - let audioTime : number = await audioCapturer.getAudioTime(); - - // Obtain a proper minimum buffer size. - let bufferSize : number = await audioCapturer.getBufferSize(); + async getAudioCapturerInfo(){ + // Obtain the audio capturer state. + let state = this.audioCapturer.state; + // Obtain the audio capturer information. + let audioCapturerInfo : audio.AudioCapturerInfo = await this.audioCapturer.getCapturerInfo(); + // Obtain the audio stream information. + let audioStreamInfo : audio.AudioStreamInfo = await this.audioCapturer.getStreamInfo(); + // Obtain the audio stream ID. + let audioStreamId : number = await this.audioCapturer.getAudioStreamId(); + // Obtain the Unix timestamp, in nanoseconds. + let audioTime : number = await this.audioCapturer.getAudioTime(); + // Obtain a proper minimum buffer size. + let bufferSize : number = await this.audioCapturer.getBufferSize(); + } ``` 7. (Optional) Use **on('markReach')** to subscribe to the mark reached event, and use **off('markReach')** to unsubscribe from the event. @@ -202,12 +202,13 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference After the mark reached event is subscribed to, when the number of frames collected by the audio capturer reaches the specified value, a callback is triggered and the specified value is returned. ```js - audioCapturer.on('markReach', (reachNumber) => { - console.info('Mark reach event Received'); - console.info(`The Capturer reached frame: ${reachNumber}`); - }); - - audioCapturer.off('markReach'); // Unsubscribe from the mark reached event. This event will no longer be listened for. + async markReach(){ + this.audioCapturer.on('markReach', 10, (reachNumber) => { + console.info('Mark reach event Received'); + console.info(`The Capturer reached frame: ${reachNumber}`); + }); + this.audioCapturer.off('markReach'); // Unsubscribe from the mark reached event. This event will no longer be listened for. + } ``` 8. (Optional) Use **on('periodReach')** to subscribe to the period reached event, and use **off('periodReach')** to unsubscribe from the event. @@ -215,40 +216,43 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference After the period reached event is subscribed to, each time the number of frames collected by the audio capturer reaches the specified value, a callback is triggered and the specified value is returned. ```js - audioCapturer.on('periodReach', (reachNumber) => { - console.info('Period reach event Received'); - console.info(`In this period, the Capturer reached frame: ${reachNumber}`); - }); - - audioCapturer.off('periodReach'); // Unsubscribe from the period reached event. This event will no longer be listened for. + async periodReach(){ + this.audioCapturer.on('periodReach', 10, (reachNumber) => { + console.info('Period reach event Received'); + console.info(`In this period, the Capturer reached frame: ${reachNumber}`); + }); + this.audioCapturer.off('periodReach'); // Unsubscribe from the period reached event. This event will no longer be listened for. + } ``` 9. If your application needs to perform some operations when the audio capturer state is updated, it can subscribe to the state change event. When the audio capturer state is updated, the application receives a callback containing the event type. ```js - audioCapturer.on('stateChange', (state) => { - console.info(`AudioCapturerLog: Changed State to : ${state}`) - switch (state) { - case audio.AudioState.STATE_PREPARED: - console.info('--------CHANGE IN AUDIO STATE----------PREPARED--------------'); - console.info('Audio State is : Prepared'); - break; - case audio.AudioState.STATE_RUNNING: - console.info('--------CHANGE IN AUDIO STATE----------RUNNING--------------'); - console.info('Audio State is : Running'); - break; - case audio.AudioState.STATE_STOPPED: - console.info('--------CHANGE IN AUDIO STATE----------STOPPED--------------'); - console.info('Audio State is : stopped'); - break; - case audio.AudioState.STATE_RELEASED: - console.info('--------CHANGE IN AUDIO STATE----------RELEASED--------------'); - console.info('Audio State is : released'); - break; - default: - console.info('--------CHANGE IN AUDIO STATE----------INVALID--------------'); - console.info('Audio State is : invalid'); - break; - } - }); + async stateChange(){ + this.audioCapturer.on('stateChange', (state) => { + console.info(`AudioCapturerLog: Changed State to : ${state}`) + switch (state) { + case audio.AudioState.STATE_PREPARED: + console.info('--------CHANGE IN AUDIO STATE----------PREPARED--------------'); + console.info('Audio State is : Prepared'); + break; + case audio.AudioState.STATE_RUNNING: + console.info('--------CHANGE IN AUDIO STATE----------RUNNING--------------'); + console.info('Audio State is : Running'); + break; + case audio.AudioState.STATE_STOPPED: + console.info('--------CHANGE IN AUDIO STATE----------STOPPED--------------'); + console.info('Audio State is : stopped'); + break; + case audio.AudioState.STATE_RELEASED: + console.info('--------CHANGE IN AUDIO STATE----------RELEASED--------------'); + console.info('Audio State is : released'); + break; + default: + console.info('--------CHANGE IN AUDIO STATE----------INVALID--------------'); + console.info('Audio State is : invalid'); + break; + } + }); + } ``` diff --git a/en/application-dev/media/audio-renderer.md b/en/application-dev/media/audio-renderer.md index 4a39544e7483b68d0bc15b00d643c8403dbded46..0a58ea5251744162d9948c23e75351b298a95bb8 100644 --- a/en/application-dev/media/audio-renderer.md +++ b/en/application-dev/media/audio-renderer.md @@ -19,61 +19,68 @@ The following figure shows the audio renderer state transitions. ![audio-renderer-state](figures/audio-renderer-state.png) - **PREPARED**: The audio renderer enters this state by calling **create()**. - - **RUNNING**: The audio renderer enters this state by calling **start()** when it is in the **PREPARED** state or by calling **start()** when it is in the **STOPPED** state. - - **PAUSED**: The audio renderer enters this state by calling **pause()** when it is in the **RUNNING** state. When the audio playback is paused, it can call **start()** to resume the playback. - - **STOPPED**: The audio renderer enters this state by calling **stop()** when it is in the **PAUSED** or **RUNNING** state. - - **RELEASED**: The audio renderer enters this state by calling **release()** when it is in the **PREPARED**, **PAUSED**, or **STOPPED** state. In this state, the audio renderer releases all occupied hardware and software resources and will not transit to any other state. ## How to Develop For details about the APIs, see [AudioRenderer in Audio Management](../reference/apis/js-apis-audio.md#audiorenderer8). -1. Use **createAudioRenderer()** to create an **AudioRenderer** instance. - +1. Use **createAudioRenderer()** to create a global **AudioRenderer** instance. Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. This instance is used to render audio, control and obtain the rendering status, and register a callback for notification. ```js - import audio from '@ohos.multimedia.audio'; - + import audio from '@ohos.multimedia.audio'; + import fs from '@ohos.file.fs'; + + // Perform a self-test on APIs related to audio rendering. + @Entry + @Component + struct AudioRenderer1129 { + private audioRenderer: audio.AudioRenderer; + private bufferSize; // It will be used for the call of the write function in step 3. + private audioRenderer1: audio.AudioRenderer; // It will be used for the call in the complete example in step 14. + private audioRenderer2: audio.AudioRenderer; // It will be used for the call in the complete example in step 14. + + async initAudioRender(){ let audioStreamInfo = { - samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, - channels: audio.AudioChannel.CHANNEL_1, - sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, - encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW + samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100, + channels: audio.AudioChannel.CHANNEL_1, + sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, + encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW } let audioRendererInfo = { - content: audio.ContentType.CONTENT_TYPE_SPEECH, - usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, - rendererFlags: 0 // 0 is the extended flag bit of the audio renderer. The default value is 0. + content: audio.ContentType.CONTENT_TYPE_SPEECH, + usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION, + rendererFlags: 0 // 0 is the extended flag bit of the audio renderer. The default value is 0. } let audioRendererOptions = { - streamInfo: audioStreamInfo, - rendererInfo: audioRendererInfo - } - - let audioRenderer = await audio.createAudioRenderer(audioRendererOptions); + streamInfo: audioStreamInfo, + rendererInfo: audioRendererInfo + } + this.audioRenderer = await audio.createAudioRenderer(audioRendererOptions); console.log("Create audio renderer success."); + } + } ``` 2. Use **start()** to start audio rendering. - + ```js - async function startRenderer() { - let state = audioRenderer.state; + async startRenderer() { + let state = this.audioRenderer.state; // The audio renderer should be in the STATE_PREPARED, STATE_PAUSED, or STATE_STOPPED state when start() is called. if (state != audio.AudioState.STATE_PREPARED && state != audio.AudioState.STATE_PAUSED && - state != audio.AudioState.STATE_STOPPED) { + state != audio.AudioState.STATE_STOPPED) { console.info('Renderer is not in a correct state to start'); return; } - await audioRenderer.start(); + await this.audioRenderer.start(); - state = audioRenderer.state; + state = this.audioRenderer.state; if (state == audio.AudioState.STATE_RUNNING) { console.info('Renderer started'); } else { @@ -81,116 +88,102 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference } } ``` + The renderer state will be **STATE_RUNNING** once the audio renderer is started. The application can then begin reading buffers. - - + 3. Call **write()** to write data to the buffer. - Read the audio data to be played to the buffer. Call **write()** repeatedly to write the data to the buffer. + Read the audio data to be played to the buffer. Call **write()** repeatedly to write the data to the buffer. Import fs from '@ohos.file.fs'; as step 1. ```js - import fs from '@ohos.file.fs'; - import audio from '@ohos.multimedia.audio'; - - async function writeBuffer(buf) { - // The write operation can be performed only when the state is STATE_RUNNING. - if (audioRenderer.state != audio.AudioState.STATE_RUNNING) { - console.error('Renderer is not running, do not write'); - return; - } - let writtenbytes = await audioRenderer.write(buf); - console.info(`Actual written bytes: ${writtenbytes} `); - if (writtenbytes < 0) { - console.error('Write buffer failed. check the state of renderer'); - } - } + async writeData(){ + // Set a proper buffer size for the audio renderer. You can also select a buffer of another size. + this.bufferSize = await this.audioRenderer.getBufferSize(); + let dir = globalThis.fileDir; // You must use the sandbox path. + const filePath = dir + '/file_example_WAV_2MG.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/file_example_WAV_2MG.wav + console.info(`file filePath: ${ filePath}`); - // Set a proper buffer size for the audio renderer. You can also select a buffer of another size. - const bufferSize = await audioRenderer.getBufferSize(); - let dir = globalThis.fileDir; // You must use the sandbox path. - const filePath = dir + '/file_example_WAV_2MG.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/file_example_WAV_2MG.wav - console.info(`file filePath: ${ filePath}`); - - let file = fs.openSync(filePath, fs.OpenMode.READ_ONLY); - let stat = await fs.stat(filePath); // Music file information. - let buf = new ArrayBuffer(bufferSize); - let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1); - for (let i = 0;i < len; i++) { - let options = { - offset: i * this.bufferSize, - length: this.bufferSize - } - let readsize = await fs.read(file.fd, buf, options) - let writeSize = await new Promise((resolve,reject)=>{ - this.audioRenderer.write(buf,(err,writeSize)=>{ - if(err){ - reject(err) - }else{ - resolve(writeSize) - } + let file = fs.openSync(filePath, fs.OpenMode.READ_ONLY); + let stat = await fs.stat(filePath); // Music file information. + let buf = new ArrayBuffer(this.bufferSize); + let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1); + for (let i = 0;i < len; i++) { + let options = { + offset: i * this.bufferSize, + length: this.bufferSize + } + let readsize = await fs.read(file.fd, buf, options) + let writeSize = await new Promise((resolve,reject)=>{ + this.audioRenderer.write(buf,(err,writeSize)=>{ + if(err){ + reject(err) + }else{ + resolve(writeSize) + } + }) }) - }) + } + + fs.close(file) + await this.audioRenderer.stop(); // Stop rendering. + await this.audioRenderer.release(); // Release the resources. } - - fs.close(file) - await audioRenderer.stop(); // Stop rendering. - await audioRenderer.release(); // Releases the resources. ``` 4. (Optional) Call **pause()** or **stop()** to pause or stop rendering. ```js - async function pauseRenderer() { - let state = audioRenderer.state; - // The audio renderer can be paused only when it is in the STATE_RUNNING state. - if (state != audio.AudioState.STATE_RUNNING) { - console.info('Renderer is not running'); - return; - } - - await audioRenderer.pause(); - - state = audioRenderer.state; - if (state == audio.AudioState.STATE_PAUSED) { - console.info('Renderer paused'); - } else { - console.error('Renderer pause failed'); - } - } - - async function stopRenderer() { - let state = audioRenderer.state; - // The audio renderer can be stopped only when it is in STATE_RUNNING or STATE_PAUSED state. - if (state != audio.AudioState.STATE_RUNNING && state != audio.AudioState.STATE_PAUSED) { - console.info('Renderer is not running or paused'); - return; - } - - await audioRenderer.stop(); - - state = audioRenderer.state; - if (state == audio.AudioState.STATE_STOPPED) { - console.info('Renderer stopped'); - } else { - console.error('Renderer stop failed'); - } - } + async pauseRenderer() { + let state = this.audioRenderer.state; + // The audio renderer can be paused only when it is in the STATE_RUNNING state. + if (state != audio.AudioState.STATE_RUNNING) { + console.info('Renderer is not running'); + return; + } + + await this.audioRenderer.pause(); + + state = this.audioRenderer.state; + if (state == audio.AudioState.STATE_PAUSED) { + console.info('Renderer paused'); + } else { + console.error('Renderer pause failed'); + } + } + + async stopRenderer() { + let state = this.audioRenderer.state; + // The audio renderer can be stopped only when it is in STATE_RUNNING or STATE_PAUSED state. + if (state != audio.AudioState.STATE_RUNNING && state != audio.AudioState.STATE_PAUSED) { + console.info('Renderer is not running or paused'); + return; + } + + await this.audioRenderer.stop(); + + state = this.audioRenderer.state; + if (state == audio.AudioState.STATE_STOPPED) { + console.info('Renderer stopped'); + } else { + console.error('Renderer stop failed'); + } + } ``` 5. (Optional) Call **drain()** to clear the buffer. ```js - async function drainRenderer() { - let state = audioRenderer.state; - // drain() can be used only when the audio renderer is in the STATE_RUNNING state. - if (state != audio.AudioState.STATE_RUNNING) { - console.info('Renderer is not running'); - return; - } - - await audioRenderer.drain(); - state = audioRenderer.state; + async drainRenderer() { + let state = this.audioRenderer.state; + // drain() can be used only when the audio renderer is in the STATE_RUNNING state. + if (state != audio.AudioState.STATE_RUNNING) { + console.info('Renderer is not running'); + return; } + + await this.audioRenderer.drain(); + state = this.audioRenderer.state; + } ``` 6. After the task is complete, call **release()** to release related resources. @@ -198,67 +191,63 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference **AudioRenderer** uses a large number of system resources. Therefore, ensure that the resources are released after the task is complete. ```js - async function releaseRenderer() { - let state = audioRenderer.state; - // The audio renderer can be released only when it is not in the STATE_RELEASED or STATE_NEW state. - if (state == audio.AudioState.STATE_RELEASED || state == audio.AudioState.STATE_NEW) { - console.info('Renderer already released'); - return; - } - await audioRenderer.release(); - - state = audioRenderer.state; - if (state == audio.AudioState.STATE_RELEASED) { - console.info('Renderer released'); - } else { - console.info('Renderer release failed'); - } + async releaseRenderer() { + let state = this.audioRenderer.state; + // The audio renderer can be released only when it is not in the STATE_RELEASED or STATE_NEW state. + if (state == audio.AudioState.STATE_RELEASED || state == audio.AudioState.STATE_NEW) { + console.info('Renderer already released'); + return; + } + await this.audioRenderer.release(); + + state = this.audioRenderer.state; + if (state == audio.AudioState.STATE_RELEASED) { + console.info('Renderer released'); + } else { + console.info('Renderer release failed'); + } } ``` 7. (Optional) Obtain the audio renderer information. - + You can use the following code to obtain the audio renderer information: ```js - // Obtain the audio renderer state. - let state = audioRenderer.state; - - // Obtain the audio renderer information. - let audioRendererInfo : audio.AudioRendererInfo = await audioRenderer.getRendererInfo(); - - // Obtain the audio stream information. - let audioStreamInfo : audio.AudioStreamInfo = await audioRenderer.getStreamInfo(); - - // Obtain the audio stream ID. - let audioStreamId : number = await audioRenderer.getAudioStreamId(); - - // Obtain the Unix timestamp, in nanoseconds. - let audioTime : number = await audioRenderer.getAudioTime(); - - // Obtain a proper minimum buffer size. - let bufferSize : number = await audioRenderer.getBufferSize(); - - // Obtain the audio renderer rate. - let renderRate : audio.AudioRendererRate = await audioRenderer.getRenderRate(); + async getRenderInfo(){ + // Obtain the audio renderer state. + let state = this.audioRenderer.state; + // Obtain the audio renderer information. + let audioRendererInfo : audio.AudioRendererInfo = await this.audioRenderer.getRendererInfo(); + // Obtain the audio stream information. + let audioStreamInfo : audio.AudioStreamInfo = await this.audioRenderer.getStreamInfo(); + // Obtain the audio stream ID. + let audioStreamId : number = await this.audioRenderer.getAudioStreamId(); + // Obtain the Unix timestamp, in nanoseconds. + let audioTime : number = await this.audioRenderer.getAudioTime(); + // Obtain a proper minimum buffer size. + let bufferSize : number = await this.audioRenderer.getBufferSize(); + // Obtain the audio renderer rate. + let renderRate : audio.AudioRendererRate = await this.audioRenderer.getRenderRate(); + } ``` 8. (Optional) Set the audio renderer information. - + You can use the following code to set the audio renderer information: ```js - // Set the audio renderer rate to RENDER_RATE_NORMAL. - let renderRate : audio.AudioRendererRate = audio.AudioRendererRate.RENDER_RATE_NORMAL; - await audioRenderer.setRenderRate(renderRate); - - // Set the interruption mode of the audio renderer to SHARE_MODE. - let interruptMode : audio.InterruptMode = audio.InterruptMode.SHARE_MODE; - await audioRenderer.setInterruptMode(interruptMode); - - // Set the volume of the stream to 0.5. - let volume : number = 0.5; - await audioRenderer.setVolume(volume); + async setAudioRenderInfo(){ + // Set the audio renderer rate to RENDER_RATE_NORMAL. + let renderRate : audio.AudioRendererRate = audio.AudioRendererRate.RENDER_RATE_NORMAL; + await this.audioRenderer.setRenderRate(renderRate); + // Set the interruption mode of the audio renderer to SHARE_MODE. + let interruptMode : audio.InterruptMode = audio.InterruptMode.SHARE_MODE; + await this.audioRenderer.setInterruptMode(interruptMode); + // Set the volume of the stream to 0.5. + let volume : number = 0.5; + await this.audioRenderer.setVolume(volume); + } ``` 9. (Optional) Use **on('audioInterrupt')** to subscribe to the audio interruption event, and use **off('audioInterrupt')** to unsubscribe from the event. @@ -270,110 +259,116 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference In the case of audio interruption, the application may encounter write failures. To avoid such failures, interruption-unaware applications can use **audioRenderer.state** to check the audio renderer state before writing audio data. The applications can obtain more details by subscribing to the audio interruption events. For details, see [InterruptEvent](../reference/apis/js-apis-audio.md#interruptevent9). It should be noted that the audio interruption event subscription of the **AudioRenderer** module is slightly different from **on('interrupt')** in [AudioManager](../reference/apis/js-apis-audio.md#audiomanager). The **on('interrupt')** and **off('interrupt')** APIs are deprecated since API version 9. In the **AudioRenderer** module, you only need to call **on('audioInterrupt')** to listen for focus change events. When the **AudioRenderer** instance created by the application performs actions such as start, stop, and pause, it requests the focus, which triggers focus transfer and in return enables the related **AudioRenderer** instance to receive a notification through the callback. For instances other than **AudioRenderer**, such as frequency modulation (FM) and voice wakeup, the application does not create an instance. In this case, the application can call **on('interrupt')** in **AudioManager** to receive a focus change notification. - + ```js - audioRenderer.on('audioInterrupt', (interruptEvent) => { - console.info('InterruptEvent Received'); - console.info(`InterruptType: ${interruptEvent.eventType}`); - console.info(`InterruptForceType: ${interruptEvent.forceType}`); - console.info(`AInterruptHint: ${interruptEvent.hintType}`); - - if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) { - switch (interruptEvent.hintType) { + async subscribeAudioRender(){ + this.audioRenderer.on('audioInterrupt', (interruptEvent) => { + console.info('InterruptEvent Received'); + console.info(`InterruptType: ${interruptEvent.eventType}`); + console.info(`InterruptForceType: ${interruptEvent.forceType}`); + console.info(`AInterruptHint: ${interruptEvent.hintType}`); + + if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_FORCE) { + switch (interruptEvent.hintType) { // Forcible pausing initiated by the audio framework. To prevent data loss, stop the write operation. - case audio.InterruptHint.INTERRUPT_HINT_PAUSE: - isPlay = false; - break; + case audio.InterruptHint.INTERRUPT_HINT_PAUSE: + console.info('isPlay is false'); + break; // Forcible stopping initiated by the audio framework. To prevent data loss, stop the write operation. - case audio.InterruptHint.INTERRUPT_HINT_STOP: - isPlay = false; - break; + case audio.InterruptHint.INTERRUPT_HINT_STOP: + console.info('isPlay is false'); + break; // Forcible ducking initiated by the audio framework. - case audio.InterruptHint.INTERRUPT_HINT_DUCK: - break; + case audio.InterruptHint.INTERRUPT_HINT_DUCK: + break; // Undocking initiated by the audio framework. - case audio.InterruptHint.INTERRUPT_HINT_UNDUCK: - break; - } - } else if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_SHARE) { - switch (interruptEvent.hintType) { + case audio.InterruptHint.INTERRUPT_HINT_UNDUCK: + break; + } + } else if (interruptEvent.forceType == audio.InterruptForceType.INTERRUPT_SHARE) { + switch (interruptEvent.hintType) { // Notify the application that the rendering starts. - case audio.InterruptHint.INTERRUPT_HINT_RESUME: - startRenderer(); - break; + case audio.InterruptHint.INTERRUPT_HINT_RESUME: + this.startRenderer(); + break; // Notify the application that the audio stream is interrupted. The application then determines whether to continue. (In this example, the application pauses the rendering.) - case audio.InterruptHint.INTERRUPT_HINT_PAUSE: - isPlay = false; - pauseRenderer(); - break; + case audio.InterruptHint.INTERRUPT_HINT_PAUSE: + console.info('isPlay is false'); + this.pauseRenderer(); + break; + } } - } - }); - - audioRenderer.off('audioInterrupt'); // Unsubscribe from the audio interruption event. This event will no longer be listened for. + }); + } ``` 10. (Optional) Use **on('markReach')** to subscribe to the mark reached event, and use **off('markReach')** to unsubscribe from the event. After the mark reached event is subscribed to, when the number of frames rendered by the audio renderer reaches the specified value, a callback is triggered and the specified value is returned. - - ```js - audioRenderer.on('markReach', (reachNumber) => { - console.info('Mark reach event Received'); - console.info(`The renderer reached frame: ${reachNumber}`); - }); - audioRenderer.off('markReach'); // Unsubscribe from the mark reached event. This event will no longer be listened for. + ```js + async markReach(){ + this.audioRenderer.on('markReach', 50, (position) => { + if (position == 50) { + console.info('ON Triggered successfully'); + } + }); + this.audioRenderer.off('markReach'); // Unsubscribe from the mark reached event. This event will no longer be listened for. + } ``` 11. (Optional) Use **on('periodReach')** to subscribe to the period reached event, and use **off('periodReach')** to unsubscribe from the event. After the period reached event is subscribed to, each time the number of frames rendered by the audio renderer reaches the specified value, a callback is triggered and the specified value is returned. - - ```js - audioRenderer.on('periodReach', (reachNumber) => { - console.info('Period reach event Received'); - console.info(`In this period, the renderer reached frame: ${reachNumber} `); - }); - audioRenderer.off('periodReach'); // Unsubscribe from the period reached event. This event will no longer be listened for. + ```js + async periodReach(){ + this.audioRenderer.on('periodReach',10, (reachNumber) => { + console.info(`In this period, the renderer reached frame: ${reachNumber} `); + }); + + this.audioRenderer.off('periodReach'); // Unsubscribe from the period reached event. This event will no longer be listened for. + } ``` 12. (Optional) Use **on('stateChange')** to subscribe to audio renderer state changes. After the **stateChange** event is subscribed to, when the audio renderer state changes, a callback is triggered and the audio renderer state is returned. - + ```js - audioRenderer.on('stateChange', (audioState) => { - console.info('State change event Received'); - console.info(`Current renderer state is: ${audioState}`); - }); + async stateChange(){ + this.audioRenderer.on('stateChange', (audioState) => { + console.info('State change event Received'); + console.info(`Current renderer state is: ${audioState}`); + }); + } ``` 13. (Optional) Handle exceptions of **on()**. If the string or the parameter type passed in **on()** is incorrect , the application throws an exception. In this case, you can use **try catch** to capture the exception. - + ```js - try { - audioRenderer.on('invalidInput', () => { // The string is invalid. - }) - } catch (err) { - console.info(`Call on function error, ${err}`); // The application throws exception 401. - } - try { - audioRenderer.on(1, () => { // The type of the input parameter is incorrect. - }) - } catch (err) { - console.info(`Call on function error, ${err}`); // The application throws exception 6800101. + async errorCall(){ + try { + this.audioRenderer.on('invalidInput', () => { // The string is invalid. + }) + } catch (err) { + console.info(`Call on function error, ${err}`); // The application throws exception 401. + } + try { + this.audioRenderer.on(1, () => { // The type of the input parameter is incorrect. + }) + } catch (err) { + console.info(`Call on function error, ${err}`); // The application throws exception 6800101. + } } ``` 14. (Optional) Refer to the complete example of **on('audioInterrupt')**. - - Create **AudioRender1** and **AudioRender2** in an application, configure the independent interruption mode, and call **on('audioInterrupt')** to subscribe to audio interruption events. At the beginning, **AudioRender1** has the focus. When **AudioRender2** attempts to obtain the focus, **AudioRender1** receives a focus transfer notification and the related log information is printed. If the shared mode is used, the log information will not be printed during application running. - - ```js + Declare audioRenderer1 and audioRenderer2 first. For details, see step 1. + Create **AudioRender1** and **AudioRender2** in an application, configure the independent interruption mode, and call **on('audioInterrupt')** to subscribe to audio interruption events. At the beginning, **AudioRender1** has the focus. When **AudioRender2** attempts to obtain the focus, **AudioRender1** receives a focus transfer notification and the related log information is printed. If the shared mode is used, the log information will not be printed during application running. + ```js async runningAudioRender1(){ let audioStreamInfo = { samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, @@ -388,33 +383,33 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference } let audioRendererOptions = { streamInfo: audioStreamInfo, - rendererInfo: audioRendererInfo + rendererInfo: audioRendererInfo } - + // 1.1 Create an instance. - audioRenderer1 = await audio.createAudioRenderer(audioRendererOptions); + this.audioRenderer1 = await audio.createAudioRenderer(audioRendererOptions); console.info("Create audio renderer 1 success."); - + // 1.2 Set the independent mode. - audioRenderer1.setInterruptMode(1).then( data => { + this.audioRenderer1.setInterruptMode(1).then( data => { console.info('audioRenderer1 setInterruptMode Success!'); }).catch((err) => { - console.error(`audioRenderer1 setInterruptMode Fail: ${err}`); + console.error(`audioRenderer1 setInterruptMode Fail: ${err}`); }); - + // 1.3 Set the listener. - audioRenderer1.on('audioInterrupt', async(interruptEvent) => { - console.info(`audioRenderer1 on audioInterrupt : ${JSON.stringify(interruptEvent)}`) + this.audioRenderer1.on('audioInterrupt', async(interruptEvent) => { + console.info(`audioRenderer1 on audioInterrupt : ${JSON.stringify(interruptEvent)}`) }); - + // 1.4 Start rendering. - await audioRenderer1.start(); + await this.audioRenderer1.start(); console.info('startAudioRender1 success'); - + // 1.5 Obtain the buffer size, which is the proper minimum buffer size of the audio renderer. You can also select a buffer of another size. - const bufferSize = await audioRenderer1.getBufferSize(); + const bufferSize = await this.audioRenderer1.getBufferSize(); console.info(`audio bufferSize: ${bufferSize}`); - + // 1.6 Obtain the original audio data file. let dir = globalThis.fileDir; // You must use the sandbox path. const path1 = dir + '/music001_48000_32_1.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/music001_48000_32_1.wav @@ -423,14 +418,14 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference let stat = await fs.stat(path1); // Music file information. let buf = new ArrayBuffer(bufferSize); let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1); - + // 1.7 Render the original audio data in the buffer by using audioRender. for (let i = 0;i < len; i++) { let options = { offset: i * this.bufferSize, length: this.bufferSize } - let readsize = await fs.read(file.fd, buf, options) + let readsize = await fs.read(file1.fd, buf, options) let writeSize = await new Promise((resolve,reject)=>{ this.audioRenderer1.write(buf,(err,writeSize)=>{ if(err){ @@ -439,13 +434,13 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference resolve(writeSize) } }) - }) + }) } fs.close(file1) - await audioRenderer1.stop(); // Stop rendering. - await audioRenderer1.release(); Releases the resources. + await this.audioRenderer1.stop(); // Stop rendering. + await this.audioRenderer1.release(); // Release the resources. } - + async runningAudioRender2(){ let audioStreamInfo = { samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000, @@ -460,33 +455,33 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference } let audioRendererOptions = { streamInfo: audioStreamInfo, - rendererInfo: audioRendererInfo + rendererInfo: audioRendererInfo } - + // 2.1 Create another instance. - audioRenderer2 = await audio.createAudioRenderer(audioRendererOptions); + this.audioRenderer2 = await audio.createAudioRenderer(audioRendererOptions); console.info("Create audio renderer 2 success."); - + // 2.2 Set the independent mode. - audioRenderer2.setInterruptMode(1).then( data => { + this.audioRenderer2.setInterruptMode(1).then( data => { console.info('audioRenderer2 setInterruptMode Success!'); }).catch((err) => { - console.error(`audioRenderer2 setInterruptMode Fail: ${err}`); + console.error(`audioRenderer2 setInterruptMode Fail: ${err}`); }); - + // 2.3 Set the listener. - audioRenderer2.on('audioInterrupt', async(interruptEvent) => { - console.info(`audioRenderer2 on audioInterrupt : ${JSON.stringify(interruptEvent)}`) + this.audioRenderer2.on('audioInterrupt', async(interruptEvent) => { + console.info(`audioRenderer2 on audioInterrupt : ${JSON.stringify(interruptEvent)}`) }); - + // 2.4 Start rendering. - await audioRenderer2.start(); + await this.audioRenderer2.start(); console.info('startAudioRender2 success'); - + // 2.5 Obtain the buffer size. - const bufferSize = await audioRenderer2.getBufferSize(); + const bufferSize = await this.audioRenderer2.getBufferSize(); console.info(`audio bufferSize: ${bufferSize}`); - + // 2.6 Read the original audio data file. let dir = globalThis.fileDir; // You must use the sandbox path. const path2 = dir + '/music002_48000_32_1.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/music002_48000_32_1.wav @@ -495,14 +490,14 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference let stat = await fs.stat(path2); // Music file information. let buf = new ArrayBuffer(bufferSize); let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1); - + // 2.7 Render the original audio data in the buffer by using audioRender. for (let i = 0;i < len; i++) { let options = { offset: i * this.bufferSize, length: this.bufferSize } - let readsize = await fs.read(file.fd, buf, options) + let readsize = await fs.read(file2.fd, buf, options) let writeSize = await new Promise((resolve,reject)=>{ this.audioRenderer2.write(buf,(err,writeSize)=>{ if(err){ @@ -511,28 +506,17 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference resolve(writeSize) } }) - }) + }) } fs.close(file2) - await audioRenderer2.stop(); // Stop rendering. - await audioRenderer2.release(); // Releases the resources. + await this.audioRenderer2.stop(); // Stop rendering. + await this.audioRenderer2.release(); // Release the resources. } - - async writeBuffer(buf, audioRender) { - let writtenbytes; - await audioRender.write(buf).then((value) => { - writtenbytes = value; - console.info(`Actual written bytes: ${writtenbytes} `); - }); - if (typeof(writtenbytes) != 'number' || writtenbytes < 0) { - console.error('get Write buffer failed. check the state of renderer'); - } - } - + // Integrated invoking entry. async test(){ - await runningAudioRender1(); - await runningAudioRender2(); + await this.runningAudioRender1(); + await this.runningAudioRender2(); } - - ``` + + ``` \ No newline at end of file diff --git a/en/application-dev/reference/apis/js-apis-audio.md b/en/application-dev/reference/apis/js-apis-audio.md index 4abed77e24d5c6cf16d7fd84ce36cf806d06ce4e..b3f5def410251e954ee3f7b00c211e2f7f5116a0 100644 --- a/en/application-dev/reference/apis/js-apis-audio.md +++ b/en/application-dev/reference/apis/js-apis-audio.md @@ -23,9 +23,9 @@ import audio from '@ohos.multimedia.audio'; | Name | Type | Readable | Writable| Description | | --------------------------------------- | ----------| ---- | ---- | ------------------ | -| LOCAL_NETWORK_ID9+ | string | Yes | No | Network ID of the local device.
This is a system API.
**System capability**: SystemCapability.Multimedia.Audio.Device | -| DEFAULT_VOLUME_GROUP_ID9+ | number | Yes | No | Default volume group ID.
**System capability**: SystemCapability.Multimedia.Audio.Volume | -| DEFAULT_INTERRUPT_GROUP_ID9+ | number | Yes | No | Default audio interruption group ID.
**System capability**: SystemCapability.Multimedia.Audio.Interrupt | +| LOCAL_NETWORK_ID9+ | string | Yes | No | Network ID of the local device.
This is a system API.
**System capability**: SystemCapability.Multimedia.Audio.Device | +| DEFAULT_VOLUME_GROUP_ID9+ | number | Yes | No | Default volume group ID.
**System capability**: SystemCapability.Multimedia.Audio.Volume | +| DEFAULT_INTERRUPT_GROUP_ID9+ | number | Yes | No | Default audio interruption group ID.
**System capability**: SystemCapability.Multimedia.Audio.Interrupt | **Example** @@ -1763,7 +1763,7 @@ Sets a device to the active state. This API uses an asynchronous callback to ret | Name | Type | Mandatory| Description | | ---------- | ------------------------------------- | ---- | ------------------------ | -| deviceType | [ActiveDeviceType](#activedevicetypedeprecated) | Yes | Active audio device type. | +| deviceType | [ActiveDeviceType](#activedevicetypedeprecated) | Yes | Active audio device type. | | active | boolean | Yes | Active state to set. The value **true** means to set the device to the active state, and **false** means the opposite. | | callback | AsyncCallback<void> | Yes | Callback used to return the result.| @@ -1795,7 +1795,7 @@ Sets a device to the active state. This API uses a promise to return the result. | Name | Type | Mandatory| Description | | ---------- | ------------------------------------- | ---- | ------------------ | -| deviceType | [ActiveDeviceType](#activedevicetypedeprecated) | Yes | Active audio device type. | +| deviceType | [ActiveDeviceType](#activedevicetypedeprecated) | Yes | Active audio device type.| | active | boolean | Yes | Active state to set. The value **true** means to set the device to the active state, and **false** means the opposite. | **Return value** @@ -1829,7 +1829,7 @@ Checks whether a device is active. This API uses an asynchronous callback to ret | Name | Type | Mandatory| Description | | ---------- | ------------------------------------- | ---- | ------------------------ | -| deviceType | [ActiveDeviceType](#activedevicetypedeprecated) | Yes | Active audio device type. | +| deviceType | [ActiveDeviceType](#activedevicetypedeprecated) | Yes | Active audio device type. | | callback | AsyncCallback<boolean> | Yes | Callback used to return the active state of the device.| **Example** @@ -1860,7 +1860,7 @@ Checks whether a device is active. This API uses a promise to return the result. | Name | Type | Mandatory| Description | | ---------- | ------------------------------------- | ---- | ------------------ | -| deviceType | [ActiveDeviceType](#activedevicetypedeprecated) | Yes | Active audio device type. | +| deviceType | [ActiveDeviceType](#activedevicetypedeprecated) | Yes | Active audio device type.| **Return value** @@ -3956,6 +3956,7 @@ Describes the audio renderer change event. **Example** ```js + import audio from '@ohos.multimedia.audio'; const audioManager = audio.getAudioManager(); @@ -4240,7 +4241,6 @@ audioRenderer.getStreamInfo().then((streamInfo) => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### getAudioStreamId9+ @@ -4263,7 +4263,6 @@ Obtains the stream ID of this **AudioRenderer** instance. This API uses an async audioRenderer.getAudioStreamId((err, streamid) => { console.info(`Renderer GetStreamId: ${streamid}`); }); - ``` ### getAudioStreamId9+ @@ -4288,7 +4287,6 @@ audioRenderer.getAudioStreamId().then((streamid) => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### start8+ @@ -4315,7 +4313,6 @@ audioRenderer.start((err) => { console.info('Renderer start success.'); } }); - ``` ### start8+ @@ -4340,7 +4337,6 @@ audioRenderer.start().then(() => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### pause8+ @@ -4367,7 +4363,6 @@ audioRenderer.pause((err) => { console.info('Renderer paused.'); } }); - ``` ### pause8+ @@ -4392,7 +4387,6 @@ audioRenderer.pause().then(() => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### drain8+ @@ -4419,7 +4413,6 @@ audioRenderer.drain((err) => { console.info('Renderer drained.'); } }); - ``` ### drain8+ @@ -4444,7 +4437,6 @@ audioRenderer.drain().then(() => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### stop8+ @@ -4471,7 +4463,6 @@ audioRenderer.stop((err) => { console.info('Renderer stopped.'); } }); - ``` ### stop8+ @@ -4496,7 +4487,6 @@ audioRenderer.stop().then(() => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### release8+ @@ -4523,7 +4513,6 @@ audioRenderer.release((err) => { console.info('Renderer released.'); } }); - ``` ### release8+ @@ -4548,7 +4537,6 @@ audioRenderer.release().then(() => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### write8+ @@ -4586,15 +4574,15 @@ let filePath = path + '/StarWars10s-2C-48000-4SW.wav'; let file = fs.openSync(filePath, fs.OpenMode.READ_ONLY); let stat = await fs.stat(path); let buf = new ArrayBuffer(bufferSize); -let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1); +let len = stat.size % bufferSize == 0 ? Math.floor(stat.size / bufferSize) : Math.floor(stat.size / bufferSize + 1); for (let i = 0;i < len; i++) { let options = { - offset: i * this.bufferSize, - length: this.bufferSize + offset: i * bufferSize, + length: bufferSize } let readsize = await fs.read(file.fd, buf, options) let writeSize = await new Promise((resolve,reject)=>{ - this.audioRenderer.write(buf,(err,writeSize)=>{ + audioRenderer.write(buf,(err,writeSize)=>{ if(err){ reject(err) }else{ @@ -4604,7 +4592,6 @@ for (let i = 0;i < len; i++) { }) } - ``` ### write8+ @@ -4641,20 +4628,19 @@ let filePath = path + '/StarWars10s-2C-48000-4SW.wav'; let file = fs.openSync(filePath, fs.OpenMode.READ_ONLY); let stat = await fs.stat(path); let buf = new ArrayBuffer(bufferSize); -let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1); +let len = stat.size % bufferSize == 0 ? Math.floor(stat.size / bufferSize) : Math.floor(stat.size / bufferSize + 1); for (let i = 0;i < len; i++) { let options = { - offset: i * this.bufferSize, - length: this.bufferSize + offset: i * bufferSize, + length: bufferSize } let readsize = await fs.read(file.fd, buf, options) try{ - let writeSize = await this.audioRenderer.write(buf); + let writeSize = await audioRenderer.write(buf); } catch(err) { console.error(`audioRenderer.write err: ${err}`); } } - ``` ### getAudioTime8+ @@ -4677,7 +4663,6 @@ Obtains the number of nanoseconds elapsed from the Unix epoch (January 1, 1970). audioRenderer.getAudioTime((err, timestamp) => { console.info(`Current timestamp: ${timestamp}`); }); - ``` ### getAudioTime8+ @@ -4702,7 +4687,6 @@ audioRenderer.getAudioTime().then((timestamp) => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### getBufferSize8+ @@ -4727,7 +4711,6 @@ let bufferSize = audioRenderer.getBufferSize(async(err, bufferSize) => { console.error('getBufferSize error'); } }); - ``` ### getBufferSize8+ @@ -4754,7 +4737,6 @@ audioRenderer.getBufferSize().then((data) => { }).catch((err) => { console.error(`AudioFrameworkRenderLog: getBufferSize: ERROR: ${err}`); }); - ``` ### setRenderRate8+ @@ -4782,7 +4764,6 @@ audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_NORMAL, (err) => console.info('Callback invoked to indicate a successful render rate setting.'); } }); - ``` ### setRenderRate8+ @@ -4813,7 +4794,6 @@ audioRenderer.setRenderRate(audio.AudioRendererRate.RENDER_RATE_NORMAL).then(() }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### getRenderRate8+ @@ -4836,7 +4816,6 @@ Obtains the current render rate. This API uses an asynchronous callback to retur audioRenderer.getRenderRate((err, renderrate) => { console.info(`getRenderRate: ${renderrate}`); }); - ``` ### getRenderRate8+ @@ -4861,9 +4840,7 @@ audioRenderer.getRenderRate().then((renderRate) => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` - ### setInterruptMode9+ setInterruptMode(mode: InterruptMode): Promise<void> @@ -4893,9 +4870,7 @@ audioRenderer.setInterruptMode(mode).then(data=>{ }).catch((err) => { console.error(`setInterruptMode Fail: ${err}`); }); - ``` - ### setInterruptMode9+ setInterruptMode(mode: InterruptMode, callback: AsyncCallback\): void @@ -4921,7 +4896,6 @@ audioRenderer.setInterruptMode(mode, (err, data)=>{ } console.info('setInterruptMode Success!'); }); - ``` ### setVolume9+ @@ -4952,9 +4926,7 @@ audioRenderer.setVolume(0.5).then(data=>{ }).catch((err) => { console.error(`setVolume Fail: ${err}`); }); - ``` - ### setVolume9+ setVolume(volume: number, callback: AsyncCallback\): void @@ -4979,7 +4951,6 @@ audioRenderer.setVolume(0.5, (err, data)=>{ } console.info('setVolume Success!'); }); - ``` ### on('audioInterrupt')9+ @@ -5005,7 +4976,7 @@ For details about the error codes, see [Audio Error Codes](../errorcodes/errorco | ID | Error Message | | ------- | ------------------------------ | -| 6800101 | if input parameter value error | +| 6800101 | if input parameter value error | **Example** @@ -5059,7 +5030,6 @@ async function onAudioInterrupt(){ } }); } - ``` ### on('markReach')8+ @@ -5086,7 +5056,6 @@ audioRenderer.on('markReach', 1000, (position) => { console.info('ON Triggered successfully'); } }); - ``` @@ -5108,7 +5077,6 @@ Unsubscribes from mark reached events. ```js audioRenderer.off('markReach'); - ``` ### on('periodReach') 8+ @@ -5135,7 +5103,6 @@ audioRenderer.on('periodReach', 1000, (position) => { console.info('ON Triggered successfully'); } }); - ``` ### off('periodReach') 8+ @@ -5156,10 +5123,9 @@ Unsubscribes from period reached events. ```js audioRenderer.off('periodReach') - ``` -### on('stateChange')8+ +### on('stateChange') 8+ on(type: 'stateChange', callback: Callback): void @@ -5185,7 +5151,6 @@ audioRenderer.on('stateChange', (state) => { console.info('audio renderer state is: STATE_RUNNING'); } }); - ``` ## AudioCapturer8+ @@ -5204,7 +5169,6 @@ Provides APIs for audio capture. Before calling any API in **AudioCapturer**, yo ```js let state = audioCapturer.state; - ``` ### getCapturerInfo8+ @@ -5233,7 +5197,6 @@ audioCapturer.getCapturerInfo((err, capturerInfo) => { console.info(`Capturer flags: ${capturerInfo.capturerFlags}`); } }); - ``` @@ -5266,7 +5229,6 @@ audioCapturer.getCapturerInfo().then((audioParamsGet) => { }).catch((err) => { console.error(`AudioFrameworkRecLog: CapturerInfo :ERROR: ${err}`); }); - ``` ### getStreamInfo8+ @@ -5297,7 +5259,6 @@ audioCapturer.getStreamInfo((err, streamInfo) => { console.info(`Capturer encoding type: ${streamInfo.encodingType}`); } }); - ``` ### getStreamInfo8+ @@ -5326,7 +5287,6 @@ audioCapturer.getStreamInfo().then((audioParamsGet) => { }).catch((err) => { console.error(`getStreamInfo :ERROR: ${err}`); }); - ``` ### getAudioStreamId9+ @@ -5349,7 +5309,6 @@ Obtains the stream ID of this **AudioCapturer** instance. This API uses an async audioCapturer.getAudioStreamId((err, streamid) => { console.info(`audioCapturer GetStreamId: ${streamid}`); }); - ``` ### getAudioStreamId9+ @@ -5374,7 +5333,6 @@ audioCapturer.getAudioStreamId().then((streamid) => { }).catch((err) => { console.error(`ERROR: ${err}`); }); - ``` ### start8+ @@ -5401,7 +5359,6 @@ audioCapturer.start((err) => { console.info('Capturer start success.'); } }); - ``` @@ -5433,7 +5390,6 @@ audioCapturer.start().then(() => { }).catch((err) => { console.info(`AudioFrameworkRecLog: Capturer start :ERROR : ${err}`); }); - ``` ### stop8+ @@ -5460,7 +5416,6 @@ audioCapturer.stop((err) => { console.info('Capturer stopped.'); } }); - ``` @@ -5490,7 +5445,6 @@ audioCapturer.stop().then(() => { }).catch((err) => { console.info(`AudioFrameworkRecLog: Capturer stop: ERROR: ${err}`); }); - ``` ### release8+ @@ -5517,7 +5471,6 @@ audioCapturer.release((err) => { console.info('capturer released.'); } }); - ``` @@ -5547,7 +5500,6 @@ audioCapturer.release().then(() => { }).catch((err) => { console.info(`AudioFrameworkRecLog: Capturer stop: ERROR: ${err}`); }); - ``` ### read8+ @@ -5581,7 +5533,6 @@ audioCapturer.read(bufferSize, true, async(err, buffer) => { console.info('Success in reading the buffer data'); } }); - ``` ### read8+ @@ -5621,7 +5572,6 @@ audioCapturer.read(bufferSize, true).then((buffer) => { }).catch((err) => { console.info(`ERROR : ${err}`); }); - ``` ### getAudioTime8+ @@ -5644,7 +5594,6 @@ Obtains the number of nanoseconds elapsed from the Unix epoch (January 1, 1970). audioCapturer.getAudioTime((err, timestamp) => { console.info(`Current timestamp: ${timestamp}`); }); - ``` ### getAudioTime8+ @@ -5669,7 +5618,6 @@ audioCapturer.getAudioTime().then((audioTime) => { }).catch((err) => { console.info(`AudioFrameworkRecLog: AudioCapturer Created : ERROR : ${err}`); }); - ``` ### getBufferSize8+ @@ -5699,7 +5647,6 @@ audioCapturer.getBufferSize((err, bufferSize) => { }); } }); - ``` ### getBufferSize8+ @@ -5726,7 +5673,6 @@ audioCapturer.getBufferSize().then((data) => { }).catch((err) => { console.info(`AudioFrameworkRecLog: getBufferSize :ERROR : ${err}`); }); - ``` ### on('markReach')8+ @@ -5753,7 +5699,6 @@ audioCapturer.on('markReach', 1000, (position) => { console.info('ON Triggered successfully'); } }); - ``` ### off('markReach')8+ @@ -5774,7 +5719,6 @@ Unsubscribes from mark reached events. ```js audioCapturer.off('markReach'); - ``` ### on('periodReach')8+ @@ -5801,7 +5745,6 @@ audioCapturer.on('periodReach', 1000, (position) => { console.info('ON Triggered successfully'); } }); - ``` ### off('periodReach')8+ @@ -5822,10 +5765,9 @@ Unsubscribes from period reached events. ```js audioCapturer.off('periodReach') - ``` -### on('stateChange')8+ +### on('stateChange') 8+ on(type: 'stateChange', callback: Callback): void @@ -5851,7 +5793,6 @@ audioCapturer.on('stateChange', (state) => { console.info('audio capturer state is: STATE_RUNNING'); } }); - ``` ## ToneType9+ @@ -5926,7 +5867,6 @@ tonePlayer.load(audio.ToneType.TONE_TYPE_DIAL_5, (err) => { console.info('callback call load success'); } }); - ``` ### load9+ @@ -5959,7 +5899,6 @@ tonePlayer.load(audio.ToneType.TONE_TYPE_DIAL_1).then(() => { }).catch(() => { console.error('promise call load fail'); }); - ``` ### start9+ @@ -5989,7 +5928,6 @@ tonePlayer.start((err) => { console.info('callback call start success'); } }); - ``` ### start9+ @@ -6016,7 +5954,6 @@ tonePlayer.start().then(() => { }).catch(() => { console.error('promise call start fail'); }); - ``` ### stop9+ @@ -6046,7 +5983,6 @@ tonePlayer.stop((err) => { console.error('callback call stop success '); } }); - ``` ### stop9+ @@ -6073,7 +6009,6 @@ tonePlayer.stop().then(() => { }).catch(() => { console.error('promise call stop fail'); }); - ``` ### release9+ @@ -6103,7 +6038,6 @@ tonePlayer.release((err) => { console.info('callback call release success '); } }); - ``` ### release9+ @@ -6130,7 +6064,6 @@ tonePlayer.release().then(() => { }).catch(() => { console.error('promise call release fail'); }); - ``` ## ActiveDeviceType(deprecated) diff --git a/en/application-dev/reference/apis/js-apis-medialibrary.md b/en/application-dev/reference/apis/js-apis-medialibrary.md index cd52a40c909762b31e033274b2b5193ecf34e6f6..240108ae1849dfb2fd04d927728c653cdb4627f7 100644 --- a/en/application-dev/reference/apis/js-apis-medialibrary.md +++ b/en/application-dev/reference/apis/js-apis-medialibrary.md @@ -2,7 +2,10 @@ > **NOTE** > -> The APIs of this module are supported since API version 6. Updates will be marked with a superscript to indicate their earliest API version. +> - The APIs of this module are supported since API version 6. Updates will be marked with a superscript to indicate their earliest API version. +> - This API is deprecated since API version 9 and will be retained until API version 13. +> - Certain functionalities are changed as system APIs and can be used only by system applications. To use these functionalities, call [@ohos.filemanagement.userFileManager](js-apis-userFileManager.md). +> - The functionalities for selecting and storing media assets are still open to common applications. To use these functionalities, call [@ohos.file.picker](js-apis-file-picker.md). ## Modules to Import ```js @@ -131,17 +134,12 @@ async function example() { console.info('fileAsset.displayName ' + '0 : ' + fileAsset.displayName); // Call getNextObject to obtain the next file until the last one. for (let i = 1; i < count; i++) { - fetchFileResult.getNextObject((error, fileAsset) => { - if (fileAsset == undefined) { - console.error('get next object failed with error: ' + error); - return; - } - console.info('fileAsset.displayName ' + i + ': ' + fileAsset.displayName); - }) + let fileAsset = await fetchFileResult.getNextObject(); + console.info('fileAsset.displayName ' + i + ': ' + fileAsset.displayName); } + // Release the FetchFileResult instance and invalidate it. Other APIs can no longer be called. + fetchFileResult.close(); }); - // Release the FetchFileResult instance and invalidate it. Other APIs can no longer be called. - fetchFileResult.close(); }); } ``` @@ -199,18 +197,15 @@ async function example() { console.info('fileAsset.displayName ' + '0 : ' + fileAsset.displayName); // Call getNextObject to obtain the next file until the last one. for (let i = 1; i < count; i++) { - fetchFileResult.getNextObject().then((fileAsset) => { - console.info('fileAsset.displayName ' + i + ': ' + fileAsset.displayName); - }).catch((error) => { - console.error('get next object failed with error: ' + error); - }) + let fileAsset = await fetchFileResult.getNextObject(); + console.info('fileAsset.displayName ' + i + ': ' + fileAsset.displayName); } + // Release the FetchFileResult instance and invalidate it. Other APIs can no longer be called. + fetchFileResult.close(); }).catch((error) => { // Calling getFirstObject fails. console.error('get first object failed with error: ' + error); }); - // Release the FetchFileResult instance and invalidate it. Other APIs can no longer be called. - fetchFileResult.close(); }).catch((error) => { // Calling getFileAssets fails. console.error('get file assets failed with error: ' + error); @@ -500,7 +495,7 @@ async function example() { ### getAlbums7+ -getAlbums(options: MediaFetchOptions, callback: AsyncCallback): void +getAlbums(options: MediaFetchOptions, callback: AsyncCallback<Array<Album>>): void Obtains the albums. This API uses an asynchronous callback to return the result. @@ -535,7 +530,7 @@ async function example() { ### getAlbums7+ -getAlbums(options: MediaFetchOptions): Promise +getAlbums(options: MediaFetchOptions): Promise<Array<Album>> Obtains the albums. This API uses a promise to return the result. @@ -615,7 +610,7 @@ Call this API when you no longer need to use the APIs in the **MediaLibrary** in media.release() ``` -### storeMediaAsset(deprecated) +### storeMediaAsset storeMediaAsset(option: MediaAssetOption, callback: AsyncCallback<string>): void @@ -623,7 +618,7 @@ Stores a media asset. This API uses an asynchronous callback to return the URI t > **NOTE** > -> This API is deprecated since API version 9. +> This API is supported since API version 6 and can be used only by the FA model. **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -653,7 +648,7 @@ mediaLibrary.getMediaLibrary().storeMediaAsset(option, (error, value) => { ``` -### storeMediaAsset(deprecated) +### storeMediaAsset storeMediaAsset(option: MediaAssetOption): Promise<string> @@ -661,7 +656,7 @@ Stores a media asset. This API uses a promise to return the URI that stores the > **NOTE** > -> This API is deprecated since API version 9. +> This API is supported since API version 6 and can be used only by the FA model. **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -694,15 +689,15 @@ mediaLibrary.getMediaLibrary().storeMediaAsset(option).then((value) => { ``` -### startImagePreview(deprecated) +### startImagePreview startImagePreview(images: Array<string>, index: number, callback: AsyncCallback<void>): void Starts image preview, with the first image to preview specified. This API can be used to preview local images whose URIs start with **datashare://** or online images whose URIs start with **https://**. It uses an asynchronous callback to return the execution result. > **NOTE** -> -> This API is deprecated since API version 9. You are advised to use the **\<[Image](../arkui-ts/ts-basic-components-image.md)>** component instead. The **\** component can be used to render and display local and online images. +> This API is supported since API version 6 and can be used only by the FA model. +> You are advised to use the **\<[Image](../arkui-ts/ts-basic-components-image.md)>** component instead. The **\** component can be used to render and display local and online images. **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -738,15 +733,15 @@ mediaLibrary.getMediaLibrary().startImagePreview(images, index, (error) => { ``` -### startImagePreview(deprecated) +### startImagePreview startImagePreview(images: Array<string>, callback: AsyncCallback<void>): void Starts image preview. This API can be used to preview local images whose URIs start with **datashare://** or online images whose URIs start with **https://**. It uses an asynchronous callback to return the execution result. > **NOTE** -> -> This API is deprecated since API version 9. You are advised to use the **\<[Image](../arkui-ts/ts-basic-components-image.md)>** component instead. The **\** component can be used to render and display local and online images. +> This API is supported since API version 6 and can be used only by the FA model. +> You are advised to use the **\<[Image](../arkui-ts/ts-basic-components-image.md)>** component instead. The **\** component can be used to render and display local and online images. **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -780,15 +775,15 @@ mediaLibrary.getMediaLibrary().startImagePreview(images, (error) => { ``` -### startImagePreview(deprecated) +### startImagePreview startImagePreview(images: Array<string>, index?: number): Promise<void> Starts image preview, with the first image to preview specified. This API can be used to preview local images whose URIs start with **datashare://** or online images whose URIs start with **https://**. It uses a promise to return the execution result. > **NOTE** -> -> This API is deprecated since API version 9. You are advised to use the **\<[Image](../arkui-ts/ts-basic-components-image.md)>** component instead. The **\** component can be used to render and display local and online images. +> This API is supported since API version 6 and can be used only by the FA model. +> You are advised to use the **\<[Image](../arkui-ts/ts-basic-components-image.md)>** component instead. The **\** component can be used to render and display local and online images. **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -827,15 +822,15 @@ mediaLibrary.getMediaLibrary().startImagePreview(images, index).then(() => { ``` -### startMediaSelect(deprecated) +### startMediaSelect startMediaSelect(option: MediaSelectOption, callback: AsyncCallback<Array<string>>): void Starts media selection. This API uses an asynchronous callback to return the list of URIs that store the selected media assets. > **NOTE** -> -> This API is deprecated since API version 9. You are advised to use the system app Gallery instead. Gallery is a built-in visual resource access application that provides features such as image and video management and browsing. For details about how to use Gallery, visit [OpenHarmony/applications_photos](https://gitee.com/openharmony/applications_photos). +> This API is supported since API version 6 and can be used only by the FA model. +> You are advised to use the system app Gallery instead. Gallery is a built-in visual resource access application that provides features such as image and video management and browsing. For details about how to use Gallery, visit [OpenHarmony/applications_photos](https://gitee.com/openharmony/applications_photos). **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -843,7 +838,7 @@ Starts media selection. This API uses an asynchronous callback to return the lis | Name | Type | Mandatory | Description | | -------- | ---------------------------------------- | ---- | ------------------------------------ | -| option | [MediaSelectOption](#mediaselectoptiondeprecated) | Yes | Media selection option. | +| option | [MediaSelectOption](#mediaselectoption) | Yes | Media selection option. | | callback | AsyncCallback<Array<string>> | Yes | Callback used to return the list of URIs (starting with **datashare://**) that store the selected media assets.| **Example** @@ -864,15 +859,15 @@ mediaLibrary.getMediaLibrary().startMediaSelect(option, (error, value) => { ``` -### startMediaSelect(deprecated) +### startMediaSelect startMediaSelect(option: MediaSelectOption): Promise<Array<string>> Starts media selection. This API uses a promise to return the list of URIs that store the selected media assets. > **NOTE** -> -> This API is deprecated since API version 9. You are advised to use the system app Gallery instead. Gallery is a built-in visual resource access application that provides features such as image and video management and browsing. For details about how to use Gallery, visit [OpenHarmony/applications_photos](https://gitee.com/openharmony/applications_photos). +> This API is supported since API version 6 and can be used only by the FA model. +> You are advised to use the system app Gallery instead. Gallery is a built-in visual resource access application that provides features such as image and video management and browsing. For details about how to use Gallery, visit [OpenHarmony/applications_photos](https://gitee.com/openharmony/applications_photos). **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -880,7 +875,7 @@ Starts media selection. This API uses a promise to return the list of URIs that | Name | Type | Mandatory | Description | | ------ | --------------------------------------- | ---- | ------- | -| option | [MediaSelectOption](#mediaselectoptiondeprecated) | Yes | Media selection option.| +| option | [MediaSelectOption](#mediaselectoption) | Yes | Media selection option.| **Return value** @@ -1041,7 +1036,6 @@ async function example() { Provides APIs for encapsulating file asset attributes. > **NOTE** -> > 1. The system attempts to parse the file content if the file is an audio or video file. The actual field values will be restored from the passed values during scanning on some devices. > 2. Some devices may not support the modification of **orientation**. You are advised to use [ModifyImageProperty](js-apis-image.md#modifyimageproperty9) of the **image** module. @@ -1923,9 +1917,9 @@ async function example() { if(i == fetchCount - 1) { var result = fetchFileResult.isAfterLast(); console.info('mediaLibrary fileAsset isAfterLast result: ' + result); + fetchFileResult.close(); } } - fetchFileResult.close(); } ``` @@ -1985,8 +1979,8 @@ async function example() { return; } console.info('getFirstObject successfully, displayName : ' + fileAsset.displayName); + fetchFileResult.close(); }) - fetchFileResult.close(); } ``` @@ -2018,10 +2012,10 @@ async function example() { let fetchFileResult = await media.getFileAssets(getImageOp); fetchFileResult.getFirstObject().then((fileAsset) => { console.info('getFirstObject successfully, displayName: ' + fileAsset.displayName); + fetchFileResult.close(); }).catch((error) => { console.error('getFirstObject failed with error: ' + error); }); - fetchFileResult.close(); } ``` @@ -2055,16 +2049,16 @@ async function example() { }; let fetchFileResult = await media.getFileAssets(getImageOp); let fileAsset = await fetchFileResult.getFirstObject(); - if (! fetchFileResult.isAfterLast) { + if (!fileAsset.isAfterLast) { fetchFileResult.getNextObject((error, fileAsset) => { if (error) { console.error('fetchFileResult getNextObject failed with error: ' + error); return; } console.log('fetchFileResult getNextObject successfully, displayName: ' + fileAsset.displayName); + fetchFileResult.close(); }) } - fetchFileResult.close(); } ``` @@ -2099,14 +2093,14 @@ async function example() { }; let fetchFileResult = await media.getFileAssets(getImageOp); let fileAsset = await fetchFileResult.getFirstObject(); - if (! fetchFileResult.isAfterLast) { + if (!fileAsset.isAfterLast) { fetchFileResult.getNextObject().then((fileAsset) => { console.info('fetchFileResult getNextObject successfully, displayName: ' + fileAsset.displayName); + fetchFileResult.close(); }).catch((error) => { console.error('fetchFileResult getNextObject failed with error: ' + error); }) } - fetchFileResult.close(); } ``` @@ -2142,8 +2136,8 @@ async function example() { return; } console.info('getLastObject successfully, displayName: ' + fileAsset.displayName); + fetchFileResult.close(); }) - fetchFileResult.close(); } ``` @@ -2175,10 +2169,10 @@ async function example() { let fetchFileResult = await media.getFileAssets(getImageOp); fetchFileResult.getLastObject().then((fileAsset) => { console.info('getLastObject successfully, displayName: ' + fileAsset.displayName); + fetchFileResult.close(); }).catch((error) => { console.error('getLastObject failed with error: ' + error); }); - fetchFileResult.close(); } ``` @@ -2215,8 +2209,8 @@ async function example() { return; } console.info('getPositionObject successfully, displayName: ' + fileAsset.displayName); + fetchFileResult.close(); }) - fetchFileResult.close(); } ``` @@ -2254,10 +2248,10 @@ async function example() { let fetchFileResult = await media.getFileAssets(getImageOp); fetchFileResult.getPositionObject(0).then((fileAsset) => { console.info('getPositionObject successfully, displayName: ' + fileAsset.displayName); + fetchFileResult.close(); }).catch((error) => { console.error('getPositionObject failed with error: ' + error); }); - fetchFileResult.close(); } ``` @@ -2294,9 +2288,9 @@ async function example() { } for (let i = 0; i < fetchFileResult.getCount(); i++) { console.info('getAllObject fileAssetList ' + i + ' displayName: ' + fileAssetList[i].displayName); - } + } + fetchFileResult.close(); }) - fetchFileResult.close(); } ``` @@ -2330,10 +2324,10 @@ async function example() { for (let i = 0; i < fetchFileResult.getCount(); i++) { console.info('getAllObject fileAssetList ' + i + ' displayName: ' + fileAssetList[i].displayName); } + fetchFileResult.close(); }).catch((error) => { console.error('getAllObject failed with error: ' + error); }); - fetchFileResult.close(); } ``` @@ -2465,10 +2459,10 @@ async function example() { console.error('album getFileAssets failed with error: ' + error); return; } - let count = fetchFileResult.getcount(); + let count = fetchFileResult.getCount(); console.info('album getFileAssets successfully, count: ' + count); + fetchFileResult.close(); }); - fetchFileResult.close(); } ``` @@ -2502,7 +2496,7 @@ async function example() { selections: '', selectionArgs: [], }; - let fileNoArgsfetchOp = { + let fileNoArgsfetchOp = { selections: '', selectionArgs: [], }; @@ -2510,13 +2504,13 @@ async function example() { const albumList = await media.getAlbums(AlbumNoArgsfetchOp); const album = albumList[0]; // Obtain an album from the album list and obtain all media assets that meet the retrieval options in the album. - album.getFileAssets(fileNoArgsfetchOp).then((albumFetchFileResult) => { - let count = fetchFileResult.getcount(); + album.getFileAssets(fileNoArgsfetchOp).then((fetchFileResult) => { + let count = fetchFileResult.getCount(); console.info('album getFileAssets successfully, count: ' + count); + fetchFileResult.close(); }).catch((error) => { console.error('album getFileAssets failed with error: ' + error); }); - fetchFileResult.close(); } ``` @@ -2555,7 +2549,6 @@ Enumerates media types. Enumerates key file information. > **NOTE** -> > The **bucket_id** field may change after file rename or movement. Therefore, you must obtain the field again before using it. **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -2641,14 +2634,10 @@ Describes the image size. | width | number | Yes | Yes | Image width, in pixels.| | height | number | Yes | Yes | Image height, in pixels.| -## MediaAssetOption(deprecated) +## MediaAssetOption Implements the media asset option. -> **NOTE** -> -> This API is deprecated since API version 9. - **System capability**: SystemCapability.Multimedia.MediaLibrary.Core @@ -2658,17 +2647,13 @@ Implements the media asset option. | mimeType | string | Yes | Yes | Multipurpose Internet Mail Extensions (MIME) type of the media.
The value can be 'image/\*', 'video/\*', 'audio/\*' or 'file\*'.| | relativePath | string | Yes | Yes | Custom path for storing media assets, for example, 'Pictures/'. If this parameter is unspecified, media assets are stored in the default path.
Default path of images: 'Pictures/'
Default path of videos: 'Videos/'
Default path of audios: 'Audios/'
Default path of files: 'Documents/'| -## MediaSelectOption(deprecated) +## MediaSelectOption Describes media selection option. -> **NOTE** -> -> This API is deprecated since API version 9. - **System capability**: SystemCapability.Multimedia.MediaLibrary.Core | Name | Type | Readable| Writable| Description | | ----- | ------ | ---- | ---- | -------------------- | | type | 'image' | 'video' | 'media' | Yes | Yes | Media type, which can be **image**, **media**, or **video**. Currently, only **media** is supported.| -| count | number | Yes | Yes | Number of media assets selected. The value starts from 1, which indicates that one media asset can be selected. | +| count | number | Yes | Yes | Maximum number of media assets that can be selected. The value starts from 1, which indicates that one media asset can be selected. |