提交 eb9d2336 编写于 作者: G Gloria

Update docs against 15272+14407+15018+14814+14539+14658+14627+14642+15283

Signed-off-by: wusongqing<wusongqing@huawei.com>
上级 0c2853e8
# Media # Media
- Audio - Audio and Video
- [Audio Overview](audio-overview.md) - [Audio Overview](audio-overview.md)
- [Audio Playback Development](audio-playback.md)
- [Audio Recording Development](audio-recorder.md)
- [Audio Rendering Development](audio-renderer.md) - [Audio Rendering Development](audio-renderer.md)
- [Audio Stream Management Development](audio-stream-manager.md) - [Audio Stream Management Development](audio-stream-manager.md)
- [Audio Capture Development](audio-capturer.md) - [Audio Capture Development](audio-capturer.md)
...@@ -12,8 +10,10 @@ ...@@ -12,8 +10,10 @@
- [Audio Interruption Mode Development](audio-interruptmode.md) - [Audio Interruption Mode Development](audio-interruptmode.md)
- [Volume Management Development](audio-volume-manager.md) - [Volume Management Development](audio-volume-manager.md)
- [Audio Routing and Device Management Development](audio-routing-manager.md) - [Audio Routing and Device Management Development](audio-routing-manager.md)
- [AVPlayer Development (Recommended)](avplayer-playback.md)
- Video - [AVRecorder Development (Recommended)](avrecorder.md)
- [Audio Playback Development](audio-playback.md)
- [Audio Recording Development](audio-recorder.md)
- [Video Playback Development](video-playback.md) - [Video Playback Development](video-playback.md)
- [Video Recording Development](video-recorder.md) - [Video Recording Development](video-recorder.md)
......
...@@ -21,7 +21,7 @@ This following figure shows the audio capturer state transitions. ...@@ -21,7 +21,7 @@ This following figure shows the audio capturer state transitions.
## Constraints ## Constraints
Before developing the audio data collection feature, configure the **ohos.permission.MICROPHONE** permission for your application. For details about permission configuration, see [Permission Application Guide](../security/accesstoken-guidelines.md). Before developing the audio data collection feature, configure the **ohos.permission.MICROPHONE** permission for your application. For details, see [Permission Application Guide](../security/accesstoken-guidelines.md).
## How to Develop ## How to Develop
...@@ -72,7 +72,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference ...@@ -72,7 +72,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference
} }
await audioCapturer.start(); await audioCapturer.start();
let state = audioCapturer.state; state = audioCapturer.state;
if (state == audio.AudioState.STATE_RUNNING) { if (state == audio.AudioState.STATE_RUNNING) {
console.info('AudioRecLog: Capturer started'); console.info('AudioRecLog: Capturer started');
} else { } else {
...@@ -86,7 +86,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference ...@@ -86,7 +86,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference
The following example shows how to write recorded data into a file. The following example shows how to write recorded data into a file.
```js ```js
import fileio from '@ohos.fileio'; import fs from '@ohos.file.fs';
let state = audioCapturer.state; let state = audioCapturer.state;
// The read operation can be performed only when the state is STATE_RUNNING. // The read operation can be performed only when the state is STATE_RUNNING.
...@@ -96,31 +96,36 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference ...@@ -96,31 +96,36 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference
} }
const path = '/data/data/.pulse_dir/capture_js.wav'; // Path for storing the collected audio file. const path = '/data/data/.pulse_dir/capture_js.wav'; // Path for storing the collected audio file.
let fd = fileio.openSync(path, 0o102, 0o777); let file = fs.openSync(filePath, 0o2);
if (fd !== null) { let fd = file.fd;
console.info('AudioRecLog: file fd created'); if (file !== null) {
} console.info('AudioRecLog: file created');
else{ } else {
console.info('AudioRecLog: file fd create : FAILED'); console.info('AudioRecLog: file create : FAILED');
return; return;
} }
fd = fileio.openSync(path, 0o2002, 0o666);
if (fd !== null) { if (fd !== null) {
console.info('AudioRecLog: file fd opened in append mode'); console.info('AudioRecLog: file fd opened in append mode');
} }
let numBuffersToCapture = 150; // Write data for 150 times. let numBuffersToCapture = 150; // Write data for 150 times.
let count = 0;
while (numBuffersToCapture) { while (numBuffersToCapture) {
let bufferSize = await audioCapturer.getBufferSize();
let buffer = await audioCapturer.read(bufferSize, true); let buffer = await audioCapturer.read(bufferSize, true);
let options = {
offset: count * this.bufferSize,
length: this.bufferSize
}
if (typeof(buffer) == undefined) { if (typeof(buffer) == undefined) {
console.info('AudioRecLog: read buffer failed'); console.info('AudioRecLog: read buffer failed');
} else { } else {
let number = fileio.writeSync(fd, buffer); let number = fs.writeSync(fd, buffer, options);
console.info(`AudioRecLog: data written: ${number}`); console.info(`AudioRecLog: data written: ${number}`);
} }
numBuffersToCapture--; numBuffersToCapture--;
count++;
} }
``` ```
...@@ -189,7 +194,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference ...@@ -189,7 +194,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference
let audioTime : number = await audioCapturer.getAudioTime(); let audioTime : number = await audioCapturer.getAudioTime();
// Obtain a proper minimum buffer size. // Obtain a proper minimum buffer size.
let bufferSize : number = await audioCapturer.getBuffersize(); let bufferSize : number = await audioCapturer.getBufferSize();
``` ```
7. (Optional) Use **on('markReach')** to subscribe to the mark reached event, and use **off('markReach')** to unsubscribe from the event. 7. (Optional) Use **on('markReach')** to subscribe to the mark reached event, and use **off('markReach')** to unsubscribe from the event.
......
...@@ -38,7 +38,7 @@ For details about the **src** types supported by **AudioPlayer**, see the [src a ...@@ -38,7 +38,7 @@ For details about the **src** types supported by **AudioPlayer**, see the [src a
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
// Print the stream track information. // Print the stream track information.
function printfDescription(obj) { function printfDescription(obj) {
...@@ -112,14 +112,8 @@ async function audioPlayerDemo() { ...@@ -112,14 +112,8 @@ async function audioPlayerDemo() {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let path = pathDir + '/01.mp3' let path = pathDir + '/01.mp3'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + file.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback. audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
} }
``` ```
...@@ -128,7 +122,7 @@ async function audioPlayerDemo() { ...@@ -128,7 +122,7 @@ async function audioPlayerDemo() {
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
export class AudioDemo { export class AudioDemo {
// Set the player callbacks. // Set the player callbacks.
...@@ -154,14 +148,8 @@ export class AudioDemo { ...@@ -154,14 +148,8 @@ export class AudioDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let path = pathDir + '/01.mp3' let path = pathDir + '/01.mp3'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + file.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback. audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
} }
} }
...@@ -171,7 +159,7 @@ export class AudioDemo { ...@@ -171,7 +159,7 @@ export class AudioDemo {
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
export class AudioDemo { export class AudioDemo {
// Set the player callbacks. // Set the player callbacks.
...@@ -202,14 +190,8 @@ export class AudioDemo { ...@@ -202,14 +190,8 @@ export class AudioDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\02.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\02.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let nextpath = pathDir + '/02.mp3' let nextpath = pathDir + '/02.mp3'
await fileIO.open(nextpath).then((fdNumber) => { let nextFile = await fs.open(nextpath);
nextFdPath = nextFdPath + '' + fdNumber; nextFdPath = nextFdPath + '' + nextFile.fd;
console.info('open fd success fd is' + nextFdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
audioPlayer.src = nextFdPath; // Set the src attribute and trigger the 'dataLoad' event callback. audioPlayer.src = nextFdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
} }
...@@ -220,14 +202,8 @@ export class AudioDemo { ...@@ -220,14 +202,8 @@ export class AudioDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let path = pathDir + '/01.mp3' let path = pathDir + '/01.mp3'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + file.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback. audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
} }
} }
...@@ -237,7 +213,7 @@ export class AudioDemo { ...@@ -237,7 +213,7 @@ export class AudioDemo {
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
export class AudioDemo { export class AudioDemo {
// Set the player callbacks. // Set the player callbacks.
...@@ -259,14 +235,8 @@ export class AudioDemo { ...@@ -259,14 +235,8 @@ export class AudioDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let path = pathDir + '/01.mp3' let path = pathDir + '/01.mp3'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + file.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback. audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
} }
} }
......
...@@ -34,8 +34,7 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference ...@@ -34,8 +34,7 @@ For details about the APIs, see [AudioRenderer in Audio Management](../reference
1. Use **createAudioRenderer()** to create an **AudioRenderer** instance. 1. Use **createAudioRenderer()** to create an **AudioRenderer** instance.
Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. This instance is used to render audio, control and obtain the rendering status, and register a callback for notification.
Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. This instance is used to render audio, control and obtain the rendering status, and register a callback for notification.
```js ```js
import audio from '@ohos.multimedia.audio'; import audio from '@ohos.multimedia.audio';
...@@ -82,15 +81,15 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th ...@@ -82,15 +81,15 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
} }
} }
``` ```
The renderer state will be **STATE_RUNNING** once the audio renderer is started. The application can then begin reading buffers.
The renderer state will be **STATE_RUNNING** once the audio renderer is started. The application can then begin reading buffers.
3. Call **write()** to write data to the buffer. 3. Call **write()** to write data to the buffer.
Read the audio data to be played to the buffer. Call **write()** repeatedly to write the data to the buffer. Read the audio data to be played to the buffer. Call **write()** repeatedly to write the data to the buffer.
```js ```js
import fileio from '@ohos.fileio'; import fs from '@ohos.file.fs';
import audio from '@ohos.multimedia.audio'; import audio from '@ohos.multimedia.audio';
async function writeBuffer(buf) { async function writeBuffer(buf) {
...@@ -109,35 +108,33 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th ...@@ -109,35 +108,33 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
// Set a proper buffer size for the audio renderer. You can also select a buffer of another size. // Set a proper buffer size for the audio renderer. You can also select a buffer of another size.
const bufferSize = await audioRenderer.getBufferSize(); const bufferSize = await audioRenderer.getBufferSize();
let dir = globalThis.fileDir; // You must use the sandbox path. let dir = globalThis.fileDir; // You must use the sandbox path.
const path = dir + '/file_example_WAV_2MG.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/file_example_WAV_2MG.wav const filePath = dir + '/file_example_WAV_2MG.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/file_example_WAV_2MG.wav
console.info(`file path: ${ path}`); console.info(`file filePath: ${ filePath}`);
let ss = fileio.createStreamSync(path, 'r');
const totalSize = fileio.statSync(path).size; // Size of the file to render. let file = fs.openSync(filePath, fs.OpenMode.READ_ONLY);
let discardHeader = new ArrayBuffer(bufferSize); let stat = await fs.stat(filePath); // Music file information.
ss.readSync(discardHeader);
let rlen = 0;
rlen += bufferSize;
let id = setInterval(() => {
if (audioRenderer.state == audio.AudioState.STATE_RELEASED) { // The rendering stops if the audio renderer is in the STATE_RELEASED state.
ss.closeSync();
await audioRenderer.stop();
clearInterval(id);
}
if (audioRenderer.state == audio.AudioState.STATE_RUNNING) {
if (rlen >= totalSize) { // The rendering stops if the file finishes reading.
ss.closeSync();
await audioRenderer.stop();
clearInterval(id);
}
let buf = new ArrayBuffer(bufferSize); let buf = new ArrayBuffer(bufferSize);
rlen += ss.readSync(buf); let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1);
console.info(`Total bytes read from file: ${rlen}`); for (let i = 0;i < len; i++) {
writeBuffer(buf); let options = {
} else { offset: i * this.bufferSize,
console.info('check after next interval'); length: this.bufferSize
}
let readsize = await fs.read(file.fd, buf, options)
let writeSize = await new Promise((resolve,reject)=>{
this.audioRenderer.write(buf,(err,writeSize)=>{
if(err){
reject(err)
}else{
resolve(writeSize)
}
})
})
} }
}, 30); // The timer interval is set based on the audio format. The unit is millisecond.
fs.close(file)
await audioRenderer.stop(); // Stop rendering.
await audioRenderer.release(); // Releases the resources.
``` ```
4. (Optional) Call **pause()** or **stop()** to pause or stop rendering. 4. (Optional) Call **pause()** or **stop()** to pause or stop rendering.
...@@ -242,7 +239,7 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th ...@@ -242,7 +239,7 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
let audioTime : number = await audioRenderer.getAudioTime(); let audioTime : number = await audioRenderer.getAudioTime();
// Obtain a proper minimum buffer size. // Obtain a proper minimum buffer size.
let bufferSize : number = await audioRenderer.getBuffersize(); let bufferSize : number = await audioRenderer.getBufferSize();
// Obtain the audio renderer rate. // Obtain the audio renderer rate.
let renderRate : audio.AudioRendererRate = await audioRenderer.getRenderRate(); let renderRate : audio.AudioRendererRate = await audioRenderer.getRenderRate();
...@@ -424,35 +421,31 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th ...@@ -424,35 +421,31 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
let dir = globalThis.fileDir; // You must use the sandbox path. let dir = globalThis.fileDir; // You must use the sandbox path.
const path1 = dir + '/music001_48000_32_1.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/music001_48000_32_1.wav const path1 = dir + '/music001_48000_32_1.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/music001_48000_32_1.wav
console.info(`audioRender1 file path: ${ path1}`); console.info(`audioRender1 file path: ${ path1}`);
let ss1 = await fileio.createStream(path1,'r'); let file1 = fs.openSync(path1, fs.OpenMode.READ_ONLY);
const totalSize1 = fileio.statSync(path1).size; // Size of the file to render. let stat = await fs.stat(path1); // Music file information.
console.info(`totalSize1 -------: ${totalSize1}`); let buf = new ArrayBuffer(bufferSize);
let discardHeader = new ArrayBuffer(bufferSize); let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1);
ss1.readSync(discardHeader);
let rlen = 0;
rlen += bufferSize;
// 1.7 Render the original audio data in the buffer by using audioRender. // 1.7 Render the original audio data in the buffer by using audioRender.
let id = setInterval(async () => { for (let i = 0;i < len; i++) {
if (audioRenderer1.state == audio.AudioState.STATE_RELEASED) { // The rendering stops if the audio renderer is in the STATE_RELEASED state. let options = {
ss1.closeSync(); offset: i * this.bufferSize,
audioRenderer1.stop(); length: this.bufferSize
clearInterval(id); }
} let readsize = await fs.read(file.fd, buf, options)
if (audioRenderer1.state == audio.AudioState.STATE_RUNNING) { let writeSize = await new Promise((resolve,reject)=>{
if (rlen >= totalSize1) { // The rendering stops if the file finishes reading. this.audioRenderer1.write(buf,(err,writeSize)=>{
ss1.closeSync(); if(err){
await audioRenderer1.stop(); reject(err)
clearInterval(id); }else{
resolve(writeSize)
} }
let buf = new ArrayBuffer(bufferSize); })
rlen += ss1.readSync(buf); })
console.info(`Total bytes read from file: ${rlen}`);
await writeBuffer(buf, that.audioRenderer1);
} else {
console.info('check after next interval');
} }
}, 30); // The timer interval is set based on the audio format. The unit is millisecond. fs.close(file1)
await audioRenderer1.stop(); // Stop rendering.
await audioRenderer1.release(); Releases the resources.
} }
async runningAudioRender2(){ async runningAudioRender2(){
...@@ -499,36 +492,32 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th ...@@ -499,36 +492,32 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
// 2.6 Read the original audio data file. // 2.6 Read the original audio data file.
let dir = globalThis.fileDir; // You must use the sandbox path. let dir = globalThis.fileDir; // You must use the sandbox path.
const path2 = dir + '/music002_48000_32_1.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/music002_48000_32_1.wav const path2 = dir + '/music002_48000_32_1.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/music002_48000_32_1.wav
console.error(`audioRender1 file path: ${ path2}`); console.info(`audioRender2 file path: ${ path2}`);
let ss2 = await fileio.createStream(path2,'r'); let file2 = fs.openSync(path2, fs.OpenMode.READ_ONLY);
const totalSize2 = fileio.statSync(path2).size; // Size of the file to render. let stat = await fs.stat(path2); // Music file information.
console.error(`totalSize2 -------: ${totalSize2}`); let buf = new ArrayBuffer(bufferSize);
let discardHeader2 = new ArrayBuffer(bufferSize); let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1);
ss2.readSync(discardHeader2);
let rlen = 0;
rlen += bufferSize;
// 2.7 Render the original audio data in the buffer by using audioRender. // 2.7 Render the original audio data in the buffer by using audioRender.
let id = setInterval(async () => { for (let i = 0;i < len; i++) {
if (audioRenderer2.state == audio.AudioState.STATE_RELEASED) { // The rendering stops if the audio renderer is in the STATE_RELEASED state. let options = {
ss2.closeSync(); offset: i * this.bufferSize,
that.audioRenderer2.stop(); length: this.bufferSize
clearInterval(id); }
} let readsize = await fs.read(file.fd, buf, options)
if (audioRenderer1.state == audio.AudioState.STATE_RUNNING) { let writeSize = await new Promise((resolve,reject)=>{
if (rlen >= totalSize2) { // The rendering stops if the file finishes reading. this.audioRenderer2.write(buf,(err,writeSize)=>{
ss2.closeSync(); if(err){
await audioRenderer2.stop(); reject(err)
clearInterval(id); }else{
resolve(writeSize)
} }
let buf = new ArrayBuffer(bufferSize); })
rlen += ss2.readSync(buf); })
console.info(`Total bytes read from file: ${rlen}`);
await writeBuffer(buf, that.audioRenderer2);
} else {
console.info('check after next interval');
} }
}, 30); // The timer interval is set based on the audio format. The unit is millisecond. fs.close(file2)
await audioRenderer2.stop(); // Stop rendering.
await audioRenderer2.release(); // Releases the resources.
} }
async writeBuffer(buf, audioRender) { async writeBuffer(buf, audioRender) {
......
...@@ -104,7 +104,7 @@ The full playback process includes creating an instance, setting resources, sett ...@@ -104,7 +104,7 @@ The full playback process includes creating an instance, setting resources, sett
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import audio from '@ohos.multimedia.audio'; import audio from '@ohos.multimedia.audio';
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
const TAG = 'AVPlayerDemo:' const TAG = 'AVPlayerDemo:'
export class AVPlayerDemo { export class AVPlayerDemo {
...@@ -223,14 +223,8 @@ export class AVPlayerDemo { ...@@ -223,14 +223,8 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_AAC.mp4' let path = pathDir + '/H264_AAC.mp4'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path)
fdPath = fdPath + '' + fdNumber fdPath = fdPath + '' + file.fd
console.info('open fd success fd is' + fdPath)
}, (err) => {
console.info('open fd failed err is' + err)
}).catch((err) => {
console.info('open fd failed err is' + err)
});
this.avPlayer.url = fdPath this.avPlayer.url = fdPath
} }
} }
...@@ -240,7 +234,7 @@ export class AVPlayerDemo { ...@@ -240,7 +234,7 @@ export class AVPlayerDemo {
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
const TAG = 'AVPlayerDemo:' const TAG = 'AVPlayerDemo:'
export class AVPlayerDemo { export class AVPlayerDemo {
...@@ -280,7 +274,7 @@ export class AVPlayerDemo { ...@@ -280,7 +274,7 @@ export class AVPlayerDemo {
break; break;
case 'stopped': // This state is reported upon a successful callback of stop(). case 'stopped': // This state is reported upon a successful callback of stop().
console.info(TAG + 'state stopped called') console.info(TAG + 'state stopped called')
this.avPlayer.reset() // Call reset() to initialize the AVPlayer state. this.avPlayer.release() // Call reset() to initialize the AVPlayer state.
break; break;
case 'released': case 'released':
console.info(TAG + 'state released called') console.info(TAG + 'state released called')
...@@ -302,24 +296,18 @@ export class AVPlayerDemo { ...@@ -302,24 +296,18 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_AAC.mp4' let path = pathDir + '/H264_AAC.mp4'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path)
fdPath = fdPath + '' + fdNumber fdPath = fdPath + '' + file.fd
console.info('open fd success fd is' + fdPath)
}, (err) => {
console.info('open fd failed err is' + err)
}).catch((err) => {
console.info('open fd failed err is' + err)
});
this.avPlayer.url = fdPath this.avPlayer.url = fdPath
} }
} }
``` ```
### Switching to the Next Video Clip ### Looping a Song
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
const TAG = 'AVPlayerDemo:' const TAG = 'AVPlayerDemo:'
export class AVPlayerDemo { export class AVPlayerDemo {
...@@ -362,7 +350,7 @@ export class AVPlayerDemo { ...@@ -362,7 +350,7 @@ export class AVPlayerDemo {
break; break;
case 'stopped': // This state is reported upon a successful callback of stop(). case 'stopped': // This state is reported upon a successful callback of stop().
console.info(TAG + 'state stopped called') console.info(TAG + 'state stopped called')
this.avPlayer.reset() // Call reset() to initialize the AVPlayer state. this.avPlayer.release() // Call reset() to initialize the AVPlayer state.
break; break;
case 'released': case 'released':
console.info(TAG + 'state released called') console.info(TAG + 'state released called')
...@@ -393,23 +381,17 @@ export class AVPlayerDemo { ...@@ -393,23 +381,17 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_AAC.mp4' let path = pathDir + '/H264_AAC.mp4'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path)
fdPath = fdPath + '' + fdNumber fdPath = fdPath + '' + file.fd
console.info('open fd success fd is' + fdPath)
}, (err) => {
console.info('open fd failed err is' + err)
}).catch((err) => {
console.info('open fd failed err is' + err)
});
this.avPlayer.url = fdPath this.avPlayer.url = fdPath
} }
} }
``` ```
### Looping a Song ### Switching to the Next Video Clip
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
const TAG = 'AVPlayerDemo:' const TAG = 'AVPlayerDemo:'
export class AVPlayerDemo { export class AVPlayerDemo {
...@@ -422,14 +404,8 @@ export class AVPlayerDemo { ...@@ -422,14 +404,8 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_MP3.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_MP3.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_MP3.mp4' let path = pathDir + '/H264_MP3.mp4'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path)
fdPath = fdPath + '' + fdNumber fdPath = fdPath + '' + file.fd
console.info('open fd success fd is' + fdPath)
}, (err) => {
console.info('open fd failed err is' + err)
}).catch((err) => {
console.info('open fd failed err is' + err)
});
this.avPlayer.url = fdPath // The initialized state is reported again. this.avPlayer.url = fdPath // The initialized state is reported again.
} }
...@@ -493,14 +469,8 @@ export class AVPlayerDemo { ...@@ -493,14 +469,8 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements. let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_AAC.mp4' let path = pathDir + '/H264_AAC.mp4'
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path)
fdPath = fdPath + '' + fdNumber fdPath = fdPath + '' + file.fd
console.info('open fd success fd is' + fdPath)
}, (err) => {
console.info('open fd failed err is' + err)
}).catch((err) => {
console.info('open fd failed err is' + err)
});
this.avPlayer.url = fdPath this.avPlayer.url = fdPath
} }
} }
......
...@@ -67,14 +67,14 @@ export class AVRecorderDemo { ...@@ -67,14 +67,14 @@ export class AVRecorderDemo {
let surfaceID; // The surface ID is obtained by calling getInputSurface and transferred to the videoOutput object of the camera. let surfaceID; // The surface ID is obtained by calling getInputSurface and transferred to the videoOutput object of the camera.
await this.getFd('01.mp4'); await this.getFd('01.mp4');
// Configure the parameters related to audio and video recording. // Configure the parameters related to audio and video recording based on those supported by the hardware device.
let avProfile = { let avProfile = {
audioBitrate : 48000, audioBitrate : 48000,
audioChannels : 2, audioChannels : 2,
audioCodec : media.CodecMimeType.AUDIO_AAC, audioCodec : media.CodecMimeType.AUDIO_AAC,
audioSampleRate : 48000, audioSampleRate : 48000,
fileFormat : media.ContainerFormatType.CFT_MPEG_4, fileFormat : media.ContainerFormatType.CFT_MPEG_4,
videoBitrate : 48000, videoBitrate : 2000000,
videoCodec : media.CodecMimeType.VIDEO_MPEG4, videoCodec : media.CodecMimeType.VIDEO_MPEG4,
videoFrameWidth : 640, videoFrameWidth : 640,
videoFrameHeight : 480, videoFrameHeight : 480,
...@@ -363,10 +363,10 @@ export class VideoRecorderDemo { ...@@ -363,10 +363,10 @@ export class VideoRecorderDemo {
let surfaceID; // The surface ID is obtained by calling getInputSurface and transferred to the videoOutput object of the camera. let surfaceID; // The surface ID is obtained by calling getInputSurface and transferred to the videoOutput object of the camera.
await this.getFd('01.mp4'); await this.getFd('01.mp4');
// Configure the parameters related to video recording. // Configure the parameters related to pure video recording based on those supported by the hardware device.
let videoProfile = { let videoProfile = {
fileFormat : media.ContainerFormatType.CFT_MPEG_4, fileFormat : media.ContainerFormatType.CFT_MPEG_4,
videoBitrate : 48000, videoBitrate : 2000000,
videoCodec : media.CodecMimeType.VIDEO_MPEG4, videoCodec : media.CodecMimeType.VIDEO_MPEG4,
videoFrameWidth : 640, videoFrameWidth : 640,
videoFrameHeight : 480, videoFrameHeight : 480,
......
...@@ -51,7 +51,7 @@ For details about how to create an XComponent, see [XComponent](../reference/ark ...@@ -51,7 +51,7 @@ For details about how to create an XComponent, see [XComponent](../reference/ark
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
export class VideoPlayerDemo { export class VideoPlayerDemo {
// Report an error in the case of a function invocation failure. // Report an error in the case of a function invocation failure.
failureCallback(error) { failureCallback(error) {
...@@ -82,14 +82,8 @@ export class VideoPlayerDemo { ...@@ -82,14 +82,8 @@ export class VideoPlayerDemo {
let fdPath = 'fd://' let fdPath = 'fd://'
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command.
let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4'; let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4';
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + file.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
// Call createVideoPlayer to create a VideoPlayer instance. // Call createVideoPlayer to create a VideoPlayer instance.
await media.createVideoPlayer().then((video) => { await media.createVideoPlayer().then((video) => {
if (typeof (video) != 'undefined') { if (typeof (video) != 'undefined') {
...@@ -180,7 +174,7 @@ export class VideoPlayerDemo { ...@@ -180,7 +174,7 @@ export class VideoPlayerDemo {
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
export class VideoPlayerDemo { export class VideoPlayerDemo {
// Report an error in the case of a function invocation failure. // Report an error in the case of a function invocation failure.
failureCallback(error) { failureCallback(error) {
...@@ -211,14 +205,8 @@ export class VideoPlayerDemo { ...@@ -211,14 +205,8 @@ export class VideoPlayerDemo {
let fdPath = 'fd://' let fdPath = 'fd://'
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command.
let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4'; let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4';
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + file.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
// Call createVideoPlayer to create a VideoPlayer instance. // Call createVideoPlayer to create a VideoPlayer instance.
await media.createVideoPlayer().then((video) => { await media.createVideoPlayer().then((video) => {
if (typeof (video) != 'undefined') { if (typeof (video) != 'undefined') {
...@@ -267,7 +255,7 @@ export class VideoPlayerDemo { ...@@ -267,7 +255,7 @@ export class VideoPlayerDemo {
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
export class VideoPlayerDemo { export class VideoPlayerDemo {
// Report an error in the case of a function invocation failure. // Report an error in the case of a function invocation failure.
failureCallback(error) { failureCallback(error) {
...@@ -299,14 +287,8 @@ export class VideoPlayerDemo { ...@@ -299,14 +287,8 @@ export class VideoPlayerDemo {
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command.
let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4'; let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4';
let nextPath = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/MP4_AAC.mp4'; let nextPath = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/MP4_AAC.mp4';
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + file.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
// Call createVideoPlayer to create a VideoPlayer instance. // Call createVideoPlayer to create a VideoPlayer instance.
await media.createVideoPlayer().then((video) => { await media.createVideoPlayer().then((video) => {
if (typeof (video) != 'undefined') { if (typeof (video) != 'undefined') {
...@@ -341,14 +323,8 @@ export class VideoPlayerDemo { ...@@ -341,14 +323,8 @@ export class VideoPlayerDemo {
// Obtain the next video FD address. // Obtain the next video FD address.
fdPath = 'fd://' fdPath = 'fd://'
await fileIO.open(nextPath).then((fdNumber) => { let nextFile = await fs.open(nextPath);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + nextFile.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
// Set the second video playback source. // Set the second video playback source.
videoPlayer.url = fdPath; videoPlayer.url = fdPath;
...@@ -378,7 +354,7 @@ export class VideoPlayerDemo { ...@@ -378,7 +354,7 @@ export class VideoPlayerDemo {
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
import fileIO from '@ohos.fileio' import fs from '@ohos.file.fs'
export class VideoPlayerDemo { export class VideoPlayerDemo {
// Report an error in the case of a function invocation failure. // Report an error in the case of a function invocation failure.
failureCallback(error) { failureCallback(error) {
...@@ -409,14 +385,8 @@ export class VideoPlayerDemo { ...@@ -409,14 +385,8 @@ export class VideoPlayerDemo {
let fdPath = 'fd://' let fdPath = 'fd://'
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command.
let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4'; let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4';
await fileIO.open(path).then((fdNumber) => { let file = await fs.open(path);
fdPath = fdPath + '' + fdNumber; fdPath = fdPath + '' + file.fd;
console.info('open fd success fd is' + fdPath);
}, (err) => {
console.info('open fd failed err is' + err);
}).catch((err) => {
console.info('open fd failed err is' + err);
});
// Call createVideoPlayer to create a VideoPlayer instance. // Call createVideoPlayer to create a VideoPlayer instance.
await media.createVideoPlayer().then((video) => { await media.createVideoPlayer().then((video) => {
if (typeof (video) != 'undefined') { if (typeof (video) != 'undefined') {
......
...@@ -76,14 +76,14 @@ export class VideoRecorderDemo { ...@@ -76,14 +76,14 @@ export class VideoRecorderDemo {
let surfaceID = null; // Used to save the surface ID returned by getInputSurface. let surfaceID = null; // Used to save the surface ID returned by getInputSurface.
// Obtain the FD address of the video to be recorded. // Obtain the FD address of the video to be recorded.
await this.getFd('01.mp4'); await this.getFd('01.mp4');
// Recording-related parameter settings // Configure the parameters related to video recording based on those supported by the hardware device.
let videoProfile = { let videoProfile = {
audioBitrate : 48000, audioBitrate : 48000,
audioChannels : 2, audioChannels : 2,
audioCodec : 'audio/mp4a-latm', audioCodec : 'audio/mp4a-latm',
audioSampleRate : 48000, audioSampleRate : 48000,
fileFormat : 'mp4', fileFormat : 'mp4',
videoBitrate : 48000, videoBitrate : 2000000,
videoCodec : 'video/mp4v-es', videoCodec : 'video/mp4v-es',
videoFrameWidth : 640, videoFrameWidth : 640,
videoFrameHeight : 480, videoFrameHeight : 480,
......
...@@ -1161,7 +1161,7 @@ Unsubscribes from the event that checks whether the bit rate is successfully set ...@@ -1161,7 +1161,7 @@ Unsubscribes from the event that checks whether the bit rate is successfully set
| Name| Type | Mandatory| Description | | Name| Type | Mandatory| Description |
| ------ | ------ | ---- | ------------------------------------------------------------ | | ------ | ------ | ---- | ------------------------------------------------------------ |
| type | string | Yes | Event type, which is **'bitrateDone'** in this case| | type | string | Yes | Event type, which is **'bitrateDone'** in this case.|
**Example** **Example**
...@@ -1694,6 +1694,8 @@ Sets audio and video recording parameters. This API uses an asynchronous callbac ...@@ -1694,6 +1694,8 @@ Sets audio and video recording parameters. This API uses an asynchronous callbac
This permission is required only if audio recording is involved. This permission is required only if audio recording is involved.
To use the camera to record videos, the camera module is required. For details about how to obtain the permissions and use the APIs, see [Camera Management](js-apis-camera.md).
**System capability**: SystemCapability.Multimedia.Media.AVRecorder **System capability**: SystemCapability.Multimedia.Media.AVRecorder
**Parameters** **Parameters**
...@@ -1717,13 +1719,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco ...@@ -1717,13 +1719,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco
**Example** **Example**
```js ```js
// Configure the parameters based on those supported by the hardware device.
let AVRecorderProfile = { let AVRecorderProfile = {
audioBitrate : 48000, audioBitrate : 48000,
audioChannels : 2, audioChannels : 2,
audioCodec : media.CodecMimeType.AUDIO_AAC, audioCodec : media.CodecMimeType.AUDIO_AAC,
audioSampleRate : 48000, audioSampleRate : 48000,
fileFormat : media.ContainerFormatType.CFT_MPEG_4, fileFormat : media.ContainerFormatType.CFT_MPEG_4,
videoBitrate : 48000, videoBitrate : 2000000,
videoCodec : media.CodecMimeType.VIDEO_MPEG4, videoCodec : media.CodecMimeType.VIDEO_MPEG4,
videoFrameWidth : 640, videoFrameWidth : 640,
videoFrameHeight : 480, videoFrameHeight : 480,
...@@ -1757,6 +1760,8 @@ Sets audio and video recording parameters. This API uses a promise to return the ...@@ -1757,6 +1760,8 @@ Sets audio and video recording parameters. This API uses a promise to return the
This permission is required only if audio recording is involved. This permission is required only if audio recording is involved.
To use the camera to record videos, the camera module is required. For details about how to obtain the permissions and use the APIs, see [Camera Management](js-apis-camera.md).
**System capability**: SystemCapability.Multimedia.Media.AVRecorder **System capability**: SystemCapability.Multimedia.Media.AVRecorder
**Parameters** **Parameters**
...@@ -1785,13 +1790,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco ...@@ -1785,13 +1790,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco
**Example** **Example**
```js ```js
// Configure the parameters based on those supported by the hardware device.
let AVRecorderProfile = { let AVRecorderProfile = {
audioBitrate : 48000, audioBitrate : 48000,
audioChannels : 2, audioChannels : 2,
audioCodec : media.CodecMimeType.AUDIO_AAC, audioCodec : media.CodecMimeType.AUDIO_AAC,
audioSampleRate : 48000, audioSampleRate : 48000,
fileFormat : media.ContainerFormatType.CFT_MPEG_4, fileFormat : media.ContainerFormatType.CFT_MPEG_4,
videoBitrate : 48000, videoBitrate : 2000000,
videoCodec : media.CodecMimeType.VIDEO_MPEG4, videoCodec : media.CodecMimeType.VIDEO_MPEG4,
videoFrameWidth : 640, videoFrameWidth : 640,
videoFrameHeight : 480, videoFrameHeight : 480,
...@@ -2479,7 +2485,7 @@ Describes the audio and video recording parameters. ...@@ -2479,7 +2485,7 @@ Describes the audio and video recording parameters.
| audioSourceType | [AudioSourceType](#audiosourcetype9) | No | Type of the audio source to record. This parameter is mandatory for audio recording. | | audioSourceType | [AudioSourceType](#audiosourcetype9) | No | Type of the audio source to record. This parameter is mandatory for audio recording. |
| videoSourceType | [VideoSourceType](#videosourcetype9) | No | Type of the video source to record. This parameter is mandatory for video recording. | | videoSourceType | [VideoSourceType](#videosourcetype9) | No | Type of the video source to record. This parameter is mandatory for video recording. |
| profile | [AVRecorderProfile](#avrecorderprofile9) | Yes | Recording profile. This parameter is mandatory. | | profile | [AVRecorderProfile](#avrecorderprofile9) | Yes | Recording profile. This parameter is mandatory. |
| url | string | Yes | Recording output URL: fd://xx (fd number).<br>![img](figures/en-us_image_url.png)<br> This parameter is mandatory. | | url | string | Yes | Recording output URL: fd://xx (fd number).<br>![img](figures/en-us_image_url.png)<br>This parameter is mandatory. |
| rotation | number | No | Rotation angle of the recorded video. The value can only be 0, 90, 180, or 270. | | rotation | number | No | Rotation angle of the recorded video. The value can only be 0, 90, 180, or 270. |
| location | [Location](#location) | No | Geographical location of the recorded video. | | location | [Location](#location) | No | Geographical location of the recorded video. |
...@@ -2601,13 +2607,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco ...@@ -2601,13 +2607,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco
**Example** **Example**
```js ```js
// Configure the parameters based on those supported by the hardware device.
let videoProfile = { let videoProfile = {
audioBitrate : 48000, audioBitrate : 48000,
audioChannels : 2, audioChannels : 2,
audioCodec : 'audio/mp4a-latm', audioCodec : 'audio/mp4a-latm',
audioSampleRate : 48000, audioSampleRate : 48000,
fileFormat : 'mp4', fileFormat : 'mp4',
videoBitrate : 48000, videoBitrate : 2000000,
videoCodec : 'video/mp4v-es', videoCodec : 'video/mp4v-es',
videoFrameWidth : 640, videoFrameWidth : 640,
videoFrameHeight : 480, videoFrameHeight : 480,
...@@ -2671,13 +2678,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco ...@@ -2671,13 +2678,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco
**Example** **Example**
```js ```js
// Configure the parameters based on those supported by the hardware device.
let videoProfile = { let videoProfile = {
audioBitrate : 48000, audioBitrate : 48000,
audioChannels : 2, audioChannels : 2,
audioCodec : 'audio/mp4a-latm', audioCodec : 'audio/mp4a-latm',
audioSampleRate : 48000, audioSampleRate : 48000,
fileFormat : 'mp4', fileFormat : 'mp4',
videoBitrate : 48000, videoBitrate : 2000000,
videoCodec : 'video/mp4v-es', videoCodec : 'video/mp4v-es',
videoFrameWidth : 640, videoFrameWidth : 640,
videoFrameHeight : 480, videoFrameHeight : 480,
...@@ -3796,7 +3804,7 @@ audioPlayer.on('error', (error) => { // Set the 'error' event callback ...@@ -3796,7 +3804,7 @@ audioPlayer.on('error', (error) => { // Set the 'error' event callback
console.info(`audio error called, error: ${error}`); console.info(`audio error called, error: ${error}`);
}); });
// Set the FD (local playback) of the video file selected by the user. // Set the FD (local playback) of the audio file selected by the user.
let fdPath = 'fd://'; let fdPath = 'fd://';
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/accounts/account_0/appdata" command. // The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/accounts/account_0/appdata" command.
let path = '/data/accounts/account_0/appdata/ohos.xxx.xxx.xxx/01.mp3'; let path = '/data/accounts/account_0/appdata/ohos.xxx.xxx.xxx/01.mp3';
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册