diff --git a/en/application-dev/media/Readme-EN.md b/en/application-dev/media/Readme-EN.md
index d65c0d9dbe51f963385afaac0b75deccc6b21d2b..926a2718a48dcefd217e503932f9d9f997d1275e 100755
--- a/en/application-dev/media/Readme-EN.md
+++ b/en/application-dev/media/Readme-EN.md
@@ -1,9 +1,7 @@
# Media
-- Audio
+- Audio and Video
- [Audio Overview](audio-overview.md)
- - [Audio Playback Development](audio-playback.md)
- - [Audio Recording Development](audio-recorder.md)
- [Audio Rendering Development](audio-renderer.md)
- [Audio Stream Management Development](audio-stream-manager.md)
- [Audio Capture Development](audio-capturer.md)
@@ -12,8 +10,10 @@
- [Audio Interruption Mode Development](audio-interruptmode.md)
- [Volume Management Development](audio-volume-manager.md)
- [Audio Routing and Device Management Development](audio-routing-manager.md)
-
-- Video
+ - [AVPlayer Development (Recommended)](avplayer-playback.md)
+ - [AVRecorder Development (Recommended)](avrecorder.md)
+ - [Audio Playback Development](audio-playback.md)
+ - [Audio Recording Development](audio-recorder.md)
- [Video Playback Development](video-playback.md)
- [Video Recording Development](video-recorder.md)
diff --git a/en/application-dev/media/audio-capturer.md b/en/application-dev/media/audio-capturer.md
index 4202b8ea4d78e9c38f43fc77bf7ea503712340d8..8371b6248d71f48e9088da849dc36c3edb2be3cf 100644
--- a/en/application-dev/media/audio-capturer.md
+++ b/en/application-dev/media/audio-capturer.md
@@ -72,7 +72,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference
}
await audioCapturer.start();
- let state = audioCapturer.state;
+ state = audioCapturer.state;
if (state == audio.AudioState.STATE_RUNNING) {
console.info('AudioRecLog: Capturer started');
} else {
@@ -86,7 +86,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference
The following example shows how to write recorded data into a file.
```js
- import fileio from '@ohos.fileio';
+ import fs from '@ohos.file.fs';
let state = audioCapturer.state;
// The read operation can be performed only when the state is STATE_RUNNING.
@@ -96,31 +96,36 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference
}
const path = '/data/data/.pulse_dir/capture_js.wav'; // Path for storing the collected audio file.
- let fd = fileio.openSync(path, 0o102, 0o777);
- if (fd !== null) {
- console.info('AudioRecLog: file fd created');
- }
- else{
- console.info('AudioRecLog: file fd create : FAILED');
+ let file = fs.openSync(filePath, 0o2);
+ let fd = file.fd;
+ if (file !== null) {
+ console.info('AudioRecLog: file created');
+ } else {
+ console.info('AudioRecLog: file create : FAILED');
return;
}
-
- fd = fileio.openSync(path, 0o2002, 0o666);
+
if (fd !== null) {
console.info('AudioRecLog: file fd opened in append mode');
}
let numBuffersToCapture = 150; // Write data for 150 times.
+ let count = 0;
while (numBuffersToCapture) {
+ let bufferSize = await audioCapturer.getBufferSize();
let buffer = await audioCapturer.read(bufferSize, true);
+ let options = {
+ offset: count * this.bufferSize,
+ length: this.bufferSize
+ }
if (typeof(buffer) == undefined) {
console.info('AudioRecLog: read buffer failed');
} else {
- let number = fileio.writeSync(fd, buffer);
+ let number = fs.writeSync(fd, buffer, options);
console.info(`AudioRecLog: data written: ${number}`);
- }
-
+ }
numBuffersToCapture--;
+ count++;
}
```
@@ -189,7 +194,7 @@ For details about the APIs, see [AudioCapturer in Audio Management](../reference
let audioTime : number = await audioCapturer.getAudioTime();
// Obtain a proper minimum buffer size.
- let bufferSize : number = await audioCapturer.getBuffersize();
+ let bufferSize : number = await audioCapturer.getBufferSize();
```
7. (Optional) Use **on('markReach')** to subscribe to the mark reached event, and use **off('markReach')** to unsubscribe from the event.
diff --git a/en/application-dev/media/audio-playback.md b/en/application-dev/media/audio-playback.md
index bbdb993ecdb9a1289a939af43db0e670ec10f98f..1c7953d32b8ecee4c0ff34e82ab8d13947ac9271 100644
--- a/en/application-dev/media/audio-playback.md
+++ b/en/application-dev/media/audio-playback.md
@@ -38,7 +38,7 @@ For details about the **src** types supported by **AudioPlayer**, see the [src a
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
// Print the stream track information.
function printfDescription(obj) {
@@ -112,14 +112,8 @@ async function audioPlayerDemo() {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let path = pathDir + '/01.mp3'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let file = await fs.open(path);
+ fdPath = fdPath + '' + file.fd;
audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
}
```
@@ -128,7 +122,7 @@ async function audioPlayerDemo() {
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
export class AudioDemo {
// Set the player callbacks.
@@ -154,14 +148,8 @@ export class AudioDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let path = pathDir + '/01.mp3'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let file = await fs.open(path);
+ fdPath = fdPath + '' + file.fd;
audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
}
}
@@ -171,7 +159,7 @@ export class AudioDemo {
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
export class AudioDemo {
// Set the player callbacks.
@@ -202,14 +190,8 @@ export class AudioDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\02.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let nextpath = pathDir + '/02.mp3'
- await fileIO.open(nextpath).then((fdNumber) => {
- nextFdPath = nextFdPath + '' + fdNumber;
- console.info('open fd success fd is' + nextFdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let nextFile = await fs.open(nextpath);
+ nextFdPath = nextFdPath + '' + nextFile.fd;
audioPlayer.src = nextFdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
}
@@ -220,14 +202,8 @@ export class AudioDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let path = pathDir + '/01.mp3'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let file = await fs.open(path);
+ fdPath = fdPath + '' + file.fd;
audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
}
}
@@ -237,7 +213,7 @@ export class AudioDemo {
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
export class AudioDemo {
// Set the player callbacks.
@@ -259,14 +235,8 @@ export class AudioDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/app/el2/100/base/ohos.acts.multimedia.audio.audioplayer/haps/entry/files" command.
let path = pathDir + '/01.mp3'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let file = await fs.open(path);
+ fdPath = fdPath + '' + file.fd;
audioPlayer.src = fdPath; // Set the src attribute and trigger the 'dataLoad' event callback.
}
}
diff --git a/en/application-dev/media/audio-renderer.md b/en/application-dev/media/audio-renderer.md
index 0b5b382d72fec98bfa18c3cfdee7bd61ef713da1..4a39544e7483b68d0bc15b00d643c8403dbded46 100644
--- a/en/application-dev/media/audio-renderer.md
+++ b/en/application-dev/media/audio-renderer.md
@@ -33,31 +33,30 @@ The following figure shows the audio renderer state transitions.
For details about the APIs, see [AudioRenderer in Audio Management](../reference/apis/js-apis-audio.md#audiorenderer8).
1. Use **createAudioRenderer()** to create an **AudioRenderer** instance.
-
-Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. This instance is used to render audio, control and obtain the rendering status, and register a callback for notification.
+ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. This instance is used to render audio, control and obtain the rendering status, and register a callback for notification.
```js
- import audio from '@ohos.multimedia.audio';
-
- let audioStreamInfo = {
- samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
- channels: audio.AudioChannel.CHANNEL_1,
- sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
- encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
- }
- let audioRendererInfo = {
- content: audio.ContentType.CONTENT_TYPE_SPEECH,
- usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION,
- rendererFlags: 0 // 0 is the extended flag bit of the audio renderer. The default value is 0.
+ import audio from '@ohos.multimedia.audio';
+
+ let audioStreamInfo = {
+ samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
+ channels: audio.AudioChannel.CHANNEL_1,
+ sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
+ encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
+ }
+ let audioRendererInfo = {
+ content: audio.ContentType.CONTENT_TYPE_SPEECH,
+ usage: audio.StreamUsage.STREAM_USAGE_VOICE_COMMUNICATION,
+ rendererFlags: 0 // 0 is the extended flag bit of the audio renderer. The default value is 0.
+ }
+ let audioRendererOptions = {
+ streamInfo: audioStreamInfo,
+ rendererInfo: audioRendererInfo
}
- let audioRendererOptions = {
- streamInfo: audioStreamInfo,
- rendererInfo: audioRendererInfo
- }
- let audioRenderer = await audio.createAudioRenderer(audioRendererOptions);
- console.log("Create audio renderer success.");
+ let audioRenderer = await audio.createAudioRenderer(audioRendererOptions);
+ console.log("Create audio renderer success.");
```
2. Use **start()** to start audio rendering.
@@ -90,7 +89,7 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
Read the audio data to be played to the buffer. Call **write()** repeatedly to write the data to the buffer.
```js
- import fileio from '@ohos.fileio';
+ import fs from '@ohos.file.fs';
import audio from '@ohos.multimedia.audio';
async function writeBuffer(buf) {
@@ -109,35 +108,33 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
// Set a proper buffer size for the audio renderer. You can also select a buffer of another size.
const bufferSize = await audioRenderer.getBufferSize();
let dir = globalThis.fileDir; // You must use the sandbox path.
- const path = dir + '/file_example_WAV_2MG.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/file_example_WAV_2MG.wav
- console.info(`file path: ${ path}`);
- let ss = fileio.createStreamSync(path, 'r');
- const totalSize = fileio.statSync(path).size; // Size of the file to render.
- let discardHeader = new ArrayBuffer(bufferSize);
- ss.readSync(discardHeader);
- let rlen = 0;
- rlen += bufferSize;
-
- let id = setInterval(() => {
- if (audioRenderer.state == audio.AudioState.STATE_RELEASED) { // The rendering stops if the audio renderer is in the STATE_RELEASED state.
- ss.closeSync();
- await audioRenderer.stop();
- clearInterval(id);
- }
- if (audioRenderer.state == audio.AudioState.STATE_RUNNING) {
- if (rlen >= totalSize) { // The rendering stops if the file finishes reading.
- ss.closeSync();
- await audioRenderer.stop();
- clearInterval(id);
- }
- let buf = new ArrayBuffer(bufferSize);
- rlen += ss.readSync(buf);
- console.info(`Total bytes read from file: ${rlen}`);
- writeBuffer(buf);
- } else {
- console.info('check after next interval');
+ const filePath = dir + '/file_example_WAV_2MG.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/file_example_WAV_2MG.wav
+ console.info(`file filePath: ${ filePath}`);
+
+ let file = fs.openSync(filePath, fs.OpenMode.READ_ONLY);
+ let stat = await fs.stat(filePath); // Music file information.
+ let buf = new ArrayBuffer(bufferSize);
+ let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1);
+ for (let i = 0;i < len; i++) {
+ let options = {
+ offset: i * this.bufferSize,
+ length: this.bufferSize
}
- }, 30); // The timer interval is set based on the audio format. The unit is millisecond.
+ let readsize = await fs.read(file.fd, buf, options)
+ let writeSize = await new Promise((resolve,reject)=>{
+ this.audioRenderer.write(buf,(err,writeSize)=>{
+ if(err){
+ reject(err)
+ }else{
+ resolve(writeSize)
+ }
+ })
+ })
+ }
+
+ fs.close(file)
+ await audioRenderer.stop(); // Stop rendering.
+ await audioRenderer.release(); // Releases the resources.
```
4. (Optional) Call **pause()** or **stop()** to pause or stop rendering.
@@ -192,7 +189,6 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
}
await audioRenderer.drain();
-
state = audioRenderer.state;
}
```
@@ -209,7 +205,6 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
console.info('Renderer already released');
return;
}
-
await audioRenderer.release();
state = audioRenderer.state;
@@ -242,7 +237,7 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
let audioTime : number = await audioRenderer.getAudioTime();
// Obtain a proper minimum buffer size.
- let bufferSize : number = await audioRenderer.getBuffersize();
+ let bufferSize : number = await audioRenderer.getBufferSize();
// Obtain the audio renderer rate.
let renderRate : audio.AudioRendererRate = await audioRenderer.getRenderRate();
@@ -424,35 +419,31 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
let dir = globalThis.fileDir; // You must use the sandbox path.
const path1 = dir + '/music001_48000_32_1.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/music001_48000_32_1.wav
console.info(`audioRender1 file path: ${ path1}`);
- let ss1 = await fileio.createStream(path1,'r');
- const totalSize1 = fileio.statSync(path1).size; // Size of the file to render.
- console.info(`totalSize1 -------: ${totalSize1}`);
- let discardHeader = new ArrayBuffer(bufferSize);
- ss1.readSync(discardHeader);
- let rlen = 0;
- rlen += bufferSize;
-
+ let file1 = fs.openSync(path1, fs.OpenMode.READ_ONLY);
+ let stat = await fs.stat(path1); // Music file information.
+ let buf = new ArrayBuffer(bufferSize);
+ let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1);
+
// 1.7 Render the original audio data in the buffer by using audioRender.
- let id = setInterval(async () => {
- if (audioRenderer1.state == audio.AudioState.STATE_RELEASED) { // The rendering stops if the audio renderer is in the STATE_RELEASED state.
- ss1.closeSync();
- audioRenderer1.stop();
- clearInterval(id);
+ for (let i = 0;i < len; i++) {
+ let options = {
+ offset: i * this.bufferSize,
+ length: this.bufferSize
}
- if (audioRenderer1.state == audio.AudioState.STATE_RUNNING) {
- if (rlen >= totalSize1) { // The rendering stops if the file finishes reading.
- ss1.closeSync();
- await audioRenderer1.stop();
- clearInterval(id);
- }
- let buf = new ArrayBuffer(bufferSize);
- rlen += ss1.readSync(buf);
- console.info(`Total bytes read from file: ${rlen}`);
- await writeBuffer(buf, that.audioRenderer1);
- } else {
- console.info('check after next interval');
- }
- }, 30); // The timer interval is set based on the audio format. The unit is millisecond.
+ let readsize = await fs.read(file.fd, buf, options)
+ let writeSize = await new Promise((resolve,reject)=>{
+ this.audioRenderer1.write(buf,(err,writeSize)=>{
+ if(err){
+ reject(err)
+ }else{
+ resolve(writeSize)
+ }
+ })
+ })
+ }
+ fs.close(file1)
+ await audioRenderer1.stop(); // Stop rendering.
+ await audioRenderer1.release(); Releases the resources.
}
async runningAudioRender2(){
@@ -499,36 +490,32 @@ Set parameters of the **AudioRenderer** instance in **audioRendererOptions**. Th
// 2.6 Read the original audio data file.
let dir = globalThis.fileDir; // You must use the sandbox path.
const path2 = dir + '/music002_48000_32_1.wav'; // The file to render is in the following path: /data/storage/el2/base/haps/entry/files/music002_48000_32_1.wav
- console.error(`audioRender1 file path: ${ path2}`);
- let ss2 = await fileio.createStream(path2,'r');
- const totalSize2 = fileio.statSync(path2).size; // Size of the file to render.
- console.error(`totalSize2 -------: ${totalSize2}`);
- let discardHeader2 = new ArrayBuffer(bufferSize);
- ss2.readSync(discardHeader2);
- let rlen = 0;
- rlen += bufferSize;
-
+ console.info(`audioRender2 file path: ${ path2}`);
+ let file2 = fs.openSync(path2, fs.OpenMode.READ_ONLY);
+ let stat = await fs.stat(path2); // Music file information.
+ let buf = new ArrayBuffer(bufferSize);
+ let len = stat.size % this.bufferSize == 0 ? Math.floor(stat.size / this.bufferSize) : Math.floor(stat.size / this.bufferSize + 1);
+
// 2.7 Render the original audio data in the buffer by using audioRender.
- let id = setInterval(async () => {
- if (audioRenderer2.state == audio.AudioState.STATE_RELEASED) { // The rendering stops if the audio renderer is in the STATE_RELEASED state.
- ss2.closeSync();
- that.audioRenderer2.stop();
- clearInterval(id);
- }
- if (audioRenderer1.state == audio.AudioState.STATE_RUNNING) {
- if (rlen >= totalSize2) { // The rendering stops if the file finishes reading.
- ss2.closeSync();
- await audioRenderer2.stop();
- clearInterval(id);
- }
- let buf = new ArrayBuffer(bufferSize);
- rlen += ss2.readSync(buf);
- console.info(`Total bytes read from file: ${rlen}`);
- await writeBuffer(buf, that.audioRenderer2);
- } else {
- console.info('check after next interval');
+ for (let i = 0;i < len; i++) {
+ let options = {
+ offset: i * this.bufferSize,
+ length: this.bufferSize
}
- }, 30); // The timer interval is set based on the audio format. The unit is millisecond.
+ let readsize = await fs.read(file.fd, buf, options)
+ let writeSize = await new Promise((resolve,reject)=>{
+ this.audioRenderer2.write(buf,(err,writeSize)=>{
+ if(err){
+ reject(err)
+ }else{
+ resolve(writeSize)
+ }
+ })
+ })
+ }
+ fs.close(file2)
+ await audioRenderer2.stop(); // Stop rendering.
+ await audioRenderer2.release(); // Releases the resources.
}
async writeBuffer(buf, audioRender) {
diff --git a/en/application-dev/media/avplayer-playback.md b/en/application-dev/media/avplayer-playback.md
index 270081373fb500877ca4352366982b66f72bc09a..324dd43e6f73d46e5f0d264ae81ba36802ee6021 100644
--- a/en/application-dev/media/avplayer-playback.md
+++ b/en/application-dev/media/avplayer-playback.md
@@ -104,7 +104,7 @@ The full playback process includes creating an instance, setting resources, sett
```js
import media from '@ohos.multimedia.media'
import audio from '@ohos.multimedia.audio';
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
const TAG = 'AVPlayerDemo:'
export class AVPlayerDemo {
@@ -223,14 +223,8 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_AAC.mp4'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber
- console.info('open fd success fd is' + fdPath)
- }, (err) => {
- console.info('open fd failed err is' + err)
- }).catch((err) => {
- console.info('open fd failed err is' + err)
- });
+ let file = await fs.open(path)
+ fdPath = fdPath + '' + file.fd
this.avPlayer.url = fdPath
}
}
@@ -240,7 +234,7 @@ export class AVPlayerDemo {
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
const TAG = 'AVPlayerDemo:'
export class AVPlayerDemo {
@@ -280,7 +274,7 @@ export class AVPlayerDemo {
break;
case 'stopped': // This state is reported upon a successful callback of stop().
console.info(TAG + 'state stopped called')
- this.avPlayer.reset() // Call reset() to initialize the AVPlayer state.
+ this.avPlayer.release() // Call reset() to initialize the AVPlayer state.
break;
case 'released':
console.info(TAG + 'state released called')
@@ -302,24 +296,18 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_AAC.mp4'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber
- console.info('open fd success fd is' + fdPath)
- }, (err) => {
- console.info('open fd failed err is' + err)
- }).catch((err) => {
- console.info('open fd failed err is' + err)
- });
+ let file = await fs.open(path)
+ fdPath = fdPath + '' + file.fd
this.avPlayer.url = fdPath
}
}
```
-### Switching to the Next Video Clip
+### Looping a Song
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
const TAG = 'AVPlayerDemo:'
export class AVPlayerDemo {
@@ -362,7 +350,7 @@ export class AVPlayerDemo {
break;
case 'stopped': // This state is reported upon a successful callback of stop().
console.info(TAG + 'state stopped called')
- this.avPlayer.reset() // Call reset() to initialize the AVPlayer state.
+ this.avPlayer.release() // Call reset() to initialize the AVPlayer state.
break;
case 'released':
console.info(TAG + 'state released called')
@@ -393,23 +381,17 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_AAC.mp4'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber
- console.info('open fd success fd is' + fdPath)
- }, (err) => {
- console.info('open fd failed err is' + err)
- }).catch((err) => {
- console.info('open fd failed err is' + err)
- });
+ let file = await fs.open(path)
+ fdPath = fdPath + '' + file.fd
this.avPlayer.url = fdPath
}
}
```
-### Looping a Song
+### Switching to the Next Video Clip
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
const TAG = 'AVPlayerDemo:'
export class AVPlayerDemo {
@@ -422,14 +404,8 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_MP3.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_MP3.mp4'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber
- console.info('open fd success fd is' + fdPath)
- }, (err) => {
- console.info('open fd failed err is' + err)
- }).catch((err) => {
- console.info('open fd failed err is' + err)
- });
+ let file = await fs.open(path)
+ fdPath = fdPath + '' + file.fd
this.avPlayer.url = fdPath // The initialized state is reported again.
}
@@ -493,14 +469,8 @@ export class AVPlayerDemo {
let pathDir = "/data/storage/el2/base/haps/entry/files" // The path used here is an example. Obtain the path based on project requirements.
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el2/100/base/ohos.acts.multimedia.media.avplayer/haps/entry/files" command.
let path = pathDir + '/H264_AAC.mp4'
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber
- console.info('open fd success fd is' + fdPath)
- }, (err) => {
- console.info('open fd failed err is' + err)
- }).catch((err) => {
- console.info('open fd failed err is' + err)
- });
+ let file = await fs.open(path)
+ fdPath = fdPath + '' + file.fd
this.avPlayer.url = fdPath
}
}
diff --git a/en/application-dev/media/avrecorder.md b/en/application-dev/media/avrecorder.md
index b897c68a657f2891800e2f4d67fc60a1aec8eacf..9214df032d7d060cabe9900e8a0d5ab6e7aa12f9 100644
--- a/en/application-dev/media/avrecorder.md
+++ b/en/application-dev/media/avrecorder.md
@@ -69,14 +69,14 @@ export class AVRecorderDemo {
let surfaceID; // The surface ID is obtained by calling getInputSurface and transferred to the videoOutput object of the camera.
await this.getFd('01.mp4');
- // Configure the parameters related to audio and video recording.
+ // Configure the parameters related to audio and video recording based on those supported by the hardware device.
let avProfile = {
audioBitrate : 48000,
audioChannels : 2,
audioCodec : media.CodecMimeType.AUDIO_AAC,
audioSampleRate : 48000,
fileFormat : media.ContainerFormatType.CFT_MPEG_4,
- videoBitrate : 48000,
+ videoBitrate : 2000000,
videoCodec : media.CodecMimeType.VIDEO_MPEG4,
videoFrameWidth : 640,
videoFrameHeight : 480,
@@ -365,10 +365,10 @@ export class VideoRecorderDemo {
let surfaceID; // The surface ID is obtained by calling getInputSurface and transferred to the videoOutput object of the camera.
await this.getFd('01.mp4');
- // Configure the parameters related to video recording.
+ // Configure the parameters related to pure video recording based on those supported by the hardware device.
let videoProfile = {
fileFormat : media.ContainerFormatType.CFT_MPEG_4,
- videoBitrate : 48000,
+ videoBitrate : 2000000,
videoCodec : media.CodecMimeType.VIDEO_MPEG4,
videoFrameWidth : 640,
videoFrameHeight : 480,
diff --git a/en/application-dev/media/video-playback.md b/en/application-dev/media/video-playback.md
index b324f19b3cf0f3621bd74809c4f1a2d0b57d0abd..d4c895b452aa31b28690bd96bd9ef0fac64c4eb4 100644
--- a/en/application-dev/media/video-playback.md
+++ b/en/application-dev/media/video-playback.md
@@ -51,7 +51,7 @@ For details about how to create an XComponent, see [XComponent](../reference/ark
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
export class VideoPlayerDemo {
// Report an error in the case of a function invocation failure.
failureCallback(error) {
@@ -82,14 +82,8 @@ export class VideoPlayerDemo {
let fdPath = 'fd://'
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command.
let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4';
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let file = await fs.open(path);
+ fdPath = fdPath + '' + file.fd;
// Call createVideoPlayer to create a VideoPlayer instance.
await media.createVideoPlayer().then((video) => {
if (typeof (video) != 'undefined') {
@@ -180,7 +174,7 @@ export class VideoPlayerDemo {
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
export class VideoPlayerDemo {
// Report an error in the case of a function invocation failure.
failureCallback(error) {
@@ -211,14 +205,8 @@ export class VideoPlayerDemo {
let fdPath = 'fd://'
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command.
let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4';
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let file = await fs.open(path);
+ fdPath = fdPath + '' + file.fd;
// Call createVideoPlayer to create a VideoPlayer instance.
await media.createVideoPlayer().then((video) => {
if (typeof (video) != 'undefined') {
@@ -267,7 +255,7 @@ export class VideoPlayerDemo {
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
export class VideoPlayerDemo {
// Report an error in the case of a function invocation failure.
failureCallback(error) {
@@ -299,14 +287,8 @@ export class VideoPlayerDemo {
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command.
let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4';
let nextPath = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/MP4_AAC.mp4';
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let file = await fs.open(path);
+ fdPath = fdPath + '' + file.fd;
// Call createVideoPlayer to create a VideoPlayer instance.
await media.createVideoPlayer().then((video) => {
if (typeof (video) != 'undefined') {
@@ -341,14 +323,8 @@ export class VideoPlayerDemo {
// Obtain the next video FD address.
fdPath = 'fd://'
- await fileIO.open(nextPath).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let nextFile = await fs.open(nextPath);
+ fdPath = fdPath + '' + nextFile.fd;
// Set the second video playback source.
videoPlayer.url = fdPath;
@@ -378,7 +354,7 @@ export class VideoPlayerDemo {
```js
import media from '@ohos.multimedia.media'
-import fileIO from '@ohos.fileio'
+import fs from '@ohos.file.fs'
export class VideoPlayerDemo {
// Report an error in the case of a function invocation failure.
failureCallback(error) {
@@ -409,14 +385,8 @@ export class VideoPlayerDemo {
let fdPath = 'fd://'
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\H264_AAC.mp4 /data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile" command.
let path = '/data/app/el1/bundle/public/ohos.acts.multimedia.video.videoplayer/ohos.acts.multimedia.video.videoplayer/assets/entry/resources/rawfile/H264_AAC.mp4';
- await fileIO.open(path).then((fdNumber) => {
- fdPath = fdPath + '' + fdNumber;
- console.info('open fd success fd is' + fdPath);
- }, (err) => {
- console.info('open fd failed err is' + err);
- }).catch((err) => {
- console.info('open fd failed err is' + err);
- });
+ let file = await fs.open(path);
+ fdPath = fdPath + '' + file.fd;
// Call createVideoPlayer to create a VideoPlayer instance.
await media.createVideoPlayer().then((video) => {
if (typeof (video) != 'undefined') {
diff --git a/en/application-dev/media/video-recorder.md b/en/application-dev/media/video-recorder.md
index bef55899bcb51359a6b6d68ef6d7894d70e435ae..fd9de91b4bae0591e2a5dc4869455bdd4055943e 100644
--- a/en/application-dev/media/video-recorder.md
+++ b/en/application-dev/media/video-recorder.md
@@ -76,14 +76,14 @@ export class VideoRecorderDemo {
let surfaceID = null; // Used to save the surface ID returned by getInputSurface.
// Obtain the FD address of the video to be recorded.
await this.getFd('01.mp4');
- // Recording-related parameter settings
+ // Configure the parameters related to video recording based on those supported by the hardware device.
let videoProfile = {
audioBitrate : 48000,
audioChannels : 2,
audioCodec : 'audio/mp4a-latm',
audioSampleRate : 48000,
fileFormat : 'mp4',
- videoBitrate : 48000,
+ videoBitrate : 2000000,
videoCodec : 'video/mp4v-es',
videoFrameWidth : 640,
videoFrameHeight : 480,
diff --git a/en/application-dev/reference/apis/js-apis-media.md b/en/application-dev/reference/apis/js-apis-media.md
index 5a1dcff96d3f5d3c7e2dc1beeb8038e3b68bb46c..3d29422be8cb7f4c363d3272fa1d00dae4019bc9 100644
--- a/en/application-dev/reference/apis/js-apis-media.md
+++ b/en/application-dev/reference/apis/js-apis-media.md
@@ -1162,7 +1162,7 @@ Unsubscribes from the event that checks whether the bit rate is successfully set
| Name| Type | Mandatory| Description |
| ------ | ------ | ---- | ------------------------------------------------------------ |
-| type | string | Yes | Event type, which is **'bitrateDone'** in this case|
+| type | string | Yes | Event type, which is **'bitrateDone'** in this case.|
**Example**
@@ -1720,13 +1720,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco
**Example**
```js
+// Configure the parameters based on those supported by the hardware device.
let AVRecorderProfile = {
audioBitrate : 48000,
audioChannels : 2,
audioCodec : media.CodecMimeType.AUDIO_AAC,
audioSampleRate : 48000,
fileFormat : media.ContainerFormatType.CFT_MPEG_4,
- videoBitrate : 48000,
+ videoBitrate : 2000000,
videoCodec : media.CodecMimeType.VIDEO_MPEG4,
videoFrameWidth : 640,
videoFrameHeight : 480,
@@ -1790,13 +1791,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco
**Example**
```js
+// Configure the parameters based on those supported by the hardware device.
let AVRecorderProfile = {
audioBitrate : 48000,
audioChannels : 2,
audioCodec : media.CodecMimeType.AUDIO_AAC,
audioSampleRate : 48000,
fileFormat : media.ContainerFormatType.CFT_MPEG_4,
- videoBitrate : 48000,
+ videoBitrate : 2000000,
videoCodec : media.CodecMimeType.VIDEO_MPEG4,
videoFrameWidth : 640,
videoFrameHeight : 480,
@@ -2484,7 +2486,7 @@ Describes the audio and video recording parameters.
| audioSourceType | [AudioSourceType](#audiosourcetype9) | No | Type of the audio source to record. This parameter is mandatory for audio recording. |
| videoSourceType | [VideoSourceType](#videosourcetype9) | No | Type of the video source to record. This parameter is mandatory for video recording. |
| profile | [AVRecorderProfile](#avrecorderprofile9) | Yes | Recording profile. This parameter is mandatory. |
-| url | string | Yes | Recording output URL: fd://xx (fd number).
![img](figures/en-us_image_url.png)
This parameter is mandatory. |
+| url | string | Yes | Recording output URL: fd://xx (fd number).
![img](figures/en-us_image_url.png)
This parameter is mandatory. |
| rotation | number | No | Rotation angle of the recorded video. The value can only be 0, 90, 180, or 270. |
| location | [Location](#location) | No | Geographical location of the recorded video. |
@@ -2606,13 +2608,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco
**Example**
```js
+// Configure the parameters based on those supported by the hardware device.
let videoProfile = {
audioBitrate : 48000,
audioChannels : 2,
audioCodec : 'audio/mp4a-latm',
audioSampleRate : 48000,
fileFormat : 'mp4',
- videoBitrate : 48000,
+ videoBitrate : 2000000,
videoCodec : 'video/mp4v-es',
videoFrameWidth : 640,
videoFrameHeight : 480,
@@ -2676,13 +2679,14 @@ For details about the error codes, see [Media Error Codes](../errorcodes/errorco
**Example**
```js
+// Configure the parameters based on those supported by the hardware device.
let videoProfile = {
audioBitrate : 48000,
audioChannels : 2,
audioCodec : 'audio/mp4a-latm',
audioSampleRate : 48000,
fileFormat : 'mp4',
- videoBitrate : 48000,
+ videoBitrate : 2000000,
videoCodec : 'video/mp4v-es',
videoFrameWidth : 640,
videoFrameHeight : 480,
@@ -3801,7 +3805,7 @@ audioPlayer.on('error', (error) => { // Set the 'error' event callback
console.info(`audio error called, error: ${error}`);
});
-// Set the FD (local playback) of the video file selected by the user.
+// Set the FD (local playback) of the audio file selected by the user.
let fdPath = 'fd://';
// The stream in the path can be pushed to the device by running the "hdc file send D:\xxx\01.mp3 /data/accounts/account_0/appdata" command.
let path = '/data/accounts/account_0/appdata/ohos.xxx.xxx.xxx/01.mp3';