未验证 提交 027fd986 编写于 作者: Mr-YX's avatar Mr-YX 提交者: Gitee

Signed-off-by: mr-yx <496043997@qq.com>

上级 afdc4a0f
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
### 全流程场景 ### 全流程场景
包含流程:创建实例,读取图片信息,读写pixelmap,更新数据,打包像素,释放资源等流程。 包含流程:创建实例、读取图片信息、读写pixelmap、更新数据、打包像素、释放资源等流程。
```js ```js
const Color = new ArrayBuffer(96);//用于存放图像像素数据 const Color = new ArrayBuffer(96);//用于存放图像像素数据
...@@ -27,7 +27,7 @@ let opts = { alphaType: 0, editable: true, pixelFormat: 4, scaleMode: 1, size: { ...@@ -27,7 +27,7 @@ let opts = { alphaType: 0, editable: true, pixelFormat: 4, scaleMode: 1, size: {
done(); done();
}) })
//用于读像素 //用于读像素
pixelmap.readPixels(area,(data) => { pixelmap.readPixels(area,(data) => {
if(data !== null) { if(data !== null) {
var bufferArr = new Uint8Array(area.pixels); var bufferArr = new Uint8Array(area.pixels);
var res = true; var res = true;
...@@ -50,39 +50,39 @@ pixelmap.readPixelsToBuffer(readBuffer,() => { ...@@ -50,39 +50,39 @@ pixelmap.readPixelsToBuffer(readBuffer,() => {
var bufferArr = new Uint8Array(readBuffer); var bufferArr = new Uint8Array(readBuffer);
var res = true; var res = true;
for (var i = 0; i < bufferArr.length; i++) { for (var i = 0; i < bufferArr.length; i++) {
if(res) { if(res) {
if (bufferArr[i] !== 0) { if (bufferArr[i] !== 0) {
res = false; res = false;
console.info('TC_020-1 Success'); console.info('TC_020-1 Success');
expect(true).assertTrue(); expect(true).assertTrue();
done(); done();
break; break;
} }
}
} }
}
//用于写像素 //用于写像素
pixelmap.writePixels(area,() => { pixelmap.writePixels(area,() => {
const readArea = { pixels: new ArrayBuffer(20), offset: 0, stride: 8, region: { size: { height: 1, width: 2 }, x: 0, y: 0 }} const readArea = { pixels: new ArrayBuffer(20), offset: 0, stride: 8, region: { size: { height: 1, width: 2 }, x: 0, y: 0 }}
pixelmap.readPixels(readArea,() => { pixelmap.readPixels(readArea,() => {
var readArr = new Uint8Array(readArea.pixels); var readArr = new Uint8Array(readArea.pixels);
var res = true; var res = true;
for (var i = 0; i < readArr.length; i++) { for (var i = 0; i < readArr.length; i++) {
if(res) { if(res) {
if (readArr[i] !== 0) { if (readArr[i] !== 0) {
res = false; res = false;
console.info('TC_022-1 Success'); console.info('TC_022-1 Success');
expect(true).assertTrue(); expect(true).assertTrue();
done(); done();
break; break;
} }
}
} }
}
//用于写像素到缓冲区 //用于写像素到缓冲区
pixelmap.writeBufferToPixels(writeColor).then(() => { pixelmap.writeBufferToPixels(writeColor).then(() => {
const readBuffer = new ArrayBuffer(96); const readBuffer = new ArrayBuffer(96);
pixelmap.readPixelsToBuffer(readBuffer).then (() => { pixelmap.readPixelsToBuffer(readBuffer).then (() => {
var bufferArr = new Uint8Array(readBuffer); var bufferArr = new Uint8Array(readBuffer);
var res = true; var res = true;
for (var i = 0; i < bufferArr.length; i++) { for (var i = 0; i < bufferArr.length; i++) {
...@@ -95,18 +95,18 @@ pixelmap.writePixels(area,() => { ...@@ -95,18 +95,18 @@ pixelmap.writePixels(area,() => {
break; break;
} }
} }
} }
//用于获取图片信息 //用于获取图片信息
pixelmap.getImageInfo( imageInfo => { pixelmap.getImageInfo( imageInfo => {
if (imageInfo !== null) { if (imageInfo !== null) {
console.info('TC_024-1 imageInfo is ready'); console.info('TC_024-1 imageInfo is ready');
expect(imageInfo.size.height == 4).assertTrue(); expect(imageInfo.size.height == 4).assertTrue();
expect(imageInfo.size.width == 6).assertTrue(); expect(imageInfo.size.width == 6).assertTrue();
expect(imageInfo.pixelFormat == 4).assertTrue(); expect(imageInfo.pixelFormat == 4).assertTrue();
done(); done();
} }
}) })
//用于释放pixelmap //用于释放pixelmap
pixelmap.release(()=>{ pixelmap.release(()=>{
...@@ -136,7 +136,7 @@ imagePackerApi.packing(imageSourceApi, packOpts, data => { ...@@ -136,7 +136,7 @@ imagePackerApi.packing(imageSourceApi, packOpts, data => {
console.info('TC_062-1 finished'); console.info('TC_062-1 finished');
expect(data !== null).assertTrue(); expect(data !== null).assertTrue();
done(); done();
}) })
//用于释放imagepacker //用于释放imagepacker
imagePackerApi.release(); imagePackerApi.release();
...@@ -171,9 +171,9 @@ imageSourceApi.createPixelMap(decodingOptions, pixelmap => { ...@@ -171,9 +171,9 @@ imageSourceApi.createPixelMap(decodingOptions, pixelmap => {
//用于promise创建pixelmap //用于promise创建pixelmap
imageSourceApi.createPixelMap().then(pixelmap => { imageSourceApi.createPixelMap().then(pixelmap => {
console.info('TC_050-11 createPixelMap '); console.info('TC_050-11 createPixelMap ');
expect(pixelmap !== null ).assertTrue(); expect(pixelmap !== null ).assertTrue();
done(); done();
}) })
//函数调用发生异常时,捕捉错误信息 //函数调用发生异常时,捕捉错误信息
...@@ -181,7 +181,7 @@ catch(error => { ...@@ -181,7 +181,7 @@ catch(error => {
console.log('TC_050-11 error: ' + error); console.log('TC_050-11 error: ' + error);
expect().assertFail(); expect().assertFail();
done(); done();
}) })
//用于获取像素每行字节数 //用于获取像素每行字节数
pixelmap.getBytesNumberPerRow( num => { pixelmap.getBytesNumberPerRow( num => {
...@@ -192,13 +192,13 @@ pixelmap.getBytesNumberPerRow( num => { ...@@ -192,13 +192,13 @@ pixelmap.getBytesNumberPerRow( num => {
//用于获取像素总字节数 //用于获取像素总字节数
pixelmap.getPixelBytesNumber(num => { pixelmap.getPixelBytesNumber(num => {
console.info('TC_026-1 num is ' + num); console.info('TC_026-1 num is ' + num);
expect(num == expectNum).assertTrue(); expect(num == expectNum).assertTrue();
done(); done();
}) })
//用于获取pixelmap信息 //用于获取pixelmap信息
pixelmap.getImageInfo( imageInfo => {}) pixelmap.getImageInfo( imageInfo => {})
//用于打印获取失败信息 //用于打印获取失败信息
console.info('TC_024-1 imageInfo is empty'); console.info('TC_024-1 imageInfo is empty');
...@@ -206,17 +206,17 @@ expect(false).assertTrue() ...@@ -206,17 +206,17 @@ expect(false).assertTrue()
//用于释放pixelmap //用于释放pixelmap
pixelmap.release(()=>{ pixelmap.release(()=>{
expect(true).assertTrue(); expect(true).assertTrue();
console.log('TC_027-1 suc'); console.log('TC_027-1 suc');
done(); done();
}) })
//用于捕捉释放失败信息 //用于捕捉释放失败信息
catch(error => { catch(error => {
console.log('TC_027-1 error: ' + error); console.log('TC_027-1 error: ' + error);
expect().assertFail(); expect().assertFail();
done(); done();
}) })
``` ```
### 编码场景 ### 编码场景
...@@ -225,14 +225,14 @@ catch(error => { ...@@ -225,14 +225,14 @@ catch(error => {
/data/local/tmp/test.png //设置创建imagesource的路径 /data/local/tmp/test.png //设置创建imagesource的路径
//用于设置imagesource //用于设置imagesource
const imageSourceApi = image.createImageSource(path);//'/data/local/tmp/test.png' const imageSourceApi = image.createImageSource(path);//'/data/local/tmp/test.png'
//如果创建imagesource失败,打印错误信息 //如果创建imagesource失败,打印错误信息
if (imageSourceApi == null) { if (imageSourceApi == null) {
console.info('TC_062 create image source failed'); console.info('TC_062 create image source failed');
expect(false).assertTrue(); expect(false).assertTrue();
done(); done();
} }
//如果创建imagesource成功,则创建imagepacker //如果创建imagesource成功,则创建imagepacker
const imagePackerApi = image.createImagePacker(); const imagePackerApi = image.createImagePacker();
...@@ -242,7 +242,7 @@ if (imagePackerApi == null) { ...@@ -242,7 +242,7 @@ if (imagePackerApi == null) {
console.info('TC_062 create image packer failed'); console.info('TC_062 create image packer failed');
expect(false).assertTrue(); expect(false).assertTrue();
done(); done();
} }
//如果创建imagepacker成功,则设置编码参数 //如果创建imagepacker成功,则设置编码参数
let packOpts = { format:["image/jpeg"], //支持编码的格式为jpg let packOpts = { format:["image/jpeg"], //支持编码的格式为jpg
...@@ -251,20 +251,20 @@ let packOpts = { format:["image/jpeg"], //支持编码的格式为jpg ...@@ -251,20 +251,20 @@ let packOpts = { format:["image/jpeg"], //支持编码的格式为jpg
//用于编码 //用于编码
imagePackerApi.packing(imageSourceApi, packOpts) imagePackerApi.packing(imageSourceApi, packOpts)
.then( data => { .then( data => {
console.info('TC_062 finished'); console.info('TC_062 finished');
expect(data !== null).assertTrue(); expect(data !== null).assertTrue();
done(); done();
}) })
//编码完成,释放imagepacker //编码完成,释放imagepacker
imagePackerApi.release(); imagePackerApi.release();
//用于获取imagesource信息 //用于获取imagesource信息
imageSourceApi.getImageInfo(imageInfo => { imageSourceApi.getImageInfo(imageInfo => {
console.info('TC_045 imageInfo'); console.info('TC_045 imageInfo');
expect(imageInfo !== null).assertTrue(); expect(imageInfo !== null).assertTrue();
done(); done();
}) })
//用于更新增量数据 //用于更新增量数据
imageSourceIncrementalSApi.updateData(array, false, 0, 10,(error,data )=> {}) imageSourceIncrementalSApi.updateData(array, false, 0, 10,(error,data )=> {})
...@@ -278,22 +278,22 @@ imageSourceIncrementalSApi.updateData(array, false, 0, 10,(error,data )=> {}) ...@@ -278,22 +278,22 @@ imageSourceIncrementalSApi.updateData(array, false, 0, 10,(error,data )=> {})
```js ```js
public async init(surfaceId: any) { public async init(surfaceId: any) {
//服务端代码,创建ImageReceiver //服务端代码,创建ImageReceiver
var receiver = image.createImageReceiver(8 * 1024, 8, image.ImageFormat.JPEG, 1); var receiver = image.createImageReceiver(8 * 1024, 8, image.ImageFormat.JPEG, 1);
//获取Surface ID //获取Surface ID
var surfaceId = await receiver.getReceivingSurfaceId(); var surfaceId = await receiver.getReceivingSurfaceId();
//注册Surface的监听,在suface的buffer准备好后触发 //注册Surface的监听,在suface的buffer准备好后触发
receiver.on('imageArrival', () => { receiver.on('imageArrival', () => {
//去获取Surface中最新的buffer //去获取Surface中最新的buffer
receiver.readNextImage((err, img) => { receiver.readNextImage((err, img) => {
img.getComponent(4, (err, componet) => { img.getComponent(4, (err, componet) => {
//消费componet.byteBuffer,例如:将buffer内容保存成图片。 //消费componet.byteBuffer,例如:将buffer内容保存成图片。
}) })
}) })
}) })
//调用Camera方法将surfaceId传递给Camera。camera会通过surfaceId获取surface,并生产出surface buffer。 //调用Camera方法将surfaceId传递给Camera。camera会通过surfaceId获取surface,并生产出surface buffer。
} }
``` ```
\ No newline at end of file
...@@ -2,10 +2,8 @@ ...@@ -2,10 +2,8 @@
## 场景介绍 ## 场景介绍
开发者可以通过本文了解到在**OpenHarmony**如何使用**OpenSL ES**进行录音相关操作;当前仅实现了部分[**OpenSL ES**接口] 开发者可以通过本文了解到在**OpenHarmony**如何使用 **OpenSL ES** 进行录音相关操作;当前仅实现了部分[**OpenSL ES**接口]
(https://gitee.com/openharmony/third_party_opensles/blob/master/api/1.0.1/OpenSLES.h),未实现接口调用后会返回**SL_RESULT_FEATURE_UNSUPPORTED** (https://gitee.com/openharmony/third_party_opensles/blob/master/api/1.0.1/OpenSLES.h),未实现接口调用后会返回**SL_RESULT_FEATURE_UNSUPPORTED**
## 开发步骤 ## 开发步骤
...@@ -19,7 +17,7 @@ ...@@ -19,7 +17,7 @@
#include <OpenSLES_Platform.h> #include <OpenSLES_Platform.h>
``` ```
2. 使用 **slCreateEngine** 接口创建引擎对象和实例化引擎对象 **engine** 2. 使用 **slCreateEngine** 接口创建引擎对象和实例化引擎对象 **engine**
```c++ ```c++
SLObjectItf engineObject = nullptr; SLObjectItf engineObject = nullptr;
...@@ -27,8 +25,6 @@ ...@@ -27,8 +25,6 @@
(*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE); (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
``` ```
3. 获取接口 **SL_IID_ENGINE** 的引擎接口 **engineEngine** 实例 3. 获取接口 **SL_IID_ENGINE** 的引擎接口 **engineEngine** 实例
```c++ ```c++
...@@ -36,9 +32,7 @@ ...@@ -36,9 +32,7 @@
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineItf); result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineItf);
``` ```
4. 配置录音器信息(配置输入源audiosource、输出源audiosink),创建录音对象**pcmCapturerObject**
4. 配置录音器信息(配置输入源audiosource、输出源audiosink),创建录音对象**pcmCapturerObject**
```c++ ```c++
SLDataLocator_IODevice io_device = { SLDataLocator_IODevice io_device = {
...@@ -82,20 +76,18 @@ ...@@ -82,20 +76,18 @@
5. 获取录音接口**SL_IID_RECORD****recordItf** 接口实例 5. 获取录音接口**SL_IID_RECORD****recordItf** 接口实例
``` ```c++
SLRecordItf recordItf; SLRecordItf recordItf;
(*pcmCapturerObject)->GetInterface(pcmCapturerObject, SL_IID_RECORD, &recordItf); (*pcmCapturerObject)->GetInterface(pcmCapturerObject, SL_IID_RECORD, &recordItf);
``` ```
6. 获取接口 **SL_IID_OH_BUFFERQUEUE****bufferQueueItf** 实例 6. 获取接口 **SL_IID_OH_BUFFERQUEUE****bufferQueueItf** 实例
``` ```c++
SLOHBufferQueueItf bufferQueueItf; SLOHBufferQueueItf bufferQueueItf;
(*pcmCapturerObject)->GetInterface(pcmCapturerObject, SL_IID_OH_BUFFERQUEUE, &bufferQueueItf); (*pcmCapturerObject)->GetInterface(pcmCapturerObject, SL_IID_OH_BUFFERQUEUE, &bufferQueueItf);
``` ```
7. 注册 **BufferQueueCallback** 回调 7. 注册 **BufferQueueCallback** 回调
```c++ ```c++
...@@ -120,7 +112,6 @@ ...@@ -120,7 +112,6 @@
(*bufferQueueItf)->RegisterCallback(bufferQueueItf, BufferQueueCallback, wavFile_); (*bufferQueueItf)->RegisterCallback(bufferQueueItf, BufferQueueCallback, wavFile_);
``` ```
8. 开始录音 8. 开始录音
```c++ ```c++
...@@ -145,7 +136,6 @@ ...@@ -145,7 +136,6 @@
} }
``` ```
9. 结束录音 9. 结束录音
```c++ ```c++
...@@ -159,4 +149,4 @@ ...@@ -159,4 +149,4 @@
wavFile_ = nullptr; wavFile_ = nullptr;
return; return;
} }
``` ```
\ No newline at end of file
# OpenSL ES音频播放开发指导 # OpenSL ES音频播放开发指导
## 场景介绍 ## 场景介绍
开发者可以通过本文了解到在**OpenHarmony**如何使用**OpenSL ES**进行音频播放相关操作;当前仅实现了部分[**OpenSL ES**接口](https://gitee.com/openharmony/third_party_opensles/blob/master/api/1.0.1/OpenSLES.h),未实现接口调用后会返回**SL_RESULT_FEATURE_UNSUPPORTED** 开发者可以通过本文了解到在**OpenHarmony**如何使用**OpenSL ES**进行音频播放相关操作;当前仅实现了部分[**OpenSL ES**接口](https://gitee.com/openharmony/third_party_opensles/blob/master/api/1.0.1/OpenSLES.h),未实现接口调用后会返回**SL_RESULT_FEATURE_UNSUPPORTED**
## 开发步骤 ## 开发步骤
以下步骤描述了在**OpenHarmony**如何使用**OpenSL ES**开发音频播放功能: 以下步骤描述了在**OpenHarmony**如何使用**OpenSL ES**开发音频播放功能:
...@@ -20,9 +16,7 @@ ...@@ -20,9 +16,7 @@
#include <OpenSLES_Platform.h> #include <OpenSLES_Platform.h>
``` ```
2. 使用 **slCreateEngine** 接口和获取 **engine** 实例
2. 使用 **slCreateEngine** 接口和获取 **engine** 实例。
```c++ ```c++
SLObjectItf engineObject = nullptr; SLObjectItf engineObject = nullptr;
...@@ -30,8 +24,6 @@ ...@@ -30,8 +24,6 @@
(*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE); (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
``` ```
3. 获取接口 **SL_IID_ENGINE****engineEngine** 实例 3. 获取接口 **SL_IID_ENGINE****engineEngine** 实例
```c++ ```c++
...@@ -39,9 +31,7 @@ ...@@ -39,9 +31,7 @@
(*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine); (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
``` ```
4. 配置播放器信息,创建 **AudioPlayer**
4. 配置播放器信息,创建 **AudioPlayer**
```c++ ```c++
SLDataLocator_BufferQueue slBufferQueue = { SLDataLocator_BufferQueue slBufferQueue = {
...@@ -66,8 +56,6 @@ ...@@ -66,8 +56,6 @@
(*pcmPlayerObject)->Realize(pcmPlayerObject, SL_BOOLEAN_FALSE); (*pcmPlayerObject)->Realize(pcmPlayerObject, SL_BOOLEAN_FALSE);
``` ```
5. 获取接口 **SL_IID_OH_BUFFERQUEUE****bufferQueueItf** 实例 5. 获取接口 **SL_IID_OH_BUFFERQUEUE****bufferQueueItf** 实例
``` ```
...@@ -75,8 +63,6 @@ ...@@ -75,8 +63,6 @@
(*pcmPlayerObject)->GetInterface(pcmPlayerObject, SL_IID_OH_BUFFERQUEUE, &bufferQueueItf); (*pcmPlayerObject)->GetInterface(pcmPlayerObject, SL_IID_OH_BUFFERQUEUE, &bufferQueueItf);
``` ```
6. 打开音频文件,注册 **BufferQueueCallback** 回调 6. 打开音频文件,注册 **BufferQueueCallback** 回调
```c++ ```c++
...@@ -101,8 +87,6 @@ ...@@ -101,8 +87,6 @@
(*bufferQueueItf)->RegisterCallback(bufferQueueItf, BufferQueueCallback, wavFile_); (*bufferQueueItf)->RegisterCallback(bufferQueueItf, BufferQueueCallback, wavFile_);
``` ```
7. 获取接口 **SL_PLAYSTATE_PLAYING****playItf** 实例,开始播放 7. 获取接口 **SL_PLAYSTATE_PLAYING****playItf** 实例,开始播放
```c++ ```c++
...@@ -111,13 +95,10 @@ ...@@ -111,13 +95,10 @@
(*playItf)->SetPlayState(playItf, SL_PLAYSTATE_PLAYING); (*playItf)->SetPlayState(playItf, SL_PLAYSTATE_PLAYING);
``` ```
8. 结束音频播放 8. 结束音频播放
```c++ ```c++
(*playItf)->SetPlayState(playItf, SL_PLAYSTATE_STOPPED); (*playItf)->SetPlayState(playItf, SL_PLAYSTATE_STOPPED);
(*pcmPlayerObject)->Destroy(pcmPlayerObject); (*pcmPlayerObject)->Destroy(pcmPlayerObject);
(*engineObject)->Destroy(engineObject); (*engineObject)->Destroy(engineObject);
``` ```
\ No newline at end of file
...@@ -8,13 +8,11 @@ ...@@ -8,13 +8,11 @@
![zh-ch_image_video_state_machine](figures/zh-ch_image_video_state_machine.png) ![zh-ch_image_video_state_machine](figures/zh-ch_image_video_state_machine.png)
**图2** 视频播放零层图 **图2** 视频播放零层图
![zh-ch_image_video_player](figures/zh-ch_image_video_player.png) ![zh-ch_image_video_player](figures/zh-ch_image_video_player.png)
*注意:视频播放需要显示、音频、编解码等硬件能力。 *注意:视频播放需要显示、音频、编解码等硬件能力。*
1. 三方应用从Xcomponent组件获取surfaceID。 1. 三方应用从Xcomponent组件获取surfaceID。
2. 三方应用把surfaceID传递给VideoPlayer JS。 2. 三方应用把surfaceID传递给VideoPlayer JS。
...@@ -118,7 +116,7 @@ export class VideoPlayerDemo { ...@@ -118,7 +116,7 @@ export class VideoPlayerDemo {
console.info('pause success'); console.info('pause success');
}, this.failureCallback).catch(this.catchCallback); }, this.failureCallback).catch(this.catchCallback);
// 通过promise回调方式获取视频轨道信息 // 通过promise回调方式获取视频轨道信息ommunication_dsoftbus
let arrayDescription; let arrayDescription;
await videoPlayer.getTrackDescription().then((arrlist) => { await videoPlayer.getTrackDescription().then((arrlist) => {
if (typeof (arrlist) != 'undefined') { if (typeof (arrlist) != 'undefined') {
......
...@@ -8,8 +8,6 @@ ...@@ -8,8 +8,6 @@
![zh-ch_image_video_recorder_state_machine](figures/zh-ch_image_video_recorder_state_machine.png) ![zh-ch_image_video_recorder_state_machine](figures/zh-ch_image_video_recorder_state_machine.png)
**图2** 视频录制零层图 **图2** 视频录制零层图
![zh-ch_image_video_recorder_zero](figures/zh-ch_image_video_recorder_zero.png) ![zh-ch_image_video_recorder_zero](figures/zh-ch_image_video_recorder_zero.png)
...@@ -20,7 +18,7 @@ ...@@ -20,7 +18,7 @@
### 全流程场景 ### 全流程场景
包含流程:创建实例,设置录制参数,录制视频,暂停录制,恢复录制,停止录制,释放资源等流程。 包含流程:创建实例、设置录制参数、录制视频、暂停录制、恢复录制、停止录制、释放资源等流程。
```js ```js
import media from '@ohos.multimedia.media' import media from '@ohos.multimedia.media'
...@@ -148,5 +146,4 @@ export class VideoRecorderDemo { ...@@ -148,5 +146,4 @@ export class VideoRecorderDemo {
surfaceID = undefined; surfaceID = undefined;
} }
} }
``` ```
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册