From 027fd986256c7bb6c44e1d87b9e335f1563b1121 Mon Sep 17 00:00:00 2001 From: Mr_YX <496043997@qq.com> Date: Fri, 17 Jun 2022 19:39:48 +0000 Subject: [PATCH] Signed-off-by: mr-yx <496043997@qq.com> --- zh-cn/application-dev/media/image.md | 166 +++++++++--------- .../application-dev/media/opensles-capture.md | 24 +-- .../media/opensles-playback.md | 27 +-- zh-cn/application-dev/media/video-playback.md | 6 +- zh-cn/application-dev/media/video-recorder.md | 7 +- 5 files changed, 98 insertions(+), 132 deletions(-) diff --git a/zh-cn/application-dev/media/image.md b/zh-cn/application-dev/media/image.md index 0c744f21af..81246819fb 100644 --- a/zh-cn/application-dev/media/image.md +++ b/zh-cn/application-dev/media/image.md @@ -12,7 +12,7 @@ ### 全流程场景 -包含流程:创建实例,读取图片信息,读写pixelmap,更新数据,打包像素,释放资源等流程。 +包含流程:创建实例、读取图片信息、读写pixelmap、更新数据、打包像素、释放资源等流程。 ```js const Color = new ArrayBuffer(96);//用于存放图像像素数据 @@ -27,7 +27,7 @@ let opts = { alphaType: 0, editable: true, pixelFormat: 4, scaleMode: 1, size: { done(); }) //用于读像素 - pixelmap.readPixels(area,(data) => { +pixelmap.readPixels(area,(data) => { if(data !== null) { var bufferArr = new Uint8Array(area.pixels); var res = true; @@ -50,39 +50,39 @@ pixelmap.readPixelsToBuffer(readBuffer,() => { var bufferArr = new Uint8Array(readBuffer); var res = true; for (var i = 0; i < bufferArr.length; i++) { - if(res) { - if (bufferArr[i] !== 0) { - res = false; - console.info('TC_020-1 Success'); - expect(true).assertTrue(); - done(); - break; - } - } + if(res) { + if (bufferArr[i] !== 0) { + res = false; + console.info('TC_020-1 Success'); + expect(true).assertTrue(); + done(); + break; + } } +} //用于写像素 pixelmap.writePixels(area,() => { const readArea = { pixels: new ArrayBuffer(20), offset: 0, stride: 8, region: { size: { height: 1, width: 2 }, x: 0, y: 0 }} - pixelmap.readPixels(readArea,() => { - var readArr = new Uint8Array(readArea.pixels); - var res = true; - for (var i = 0; i < readArr.length; i++) { - if(res) { - if (readArr[i] !== 0) { - res = false; - console.info('TC_022-1 Success'); - expect(true).assertTrue(); - done(); - break; - } - } + pixelmap.readPixels(readArea,() => { + var readArr = new Uint8Array(readArea.pixels); + var res = true; + for (var i = 0; i < readArr.length; i++) { + if(res) { + if (readArr[i] !== 0) { + res = false; + console.info('TC_022-1 Success'); + expect(true).assertTrue(); + done(); + break; + } } + } //用于写像素到缓冲区 - pixelmap.writeBufferToPixels(writeColor).then(() => { - const readBuffer = new ArrayBuffer(96); - pixelmap.readPixelsToBuffer(readBuffer).then (() => { +pixelmap.writeBufferToPixels(writeColor).then(() => { + const readBuffer = new ArrayBuffer(96); + pixelmap.readPixelsToBuffer(readBuffer).then (() => { var bufferArr = new Uint8Array(readBuffer); var res = true; for (var i = 0; i < bufferArr.length; i++) { @@ -95,18 +95,18 @@ pixelmap.writePixels(area,() => { break; } } - } + } //用于获取图片信息 pixelmap.getImageInfo( imageInfo => { - if (imageInfo !== null) { - console.info('TC_024-1 imageInfo is ready'); - expect(imageInfo.size.height == 4).assertTrue(); - expect(imageInfo.size.width == 6).assertTrue(); - expect(imageInfo.pixelFormat == 4).assertTrue(); - done(); - } - }) + if (imageInfo !== null) { + console.info('TC_024-1 imageInfo is ready'); + expect(imageInfo.size.height == 4).assertTrue(); + expect(imageInfo.size.width == 6).assertTrue(); + expect(imageInfo.pixelFormat == 4).assertTrue(); + done(); + } +}) //用于释放pixelmap pixelmap.release(()=>{ @@ -136,7 +136,7 @@ imagePackerApi.packing(imageSourceApi, packOpts, data => { console.info('TC_062-1 finished'); expect(data !== null).assertTrue(); done(); - }) +}) //用于释放imagepacker imagePackerApi.release(); @@ -171,9 +171,9 @@ imageSourceApi.createPixelMap(decodingOptions, pixelmap => { //用于promise创建pixelmap imageSourceApi.createPixelMap().then(pixelmap => { - console.info('TC_050-11 createPixelMap '); - expect(pixelmap !== null ).assertTrue(); - done(); + console.info('TC_050-11 createPixelMap '); + expect(pixelmap !== null ).assertTrue(); + done(); }) //函数调用发生异常时,捕捉错误信息 @@ -181,7 +181,7 @@ catch(error => { console.log('TC_050-11 error: ' + error); expect().assertFail(); done(); - }) +}) //用于获取像素每行字节数 pixelmap.getBytesNumberPerRow( num => { @@ -192,13 +192,13 @@ pixelmap.getBytesNumberPerRow( num => { //用于获取像素总字节数 pixelmap.getPixelBytesNumber(num => { - console.info('TC_026-1 num is ' + num); - expect(num == expectNum).assertTrue(); - done(); - }) + console.info('TC_026-1 num is ' + num); + expect(num == expectNum).assertTrue(); + done(); +}) //用于获取pixelmap信息 - pixelmap.getImageInfo( imageInfo => {}) +pixelmap.getImageInfo( imageInfo => {}) //用于打印获取失败信息 console.info('TC_024-1 imageInfo is empty'); @@ -206,17 +206,17 @@ expect(false).assertTrue() //用于释放pixelmap pixelmap.release(()=>{ - expect(true).assertTrue(); - console.log('TC_027-1 suc'); - done(); - }) + expect(true).assertTrue(); + console.log('TC_027-1 suc'); + done(); +}) //用于捕捉释放失败信息 catch(error => { - console.log('TC_027-1 error: ' + error); - expect().assertFail(); - done(); - }) + console.log('TC_027-1 error: ' + error); + expect().assertFail(); + done(); +}) ``` ### 编码场景 @@ -225,14 +225,14 @@ catch(error => { /data/local/tmp/test.png //设置创建imagesource的路径 //用于设置imagesource - const imageSourceApi = image.createImageSource(path);//'/data/local/tmp/test.png' +const imageSourceApi = image.createImageSource(path);//'/data/local/tmp/test.png' //如果创建imagesource失败,打印错误信息 if (imageSourceApi == null) { - console.info('TC_062 create image source failed'); - expect(false).assertTrue(); - done(); - } + console.info('TC_062 create image source failed'); + expect(false).assertTrue(); + done(); +} //如果创建imagesource成功,则创建imagepacker const imagePackerApi = image.createImagePacker(); @@ -242,7 +242,7 @@ if (imagePackerApi == null) { console.info('TC_062 create image packer failed'); expect(false).assertTrue(); done(); - } +} //如果创建imagepacker成功,则设置编码参数 let packOpts = { format:["image/jpeg"], //支持编码的格式为jpg @@ -251,20 +251,20 @@ let packOpts = { format:["image/jpeg"], //支持编码的格式为jpg //用于编码 imagePackerApi.packing(imageSourceApi, packOpts) .then( data => { - console.info('TC_062 finished'); - expect(data !== null).assertTrue(); - done(); - }) + console.info('TC_062 finished'); + expect(data !== null).assertTrue(); + done(); +}) //编码完成,释放imagepacker - imagePackerApi.release(); +imagePackerApi.release(); //用于获取imagesource信息 imageSourceApi.getImageInfo(imageInfo => { - console.info('TC_045 imageInfo'); - expect(imageInfo !== null).assertTrue(); - done(); - }) + console.info('TC_045 imageInfo'); + expect(imageInfo !== null).assertTrue(); + done(); +}) //用于更新增量数据 imageSourceIncrementalSApi.updateData(array, false, 0, 10,(error,data )=> {}) @@ -278,22 +278,22 @@ imageSourceIncrementalSApi.updateData(array, false, 0, 10,(error,data )=> {}) ```js public async init(surfaceId: any) { - //服务端代码,创建ImageReceiver - var receiver = image.createImageReceiver(8 * 1024, 8, image.ImageFormat.JPEG, 1); + //服务端代码,创建ImageReceiver + var receiver = image.createImageReceiver(8 * 1024, 8, image.ImageFormat.JPEG, 1); - //获取Surface ID - var surfaceId = await receiver.getReceivingSurfaceId(); + //获取Surface ID + var surfaceId = await receiver.getReceivingSurfaceId(); - //注册Surface的监听,在suface的buffer准备好后触发 - receiver.on('imageArrival', () => { - //去获取Surface中最新的buffer - receiver.readNextImage((err, img) => { - img.getComponent(4, (err, componet) => { - //消费componet.byteBuffer,例如:将buffer内容保存成图片。 - }) - }) - }) + //注册Surface的监听,在suface的buffer准备好后触发 + receiver.on('imageArrival', () => { + //去获取Surface中最新的buffer + receiver.readNextImage((err, img) => { + img.getComponent(4, (err, componet) => { + //消费componet.byteBuffer,例如:将buffer内容保存成图片。 + }) + }) + }) - //调用Camera方法将surfaceId传递给Camera。camera会通过surfaceId获取surface,并生产出surface buffer。 + //调用Camera方法将surfaceId传递给Camera。camera会通过surfaceId获取surface,并生产出surface buffer。 } ``` \ No newline at end of file diff --git a/zh-cn/application-dev/media/opensles-capture.md b/zh-cn/application-dev/media/opensles-capture.md index f650319925..fb17c62285 100644 --- a/zh-cn/application-dev/media/opensles-capture.md +++ b/zh-cn/application-dev/media/opensles-capture.md @@ -2,10 +2,8 @@ ## 场景介绍 -开发者可以通过本文了解到在**OpenHarmony**如何使用**OpenSL ES**进行录音相关操作;当前仅实现了部分[**OpenSL ES**接口] -(https://gitee.com/openharmony/third_party_opensles/blob/master/api/1.0.1/OpenSLES.h),未实现接口调用后会返回**SL_RESULT_FEATURE_UNSUPPORTED** - - +开发者可以通过本文了解到在**OpenHarmony**如何使用 **OpenSL ES** 进行录音相关操作;当前仅实现了部分[**OpenSL ES**接口] +(https://gitee.com/openharmony/third_party_opensles/blob/master/api/1.0.1/OpenSLES.h),未实现接口调用后会返回**SL_RESULT_FEATURE_UNSUPPORTED**。 ## 开发步骤 @@ -19,7 +17,7 @@ #include ``` -2. 使用 **slCreateEngine** 接口创建引擎对象和实例化引擎对象 **engine**。 +2. 使用 **slCreateEngine** 接口创建引擎对象和实例化引擎对象 **engine** ```c++ SLObjectItf engineObject = nullptr; @@ -27,8 +25,6 @@ (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE); ``` - - 3. 获取接口 **SL_IID_ENGINE** 的引擎接口 **engineEngine** 实例 ```c++ @@ -36,9 +32,7 @@ result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineItf); ``` - - -4. 配置录音器信息(配置输入源audiosource、输出源audiosink),创建录音对象**pcmCapturerObject** 。 +4. 配置录音器信息(配置输入源audiosource、输出源audiosink),创建录音对象**pcmCapturerObject** ```c++ SLDataLocator_IODevice io_device = { @@ -82,20 +76,18 @@ 5. 获取录音接口**SL_IID_RECORD** 的 **recordItf** 接口实例 - ``` + ```c++ SLRecordItf recordItf; (*pcmCapturerObject)->GetInterface(pcmCapturerObject, SL_IID_RECORD, &recordItf); ``` 6. 获取接口 **SL_IID_OH_BUFFERQUEUE** 的 **bufferQueueItf** 实例 - ``` + ```c++ SLOHBufferQueueItf bufferQueueItf; (*pcmCapturerObject)->GetInterface(pcmCapturerObject, SL_IID_OH_BUFFERQUEUE, &bufferQueueItf); ``` - - 7. 注册 **BufferQueueCallback** 回调 ```c++ @@ -120,7 +112,6 @@ (*bufferQueueItf)->RegisterCallback(bufferQueueItf, BufferQueueCallback, wavFile_); ``` - 8. 开始录音 ```c++ @@ -145,7 +136,6 @@ } ``` - 9. 结束录音 ```c++ @@ -159,4 +149,4 @@ wavFile_ = nullptr; return; } - ``` + ``` \ No newline at end of file diff --git a/zh-cn/application-dev/media/opensles-playback.md b/zh-cn/application-dev/media/opensles-playback.md index 432003db81..3f3b891f82 100644 --- a/zh-cn/application-dev/media/opensles-playback.md +++ b/zh-cn/application-dev/media/opensles-playback.md @@ -1,13 +1,9 @@ -# OpenSL ES音频播放开发指导 - - +# OpenSL ES音频播放开发指导 ## 场景介绍 开发者可以通过本文了解到在**OpenHarmony**如何使用**OpenSL ES**进行音频播放相关操作;当前仅实现了部分[**OpenSL ES**接口](https://gitee.com/openharmony/third_party_opensles/blob/master/api/1.0.1/OpenSLES.h),未实现接口调用后会返回**SL_RESULT_FEATURE_UNSUPPORTED** - - ## 开发步骤 以下步骤描述了在**OpenHarmony**如何使用**OpenSL ES**开发音频播放功能: @@ -20,9 +16,7 @@ #include ``` - - -2. 使用 **slCreateEngine** 接口和获取 **engine** 实例。 +2. 使用 **slCreateEngine** 接口和获取 **engine** 实例 ```c++ SLObjectItf engineObject = nullptr; @@ -30,8 +24,6 @@ (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE); ``` - - 3. 获取接口 **SL_IID_ENGINE** 的 **engineEngine** 实例 ```c++ @@ -39,9 +31,7 @@ (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine); ``` - - -4. 配置播放器信息,创建 **AudioPlayer** 。 +4. 配置播放器信息,创建 **AudioPlayer** ```c++ SLDataLocator_BufferQueue slBufferQueue = { @@ -66,8 +56,6 @@ (*pcmPlayerObject)->Realize(pcmPlayerObject, SL_BOOLEAN_FALSE); ``` - - 5. 获取接口 **SL_IID_OH_BUFFERQUEUE** 的 **bufferQueueItf** 实例 ``` @@ -75,8 +63,6 @@ (*pcmPlayerObject)->GetInterface(pcmPlayerObject, SL_IID_OH_BUFFERQUEUE, &bufferQueueItf); ``` - - 6. 打开音频文件,注册 **BufferQueueCallback** 回调 ```c++ @@ -101,8 +87,6 @@ (*bufferQueueItf)->RegisterCallback(bufferQueueItf, BufferQueueCallback, wavFile_); ``` - - 7. 获取接口 **SL_PLAYSTATE_PLAYING** 的 **playItf** 实例,开始播放 ```c++ @@ -111,13 +95,10 @@ (*playItf)->SetPlayState(playItf, SL_PLAYSTATE_PLAYING); ``` - - 8. 结束音频播放 ```c++ (*playItf)->SetPlayState(playItf, SL_PLAYSTATE_STOPPED); (*pcmPlayerObject)->Destroy(pcmPlayerObject); (*engineObject)->Destroy(engineObject); - ``` - + ``` \ No newline at end of file diff --git a/zh-cn/application-dev/media/video-playback.md b/zh-cn/application-dev/media/video-playback.md index 5e92492a41..4e94b7364a 100644 --- a/zh-cn/application-dev/media/video-playback.md +++ b/zh-cn/application-dev/media/video-playback.md @@ -8,13 +8,11 @@ ![zh-ch_image_video_state_machine](figures/zh-ch_image_video_state_machine.png) - - **图2** 视频播放零层图 ![zh-ch_image_video_player](figures/zh-ch_image_video_player.png) -*注意:视频播放需要显示、音频、编解码等硬件能力。 +*注意:视频播放需要显示、音频、编解码等硬件能力。* 1. 三方应用从Xcomponent组件获取surfaceID。 2. 三方应用把surfaceID传递给VideoPlayer JS。 @@ -118,7 +116,7 @@ export class VideoPlayerDemo { console.info('pause success'); }, this.failureCallback).catch(this.catchCallback); - // 通过promise回调方式获取视频轨道信息 + // 通过promise回调方式获取视频轨道信息ommunication_dsoftbus let arrayDescription; await videoPlayer.getTrackDescription().then((arrlist) => { if (typeof (arrlist) != 'undefined') { diff --git a/zh-cn/application-dev/media/video-recorder.md b/zh-cn/application-dev/media/video-recorder.md index efe3d29d97..2e722caf3a 100644 --- a/zh-cn/application-dev/media/video-recorder.md +++ b/zh-cn/application-dev/media/video-recorder.md @@ -8,8 +8,6 @@ ![zh-ch_image_video_recorder_state_machine](figures/zh-ch_image_video_recorder_state_machine.png) - - **图2** 视频录制零层图 ![zh-ch_image_video_recorder_zero](figures/zh-ch_image_video_recorder_zero.png) @@ -20,7 +18,7 @@ ### 全流程场景 -包含流程:创建实例,设置录制参数,录制视频,暂停录制,恢复录制,停止录制,释放资源等流程。 +包含流程:创建实例、设置录制参数、录制视频、暂停录制、恢复录制、停止录制、释放资源等流程。 ```js import media from '@ohos.multimedia.media' @@ -148,5 +146,4 @@ export class VideoRecorderDemo { surfaceID = undefined; } } -``` - +``` \ No newline at end of file -- GitLab