提交 26f6d137 编写于 作者: Q qinliwen

<新增camera&audio&player Validator用例>

Signed-off-by: Nqinliwen <qinliwen3@huawei.com>
上级 c22cb058
import audio from '@ohos.multimedia.audio'
import deviceInfo from '@ohos.deviceInfo'
import fileio from '@ohos.fileio'
import image from '@ohos.multimedia.image'
import media from '@ohos.multimedia.media'
import mediaLibrary from '@ohos.multimedia.mediaLibrary'
import Logger from '../model/Logger'
import MediaUtils from '../model/MediaUtils'
// @ts-ignore
import fs from '@ohos.file.fs'
import mediaPlay from '../model/mediaPlay'
class AudioCapturer {
private tag: string = 'qlw AudioCapture'
private static instance: AudioCapturer = new AudioCapturer()
private audioCapturer: audio.AudioCapturer = undefined
private fd: number = undefined
private isRecorder: boolean = false
private file: fs.File = undefined
async createAudioCapturer() {
let AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
let AudioCapturerInfo = {
source: audio.SourceType.SOURCE_TYPE_MIC,
capturerFlags: 0
}
let AudioCapturerOptions = {
streamInfo: AudioStreamInfo,
capturerInfo: AudioCapturerInfo
// @ts-ignore
}
this.audioCapturer = await audio.createAudioCapturer(AudioCapturerOptions)
}
async getFileFd() {
let filesDir = globalThis.abilityContext.filesDir
let path = filesDir + '/test.wav'
Logger.info(this.tag, `getFileFd path : ${path}`)
this.file = fs.openSync(path, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE | fs.OpenMode.TRUNC)
return this.file.fd
}
async startCapturer() {
try {
this.fd = await this.getFileFd()
Logger.info(this.tag, `fd : ${this.fd}`)
if (this.fd !== null) {
Logger.info(this.tag, `create audio fileAssets success fd : ${this.fd}`)
}
else {
Logger.info(this.tag, `create audio fileAssets error`)
}
let header = mediaPlay.encodeWAV(audio.AudioSamplingRate.SAMPLE_RATE_44100, audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE, audio.AudioChannel.CHANNEL_2)
Logger.info(this.tag, `wav header length: ${header.buffer.byteLength}`)
fs.writeSync(this.fd, header.buffer)
this.audioCapturer.start(async (err) => {
if (err) {
Logger.info(this.tag, `Capture start failed`)
} else {
Logger.info(this.tag, `Capture start success`)
let bufferSize = await this.audioCapturer.getBufferSize()
Logger.info(this.tag, `audioCapture bufferSize: ${bufferSize}`)
this.isRecorder = true
while (this.isRecorder) {
Logger.info(this.tag, 'audioCapture: ---------READ BUFFER---------')
let buffer = await this.audioCapturer.read(bufferSize, true)
Logger.info(this.tag, 'audioCapture: ---------WRITE BUFFER---------')
fs.writeSync(this.fd, buffer)
}
}
})
} catch (err) {
Logger.info(this.tag, `startCapturer fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
async stopCapturer() {
try {
await this.audioCapturer.stop()
this.isRecorder = false
Logger.info(this.tag, `stopCapturer success`)
} catch (err) {
Logger.info(this.tag, `stopCapturer fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
async releaseCapturer() {
try {
if (this.audioCapturer) {
await this.audioCapturer.release()
Logger.info(this.tag, `releaseCapturer success`)
}
if (this.file) {
fs.closeSync(this.file);
Logger.info(this.tag, `release file success`)
}
} catch (err) {
Logger.info(this.tag, `stopCapturer fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
}
export default new AudioCapturer()
\ No newline at end of file
import audio from '@ohos.multimedia.audio'
import fileio from '@ohos.fileio'
import Logger from '../model/Logger'
// @ts-ignore
import fs from '@ohos.file.fs';
class AudioRenderer {
private tag: string = 'qlw AudioRenderer'
private static instance: AudioRenderer = new AudioRenderer()
private audioRenderer: audio.AudioRenderer = undefined
private fd: number = undefined
private offset: number = undefined
async createAudioRenderer(){
let audioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_2,
// channels: audio.AudioChannel.CHANNEL_1,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
let audioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_MUSIC,
usage: audio.StreamUsage.STREAM_USAGE_MEDIA,
rendererFlags: 0
}
let audioRendererOptions = {
streamInfo: audioStreamInfo,
rendererInfo: audioRendererInfo
}
this.audioRenderer = await audio.createAudioRenderer(audioRendererOptions);
}
async startRenderer(){
try{
await this.audioRenderer.start()
// await globalThis.abilityContext.resourceManager.getRawFileDescriptor('test_44100_2.wav').then(value => {
// this.fd = value.fd
// Logger.info(this.tag, `fd : ${this.fd}`)
// let fileDescriptor = {fd: value.fd, offset: value.offset, length: value.length};
// Logger.info(this.tag, `getRawFileDescriptor success fileDescriptor:` + JSON.stringify(fileDescriptor) )
// }).catch(error => {
// console.log('case getRawFileDescriptor err: ' + error);
// });
globalThis.abilityContext.resourceManager.getRawFd("test_44100_2.wav").then(value => {
this.fd = value.fd
this.offset = value.offset
Logger.info(this.tag, `getRawFd fd : ${this.fd}, offset: ${value.offset}, length: ${value.length}`)
}).catch(err => {
console.log(`getRawFd fail err: ${err}, message: ${err.message}, code: ${err.code}`);
})
let bufferSize = await this.audioRenderer.getBufferSize()
Logger.info(this.tag, `audioRenderer bufferSize:` + JSON.stringify(bufferSize))
let stat = await fs.stat(this.fd);
let len = stat.size % bufferSize == 0 ? Math.floor(stat.size / bufferSize) : Math.floor(stat.size / bufferSize + 1);
let buf = new ArrayBuffer(bufferSize);
while (true) {
for (let i = 0;i < len; i++) {
let options = {
offset: i * bufferSize + this.offset,
length: bufferSize
}
await fs.read(this.fd, buf, options)
try {
await this.audioRenderer.write(buf)
} catch (err) {
console.error(`audioRenderer.write err: ${err}`)
}
}
}
}catch(err){
Logger.info(this.tag, `startRenderer fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
async stopRenderer(){
try{
await this.audioRenderer.stop()
}catch(err){
Logger.info(this.tag, `stopCapturer fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
async releaseRenderer(){
try{
if (this.audioRenderer){
await this.audioRenderer.release()
// await globalThis.abilityContext.resourceManager.closeRawFileDescriptor('test_44100_2.wav').then(()=> {
// Logger.info(this.tag, `closeRawFileDescriptor success`)
// }).catch(err => {
// Logger.info(this.tag, `closeRawFileDescriptor fail err: ${err}, message: ${err.message}, code: ${err.code}`)
// });
await globalThis.abilityContext.resourceManager.closeRawFd('test_44100_2.wav').then(()=> {
Logger.info(this.tag, `closeRawFileDescriptor success`)
}).catch(err => {
Logger.info(this.tag, `closeRawFileDescriptor fail err: ${err}, message: ${err.message}, code: ${err.code}`)
});
}
}catch(err){
Logger.info(this.tag, `stopCapturer fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
}
export default new AudioRenderer()
\ No newline at end of file
// @ts-nocheck
import camera from '@ohos.multimedia.camera'
import deviceInfo from '@ohos.deviceInfo'
import fileio from '@ohos.fileio'
import image from '@ohos.multimedia.image'
import media from '@ohos.multimedia.media'
import mediaLibrary from '@ohos.multimedia.mediaLibrary'
import Logger from '../model/Logger'
import MediaUtils from '../model/MediaUtils'
import prompt from '@ohos.prompt';
import fs from '@ohos.file.fs';
const CameraSize = {
WIDTH: 1280,
HEIGHT: 720
}
class CameraService {
private tag: string = 'qlw CameraService'
private static instance: CameraService = new CameraService()
private mediaUtil = MediaUtils.getInstance()
private cameraManager: camera.CameraManager = undefined
cameras: Array<camera.CameraDevice> = undefined
private cameraInput: camera.CameraInput = undefined
private previewOutput: camera.PreviewOutput = undefined
private photoOutput: camera.PhotoOutput = undefined
private cameraOutputCapability: camera.CameraOutputCapability = undefined
private captureSession: camera.CaptureSession = undefined
private mReceiver: image.ImageReceiver = undefined
private fileAsset: mediaLibrary.FileAsset = undefined
private fd: number = -1
private videoRecorder: media.VideoRecorder = undefined
private videoOutput: camera.VideoOutput = undefined
private handleTakePicture: (photoUri: string) => void = undefined
private videoConfig: any = {
audioSourceType: 1,
videoSourceType: 0,
profile: {
audioBitrate: 48000,
audioChannels: 2,
audioCodec: 'audio/mp4v-es',
audioSampleRate: 48000,
durationTime: 1000,
fileFormat: 'mp4',
videoBitrate: 280000,
videoCodec: 'video/mp4v-es',
videoFrameWidth: 640,
videoFrameHeight: 480,
videoFrameRate: 15,
},
rotation: 270,
url: '',
orientationHint: 0,
location: { latitude: 30, longitude: 130 },
maxSize: 10000,
maxDuration: 10000
}
private videoProfileObj: camera.VideoProfile = {
format: 1,
size: {
"width": 640,
"height": 480
},
frameRateRange: {
"min": 5,
"max": 5
}
}
private photoProfileObj: camera.Profile = {
format: 1,
size: {
"width": 640,
"height": 480
}
}
private videoOutputStopBol: boolean = true
resolution: any = null
previewSizeResolution: any = null
constructor() {
try {
this.mReceiver = image.createImageReceiver(CameraSize.WIDTH, CameraSize.HEIGHT, image.ImageFormat.JPEG, 8)
Logger.info(this.tag, 'createImageReceiver')
this.mReceiver.on('imageArrival', () => {
Logger.info(this.tag, 'imageArrival')
this.mReceiver.readNextImage((err, image) => {
Logger.info(this.tag, 'readNextImage')
if (err || image === undefined) {
Logger.error(this.tag, 'failed to get valid image')
return
}
image.getComponent(4, (errMsg, img) => {
Logger.info(this.tag, 'getComponent')
if (errMsg || img === undefined) {
Logger.info(this.tag, 'failed to get valid buffer')
return
}
let buffer
if (img.byteBuffer) {
buffer = img.byteBuffer
} else {
Logger.error(this.tag, 'img.byteBuffer is undefined')
}
this.savePicture(buffer, image)
})
})
})
} catch (err) {
Logger.info(this.tag, `image Receiver err ${err.message}`)
}
}
async savePicture(buffer: ArrayBuffer, img: image.Image) {
try {
Logger.info(this.tag, 'savePicture')
let imgFileAsset = await this.mediaUtil.createAndGetUri(mediaLibrary.MediaType.IMAGE)
let imgPhotoUri = imgFileAsset.uri
Logger.info(this.tag, `photoUri = ${imgPhotoUri}`)
let imgFd = await this.mediaUtil.getFdPath(imgFileAsset)
Logger.info(this.tag, `fd = ${imgFd}`)
await fileio.write(imgFd, buffer)
await imgFileAsset.close(imgFd)
await img.release()
Logger.info(this.tag, 'save image done')
if (this.handleTakePicture) {
this.handleTakePicture(imgPhotoUri)
}
} catch (err) {
Logger.info(this.tag, `save picture err ${err.message}`)
}
}
async initCamera(surfaceId: number, cameraDeviceIndex: number, obj?, photoIndex?) {
try {
if (deviceInfo.deviceType === 'default') {
this.videoConfig.videoSourceType = 1
} else {
this.videoConfig.videoSourceType = 0
}
Logger.info(this.tag, `cameraDeviceIndex success: ${cameraDeviceIndex}`)
await this.releaseCamera()
await this.getCameraManagerFn()
await this.getSupportedCamerasFn()
await this.getSupportedOutputCapabilityFn(cameraDeviceIndex)
// await this.createPreviewOutputFn(obj ? obj : this.photoProfileObj, surfaceId)
await this.createPreviewOutputFn(this.cameraOutputCapability.previewProfiles[0], surfaceId)
// await this.createPhotoOutputFn(this.photoProfileObj)
await this.createPhotoOutputFn(obj ? obj : this.cameraOutputCapability.photoProfiles[photoIndex?photoIndex:0])
await this.createCameraInputFn(this.cameras[cameraDeviceIndex])
await this.cameraInputOpenFn()
await this.sessionFlowFn()
} catch (err) {
Logger.info(this.tag, 'initCamera err: ' + JSON.stringify(err.message))
}
}
// 曝光模式
isExposureModeSupportedFn(ind) {
try {
let status = this.captureSession.isExposureModeSupported(ind)
Logger.info(this.tag, `isExposureModeSupported success: ${status}`)
prompt.showToast({
message: status ? '支持此模式' : '不支持此模式',
duration: 2000,
bottom: '60%'
})
// 设置曝光模式
this.captureSession.setExposureMode(ind)
Logger.info(this.tag, `setExposureMode success`)
// 获取当前曝光模式
let exposureMode = this.captureSession.getExposureMode()
Logger.info(this.tag, `getExposureMode success: ${exposureMode}`)
} catch (err) {
Logger.info(this.tag, `isExposureModeSupportedFn fail: ${err} , message: ${err.message}, code: ${err.code}`)
}
}
// 曝光区域
isMeteringPoint(Point1) {
try {
// 获取当前曝光模式
let exposureMode = this.captureSession.getExposureMode()
Logger.info(this.tag, `getExposureMode success: ${exposureMode}`)
// 设置曝光区域中心点
this.captureSession.setMeteringPoint(Point1)
Logger.info(this.tag, `setMeteringPoint success`)
// 查询曝光区域中心点
let exposurePoint = this.captureSession.getMeteringPoint()
Logger.info(this.tag, `getMeteringPoint success: ${exposurePoint}`)
} catch (err) {
Logger.info(this.tag, `isMeteringPoint fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 曝光补偿
isExposureBiasRange(ind) {
try {
// 查询曝光补偿范围
let biasRangeArray = this.captureSession.getExposureBiasRange()
Logger.info(this.tag, `getExposureBiasRange success: ${biasRangeArray}`)
// 设置曝光补偿
this.captureSession.setExposureBias(ind)
Logger.info(this.tag, `setExposureBias success: ${ind}`)
// 查询当前曝光值
let exposureValue = this.captureSession.getExposureValue()
Logger.info(this.tag, `getExposureValue success: ${exposureValue}`)
} catch (err) {
Logger.info(this.tag, `isExposureBiasRange fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 对焦模式
isFocusMode(ind) {
try {
// 检测对焦模式是否支持
let status = this.captureSession.isFocusModeSupported(ind)
Logger.info(this.tag, `isFocusModeSupported success: ${status}`)
prompt.showToast({
message: status ? '支持此模式' : '不支持此模式',
duration: 2000,
bottom: '60%'
})
// 设置对焦模式
this.captureSession.setFocusMode(ind)
Logger.info(this.tag, `setFocusMode success`)
// 获取当前对焦模式
let afMode = this.captureSession.getFocusMode()
Logger.info(this.tag, `getFocusMode success: ${afMode}`)
} catch (err) {
Logger.info(this.tag, `isFocusMode fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 对焦点
isFocusPoint(Point) {
try {
// 设置对焦点
this.captureSession.setFocusPoint(Point)
Logger.info(this.tag, `setFocusPoint success`)
// 获取当前对焦点
let point = this.captureSession.getFocusPoint()
Logger.info(this.tag, `getFocusPoint success: ${point}`)
} catch (err) {
Logger.info(this.tag, `isFocusPoint fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 闪光灯
hasFlashFn(ind) {
try {
// 检测是否有闪光灯
let status = this.captureSession.hasFlash()
Logger.info(this.tag, `hasFlash success: ${status}`)
// 检测闪光灯模式是否支持
let status1 = this.captureSession.isFlashModeSupported(ind)
Logger.info(this.tag, `isFlashModeSupported success: ${status1}`)
prompt.showToast({
message: status1 ? '支持此模式' : '不支持此模式',
duration: 2000,
bottom: '60%'
})
// 设置闪光灯模式
this.captureSession.setFlashMode(ind)
Logger.info(this.tag, `setFlashMode success`)
// 获取当前设备的闪光灯模式
let flashMode = this.captureSession.getFlashMode()
Logger.info(this.tag, `getFlashMode success: ${flashMode}`)
} catch (err) {
Logger.info(this.tag, `hasFlashFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 变焦
setZoomRatioFn(num) {
try {
// 获取当前支持的变焦范围
let zoomRatioRange = this.captureSession.getZoomRatioRange()
Logger.info(this.tag, `getZoomRatioRange success: ${zoomRatioRange}`)
// 设置变焦比
Logger.info(this.tag, `setZoomRatioFn num: ${num}`)
this.captureSession.setZoomRatio(num)
Logger.info(this.tag, `setZoomRatio success`)
// 获取当前对焦比
let zoomRatio = this.captureSession.getZoomRatio()
Logger.info(this.tag, `getZoomRatio success: ${zoomRatio}`)
} catch (err) {
Logger.info(this.tag, `setZoomRatioFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 防抖
isVideoStabilizationModeSupportedFn(ind) {
try {
// 查询是否支持指定的视频防抖模式
let isSupported = this.captureSession.isVideoStabilizationModeSupported(ind)
Logger.info(this.tag, `isVideoStabilizationModeSupported success: ${isSupported}`)
prompt.showToast({
message: isSupported ? '支持此模式' : '不支持此模式',
duration: 2000,
bottom: '60%'
})
// 设置视频防抖
this.captureSession.setVideoStabilizationMode(ind)
Logger.info(this.tag, `setVideoStabilizationMode success`)
// 查询当前正在使用的防抖模式
let vsMode = this.captureSession.getActiveVideoStabilizationMode()
Logger.info(this.tag, `getActiveVideoStabilizationMode success: ${vsMode}`)
} catch (err) {
Logger.info(this.tag, `isVideoStabilizationModeSupportedFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
setTakePictureCallback(callback) {
this.handleTakePicture = callback
}
// 照片方向判断
onChangeRotation() {
if (globalThis.photoOrientation == 0) {
return 0
}
if (globalThis.photoOrientation == 1) {
return 90
}
if (globalThis.photoOrientation == 2) {
return 180
}
if (globalThis.photoOrientation == 3) {
return 270
}
}
// 照片地理位置逻辑,后续靠定位实现,当前传入固定值
onChangeLocation() {
if (globalThis.settingDataObj.locationBol) {
return {
latitude: 12,
longitude: 77,
altitude: 1000
}
}
return {
latitude: 0,
longitude: 0,
altitude: 0
}
}
// 拍照
async takePicture(imageRotation?) {
try {
Logger.info(this.tag, 'takePicture start')
let photoSettings = {
rotation: imageRotation ? Number(imageRotation) : 0,
quality: 1,
location: {
latitude: 0,
longitude: 0,
altitude: 0
},
mirror: false
}
Logger.info(this.tag, `photoOutput capture photoSettings: ` + JSON.stringify(photoSettings))
await this.photoOutput.capture(photoSettings)
Logger.info(this.tag, 'takePicture end')
} catch (err) {
Logger.info(this.tag, `takePicture fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 获取Fd
async getFileFd() {
let filesDir = globalThis.abilityContext.filesDir
Logger.info(this.tag, `beginConfig 3`)
let path = filesDir + '/' + 'test.mp4'
Logger.info(this.tag, `beginConfig 4`)
let file = fs.openSync(path, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE | fs.OpenMode.TRUNC)
Logger.info(this.tag, `getFileFd : ${file.fd}`)
return file.fd
}
// 开始录制
async startVideo() {
try {
Logger.info(this.tag, `startVideo begin`)
await this.captureSession.stop()
await this.captureSession.beginConfig()
this.fd = await this.getFileFd()
Logger.info(this.tag, `videoConfig.profile: ${this.videoConfig.profile}`)
this.videoRecorder = await media.createVideoRecorder()
this.videoConfig.url = `fd://${this.fd}`
this.videoConfig.profile.videoFrameWidth = this.cameraOutputCapability.videoProfiles[0].size.width
this.videoConfig.profile.videoFrameHeight = this.cameraOutputCapability.videoProfiles[0].size.height
await this.videoRecorder.prepare(this.videoConfig)
let videoId = await this.videoRecorder.getInputSurface()
Logger.info(this.tag, `videoProfileObj: ` + JSON.stringify(this.videoProfileObj))
Logger.info(this.tag, `videoProfileObj: ` + JSON.stringify(this.cameraOutputCapability.videoProfiles[0]))
// this.videoOutput = await this.cameraManager.createVideoOutput(this.videoProfileObj, videoId)
this.videoOutput = await this.cameraManager.createVideoOutput(this.cameraOutputCapability.videoProfiles[0], videoId)
Logger.info(this.tag, `createVideoOutput success: ${this.videoOutput}`)
await this.captureSession.addOutput(this.videoOutput)
await this.captureSession.commitConfig()
await this.captureSession.start()
// await this.videoOutput.on('frameStart', async () => {
// Logger.info(this.tag, `frameStart start`)
// try {
// await this.videoRecorder.start()
// Logger.info(this.tag, `frameStart end`)
// } catch (err) {
// Logger.info(this.tag, `videoRecorder start fail err: ${err}`)
// }
// })
await this.videoOutput.start()
await this.videoRecorder.start().then(() => {
setTimeout(async () => {
await this.stopVideo()
Logger.info(this.tag, `setTimeout stopVideo end`)
}, 3000)
})
Logger.info(this.tag, `videoOutput end`)
} catch (err) {
Logger.info(this.tag, `startVideo fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 停止录制
async stopVideo() {
try {
if (this.videoRecorder) {
await this.videoRecorder.stop()
await this.videoRecorder.release()
}
if (this.videoOutput) {
if (this.videoOutputStopBol) {
await this.videoOutput.stop()
}
await this.videoOutput.release()
}
if (this.fileAsset) {
await this.fileAsset.close(this.fd)
return this.fileAsset
}
Logger.info(this.tag, `stopVideo success`)
} catch (err) {
Logger.info(this.tag, `stopVideo fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 查询相机设备在模式下支持的输出能力
async getSupportedOutputCapabilityFn(cameraDeviceIndex) {
Logger.info(this.tag, `cameraOutputCapability cameraId: ${this.cameras[cameraDeviceIndex].cameraId}`)
// @ts-ignore
this.cameraOutputCapability = this.cameraManager.getSupportedOutputCapability(this.cameras[cameraDeviceIndex])
let previewSize = []
let photoSize = []
this.cameraOutputCapability.previewProfiles.forEach((item, index) => {
// Logger.info(this.tag, `cameraOutputCapability previewProfiles index: ${index}, item:` + JSON.stringify(item))
previewSize.push({
value: `${item.size.width}x${item.size.height}`
})
})
this.cameraOutputCapability.photoProfiles.forEach((item, index) => {
Logger.info(this.tag, `cameraOutputCapability photoProfiles index: ${index}, item:` + JSON.stringify(item))
photoSize.push({
value: `${item.size.width}x${item.size.height}`
})
})
Logger.info(this.tag, `cameraOutputCapability previewProfiles:` + JSON.stringify(this.cameraOutputCapability.previewProfiles))
Logger.info(this.tag, `cameraOutputCapability photoProfiles:` + JSON.stringify(this.cameraOutputCapability.photoProfiles))
Logger.info(this.tag, `cameraOutputCapability videoProfiles:` + JSON.stringify(this.cameraOutputCapability.videoProfiles))
Logger.info(this.tag, `cameraOutputCapability previewProfiles previewSize:` + JSON.stringify(previewSize))
this.resolution = previewSize
this.previewSizeResolution = photoSize
return previewSize
}
// 释放会话及其相关参数
async releaseCamera() {
try {
if (this.cameraInput) {
await this.cameraInput.release()
}
if (this.previewOutput) {
await this.previewOutput.release()
}
if (this.photoOutput) {
await this.photoOutput.release()
}
if (this.videoOutput) {
await this.videoOutput.release()
}
if (this.captureSession) {
await this.captureSession.release()
}
Logger.info(this.tag, `releaseCamera success`)
} catch (err) {
Logger.info(this.tag, `releaseCamera fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 释放会话
async releaseSession() {
await this.previewOutput.stop()
await this.photoOutput.release()
await this.captureSession.release()
Logger.info(this.tag, `releaseSession success`)
}
// 获取相机管理器实例
async getCameraManagerFn() {
try {
this.cameraManager = await camera.getCameraManager(globalThis.abilityContext)
Logger.info(this.tag, `getCameraManager success: ` + JSON.stringify(this.cameraManager))
} catch (err) {
Logger.info(this.tag, `getCameraManagerFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 获取支持指定的相机设备对象
async getSupportedCamerasFn() {
try {
this.cameras = await this.cameraManager.getSupportedCameras()
Logger.info(this.tag, `getSupportedCameras success: ` + JSON.stringify(this.cameras))
Logger.info(this.tag, `getSupportedCameras length success: ${this.cameras.length}`)
} catch (err) {
Logger.info(this.tag, `getSupportedCamerasFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 创建previewOutput输出对象
async createPreviewOutputFn(previewProfilesObj, surfaceId) {
try {
Logger.info(this.tag, `createPreviewOutputFn previewProfilesObj success: ` + JSON.stringify(previewProfilesObj))
this.previewOutput = await this.cameraManager.createPreviewOutput(previewProfilesObj, surfaceId.toString())
Logger.info(this.tag, `createPreviewOutputFn success: ` + JSON.stringify(this.previewOutput))
} catch (err) {
Logger.info(this.tag, `createPreviewOutputFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 创建photoOutput输出对象
async createPhotoOutputFn(photoProfileObj) {
try {
Logger.info(this.tag, `createPhotoOutputFn photoProfileObj success: ` + JSON.stringify(photoProfileObj))
let mSurfaceId = await this.mReceiver.getReceivingSurfaceId()
this.photoOutput = await this.cameraManager.createPhotoOutput(photoProfileObj, mSurfaceId)
Logger.info(this.tag, `createPhotoOutputFn success: ` + JSON.stringify(this.photoOutput))
} catch (err) {
Logger.info(this.tag, `createPhotoOutputFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 创建cameraInput输出对象
async createCameraInputFn(cameraDeviceIndex) {
try {
this.cameraInput = await this.cameraManager.createCameraInput(cameraDeviceIndex)
Logger.info(this.tag, `createCameraInputFn success: ${this.cameraInput}`)
} catch (err) {
Logger.info(this.tag, `createCameraInputFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
// 打开相机
async cameraInputOpenFn() {
await this.cameraInput.open()
.then((data) => {
Logger.info(this.tag, `cameraInputOpenFn open success: ${data}`)
})
.catch((err) => {
Logger.info(this.tag, `cameraInputOpenFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
})
}
// 会话流程
async sessionFlowFn() {
try {
// 创建captureSession实例
this.captureSession = await this.cameraManager.createCaptureSession()
// 开始配置会话
await this.captureSession.beginConfig()
// cameraInput加入会话
await this.captureSession.addInput(this.cameraInput)
// previewOutput加入会话
await this.captureSession.addOutput(this.previewOutput)
// photoOutput加入会话
await this.captureSession.addOutput(this.photoOutput)
// 提交配置会话
await this.captureSession.commitConfig()
// 开启会话
await this.captureSession.start()
Logger.info(this.tag, `sessionFlowFn success`)
} catch (err) {
Logger.info(this.tag, `sessionFlowFn fail err: ${err}, message: ${err.message}, code: ${err.code}`)
}
}
}
export default new CameraService()
\ No newline at end of file
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @file 日期工具
*/
export default class DateTimeUtil {
/**
* 时分秒
*/
getTime() {
const DATETIME = new Date()
return this.concatTime(DATETIME.getHours(), DATETIME.getMinutes(), DATETIME.getSeconds())
}
getHour() {
const DATETIME = new Date()
return DATETIME.getHours()
}
getMinute() {
const DATETIME = new Date()
return DATETIME.getMinutes()
}
getSecond() {
const DATETIME = new Date()
return DATETIME.getSeconds()
}
/**
* 年月日
*/
getDate() {
const DATETIME = new Date()
return this.concatDate(DATETIME.getFullYear(), DATETIME.getMonth() + 1, DATETIME.getDate())
}
getFullYear() {
const DATETIME = new Date()
return DATETIME.getFullYear()
}
getMonth() {
const DATETIME = new Date()
return DATETIME.getMonth() + 1
}
getDay() {
const DATETIME = new Date()
return DATETIME.getDate()
}
/**
* 日期不足两位补充0
* @param value-数据值
*/
fill(value: number) {
return (value > 9 ? '' : '0') + value
}
/**
* 年月日格式修饰
* @param year
* @param month
* @param date
*/
concatDate(year: number, month: number, date: number) {
return `${year}${this.fill(month)}${this.fill(date)}`
}
/**
* 时分秒格式修饰
* @param hours
* @param minutes
* @param seconds
*/
concatTime(hours: number, minutes: number, seconds: number) {
return `${this.fill(hours)}${this.fill(minutes)}${this.fill(seconds)}`
}
}
\ No newline at end of file
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import hilog from '@ohos.hilog';
class Logger {
private domain: number;
private prefix: string;
private format: string = "%{public}s, %{public}s";
constructor(prefix: string) {
this.prefix = prefix;
this.domain = 0xFF00;
}
debug(...args: any[]) {
hilog.debug(this.domain, this.prefix, this.format, args);
}
info(...args: any[]) {
hilog.info(this.domain, this.prefix, this.format, args);
}
warn(...args: any[]) {
hilog.warn(this.domain, this.prefix, this.format, args);
}
error(...args: any[]) {
hilog.error(this.domain, this.prefix, this.format, args);
}
}
export default new Logger('[Screenshot]');
\ No newline at end of file
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import mediaLibrary from '@ohos.multimedia.mediaLibrary'
import DateTimeUtil from '../model/DateTimeUtil'
import Logger from './Logger'
export default class MediaUtils {
private tag: string = 'qlw MediaUtils'
private mediaTest: mediaLibrary.MediaLibrary = mediaLibrary.getMediaLibrary(globalThis.abilityContext)
private static instance: MediaUtils = new MediaUtils()
public static getInstance() {
if (this.instance === undefined) {
this.instance = new MediaUtils()
}
return this.instance
}
async createAndGetUri(mediaType: number) {
let info = this.getInfoFromType(mediaType)
let dateTimeUtil = new DateTimeUtil()
let name = `${dateTimeUtil.getDate()}_${dateTimeUtil.getTime()}`
let displayName = `${info.prefix}${name}${info.suffix}`
Logger.info(this.tag, `displayName = ${displayName},mediaType = ${mediaType}`)
let publicPath = await this.mediaTest.getPublicDirectory(info.directory)
Logger.info(this.tag, `publicPath = ${publicPath}`)
try{
return await this.mediaTest.createAsset(mediaType, displayName, publicPath)
}catch(err){
Logger.info(this.tag, `createAsset err ` + JSON.stringify(err))
}
}
async queryFile(dataUri: any) {
let fileKeyObj = mediaLibrary.FileKey
if (dataUri !== undefined) {
let args = dataUri.id.toString()
let fetchOp = {
selections: `${fileKeyObj.ID}=?`,
selectionArgs: [args],
}
const fetchFileResult = await this.mediaTest.getFileAssets(fetchOp)
Logger.info(this.tag, `fetchFileResult.getCount() = ${fetchFileResult.getCount()}`)
const fileAsset = await fetchFileResult.getAllObject()
return fileAsset[0]
}
}
async getFdPath(fileAsset: any) {
let fd = await fileAsset.open('Rw')
Logger.info(this.tag, `fd = ${fd}`)
return fd
}
async createFile(mediaType: number) {
let dataUri = await this.createAndGetUri(mediaType)
if (dataUri) {
let fileAsset = await this.queryFile(dataUri)
if (fileAsset) {
let fd = await this.getFdPath(fileAsset)
return fd
}
}
}
async getFileAssetsFromType(mediaType: number) {
Logger.info(this.tag, `getFileAssetsFromType,mediaType = ${mediaType}`)
let fileKeyObj = mediaLibrary.FileKey
let fetchOp = {
selections: `${fileKeyObj.MEDIA_TYPE}=?`,
selectionArgs: [`${mediaType}`],
}
const fetchFileResult = await this.mediaTest.getFileAssets(fetchOp)
Logger.info(this.tag, `getFileAssetsFromType,fetchFileResult.count = ${fetchFileResult.getCount()}`)
let fileAssets = []
if (fetchFileResult.getCount() > 0) {
fileAssets = await fetchFileResult.getAllObject()
}
return fileAssets
}
async getAlbums() {
Logger.info(this.tag, 'getAlbums begin')
let albums = []
const [ files, images, videos, audios ] = await Promise.all([
this.getFileAssetsFromType(mediaLibrary.MediaType.FILE),
this.getFileAssetsFromType(mediaLibrary.MediaType.IMAGE),
this.getFileAssetsFromType(mediaLibrary.MediaType.VIDEO),
this.getFileAssetsFromType(mediaLibrary.MediaType.AUDIO)
])
albums.push({
albumName: 'Documents', count: files.length, mediaType: mediaLibrary.MediaType.FILE
})
albums.push({
albumName: 'Pictures', count: images.length, mediaType: mediaLibrary.MediaType.IMAGE
})
albums.push({
albumName: 'Videos', count: videos.length, mediaType: mediaLibrary.MediaType.VIDEO
})
albums.push({
albumName: 'Audios', count: audios.length, mediaType: mediaLibrary.MediaType.AUDIO
})
return albums
}
deleteFile(media: any) {
let uri = media.uri
Logger.info(this.tag, `deleteFile,uri = ${uri}`)
return this.mediaTest.deleteAsset(uri)
}
onDateChange(callback: () => void) {
this.mediaTest.on('albumChange', () => {
Logger.info(this.tag, 'albumChange called')
callback()
})
this.mediaTest.on('imageChange', () => {
Logger.info(this.tag, 'imageChange called')
callback()
})
this.mediaTest.on('audioChange', () => {
Logger.info(this.tag, 'audioChange called')
callback()
})
this.mediaTest.on('videoChange', () => {
Logger.info(this.tag, 'videoChange called')
callback()
})
this.mediaTest.on('fileChange', () => {
Logger.info(this.tag, 'fileChange called')
callback()
})
}
offDateChange() {
this.mediaTest.off('albumChange')
this.mediaTest.off('imageChange')
this.mediaTest.off('audioChange')
this.mediaTest.off('videoChange')
this.mediaTest.off('fileChange')
}
getInfoFromType(mediaType: number) {
let result = {
prefix: '', suffix: '', directory: 0
}
switch (mediaType) {
case mediaLibrary.MediaType.FILE:
result.prefix = 'FILE_'
result.suffix = '.txt'
result.directory = mediaLibrary.DirectoryType.DIR_DOCUMENTS
break
case mediaLibrary.MediaType.IMAGE:
result.prefix = 'IMG_'
result.suffix = '.jpg'
result.directory = mediaLibrary.DirectoryType.DIR_IMAGE
break
case mediaLibrary.MediaType.VIDEO:
result.prefix = 'VID_'
result.suffix = '.mp4'
result.directory = mediaLibrary.DirectoryType.DIR_VIDEO
break
case mediaLibrary.MediaType.AUDIO:
result.prefix = 'AUD_'
result.suffix = '.wav'
result.directory = mediaLibrary.DirectoryType.DIR_AUDIO
break
}
return result
}
}
\ No newline at end of file
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import media from '@ohos.multimedia.media'
import Logger from '../model/Logger'
let audioConfig = {
audioSourceType: 1,
audioEncoder: 3,
audioEncodeBitRate: 22050,
audioSampleRate: 22050,
numberOfChannels: 2,
format: 6,
uri: ''
}
export default class RecordModel {
private tag: string = 'qlw RecordModel'
private audioRecorder: media.AudioRecorder = undefined
initAudioRecorder(handleStateChange: () => void) {
this.release();
this.audioRecorder = media.createAudioRecorder()
Logger.info(this.tag, 'create audioRecorder success')
this.audioRecorder.on('prepare', () => {
Logger.info(this.tag, 'setCallback prepare case callback is called')
this.audioRecorder.start()
})
this.audioRecorder.on('start', () => {
Logger.info(this.tag, 'setCallback start case callback is called')
handleStateChange()
})
this.audioRecorder.on('stop', () => {
Logger.info(this.tag, 'audioRecorder stop called')
this.audioRecorder.release()
})
this.audioRecorder.on('pause', () => {
Logger.info(this.tag, 'audioRecorder pause finish')
handleStateChange()
})
this.audioRecorder.on('resume', () => {
Logger.info(this.tag, 'audioRecorder resume finish')
handleStateChange()
})
}
release() {
if (typeof (this.audioRecorder) !== `undefined`) {
Logger.info(this.tag, 'audioRecorder release')
this.audioRecorder.release()
this.audioRecorder = undefined
}
}
startRecorder(pathName: string) {
Logger.info(this.tag, `startRecorder, pathName = ${pathName}`)
if (typeof (this.audioRecorder) !== 'undefined') {
Logger.info(this.tag, 'start prepare')
audioConfig.uri = pathName
this.audioRecorder.prepare(audioConfig)
} else {
Logger.error(this.tag, 'case failed, audioRecorder is null')
}
}
pause() {
Logger.info(this.tag, 'audioRecorder pause called')
if (typeof (this.audioRecorder) !== `undefined`) {
this.audioRecorder.pause()
}
}
resume() {
Logger.info(this.tag, 'audioRecorder resume called')
if (typeof (this.audioRecorder) !== `undefined`) {
this.audioRecorder.resume()
}
}
finish() {
if (typeof (this.audioRecorder) !== `undefined`) {
this.audioRecorder.stop()
}
}
}
\ No newline at end of file
// @ts-nocheck
import media from '@ohos.multimedia.media'
import fs from '@ohos.file.fs'
import Logger from './Logger'
const TAG = 'qlw play'
export class mediaPlay {
private avPlay: media.AVPlayer = undefined
private surfaceId: number = -1
public totalDuration: number
async getFileFd(name) {
let filesDir = globalThis.abilityContext.filesDir
let path = filesDir + '/' + name
let file = fs.openSync(path)
return file.fd
}
async getRawfileFd(name) {
let file = await globalThis.abilityContext.resourceManager.getRawFd(name)
return file.fd
}
getCurrentTime() {
return 0
}
seek() {
}
async init(surfaceId?) {
let fd
if (surfaceId) {
this.surfaceId = surfaceId
fd = await this.getFileFd('test.mp4')
} else {
fd = await this.getFileFd('test.wav')
}
Logger.info(TAG, ` fd success : ${fd}`)
this.avPlay = await media.createAVPlayer()
this.setCallBack(this.avPlay)
this.avPlay.url = 'fd://' + fd
}
async initVideo(surfaceId) {
this.surfaceId = surfaceId
let fd = await this.getRawfileFd('video.mp4')
Logger.info(TAG, ` fd success : ${fd}`)
this.avPlay = await media.createAVPlayer()
this.setCallBack(this.avPlay)
this.avPlay.url = 'fd://' + fd
}
async Play() {
await this.avPlay.play()
}
setCallBack(AVPlayer) {
AVPlayer.on('stateChange', async (state, reason) => {
switch (state) {
case 'idle':
Logger.info(TAG, 'state idle start')
break;
case 'initialized':
Logger.info(TAG + 'state initialized start ')
if (this.surfaceId) {
AVPlayer.surfaceId = this.surfaceId
}
await AVPlayer.prepare()
Logger.info(TAG, 'state initialized end')
break;
case 'prepared':
Logger.info(TAG, 'state prepared start')
await AVPlayer.play()
Logger.info(TAG, 'state prepared end')
break;
case 'playing':
Logger.info(TAG, 'state playing callback')
break;
case 'paused':
Logger.info(TAG, 'state paused callback')
break;
case 'completed':
await AVPlayer.stop()
await AVPlayer.release()
case 'error':
Logger.info(TAG, 'state error callback')
break;
}
})
AVPlayer.on('error', (err) => {
Logger.info(TAG, `state error callback err:${err},code:${err.code},message:${err.message}}`)
})
}
async release(){
if (this.avPlay){
await this.avPlay.release()
Logger.info(TAG, 'avplay release success')
}
}
encodeWAV(sampleRateValue, sampleBitsValue, channelCountValue) {
let sampleRate = sampleRateValue;
let dataLen = sampleRate * 1000;
let sampleBits = sampleBitsValue * 8 + 8 // 采样格式
let channelCount = channelCountValue; // 单声道
let offset = 0;
let buffer = new ArrayBuffer(44);
let data = new DataView(buffer);
// 资源交换文件标识符
this.writeString(data, offset, 'RIFF');
offset += 4;
// 下个地址开始到文件尾总字节数,即文件大小-8
data.setUint32(offset, 36 + dataLen, true);
offset += 4;
// WAV文件标志
this.writeString(data, offset, 'WAVE');
offset += 4;
// 波形格式标志
this.writeString(data, offset, 'fmt ');
offset += 4;
// 过滤字节,一般为 0x10 = 16
data.setUint32(offset, 16, true);
offset += 4;
// 格式类别 (PCM形式采样数据)
data.setUint16(offset, 1, true);
offset += 2;
// 通道数
data.setUint16(offset, channelCount, true);
offset += 2;
// 采样率,每秒样本数,表示每个通道的播放速度
data.setUint32(offset, sampleRate, true);
offset += 4;
// 波形数据传输率 (每秒平均字节数) 单声道×每秒数据位数×每样本数据位/8
data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true);
offset += 4;
// 快数据调整数 采样一次占用字节数 单声道×每样本的数据位数/8
data.setUint16(offset, channelCount * (sampleBits / 8), true);
offset += 2;
// 每样本数据位数
data.setUint16(offset, sampleBits, true);
offset += 2;
// 数据标识符
this.writeString(data, offset, 'data');
offset += 4;
// 采样数据总数,即数据总大小-44
data.setUint32(offset, dataLen, true);
offset += 4;
return data;
}
writeString(data, offset, str) {
for (let i = 0; i < str.length; i++) {
data.setUint8(offset + i, str.charCodeAt(i));
}
}
}
export default new mediaPlay()
export function fillNum(num) {
if (num < 10) {
return '0' + num
}
return num.toString()
}
export function getTimeString(time) {
if (time == -1 || time == undefined) {
time = 0
}
let hour = Math.floor(time % (1000 * 60 * 60 * 24) / (1000 * 60 * 60))
let minute = Math.floor(time % (1000 * 60 * 60) / (1000 * 60))
let second = Math.floor(time % (1000 * 60) / 1000)
if (hour > 0) {
return `${fillNum(hour)}:${fillNum(minute)}:${fillNum(second)}`
}
return `${fillNum(minute)}:${fillNum(second)}`
}
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册