提交 77cd4679 编写于 作者: L lwx1121892

new

Signed-off-by: Nlwx1121892 <liuxueqi3@huawei.com>
上级 e39cf93c
......@@ -22,6 +22,8 @@ group("multimedia") {
"audio/audio_js_standard/AudioEventManagement:audio_eventmanagement_js_hap",
"audio/audio_js_standard/AudioRendererChangeInfo:audio_rendererchangeInfo_js_hap",
"audio/audio_js_standard/AudioTonePlayer:audio_tone_player_js_hap",
"audio/audio_js_standard/audioInterrupt:audio_interrupt_js_hap",
"audio/audio_js_standard/audioInterruptRender:audioRenderInterrupt",
"audio/audio_js_standard/audioManager:audio_manager_js_hap",
"audio/audio_js_standard/audioRenderer:audio_renderer_js_hap",
"audio/audio_js_standard/audioVoip:audio_voip_js_hap",
......
......@@ -357,15 +357,26 @@ describe('audioCapturer', function () {
steps.shift();
let markReachParam = steps[0];
steps.shift();
audioCap.on('markReach', markReachParam, (position) => {
console.log(`${Tag} position: ${JSON.stringify(position)}`);
console.log(`${Tag} markReachParam: ${JSON.stringify(markReachParam)}`);
if (position == markReachParam) {
markReachState = 'success'
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
try {
audioCap.on('markReach', markReachParam, (position) => {
console.log(`${Tag} position: ${JSON.stringify(position)}`);
console.log(`${Tag} markReachParam: ${JSON.stringify(markReachParam)}`);
if (position == markReachParam) {
markReachState = 'success'
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
}
});
toNextStep(audioCap, steps, done);
}
catch (error) {
if (error.code == 401){
markReachState = 'invalid_failure';
toNextStep(audioCap, steps, done);
}else{
console.info("err" + error.code);
toNextStep(audioCap, steps, done);
}
});
toNextStep(audioCap, steps, done);
}
});
eventEmitter.on(OFF_MARK_REACH_EVENT, (audioCap, steps, done) => {
......@@ -380,15 +391,27 @@ describe('audioCapturer', function () {
steps.shift();
let periodReachParam = steps[0];
steps.shift();
audioCap.on('periodReach', periodReachParam, (position) => {
console.log(`${Tag} position: ${JSON.stringify(position)}`);
console.log(`${Tag} periodReachParam: ${JSON.stringify(periodReachParam)}`);
if (position == periodReachParam) {
periodReachState = 'success'
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
try {
audioCap.on('periodReach', periodReachParam, (position) => {
console.log(`${Tag} position: ${JSON.stringify(position)}`);
console.log(`${Tag} periodReachParam: ${JSON.stringify(periodReachParam)}`);
if (position == periodReachParam) {
periodReachState = 'success'
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
}
});
toNextStep(audioCap, steps, done);
}
catch (error) {
if (error.code == 401){
markReachState = 'invalid_failure';
toNextStep(audioCap, steps, done);
}else{
console.info("err" + error.code);
toNextStep(audioCap, steps, done);
}
});
toNextStep(audioCap, steps, done);
}
});
eventEmitter.on(OFF_PERIODR_REACH_EVENT, (audioCap, steps, done) => {
console.log(`${Tag} emit: ${JSON.stringify(OFF_PERIODR_REACH_EVENT)}`);
......@@ -420,8 +443,7 @@ describe('audioCapturer', function () {
beforeAll(async function () {
console.info(`${Tag} AudioFrameworkTest: beforeAll: Prerequisites at the test suite level`);
let permissionName1 = 'ohos.permission.MICROPHONE';
let permissionName2 = 'ohos.permission.MANAGE_AUDIO_CONFIG';
let permissionNameList = [permissionName1,permissionName2];
let permissionNameList = [permissionName1];
let appName = 'ohos.acts.multimedia.audio.audiocapturer';
await audioTestBase.applyPermission(appName, permissionNameList);
await sleep(100);
......@@ -441,9 +463,6 @@ describe('audioCapturer', function () {
console.info(`${Tag} AudioFrameworkTest: afterAll: Test suite-level cleanup condition`);
})
async function recPromise(AudioCapturerOptions, done) {
let audioCap;
try {
......@@ -2101,12 +2120,20 @@ describe('audioCapturer', function () {
return done();
}
try {
await audioCapPromise.start();
console.log(`${Tag} start ok`);
} catch (err) {
console.log(`${Tag} start err: ${JSON.stringify(err)}`);
expect(false).assertTrue();
}
try {
await audioCapPromise.stop();
console.log(`${Tag} stop ok`);
expect(true).assertTrue();
} catch (err) {
console.log(`${Tag} stop err: ${JSON.stringify(err)}`);
expect(false).assertTrue();
expect(true).assertTrue();
}
done();
})
......@@ -2440,13 +2467,8 @@ describe('audioCapturer', function () {
capturerInfo: audioCapturerInfo44100,
}
await audio.createAudioCapturer(AudioCapturerOptionsInvalid).then(function (data) {
if (data == undefined) {
console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : Unsuccess : ${JSON.stringify(data)}`);
expect(true).assertTrue();
} else {
console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : Success: ${JSON.stringify(data)}`);
expect(false).assertTrue();
}
console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : Success: ${JSON.stringify(data)}`);
expect(false).assertTrue();
}).catch((err) => {
console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : ERROR : ${JSON.stringify(err.message)}`);
expect(true).assertTrue();
......@@ -2797,7 +2819,7 @@ describe('audioCapturer', function () {
let audioCap = null;
let markReachParam = 0;
markReachState = 'invalid_failure';
markReachState = 'fail';
let mySteps = [CREATE_EVENT, AudioCapturerOptions, MARK_REACH_EVENT, markReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_MARK_REACH_EVENT, RELEASE_EVENT, END_EVENT];
eventEmitter.emit(mySteps[0], audioCap, mySteps, done);
})
......@@ -2861,7 +2883,7 @@ describe('audioCapturer', function () {
let audioCap = null;
let markReachParam = -2;
markReachState = 'invalid_failure';
markReachState = 'fail';
let mySteps = [CREATE_EVENT, AudioCapturerOptions, MARK_REACH_EVENT, markReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_MARK_REACH_EVENT, RELEASE_EVENT, END_EVENT];
eventEmitter.emit(mySteps[0], audioCap, mySteps, done);
})
......@@ -2957,7 +2979,7 @@ describe('audioCapturer', function () {
let audioCap = null;
let periodReachParam = -2;
periodReachState = 'invalid_failure';
periodReachState = 'fail';
let mySteps = [CREATE_EVENT, AudioCapturerOptions, PERIODR_REACH_EVENT, periodReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_PERIODR_REACH_EVENT, RELEASE_EVENT, END_EVENT];
eventEmitter.emit(mySteps[0], audioCap, mySteps, done);
})
......@@ -2993,4 +3015,89 @@ describe('audioCapturer', function () {
let mySteps = [CREATE_EVENT, AudioCapturerOptions, PERIODR_REACH_EVENT, periodReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_PERIODR_REACH_EVENT, RELEASE_EVENT, END_EVENT];
eventEmitter.emit(mySteps[0], audioCap, mySteps, done);
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_0100
*@tc.name : AudioCapturer - getAudioStreamId
*@tc.desc : AudioCapturer - getAudioStreamId
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 0
*/
it('SUB_MULTIMEDIA_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_0100', 0, async function (done) {
let AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
let AudioCapturerInfo = {
source: audio.SourceType.SOURCE_TYPE_MIC,
capturerFlags: 0
}
let AudioCapturerOptions = {
streamInfo: AudioStreamInfo,
capturerInfo: AudioCapturerInfo
}
let audioCapPromise;
try {
audioCapPromise = await audio.createAudioCapturer(AudioCapturerOptions);
await audioCapPromise.getAudioStreamId((err, data) => {
if (err) {
console.info(`${Tag}: getAudioStreamId : ERROR : code: ${err.code}, mesage: ${err.message}`);
expect(false).assertTrue();
} else {
expect(true).assertTrue();
console.info(`${Tag}: getAudioStreamId : Converted: ${data}`);
}
});
await audioCapPromise.release();
} catch (err) {
console.log(`${Tag} err: ${JSON.stringify(err)}`);
expect(false).assertTrue();
}
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_0200
*@tc.name : AudioCapturer - getAudioStreamId
*@tc.desc : AudioCapturer - getAudioStreamId
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 0
*/
it('SUB_MULTIMEDIA_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_0200', 0, async function (done) {
let AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_1,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
let AudioCapturerInfo = {
source: audio.SourceType.SOURCE_TYPE_MIC,
capturerFlags: 0
}
let AudioCapturerOptions = {
streamInfo: AudioStreamInfo,
capturerInfo: AudioCapturerInfo
}
try {
let audioCapPromise = await audio.createAudioCapturer(AudioCapturerOptions);
await audioCapPromise.getAudioStreamId().then((data)=>{
expect(true).assertTrue();
console.info(`${Tag}: getAudioStreamId : Converted: ${data}`);
}).catch((err) => {
expect(true).assertTrue();
console.error(`${Tag}: getAudioStreamId : ERROR : ${err}`);
});
await audioCapPromise.release();
} catch (err) {
console.log(`${Tag} error code: ${err.code} ,message:${err.message}`);
expect(false).assertTrue();
}
done();
})
})
......@@ -28,29 +28,14 @@ describe('audioCapturerChange', function () {
beforeAll(async function () {
console.info(`AudioFrameworkTest: beforeAll: Prerequisites at the test suite level`);
let permissionName1 = 'ohos.permission.MICROPHONE';
let permissionName2 = 'ohos.permission.MANAGE_AUDIO_CONFIG';
let permissionNameList = [permissionName1,permissionName2];
let permissionNameList = [permissionName1];
let appName = 'ohos.acts.multimedia.audio.audiocapturerchangeInfo';
await audioTestBase.applyPermission(appName, permissionNameList);
await sleep(100);
console.info(`AudioFrameworkTest: beforeAll: END`);
await sleep(100);
await audioManager.getStreamManager().then((data) => {
audioStreamManager = data;
console.info(`${Tag}: Get AudioStream Manager : Success `);
}).catch((err) => {
console.info(`${Tag}: Get AudioStream Manager : ERROR :${err.message}`);
});
audioManager.getStreamManager((err, data) => {
if (err) {
console.error(`${Tag}: Get AudioStream Manager : ERROR :${err.message}`);
}
else {
audioStreamManagerCB = data;
console.info(`${Tag}: Get AudioStream Manager : Success `);
}
});
audioStreamManager = audioManager.getStreamManager();
audioStreamManagerCB = audioManager.getStreamManager();
await sleep(1000);
console.info(`${Tag}: beforeAll: END`);
})
......
......@@ -25,24 +25,9 @@ describe('audioRendererChange', function () {
console.info(`${Tag} : Create AudioManger Object JS Framework`);
beforeAll(async function () {
await AUDIOMANAGER.getStreamManager().then(function (data) {
audioStreamManager = data;
console.info(`${Tag} : Get AudioStream Manager : Success `);
}).catch((err) => {
console.info(`${Tag} : Get AudioStream Manager : ERROR : ${err.message}`);
});
AUDIOMANAGER.getStreamManager((err, data) => {
if (err) {
console.error(`${Tag} : Get AudioStream Manager : ERROR : ${err.message}`);
}
else {
audioStreamManagerCB = data;
console.info(`${Tag} : Get AudioStream Manager : Success `);
}
});
audioStreamManager = AUDIOMANAGER.getStreamManager();
audioStreamManagerCB = AUDIOMANAGER.getStreamManager();
await sleep(1000);
console.info(`${Tag} : beforeAll: Prerequisites at the test suite level`);
})
......
# Copyright (C) 2021 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import("//test/xts/tools/build/suite.gni")
ohos_js_hap_suite("audio_interrupt_js_hap") {
hap_profile = "./src/main/config.json"
deps = [
":audio_interrupt_js_assets",
":audio_interrupt_resources",
]
certificate_profile = "./signature/openharmony_sx.p7b"
hap_name = "ActsAudioInterruptJsTest"
subsystem_name = "multimedia"
part_name = "multimedia_audio_framework"
}
ohos_js_assets("audio_interrupt_js_assets") {
source_dir = "./src/main/js/default"
}
ohos_resources("audio_interrupt_resources") {
sources = [ "./src/main/resources" ]
hap_profile = "./src/main/config.json"
}
{
"description": "Configuration for audio manager Tests",
"driver": {
"type": "JSUnitTest",
"test-timeout": "5500000",
"package": "ohos.acts.multimedia.audio.audiointerrupt",
"shell-timeout": "60000"
},
"kits": [
{
"test-file-name": [
"ActsAudioInterruptJsTest.hap",
"audioRenderInterrupt.hap"
],
"type": "AppInstallKit",
"cleanup-apps": true
},
{
"type": "ShellKit",
"run-command": [
"mkdir -p /data/app/el2/100/base/ohos.acts.multimedia.audio.audiointerrupt/haps/entry/files/",
"mkdir -p /data/app/el2/100/base/com.example.audiorenderinterrupt/haps/entry/files/",
"chmod 777 -R /data/app/el2/100/base/ohos.acts.multimedia.audio.audiointerrupt/haps/entry",
"chmod 777 -R /data/app/el2/100/base/com.example.audiorenderinterrupt/haps/entry"
],
"cleanup-apps": true
},
{
"type": "PushKit",
"pre-push": [],
"push": [
"./resource/audio/audioManager/StarWars10s-2C-48000-4SW.wav ->/data/app/el2/100/base/ohos.acts.multimedia.audio.audiointerrupt/haps/entry/files/",
"./resource/audio/audioManager/StarWars10s-2C-48000-4SW.wav ->/data/app/el2/100/base/com.example.audiorenderinterrupt/haps/entry/files/"
]
}
]
}
\ No newline at end of file
{
"app": {
"apiVersion": {
"compatible": 6,
"releaseType": "Beta1",
"target": 7
},
"vendor": "acts",
"bundleName": "ohos.acts.multimedia.audio.audiointerrupt",
"version": {
"code": 1000000,
"name": "1.0.0"
}
},
"deviceConfig": {
"default": {
"debug": true
}
},
"module": {
"abilities": [
{
"iconId": 16777218,
"skills": [
{
"entities": [
"entity.system.home"
],
"actions": [
"action.system.home"
]
}
],
"descriptionId": 16777217,
"visible": true,
"labelId": 16777216,
"icon": "$media:icon",
"name": "ohos.acts.multimedia.audio.audiointerrupt.MainAbility",
"description": "$string:mainability_description",
"label": "$string:entry_MainAbility",
"type": "page",
"homeAbility": true,
"launchType": "standard"
}
],
"deviceType": [
"phone",
"tablet",
"tv",
"wearable"
],
"mainAbility": "ohos.acts.multimedia.audio.audiointerrupt.MainAbility",
"distro": {
"moduleType": "entry",
"installationFree": false,
"deliveryWithInstall": true,
"moduleName": "entry"
},
"package": "ohos.acts.multimedia.audio.audiointerrupt",
"name": ".MyApplication",
"js": [
{
"pages": [
"pages/index/index"
],
"name": "default",
"window": {
"designWidth": 720,
"autoDesignWidth": true
}
}
],
"reqPermissions": [
{
"name": "ohos.permission.GET_BUNDLE_INFO",
"reason": "use ohos.permission.GET_BUNDLE_INFO"
},
{
"name": "ohos.permission.GET_BUNDLE_INFO_PRIVILEGED",
"reason": "use ohos.permission.GET_BUNDLE_INFO_PRIVILEGED"
},
{
"name": "ohos.permission.GRANT_SENSITIVE_PERMISSIONS",
"reason": "use ohos.permission.GRANT_SENSITIVE_PERMISSIONS"
},
{
"name": "ohos.permission.REVOKE_SENSITIVE_PERMISSIONS",
"reason": "use ohos.permission.REVOKE_SENSITIVE_PERMISSIONS"
},
{
"name": "ohos.permission.MEDIA_LOCATION",
"reason": "use ohos.permission.MEDIA_LOCATION"
},
{
"name": "ohos.permission.READ_MEDIA",
"reason": "use ohos.permission.READ_MEDIA"
},
{
"name": "ohos.permission.WRITE_MEDIA",
"reason": "use ohos.permission.WRITE_MEDIA"
},
{
"name": "ohos.permission.MICROPHONE",
"reason": "use ohos.permission.MICROPHONE"
},
{
"name": "ohos.permission.ACCESS_NOTIFICATION_POLICY",
"reason": "use ohos.permission.ACCESS_NOTIFICATION_POLICY"
},
{
"name": "ohos.permission.MODIFY_AUDIO_SETTINGS",
"reason": "use ohos.permission.MODIFY_AUDIO_SETTINGS"
},
{
"name": "ohos.permission.MANAGE_AUDIO_CONFIG",
"reason": "use ohos.permission.MANAGE_AUDIO_CONFIG"
}
]
}
}
\ No newline at end of file
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Core, ExpectExtend} from 'deccjsunit/index'
export default {
onCreate() {
console.info('AceApplication onCreate');
const core = Core.getInstance()
const expectExtend = new ExpectExtend({
'id': 'extend'
})
core.addService('expect', expectExtend)
core.init()
const configService = core.getDefaultService('config')
this.timeout = 60000
configService.setConfig(this)
require('../test/List.test')
core.execute()
},
onDestroy() {
console.info('AceApplication onDestroy');
}
};
{
"strings": {
"hello": "Hello",
"world": "World"
}
}
\ No newline at end of file
{
"strings": {
"hello": "您好",
"world": "世界"
}
}
\ No newline at end of file
.container {
flex-direction: column;
justify-content: center;
align-items: center;
width: 100%;
height: 100%;
}
.title {
font-size: 40px;
color: #000000;
opacity: 0.9;
}
@media screen and (device-type: tablet) and (orientation: landscape) {
.title {
font-size: 100px;
}
}
@media screen and (device-type: wearable) {
.title {
font-size: 28px;
color: #FFFFFF;
}
}
@media screen and (device-type: tv) {
.container {
background-image: url("/common/images/Wallpaper.png");
background-size: cover;
background-repeat: no-repeat;
background-position: center;
}
.title {
font-size: 100px;
color: #FFFFFF;
}
}
@media screen and (device-type: phone) and (orientation: landscape) {
.title {
font-size: 60px;
}
}
<div class="container">
<text class="title">
{{ $t('strings.hello') }} {{ title }}
</text>
</div>
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Core, ExpectExtend} from 'deccjsunit/index'
export default {
data: {
title: ""
},
onInit() {
this.title = this.$t('strings.world');
},
onShow() {
console.info('onShow finish')
// const core = Core.getInstance()
// const expectExtend = new ExpectExtend({
// 'id': 'extend'
// })
// core.addService('expect', expectExtend)
// core.init()
// const configService = core.getDefaultService('config')
// this.timeout = 60000
// configService.setConfig(this)
// require('../../../test/List.test')
// core.execute()
},
onReady() {
},
}
\ No newline at end of file
/*
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:// www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import audio from '@ohos.multimedia.audio';
import ability from '@ohos.ability.featureAbility'
import fileio from '@ohos.fileio';
import featureAbility from '@ohos.ability.featureAbility'
import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from 'deccjsunit/index';
import avSession from '@ohos.multimedia.avsession';
describe('audioInterrupt', function () {
console.info('audioRenderInterrupt: Create AudioManger Object JS Framework');
let fdRead;
let readPath;
console.info('AudioFrameworkRenderLog: Create AudioManger Object JS Framework');
let fdPath;
let filePath;
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function getcreateAVSession() {
let tag = "createNewSession";
let type = "audio";
let context1 = featureAbility.getContext();
let av = await avSession.createAVSession(context1, tag, type);
console.info('av is ' + JSON.stringify(av));
}
async function getFdRead(pathName, done) {
let context = await featureAbility.getContext();
console.info("case0 context is " + context);
await context.getFilesDir().then((data) => {
console.info("case1 getFilesDir is path " + data);
filePath = data + '/' + pathName;
console.info('case4 filePath is ' + filePath);
})
fdPath = 'fd://';
await fileio.open(filePath).then((fdNumber) => {
fdPath = fdPath + '' + fdNumber;
fdRead = fdNumber;
console.info('[fileIO]case open fd success,fdPath is ' + fdPath);
console.info('[fileIO]case open fd success,fdRead is ' + fdRead);
}, (err) => {
console.info('[fileIO]case open fd failed');
}).catch((err) => {
console.info('[fileIO]case catch open fd failed');
});
}
beforeAll(function () {
console.info('audioRenderInterrupt: beforeAll: Prerequisites at the test suite level');
getcreateAVSession();
})
beforeEach(async function () {
console.info('audioRenderInterrupt: beforeEach: Prerequisites at the test case level');
await sleep(100);
})
afterEach(async function () {
console.info('audioRenderInterrupt: afterEach: Test case-level clearance conditions');
await sleep(100);
})
afterAll(function () {
console.info('audioRenderInterrupt: afterAll: Test suite-level cleanup condition');
})
it('SUB_AUDIO_INTERRUPT_001', 0, async function (done) {
var AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000,
channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
var AudioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_MUSIC,
usage: audio.StreamUsage.STREAM_USAGE_MEDIA,
rendererFlags: 0
}
var AudioRendererOptions = {
streamInfo: AudioStreamInfo,
rendererInfo: AudioRendererInfo
}
readPath = 'StarWars10s-2C-48000-4SW.wav';
await getFdRead(readPath, done);
var audioRen;
await audio.createAudioRenderer(AudioRendererOptions).then(async function (data) {
audioRen = data;
console.info('AudioFrameworkRenderLog: AudioRender Created : Success : Stream Type: SUCCESS');
}).catch((err) => {
console.info('AudioFrameworkRenderLog: AudioRender Created : ERROR : ' + err.message);
});
console.info('AudioFrameworkRenderLog: AudioRenderer : STATE : ' + audioRen.state);
let activated = false;
let InterruptHint = 0;
await sleep(7000);
let audioManager = audio.getAudioManager();
let interAudioInterrupt = {
streamUsage: 1,
contentType: 0,
pauseWhenDucked: true
};
audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
console.info('come in MasterHap interrupt');
if (InterruptAction.actionType != undefined && InterruptAction.actionType != null) {
console.info('An event to gain the audio focus starts.');
console.info(`Focus gain event: ${InterruptAction} `);
activated = InterruptAction.activated;
InterruptHint = InterruptAction.hint
console.info('activated is :' + activated);
console.info('InterruptHint is :' + InterruptHint);
console.info('InterruptAction.actionType is :' + InterruptAction.actionType);
}
});
await audioRen.start().then(() => {
console.info('AudioFrameworkRenderLog: renderInstant started :SUCCESS ');
}).catch((err) => {
console.info('AudioFrameworkRenderLog: renderInstant start :ERROR : ' + err.message);
});
console.info('AudioFrameworkRenderLog: AudioRenderer : STATE : ' + audioRen.state);
var bufferSize;
await audioRen.getBufferSize().then(async function (data) {
console.info('AudioFrameworkRenderLog: getBufferSize :SUCCESS ' + data);
bufferSize = data;
}).catch((err) => {
console.info('AudioFrameworkRenderLog: getBufferSize :ERROR : ' + err.message);
});
let ss = fileio.fdopenStreamSync(fdRead, 'r');
console.info('AudioFrameworkRenderLog:case 2:AudioFrameworkRenderLog: File Path: ' + ss);
let discardHeader = new ArrayBuffer(44);
console.info('AudioFrameworkRenderLog:case 2-1:AudioFrameworkRenderLog: File Path: ');
ss.readSync(discardHeader);
console.info('AudioFrameworkRenderLog:case 2-2:AudioFrameworkRenderLog: File Path: ');
let totalSize = fileio.fstatSync(fdRead).size;
console.info('AudioFrameworkRenderLog:case 3 : AudioFrameworkRenderLog: File totalSize size: ' + totalSize);
totalSize = totalSize - 44;
console.info('AudioFrameworkRenderLog: File size : Removing header: ' + totalSize);
let rlen = 0;
let count = 0;
while (rlen < totalSize) {
if (activated == false && InterruptHint == 3) {
console.info('audio was interrupt')
break;
}
let buf = new ArrayBuffer(bufferSize);
rlen += ss.readSync(buf);
console.info('MasterHap:BufferAudioFramework: bytes read from =================== file: ' + rlen);
if (rlen >= totalSize / 3 && count == 0) {
count++;
await ability.startAbilityForResult(
{
want:
{
bundleName: "com.example.audiorenderinterrupt",
abilityName: "com.example.entry.MainAbility",
},
}
).then((data) => {
console.info("==========================>startAbility Success=======================>" + JSON.stringify(data));
})
.catch((err) => {
console.info("==========================>startAbility Fail=======================>" + JSON.stringify(err));
});
}
await audioRen.write(buf);
}
console.info('AudioFrameworkRenderLog: Renderer after read');
await audioRen.drain().then(async function () {
console.info('AudioFrameworkRenderLog: Renderer drained : SUCCESS');
}).catch((err) => {
console.error('AudioFrameworkRenderLog: Renderer drain: ERROR : ' + err.message);
});
await audioRen.release().then(async function () {
console.info('AudioFrameworkRenderLog: Renderer release : SUCCESS');
}).catch((err) => {
console.info('AudioFrameworkRenderLog: Renderer release :ERROR : ' + err.message);
});
expect(activated == false && InterruptHint == 3).assertTrue();
done();
})
})
\ No newline at end of file
/**
* Copyright (C) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
require('./AudioInterrupt.test.js')
{
"string": [
{
"name": "entry_MainAbility",
"value": "entry_MainAbility"
},
{
"name": "mainability_description",
"value": "JS_Empty Ability"
}
]
}
\ No newline at end of file
# Copyright (c) 2021 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import("//test/xts/tools/build/suite.gni")
group("audioRenderInterrupt") {
testonly = true
if (is_standard_system) {
deps = [ "entry:audioRenderInterrupt" ]
}
}
# Copyright (c) 2022 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import("//test/xts/tools/build/suite.gni")
ohos_hap_assist_suite("audioRenderInterrupt") {
hap_name = "audioRenderInterrupt"
hap_profile = "./src/main/config.json"
testonly = true
deps = [
":audioRenderInterrupt_resources",
":audioRenderInterrupt_ts_assets",
]
ets2abc = true
certificate_profile = "../signature/openharmony_sx.p7b"
hap_name = "audioRenderInterrupt"
subsystem_name = "multimedia"
part_name = "multimedia_audio_framework"
}
ohos_js_assets("audioRenderInterrupt_ts_assets") {
source_dir = "./src/main/ets/MainAbility"
}
ohos_resources("audioRenderInterrupt_resources") {
sources = [ "./src/main/resources" ]
hap_profile = "./src/main/config.json"
}
{
"app": {
"bundleName": "com.example.audiorenderinterrupt",
"vendor": "huawei",
"version": {
"code": 1000000,
"name": "1.0.0"
},
"apiVersion": {
"compatible": 9,
"target": 9
}
},
"deviceConfig": {},
"module": {
"package": "com.example.entry",
"name": ".entry",
"mainAbility": ".MainAbility",
"deviceType": [
"phone"
],
"distro": {
"deliveryWithInstall": true,
"moduleName": "entry",
"moduleType": "entry",
"installationFree": false
},
"abilities": [
{
"skills": [
{
"entities": [
"entity.system.home"
],
"actions": [
"action.system.home"
]
}
],
"orientation": "unspecified",
"formsEnabled": false,
"name": ".MainAbility",
"srcLanguage": "ets",
"srcPath": "MainAbility",
"icon": "$media:icon",
"description": "$string:MainAbility_desc",
"label": "$string:MainAbility_label",
"type": "page",
"visible": true,
"launchType": "standard"
}
],
"js": [
{
"mode": {
"syntax": "ets",
"type": "pageAbility"
},
"pages": [
"pages/index"
],
"name": ".MainAbility",
"window": {
"designWidth": 720,
"autoDesignWidth": false
}
}
]
}
}
\ No newline at end of file
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import audio from '@ohos.multimedia.audio';
import fileio from '@ohos.fileio';
import featureAbility from '@ohos.ability.featureAbility'
let fdRead;
let readPath;
let fdPath;
let filePath;
let TAG = 'InterruptHap:';
var TAG1 = "Fa:SupportFunctionThree:MainAbility:";
var listPush1 = "Fa_SupportFunctionThree_MainAbility_";
var lifeList = [];
export default {
async onCreate() {
console.log(TAG1 + '=============================================================================> onCreate');
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function getFdRead(pathName) {
let context = await featureAbility.getContext();
console.info(TAG + "case0 context is " + context);
await context.getFilesDir().then((data) => {
console.info(TAG + "case1 getFilesDir is path " + data);
filePath = data + '/' + pathName;
console.info(TAG + 'case4 filePath is ' + filePath);
})
fdPath = 'fd://';
await fileio.open(filePath).then((fdNumber) => {
fdPath = fdPath + '' + fdNumber;
fdRead = fdNumber;
console.info(TAG + '[fileIO]case open fd success,fdPath is ' + fdPath);
console.info(TAG + '[fileIO]case open fd success,fdRead is ' + fdRead);
}, (err) => {
console.info(TAG + '[fileIO]case open fd failed');
}).catch((err) => {
console.info(TAG + '[fileIO]case catch open fd failed');
});
}
var AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000,
channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
var AudioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_RINGTONE,
usage: audio.StreamUsage.STREAM_USAGE_NOTIFICATION_RINGTONE,
rendererFlags: 0
}
var AudioRendererOptions = {
streamInfo: AudioStreamInfo,
rendererInfo: AudioRendererInfo
}
readPath = 'StarWars10s-2C-48000-4SW.wav';
getFdRead(readPath);
var audioRen;
await audio.createAudioRenderer(AudioRendererOptions).then(async function (data) {
audioRen = data;
console.info(TAG + 'AudioFrameworkRenderLog: AudioRender Created : Success : Stream Type: SUCCESS' + audioRen);
}).catch((err) => {
console.info(TAG + 'AudioFrameworkRenderLog: AudioRender Created : ERROR : ' + err.message);
});
console.info(TAG + 'AudioFrameworkRenderLog: AudioRenderer : STATE : ' + audioRen.state);
await audioRen.start().then(async function () {
console.info(TAG + 'AudioFrameworkRenderLog: renderInstant started :SUCCESS ');
}).catch((err) => {
console.info(TAG + 'AudioFrameworkRenderLog: renderInstant start :ERROR : ' + err.message);
});
console.info(TAG + 'AudioFrameworkRenderLog: AudioRenderer : STATE : ' + audioRen.state);
var bufferSize;
await audioRen.getBufferSize().then(async function (data) {
console.info(TAG + 'AudioFrameworkRenderLog: getBufferSize :SUCCESS ' + data);
bufferSize = data;
}).catch((err) => {
console.info(TAG + 'AudioFrameworkRenderLog: getBufferSize :ERROR : ' + err.message);
});
let ss = fileio.fdopenStreamSync(fdRead, 'r');
console.info(TAG + 'AudioFrameworkRenderLog:case 2:AudioFrameworkRenderLog: File Path: ' + ss);
let discardHeader = new ArrayBuffer(44);
console.info(TAG + 'AudioFrameworkRenderLog:case 2-1:AudioFrameworkRenderLog: File Path: ');
ss.readSync(discardHeader);
console.info(TAG + 'AudioFrameworkRenderLog:case 2-2:AudioFrameworkRenderLog: File Path: ');
let totalSize = fileio.fstatSync(fdRead).size;
console.info(TAG + 'AudioFrameworkRenderLog:case 3 : AudioFrameworkRenderLog: File totalSize size: ' + totalSize);
totalSize = totalSize - 44;
console.info(TAG + 'AudioFrameworkRenderLog: File size : Removing header: ' + totalSize);
let rlen = 0;
while (rlen < totalSize / 4) {
let buf = new ArrayBuffer(bufferSize);
rlen += ss.readSync(buf);
console.info(TAG + 'InterruptHap:BufferAudioFramework: bytes read from file: ' + rlen);
await audioRen.write(buf);
}
let activated = false;
let InterruptHint = 0;
let audioManager = audio.getAudioManager();
let interAudioInterrupt = {
streamUsage: 1,
contentType: 0,
pauseWhenDucked: true
};
audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
console.info(TAG + 'come in FuZhuHap interrupt');
if (InterruptAction.actionType != undefined && InterruptAction.actionType != null) {
console.info(TAG + 'InterruptHap An event to gain the audio focus ========================= starts.');
console.info(TAG + `Focus gain event: ${InterruptAction} `);
activated = InterruptAction.activated;
InterruptHint = InterruptAction.hint
console.info(TAG + 'activated ============ is :' + activated);
console.info(TAG + 'InterruptHint ================ is :' + InterruptHint);
console.info(TAG + 'InterruptAction.actionType ============ is :' + InterruptAction.actionType);
}
});
console.info(TAG + 'AudioFrameworkRenderLog: Renderer after read');
await sleep(3000);
await audioRen.drain().then(async function () {
console.info(TAG + 'AudioFrameworkRenderLog: Renderer drained : SUCCESS');
let wantInfo = {
want:
{
bundleName: "com.example.audiorenderinterrupt",
abilityName: "com.example.entry.MainAbility"
},
resultCode: 1111
}
featureAbility.terminateSelfWithResult(wantInfo).then(() => {
console.info(TAG + 'terminateSelf ================================== success')
})
.catch(() => {
console.info(TAG + 'terminateSelf ==================================== fail')
})
}).catch((err) => {
console.error(TAG + 'AudioFrameworkRenderLog: Renderer drain: ERROR : ' + err.message);
});
},
onDestroy() {
console.log(TAG1 + 'onDestroy');
},
async onActive() {
console.log(TAG1 + 'onActive');
},
onInactive() {
console.log(TAG1 + 'onInactive');
},
onShow() {
console.log(TAG1 + 'onShow');
},
onHide() {
console.log(TAG1 + 'onHide');
}
}
\ No newline at end of file
/*
* Copyright (c) 2022 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@Entry
@Component
struct Index {
@State message: string = '播放音乐'
build() {
Row() {
Column() {
Text(this.message)
.fontSize(50)
.fontWeight(FontWeight.Bold)
Row() {
// Button() {
// Text('播放')
// .fontSize(20)
// .fontWeight(FontWeight.Bold)
// }.type(ButtonType.Capsule)
// .margin({
// top: 20
// })
// .backgroundColor('#0D9FFB')
// .width('30%')
// .height('5%')
// .onClick(()=>{})
}
}
.width('100%')
}
.height('100%')
}
}
\ No newline at end of file
{
"color": [
{
"name": "white",
"value": "#FFFFFF"
}
]
}
\ No newline at end of file
{
"string": [
{
"name": "entry_desc",
"value": "description"
},
{
"name": "MainAbility_desc",
"value": "description"
},
{
"name": "MainAbility_label",
"value": "audioRender"
}
]
}
\ No newline at end of file
......@@ -46,6 +46,7 @@ describe('audioFramework', function () {
console.info(`${TagFrmwk}: getAudioManger : FAIL`);
}
}
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
......@@ -4903,7 +4904,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0100', 2,async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager();
let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(1, (err, value) => {
// Getting all Output devices Enumb 1 = OUTPUT_DEVICES_FLAG
console.info(`${TagFrmwk}: Callback: getDevices OUTPUT_DEVICES_FLAG`);
......@@ -4937,7 +4938,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0200', 2,async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager();
let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(2, (err, value) => {
// Getting all Input Devices ENUM 2 = INPUT_DEVICES_FLAG
console.info(`${TagFrmwk}: Callback: getDevices INPUT_DEVICES_FLAG`);
......@@ -4970,7 +4971,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0300', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager();
let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(3, (err, value) => {
// Getting all devies connected 3 = ALL_DEVICES_FLAG
console.info(`${TagFrmwk}: Callback: getDevices ALL_DEVICES_FLAG`);
......@@ -5003,7 +5004,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0400', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager();
let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG, (err, value) => {
console.info(`${TagFrmwk}: Callback: getDevices OUTPUT_DEVICES_FLAG`);
if (err) {
......@@ -5035,7 +5036,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0500', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager();
let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(audio.DeviceFlag.INPUT_DEVICES_FLAG, (err, value) => {
console.info(`${TagFrmwk}: Callback: getDevices INPUT_DEVICES_FLAG`);
if (err) {
......@@ -5066,7 +5067,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0600', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager();
let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(audio.DeviceFlag.ALL_DEVICES_FLAG, (err, value) => {
console.info(`${TagFrmwk}: Callback: getDevices ALL_DEVICES_FLAG`);
if (err) {
......@@ -5097,7 +5098,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0700', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager();
let AudioRoutingManager = audioManager.getRoutingManager();
let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG);
console.info(`${TagFrmwk}: Promise: getDevices OUTPUT_DEVICES_FLAG`);
value.forEach(displayDeviceProp);
......@@ -5112,229 +5113,6 @@ describe('audioFramework', function () {
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0800
*@tc.name : getDevices - INPUT device - Promise - ENAME
*@tc.desc : getDevices - INPUT device
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0800', 2, function (done) {
audioManager.getRoutingManager(async (err, AudioRoutingManager) => {
if (err) {
console.error(`${TagFrmwk}: Callback: failed to get RoutingManager ${err.message}`);
expect().assertFail();
} else {
let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.INPUT_DEVICES_FLAG)
console.info(`${TagFrmwk}: Promise: getDevices INPUT_DEVICES_FLAG`);
value.forEach(displayDeviceProp);
if (dTValue != null && dRValue != null && devId > 0 && sRate != null && cCount != null &&
cMask != null) {
console.info(`${TagFrmwk}: Promise: getDevices : INPUT_DEVICES_FLAG : PASS`);
expect(true).assertTrue();
} else {
console.info(`${TagFrmwk}: Promise: getDevices : INPUT_DEVICES_FLAG : FAIL`);
expect(false).assertTrue();
}
}
done();
});
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0900
*@tc.name : getDevices - ALL device - Promise - ENAME
*@tc.desc : getDevices - ALL device
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0900', 2, function (done) {
audioManager.getRoutingManager(async (err, AudioRoutingManager) => {
if (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect().assertFail();
} else {
let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.ALL_DEVICES_FLAG)
console.info(`${TagFrmwk}: Promise: getDevices ALL_DEVICES_FLAG`);
value.forEach(displayDeviceProp);
if (dTValue != null && dRValue != null && devId > 0 && sRate != null && cCount != null &&
cMask != null) {
console.info(`${TagFrmwk}: Promise: getDevices : ALL_DEVICES_FLAG : PASS`);
expect(true).assertTrue();
}
else {
console.info(`${TagFrmwk}: Promise: getDevices : ALL_DEVICES_FLAG : FAIL`);
expect(false).assertTrue();
}
}
done();
});
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100', 2, async function (done) {
try{
var routingManager = await audioManager.getRoutingManager();
}catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
let count = 0;
console.info('getRoutingManager Callback START.');
routingManager.on('micStateChange',async (micStateChange)=>{
count++;
})
try{
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(data);
console.info('Promise setMicrophoneMute PASS.');
let data1 = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS.' + data1);
}catch (err) {
console.log('ERROR:'+JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(0);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200', 2, async function (done) {
try{
var routingManager = await audioManager.getRoutingManager();
}catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
console.info('getRoutingManager Callback START.');
let count = 0;
routingManager.on('micStateChange',async (micStateChange)=>{
console.info("Updated micState:" + JSON.stringify(micStateChange));
count++;
})
try{
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
let micStatus = !data;
await audioManager.setMicrophoneMute(micStatus);
console.info('Promise setMicrophoneMute PASS:' + micStatus);
}catch (err) {
console.log('ERROR:'+JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(1);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300', 2, async function (done) {
try{
var routingManager = await audioManager.getRoutingManager();
}catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
console.info('getRoutingManager Callback START.');
let count = 0;
routingManager.on('micStateChange',async (micStateChange)=>{
console.info("Updated micState:" + JSON.stringify(micStateChange));
count++;
})
try{
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
let micStatus = !data;
await audioManager.setMicrophoneMute(micStatus);
console.info('Promise setMicrophoneMute PASS:' + micStatus);
await audioManager.setMicrophoneMute(!micStatus);
console.info('Promise setMicrophoneMute PASS:' + (!micStatus));
}catch (err) {
console.log('ERROR:'+JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(2);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400', 2, async function (done) {
try{
var routingManager = await audioManager.getRoutingManager();
}catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
let count = 0;
try {
console.info("enter SUB_AUDIO_MANAGER_micStateChange_004");
routingManager.on('micStateChange',async (micStateChange1)=>{
console.info("Updated micState--001:" + JSON.stringify(micStateChange1));
routingManager.on('micStateChange',async (micStateChange)=>{
console.info("Updated micState--002:" + JSON.stringify(micStateChange));
count++
})
let data = await audioManager.isMicrophoneMute();
console.info('Second Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(!data);
console.info('Second:Promise setMicrophoneMute PASS:' + (!data));
})
let data = await audioManager.isMicrophoneMute();
console.info('First Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(!data);
console.info('First:Promise setMicrophoneMute PASS:' + (!data));
} catch (err) {
console.log('ERROR:'+JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(1);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_DEVICEFALG_0100
*@tc.name : NONE_DEVICES_FLAG
......
......@@ -14,4 +14,6 @@
*/
require('./AudioFramework.test.js')
require('./AudioManagerApi9.test.js')
......@@ -35,8 +35,7 @@ describe('audioVoip', function () {
beforeAll(async function () {
console.info(`AudioFrameworkTest: beforeAll: Prerequisites at the test suite level`);
let permissionName1 = 'ohos.permission.MICROPHONE';
let permissionName2 = 'ohos.permission.MANAGE_AUDIO_CONFIG';
let permissionNameList = [permissionName1,permissionName2];
let permissionNameList = [permissionName1];
let appName = 'ohos.acts.multimedia.audio.audiovoip';
await audioTestBase.applyPermission(appName, permissionNameList);
await sleep(100);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册