提交 30b34c04 编写于 作者: W wangtao

xts for audio

Signed-off-by: Nwangtao <wangtao468@huawei.com>
Change-Id: I7f7c9c36a5b86b5f5aae713cd81bfa5990042e76
上级 49623241
...@@ -357,14 +357,19 @@ describe('audioCapturer', function () { ...@@ -357,14 +357,19 @@ describe('audioCapturer', function () {
steps.shift(); steps.shift();
let markReachParam = steps[0]; let markReachParam = steps[0];
steps.shift(); steps.shift();
audioCap.on('markReach', markReachParam, (position) => { try{
console.log(`${Tag} position: ${JSON.stringify(position)}`); audioCap.on('markReach', markReachParam, (position) => {
console.log(`${Tag} markReachParam: ${JSON.stringify(markReachParam)}`); console.log(`${Tag} position: ${JSON.stringify(position)}`);
if (position == markReachParam) { console.log(`${Tag} markReachParam: ${JSON.stringify(markReachParam)}`);
markReachState = 'success' if (position == markReachParam) {
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`); markReachState = 'success'
} console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
}); }
});
}catch(err){
markReachState = 'error'
console.error(`${Tag} AudioRenderLog: mark reached: error: code:${err.code},message:${err.message}`);
}
toNextStep(audioCap, steps, done); toNextStep(audioCap, steps, done);
}); });
...@@ -380,14 +385,19 @@ describe('audioCapturer', function () { ...@@ -380,14 +385,19 @@ describe('audioCapturer', function () {
steps.shift(); steps.shift();
let periodReachParam = steps[0]; let periodReachParam = steps[0];
steps.shift(); steps.shift();
audioCap.on('periodReach', periodReachParam, (position) => { try{
console.log(`${Tag} position: ${JSON.stringify(position)}`); audioCap.on('periodReach', periodReachParam, (position) => {
console.log(`${Tag} periodReachParam: ${JSON.stringify(periodReachParam)}`); console.log(`${Tag} position: ${JSON.stringify(position)}`);
if (position == periodReachParam) { console.log(`${Tag} periodReachParam: ${JSON.stringify(periodReachParam)}`);
periodReachState = 'success' if (position == periodReachParam) {
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`); periodReachState = 'success'
} console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
}); }
});
}catch(err){
periodReachState = 'error'
console.error(`${Tag} AudioRenderLog: mark reached: error: code:${err.code},message:${err.message}`);
}
toNextStep(audioCap, steps, done); toNextStep(audioCap, steps, done);
}); });
eventEmitter.on(OFF_PERIODR_REACH_EVENT, (audioCap, steps, done) => { eventEmitter.on(OFF_PERIODR_REACH_EVENT, (audioCap, steps, done) => {
...@@ -440,9 +450,6 @@ describe('audioCapturer', function () { ...@@ -440,9 +450,6 @@ describe('audioCapturer', function () {
console.info(`${Tag} AudioFrameworkTest: afterAll: Test suite-level cleanup condition`); console.info(`${Tag} AudioFrameworkTest: afterAll: Test suite-level cleanup condition`);
}) })
async function recPromise(AudioCapturerOptions, done) { async function recPromise(AudioCapturerOptions, done) {
let audioCap; let audioCap;
try { try {
...@@ -2100,9 +2107,17 @@ describe('audioCapturer', function () { ...@@ -2100,9 +2107,17 @@ describe('audioCapturer', function () {
return done(); return done();
} }
try {
await audioCapPromise.start();
console.log(`${Tag} start ok`);
} catch (err) {
console.log(`${Tag} start err: ${JSON.stringify(err)}`);
expect(false).assertTrue();
}
try { try {
await audioCapPromise.stop(); await audioCapPromise.stop();
console.log(`${Tag} stop ok`); console.log(`${Tag} stop ok`);
expect(true).assertTrue();
} catch (err) { } catch (err) {
console.log(`${Tag} stop err: ${JSON.stringify(err)}`); console.log(`${Tag} stop err: ${JSON.stringify(err)}`);
expect(false).assertTrue(); expect(false).assertTrue();
...@@ -2439,13 +2454,8 @@ describe('audioCapturer', function () { ...@@ -2439,13 +2454,8 @@ describe('audioCapturer', function () {
capturerInfo: audioCapturerInfo44100, capturerInfo: audioCapturerInfo44100,
} }
await audio.createAudioCapturer(AudioCapturerOptionsInvalid).then(function (data) { await audio.createAudioCapturer(AudioCapturerOptionsInvalid).then(function (data) {
if (data == undefined) { console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : Success: ${JSON.stringify(data)}`);
console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : Unsuccess : ${JSON.stringify(data)}`); expect(false).assertTrue();
expect(true).assertTrue();
} else {
console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : Success: ${JSON.stringify(data)}`);
expect(false).assertTrue();
}
}).catch((err) => { }).catch((err) => {
console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : ERROR : ${JSON.stringify(err.message)}`); console.info(`${Tag} AudioFrameworkRecLog: AudioCapturer Created : ERROR : ${JSON.stringify(err.message)}`);
expect(true).assertTrue(); expect(true).assertTrue();
...@@ -2992,4 +3002,89 @@ describe('audioCapturer', function () { ...@@ -2992,4 +3002,89 @@ describe('audioCapturer', function () {
let mySteps = [CREATE_EVENT, AudioCapturerOptions, PERIODR_REACH_EVENT, periodReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_PERIODR_REACH_EVENT, RELEASE_EVENT, END_EVENT]; let mySteps = [CREATE_EVENT, AudioCapturerOptions, PERIODR_REACH_EVENT, periodReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_PERIODR_REACH_EVENT, RELEASE_EVENT, END_EVENT];
eventEmitter.emit(mySteps[0], audioCap, mySteps, done); eventEmitter.emit(mySteps[0], audioCap, mySteps, done);
}) })
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_0100
*@tc.name : AudioCapturer - getAudioStreamId
*@tc.desc : AudioCapturer - getAudioStreamId
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 0
*/
it('SUB_MULTIMEDIA_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_0100', 0, async function (done) {
let AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
let AudioCapturerInfo = {
source: audio.SourceType.SOURCE_TYPE_MIC,
capturerFlags: 0
}
let AudioCapturerOptions = {
streamInfo: AudioStreamInfo,
capturerInfo: AudioCapturerInfo
}
let audioCapPromise;
try {
audioCapPromise = await audio.createAudioCapturer(AudioCapturerOptions);
await audioCapPromise.getAudioStreamId((err, data) => {
if (err) {
console.info(`${Tag}: getAudioStreamId : ERROR : code: ${err.code}, mesage: ${err.message}`);
expect(false).assertTrue();
} else {
expect(true).assertTrue();
console.info(`${Tag}: getAudioStreamId : Converted: ${data}`);
}
});
await audioCapPromise.release();
} catch (err) {
console.log(`${Tag} err: ${JSON.stringify(err)}`);
expect(false).assertTrue();
}
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_0200
*@tc.name : AudioCapturer - getAudioStreamId
*@tc.desc : AudioCapturer - getAudioStreamId
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 0
*/
it('SUB_MULTIMEDIA_AUDIO_CAPTURER_GET_AUDIO_STREAM_ID_0200', 0, async function (done) {
let AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_44100,
channels: audio.AudioChannel.CHANNEL_1,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
let AudioCapturerInfo = {
source: audio.SourceType.SOURCE_TYPE_MIC,
capturerFlags: 0
}
let AudioCapturerOptions = {
streamInfo: AudioStreamInfo,
capturerInfo: AudioCapturerInfo
}
try {
let audioCapPromise = await audio.createAudioCapturer(AudioCapturerOptions);
await audioCapPromise.getAudioStreamId().then((data)=>{
expect(true).assertTrue();
console.info(`${Tag}: getAudioStreamId : Converted: ${data}`);
}).catch((err) => {
expect(true).assertTrue();
console.error(`${Tag}: getAudioStreamId : ERROR : ${err}`);
});
await audioCapPromise.release();
} catch (err) {
console.log(`${Tag} error code: ${err.code} ,message:${err.message}`);
expect(false).assertTrue();
}
done();
})
}) })
...@@ -34,22 +34,9 @@ describe('audioCapturerChange', function () { ...@@ -34,22 +34,9 @@ describe('audioCapturerChange', function () {
await sleep(100); await sleep(100);
console.info(`AudioFrameworkTest: beforeAll: END`); console.info(`AudioFrameworkTest: beforeAll: END`);
await sleep(100); await sleep(100);
await audioManager.getStreamManager().then((data) => {
audioStreamManager = data;
console.info(`${Tag}: Get AudioStream Manager : Success `);
}).catch((err) => {
console.info(`${Tag}: Get AudioStream Manager : ERROR :${err.message}`);
});
audioManager.getStreamManager((err, data) => { audioStreamManager = audioManager.getStreamManager();
if (err) { audioStreamManagerCB = audioManager.getStreamManager();
console.error(`${Tag}: Get AudioStream Manager : ERROR :${err.message}`);
}
else {
audioStreamManagerCB = data;
console.info(`${Tag}: Get AudioStream Manager : Success `);
}
});
await sleep(1000); await sleep(1000);
console.info(`${Tag}: beforeAll: END`); console.info(`${Tag}: beforeAll: END`);
}) })
......
...@@ -25,24 +25,10 @@ describe('audioRendererChange', function () { ...@@ -25,24 +25,10 @@ describe('audioRendererChange', function () {
console.info(`${Tag} : Create AudioManger Object JS Framework`); console.info(`${Tag} : Create AudioManger Object JS Framework`);
beforeAll(async function () { beforeAll(async function () {
await AUDIOMANAGER.getStreamManager().then(function (data) {
audioStreamManager = data;
console.info(`${Tag} : Get AudioStream Manager : Success `);
}).catch((err) => {
console.info(`${Tag} : Get AudioStream Manager : ERROR : ${err.message}`);
});
AUDIOMANAGER.getStreamManager((err, data) => { audioStreamManager = AUDIOMANAGER.getStreamManager();
if (err) { audioStreamManagerCB = AUDIOMANAGER.getStreamManager();
console.error(`${Tag} : Get AudioStream Manager : ERROR : ${err.message}`);
}
else {
audioStreamManagerCB = data;
console.info(`${Tag} : Get AudioStream Manager : Success `);
}
});
await sleep(1000); await sleep(1000);
console.info(`${Tag} : beforeAll: Prerequisites at the test suite level`); console.info(`${Tag} : beforeAll: Prerequisites at the test suite level`);
}) })
......
...@@ -89,6 +89,10 @@ ...@@ -89,6 +89,10 @@
{ {
"name": "ohos.permission.MODIFY_AUDIO_SETTINGS", "name": "ohos.permission.MODIFY_AUDIO_SETTINGS",
"reason": "use ohos.permission.MODIFY_AUDIO_SETTINGS" "reason": "use ohos.permission.MODIFY_AUDIO_SETTINGS"
},
{
"name": "ohos.permission.MANAGE_AUDIO_CONFIG",
"reason": "use ohos.permission.MANAGE_AUDIO_CONFIG"
} }
] ]
} }
......
...@@ -46,6 +46,7 @@ describe('audioFramework', function () { ...@@ -46,6 +46,7 @@ describe('audioFramework', function () {
console.info(`${TagFrmwk}: getAudioManger : FAIL`); console.info(`${TagFrmwk}: getAudioManger : FAIL`);
} }
} }
function sleep(ms) { function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms)); return new Promise(resolve => setTimeout(resolve, ms));
} }
...@@ -4902,7 +4903,7 @@ describe('audioFramework', function () { ...@@ -4902,7 +4903,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2 *@tc.level : Level 2
*/ */
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0100', 2,async function (done) { it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0100', 2,async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager(); let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(1, (err, value) => { AudioRoutingManager.getDevices(1, (err, value) => {
// Getting all Output devices Enumb 1 = OUTPUT_DEVICES_FLAG // Getting all Output devices Enumb 1 = OUTPUT_DEVICES_FLAG
console.info(`${TagFrmwk}: Callback: getDevices OUTPUT_DEVICES_FLAG`); console.info(`${TagFrmwk}: Callback: getDevices OUTPUT_DEVICES_FLAG`);
...@@ -4936,7 +4937,7 @@ describe('audioFramework', function () { ...@@ -4936,7 +4937,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2 *@tc.level : Level 2
*/ */
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0200', 2,async function (done) { it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0200', 2,async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager(); let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(2, (err, value) => { AudioRoutingManager.getDevices(2, (err, value) => {
// Getting all Input Devices ENUM 2 = INPUT_DEVICES_FLAG // Getting all Input Devices ENUM 2 = INPUT_DEVICES_FLAG
console.info(`${TagFrmwk}: Callback: getDevices INPUT_DEVICES_FLAG`); console.info(`${TagFrmwk}: Callback: getDevices INPUT_DEVICES_FLAG`);
...@@ -4969,7 +4970,7 @@ describe('audioFramework', function () { ...@@ -4969,7 +4970,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2 *@tc.level : Level 2
*/ */
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0300', 2, async function (done) { it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0300', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager(); let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(3, (err, value) => { AudioRoutingManager.getDevices(3, (err, value) => {
// Getting all devies connected 3 = ALL_DEVICES_FLAG // Getting all devies connected 3 = ALL_DEVICES_FLAG
console.info(`${TagFrmwk}: Callback: getDevices ALL_DEVICES_FLAG`); console.info(`${TagFrmwk}: Callback: getDevices ALL_DEVICES_FLAG`);
...@@ -5002,7 +5003,7 @@ describe('audioFramework', function () { ...@@ -5002,7 +5003,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2 *@tc.level : Level 2
*/ */
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0400', 2, async function (done) { it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0400', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager(); let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG, (err, value) => { AudioRoutingManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG, (err, value) => {
console.info(`${TagFrmwk}: Callback: getDevices OUTPUT_DEVICES_FLAG`); console.info(`${TagFrmwk}: Callback: getDevices OUTPUT_DEVICES_FLAG`);
if (err) { if (err) {
...@@ -5034,7 +5035,7 @@ describe('audioFramework', function () { ...@@ -5034,7 +5035,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2 *@tc.level : Level 2
*/ */
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0500', 2, async function (done) { it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0500', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager(); let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(audio.DeviceFlag.INPUT_DEVICES_FLAG, (err, value) => { AudioRoutingManager.getDevices(audio.DeviceFlag.INPUT_DEVICES_FLAG, (err, value) => {
console.info(`${TagFrmwk}: Callback: getDevices INPUT_DEVICES_FLAG`); console.info(`${TagFrmwk}: Callback: getDevices INPUT_DEVICES_FLAG`);
if (err) { if (err) {
...@@ -5065,7 +5066,7 @@ describe('audioFramework', function () { ...@@ -5065,7 +5066,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2 *@tc.level : Level 2
*/ */
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0600', 2, async function (done) { it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0600', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager(); let AudioRoutingManager = audioManager.getRoutingManager();
AudioRoutingManager.getDevices(audio.DeviceFlag.ALL_DEVICES_FLAG, (err, value) => { AudioRoutingManager.getDevices(audio.DeviceFlag.ALL_DEVICES_FLAG, (err, value) => {
console.info(`${TagFrmwk}: Callback: getDevices ALL_DEVICES_FLAG`); console.info(`${TagFrmwk}: Callback: getDevices ALL_DEVICES_FLAG`);
if (err) { if (err) {
...@@ -5096,7 +5097,7 @@ describe('audioFramework', function () { ...@@ -5096,7 +5097,7 @@ describe('audioFramework', function () {
*@tc.level : Level 2 *@tc.level : Level 2
*/ */
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0700', 2, async function (done) { it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0700', 2, async function (done) {
let AudioRoutingManager = await audioManager.getRoutingManager(); let AudioRoutingManager = audioManager.getRoutingManager();
let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG); let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.OUTPUT_DEVICES_FLAG);
console.info(`${TagFrmwk}: Promise: getDevices OUTPUT_DEVICES_FLAG`); console.info(`${TagFrmwk}: Promise: getDevices OUTPUT_DEVICES_FLAG`);
value.forEach(displayDeviceProp); value.forEach(displayDeviceProp);
...@@ -5111,229 +5112,6 @@ describe('audioFramework', function () { ...@@ -5111,229 +5112,6 @@ describe('audioFramework', function () {
done(); done();
}) })
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0800
*@tc.name : getDevices - INPUT device - Promise - ENAME
*@tc.desc : getDevices - INPUT device
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0800', 2, function (done) {
audioManager.getRoutingManager(async (err, AudioRoutingManager) => {
if (err) {
console.error(`${TagFrmwk}: Callback: failed to get RoutingManager ${err.message}`);
expect().assertFail();
} else {
let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.INPUT_DEVICES_FLAG)
console.info(`${TagFrmwk}: Promise: getDevices INPUT_DEVICES_FLAG`);
value.forEach(displayDeviceProp);
if (dTValue != null && dRValue != null && devId > 0 && sRate != null && cCount != null &&
cMask != null) {
console.info(`${TagFrmwk}: Promise: getDevices : INPUT_DEVICES_FLAG : PASS`);
expect(true).assertTrue();
} else {
console.info(`${TagFrmwk}: Promise: getDevices : INPUT_DEVICES_FLAG : FAIL`);
expect(false).assertTrue();
}
}
done();
});
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0900
*@tc.name : getDevices - ALL device - Promise - ENAME
*@tc.desc : getDevices - ALL device
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0900', 2, function (done) {
audioManager.getRoutingManager(async (err, AudioRoutingManager) => {
if (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect().assertFail();
} else {
let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.ALL_DEVICES_FLAG)
console.info(`${TagFrmwk}: Promise: getDevices ALL_DEVICES_FLAG`);
value.forEach(displayDeviceProp);
if (dTValue != null && dRValue != null && devId > 0 && sRate != null && cCount != null &&
cMask != null) {
console.info(`${TagFrmwk}: Promise: getDevices : ALL_DEVICES_FLAG : PASS`);
expect(true).assertTrue();
}
else {
console.info(`${TagFrmwk}: Promise: getDevices : ALL_DEVICES_FLAG : FAIL`);
expect(false).assertTrue();
}
}
done();
});
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100', 2, async function (done) {
try{
var routingManager = await audioManager.getRoutingManager();
}catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
let count = 0;
console.info('getRoutingManager Callback START.');
routingManager.on('micStateChange',async (micStateChange)=>{
count++;
})
try{
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(data);
console.info('Promise setMicrophoneMute PASS.');
let data1 = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS.' + data1);
}catch (err) {
console.log('ERROR:'+JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(0);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200', 2, async function (done) {
try{
var routingManager = await audioManager.getRoutingManager();
}catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
console.info('getRoutingManager Callback START.');
let count = 0;
routingManager.on('micStateChange',async (micStateChange)=>{
console.info("Updated micState:" + JSON.stringify(micStateChange));
count++;
})
try{
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
let micStatus = !data;
await audioManager.setMicrophoneMute(micStatus);
console.info('Promise setMicrophoneMute PASS:' + micStatus);
}catch (err) {
console.log('ERROR:'+JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(1);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300', 2, async function (done) {
try{
var routingManager = await audioManager.getRoutingManager();
}catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
console.info('getRoutingManager Callback START.');
let count = 0;
routingManager.on('micStateChange',async (micStateChange)=>{
console.info("Updated micState:" + JSON.stringify(micStateChange));
count++;
})
try{
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
let micStatus = !data;
await audioManager.setMicrophoneMute(micStatus);
console.info('Promise setMicrophoneMute PASS:' + micStatus);
await audioManager.setMicrophoneMute(!micStatus);
console.info('Promise setMicrophoneMute PASS:' + (!micStatus));
}catch (err) {
console.log('ERROR:'+JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(2);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400', 2, async function (done) {
try{
var routingManager = await audioManager.getRoutingManager();
}catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
let count = 0;
try {
console.info("enter SUB_AUDIO_MANAGER_micStateChange_004");
routingManager.on('micStateChange',async (micStateChange1)=>{
console.info("Updated micState--001:" + JSON.stringify(micStateChange1));
routingManager.on('micStateChange',async (micStateChange)=>{
console.info("Updated micState--002:" + JSON.stringify(micStateChange));
count++
})
let data = await audioManager.isMicrophoneMute();
console.info('Second Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(!data);
console.info('Second:Promise setMicrophoneMute PASS:' + (!data));
})
let data = await audioManager.isMicrophoneMute();
console.info('First Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(!data);
console.info('First:Promise setMicrophoneMute PASS:' + (!data));
} catch (err) {
console.log('ERROR:'+JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(1);
done();
})
/** /**
*@tc.number : SUB_MULTIMEDIA_AUDIO_DEVICEFALG_0100 *@tc.number : SUB_MULTIMEDIA_AUDIO_DEVICEFALG_0100
*@tc.name : NONE_DEVICES_FLAG *@tc.name : NONE_DEVICES_FLAG
......
...@@ -14,4 +14,5 @@ ...@@ -14,4 +14,5 @@
*/ */
require('./AudioFramework.test.js') require('./AudioFramework.test.js')
require('./AudioManagerApi9.test.js')
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册