未验证 提交 01f7c9f8 编写于 作者: O openharmony_ci 提交者: Gitee

!6281 【媒体子系统】audio接口变更,用例适配

Merge pull request !6281 from liuxueqi/monthly_20221018
......@@ -357,15 +357,26 @@ describe('audioCapturer', function () {
steps.shift();
let markReachParam = steps[0];
steps.shift();
audioCap.on('markReach', markReachParam, (position) => {
console.log(`${Tag} position: ${JSON.stringify(position)}`);
console.log(`${Tag} markReachParam: ${JSON.stringify(markReachParam)}`);
if (position == markReachParam) {
markReachState = 'success'
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
try {
audioCap.on('markReach', markReachParam, (position) => {
console.log(`${Tag} position: ${JSON.stringify(position)}`);
console.log(`${Tag} markReachParam: ${JSON.stringify(markReachParam)}`);
if (position == markReachParam) {
markReachState = 'success'
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
}
});
toNextStep(audioCap, steps, done);
}
catch (error) {
if (error.code == 401){
markReachState = 'invalid_failure';
toNextStep(audioCap, steps, done);
}else{
console.info("err" + error.code);
toNextStep(audioCap, steps, done);
}
});
toNextStep(audioCap, steps, done);
}
});
eventEmitter.on(OFF_MARK_REACH_EVENT, (audioCap, steps, done) => {
......@@ -380,15 +391,27 @@ describe('audioCapturer', function () {
steps.shift();
let periodReachParam = steps[0];
steps.shift();
audioCap.on('periodReach', periodReachParam, (position) => {
console.log(`${Tag} position: ${JSON.stringify(position)}`);
console.log(`${Tag} periodReachParam: ${JSON.stringify(periodReachParam)}`);
if (position == periodReachParam) {
periodReachState = 'success'
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
try {
audioCap.on('periodReach', periodReachParam, (position) => {
console.log(`${Tag} position: ${JSON.stringify(position)}`);
console.log(`${Tag} periodReachParam: ${JSON.stringify(periodReachParam)}`);
if (position == periodReachParam) {
periodReachState = 'success'
console.info(`${Tag} AudioRenderLog: mark reached: ${JSON.stringify(position)}`);
}
});
toNextStep(audioCap, steps, done);
}
catch (error) {
if (error.code == 401){
markReachState = 'invalid_failure';
toNextStep(audioCap, steps, done);
}else{
console.info("err" + error.code);
toNextStep(audioCap, steps, done);
}
});
toNextStep(audioCap, steps, done);
}
});
eventEmitter.on(OFF_PERIODR_REACH_EVENT, (audioCap, steps, done) => {
console.log(`${Tag} emit: ${JSON.stringify(OFF_PERIODR_REACH_EVENT)}`);
......@@ -421,7 +444,7 @@ describe('audioCapturer', function () {
console.info(`${Tag} AudioFrameworkTest: beforeAll: Prerequisites at the test suite level`);
let permissionName1 = 'ohos.permission.MICROPHONE';
let permissionName2 = 'ohos.permission.MANAGE_AUDIO_CONFIG';
let permissionNameList = [permissionName1,permissionName2];
let permissionNameList = [permissionName1, permissionName2];
let appName = 'ohos.acts.multimedia.audio.audiocapturer';
await audioTestBase.applyPermission(appName, permissionNameList);
await sleep(100);
......@@ -1719,7 +1742,7 @@ describe('audioCapturer', function () {
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_VOIP_REC_VOICE_CHAT_PROMISE_1300', 2, async function (done) {
it('SUB_MULTIMEDIA_AUDIO_VOIP_REC_VOICE_CHAT_PROMISE_1300', 2, async function (done) {
let audioStreamInfo64000 = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_64000,
channels: audio.AudioChannel.CHANNEL_2,
......@@ -1998,7 +2021,7 @@ describe('audioCapturer', function () {
console.log(`${Tag} start-getAudioTime-getBufferSize-read-getAudioTimein- err: ${JSON.stringify(err)}`);
expect(false).assertTrue();
}
try{
try {
await audioCapPromise.stop();
let audioTime2 = await audioCapPromise.getAudioTime();
console.log(`${Tag} audioTime2: ${JSON.stringify(audioTime2)}`);
......@@ -2007,7 +2030,7 @@ describe('audioCapturer', function () {
console.log(`${Tag} stop-getAudioTimein- err: ${JSON.stringify(err)}`);
expect(true).assertTrue();
}
try {
await audioCapPromise.release();
} catch (err) {
......@@ -2106,7 +2129,7 @@ describe('audioCapturer', function () {
console.log(`${Tag} stop ok`);
} catch (err) {
console.log(`${Tag} stop err: ${JSON.stringify(err)}`);
expect(false).assertTrue();
expect(true).assertTrue();
}
done();
})
......@@ -2424,7 +2447,7 @@ describe('audioCapturer', function () {
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_REC_PR_VOICE_CHAT_GET_CAPTURER_StreamId_ENUM_0100', 2, async function (done) {
it('SUB_MULTIMEDIA_AUDIO_REC_PR_VOICE_CHAT_GET_CAPTURER_StreamId_ENUM_0100', 2, async function (done) {
let audioStreamInfo44100 = {
samplingRate: 44100,
channels: 1,
......@@ -2466,15 +2489,15 @@ describe('audioCapturer', function () {
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_REC_PR_VOICE_CHAT_GET_CAPTURER_StreamId_ENUM_0200
*@tc.name : AudioRec-Set1
*@tc.desc : record audio with parameter set 1
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_REC_PR_VOICE_CHAT_GET_CAPTURER_StreamId_ENUM_0200', 2, async function (done) {
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_REC_PR_VOICE_CHAT_GET_CAPTURER_StreamId_ENUM_0200
*@tc.name : AudioRec-Set1
*@tc.desc : record audio with parameter set 1
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_REC_PR_VOICE_CHAT_GET_CAPTURER_StreamId_ENUM_0200', 2, async function (done) {
let audioStreamInfo44100 = {
samplingRate: 44100,
channels: 1,
......@@ -2899,7 +2922,7 @@ describe('audioCapturer', function () {
let audioCap = null;
let markReachParam = 0;
markReachState = 'invalid_failure';
markReachState = 'fail';
let mySteps = [CREATE_EVENT, AudioCapturerOptions, MARK_REACH_EVENT, markReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_MARK_REACH_EVENT, RELEASE_EVENT, END_EVENT];
eventEmitter.emit(mySteps[0], audioCap, mySteps, done);
})
......@@ -2963,7 +2986,7 @@ describe('audioCapturer', function () {
let audioCap = null;
let markReachParam = -2;
markReachState = 'invalid_failure';
markReachState = 'fail';
let mySteps = [CREATE_EVENT, AudioCapturerOptions, MARK_REACH_EVENT, markReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_MARK_REACH_EVENT, RELEASE_EVENT, END_EVENT];
eventEmitter.emit(mySteps[0], audioCap, mySteps, done);
})
......@@ -3059,7 +3082,7 @@ describe('audioCapturer', function () {
let audioCap = null;
let periodReachParam = -2;
periodReachState = 'invalid_failure';
periodReachState = 'fail';
let mySteps = [CREATE_EVENT, AudioCapturerOptions, PERIODR_REACH_EVENT, periodReachParam, START_EVENT, GET_BUFFERSIZE_EVENT, READ_EVENT, OFF_PERIODR_REACH_EVENT, RELEASE_EVENT, END_EVENT];
eventEmitter.emit(mySteps[0], audioCap, mySteps, done);
})
......
......@@ -22,7 +22,6 @@ import { describe, beforeAll, beforeEach, afterEach, afterAll, it, expect } from
import avSession from '@ohos.multimedia.avsession';
describe('audioInterrupt', function () {
console.info('audioRenderInterrupt: Create AudioManger Object JS Framework');
// const audioManager = audio.getAudioManager();
let fdRead;
let readPath;
console.info('AudioFrameworkRenderLog: Create AudioManger Object JS Framework');
......@@ -136,21 +135,9 @@ describe('audioInterrupt', function () {
console.info('activated is :' + activated);
console.info('InterruptHint is :' + InterruptHint);
console.info('InterruptAction.actionType is :' + InterruptAction.actionType);
}
// if (InterruptAction.actionType === 1) {
// console.info('An audio interruption event starts.');
// console.info(`Audio interruption event: ${InterruptAction} `);
// expect(true).assertTrue();
// done();
// }
});
// if (activated == false) {
// console.info('activated == false')
// return;
// }
await audioRen.start().then(async function () {
await audioRen.start().then(() => {
console.info('AudioFrameworkRenderLog: renderInstant started :SUCCESS ');
}).catch((err) => {
console.info('AudioFrameworkRenderLog: renderInstant start :ERROR : ' + err.message);
......@@ -163,7 +150,6 @@ describe('audioInterrupt', function () {
bufferSize = data;
}).catch((err) => {
console.info('AudioFrameworkRenderLog: getBufferSize :ERROR : ' + err.message);
// resultFlag = false;
});
let ss = fileio.fdopenStreamSync(fdRead, 'r');
......@@ -194,15 +180,8 @@ describe('audioInterrupt', function () {
{
want:
{
// action: "action.system.home",
// entities: ["entity.system.home"],
// type: "MIMETYPE",
// flags: wantConstant.Flags.FLAG_AUTH_READ_URI_PERMISSION,
// deviceId: "",
bundleName: "com.example.audiorenderinterrupt",
/* FA模型中abilityName由package + Ability name组成 */
abilityName: "com.example.entry.MainAbility",
// uri: ""
},
}
).then((data) => {
......
......@@ -10,7 +10,7 @@
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* limitations under the License.info(TAG +
*/
import audio from '@ohos.multimedia.audio';
import fileio from '@ohos.fileio';
......@@ -19,157 +19,158 @@ let fdRead;
let readPath;
let fdPath;
let filePath;
let TAG = 'InterruptHap:';
var TAG1 = "Fa:SupportFunctionThree:MainAbility:";
var listPush1 = "Fa_SupportFunctionThree_MainAbility_";
var lifeList = [];
export default {
async onCreate() {
console.log(TAG1 + '=============================================================================> onCreate');
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function getFdRead(pathName) {
let context = await featureAbility.getContext();
console.info("case0 context is " + context);
await context.getFilesDir().then((data) => {
console.info("case1 getFilesDir is path " + data);
filePath = data + '/' + pathName;
console.info('case4 filePath is ' + filePath);
async onCreate() {
console.log(TAG1 + '=============================================================================> onCreate');
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function getFdRead(pathName) {
let context = await featureAbility.getContext();
console.info(TAG + "case0 context is " + context);
await context.getFilesDir().then((data) => {
console.info(TAG + "case1 getFilesDir is path " + data);
filePath = data + '/' + pathName;
console.info(TAG + 'case4 filePath is ' + filePath);
})
fdPath = 'fd://';
await fileio.open(filePath).then((fdNumber) => {
fdPath = fdPath + '' + fdNumber;
fdRead = fdNumber;
console.info('[fileIO]case open fd success,fdPath is ' + fdPath);
console.info('[fileIO]case open fd success,fdRead is ' + fdRead);
})
fdPath = 'fd://';
await fileio.open(filePath).then((fdNumber) => {
fdPath = fdPath + '' + fdNumber;
fdRead = fdNumber;
console.info(TAG + '[fileIO]case open fd success,fdPath is ' + fdPath);
console.info(TAG + '[fileIO]case open fd success,fdRead is ' + fdRead);
}, (err) => {
console.info('[fileIO]case open fd failed');
}).catch((err) => {
console.info('[fileIO]case catch open fd failed');
});
}
var AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000,
channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
}, (err) => {
console.info(TAG + '[fileIO]case open fd failed');
}).catch((err) => {
console.info(TAG + '[fileIO]case catch open fd failed');
});
}
var AudioStreamInfo = {
samplingRate: audio.AudioSamplingRate.SAMPLE_RATE_48000,
channels: audio.AudioChannel.CHANNEL_2,
sampleFormat: audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE,
encodingType: audio.AudioEncodingType.ENCODING_TYPE_RAW
}
var AudioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_RINGTONE,
usage: audio.StreamUsage.STREAM_USAGE_NOTIFICATION_RINGTONE,
rendererFlags: 0
}
var AudioRendererInfo = {
content: audio.ContentType.CONTENT_TYPE_RINGTONE,
usage: audio.StreamUsage.STREAM_USAGE_NOTIFICATION_RINGTONE,
rendererFlags: 0
}
var AudioRendererOptions = {
streamInfo: AudioStreamInfo,
rendererInfo: AudioRendererInfo
}
var AudioRendererOptions = {
streamInfo: AudioStreamInfo,
rendererInfo: AudioRendererInfo
}
readPath = 'StarWars10s-2C-48000-4SW.wav';
getFdRead(readPath);
readPath = 'StarWars10s-2C-48000-4SW.wav';
getFdRead(readPath);
var audioRen;
await audio.createAudioRenderer(AudioRendererOptions).then(async function (data) {
audioRen = data;
console.info('AudioFrameworkRenderLog: AudioRender Created : Success : Stream Type: SUCCESS'+audioRen);
}).catch((err) => {
console.info('AudioFrameworkRenderLog: AudioRender Created : ERROR : ' + err.message);
});
var audioRen;
await audio.createAudioRenderer(AudioRendererOptions).then(async function (data) {
audioRen = data;
console.info(TAG + 'AudioFrameworkRenderLog: AudioRender Created : Success : Stream Type: SUCCESS' + audioRen);
}).catch((err) => {
console.info(TAG + 'AudioFrameworkRenderLog: AudioRender Created : ERROR : ' + err.message);
});
console.info('AudioFrameworkRenderLog: AudioRenderer : STATE : ' + audioRen.state);
console.info(TAG + 'AudioFrameworkRenderLog: AudioRenderer : STATE : ' + audioRen.state);
await audioRen.start().then(async function () {
console.info('AudioFrameworkRenderLog: renderInstant started :SUCCESS ');
}).catch((err) => {
console.info('AudioFrameworkRenderLog: renderInstant start :ERROR : ' + err.message);
});
await audioRen.start().then(async function () {
console.info(TAG + 'AudioFrameworkRenderLog: renderInstant started :SUCCESS ');
}).catch((err) => {
console.info(TAG + 'AudioFrameworkRenderLog: renderInstant start :ERROR : ' + err.message);
});
console.info('AudioFrameworkRenderLog: AudioRenderer : STATE : ' + audioRen.state);
var bufferSize;
await audioRen.getBufferSize().then(async function (data) {
console.info('AudioFrameworkRenderLog: getBufferSize :SUCCESS ' + data);
bufferSize = data;
}).catch((err) => {
console.info('AudioFrameworkRenderLog: getBufferSize :ERROR : ' + err.message);
});
console.info(TAG + 'AudioFrameworkRenderLog: AudioRenderer : STATE : ' + audioRen.state);
var bufferSize;
await audioRen.getBufferSize().then(async function (data) {
console.info(TAG + 'AudioFrameworkRenderLog: getBufferSize :SUCCESS ' + data);
bufferSize = data;
}).catch((err) => {
console.info(TAG + 'AudioFrameworkRenderLog: getBufferSize :ERROR : ' + err.message);
});
let ss = fileio.fdopenStreamSync(fdRead, 'r');
console.info('AudioFrameworkRenderLog:case 2:AudioFrameworkRenderLog: File Path: ' + ss);
let discardHeader = new ArrayBuffer(44);
console.info('AudioFrameworkRenderLog:case 2-1:AudioFrameworkRenderLog: File Path: ');
ss.readSync(discardHeader);
console.info('AudioFrameworkRenderLog:case 2-2:AudioFrameworkRenderLog: File Path: ');
let totalSize = fileio.fstatSync(fdRead).size;
console.info('AudioFrameworkRenderLog:case 3 : AudioFrameworkRenderLog: File totalSize size: ' + totalSize);
totalSize = totalSize - 44;
console.info('AudioFrameworkRenderLog: File size : Removing header: ' + totalSize);
let rlen = 0;
while (rlen < totalSize / 4) {
let buf = new ArrayBuffer(bufferSize);
rlen += ss.readSync(buf);
console.info('InterruptHap:BufferAudioFramework: bytes read from file: ' + rlen);
await audioRen.write(buf);
}
let activated = false;
let ss = fileio.fdopenStreamSync(fdRead, 'r');
console.info(TAG + 'AudioFrameworkRenderLog:case 2:AudioFrameworkRenderLog: File Path: ' + ss);
let discardHeader = new ArrayBuffer(44);
console.info(TAG + 'AudioFrameworkRenderLog:case 2-1:AudioFrameworkRenderLog: File Path: ');
ss.readSync(discardHeader);
console.info(TAG + 'AudioFrameworkRenderLog:case 2-2:AudioFrameworkRenderLog: File Path: ');
let totalSize = fileio.fstatSync(fdRead).size;
console.info(TAG + 'AudioFrameworkRenderLog:case 3 : AudioFrameworkRenderLog: File totalSize size: ' + totalSize);
totalSize = totalSize - 44;
console.info(TAG + 'AudioFrameworkRenderLog: File size : Removing header: ' + totalSize);
let rlen = 0;
while (rlen < totalSize / 4) {
let buf = new ArrayBuffer(bufferSize);
rlen += ss.readSync(buf);
console.info(TAG + 'InterruptHap:BufferAudioFramework: bytes read from file: ' + rlen);
await audioRen.write(buf);
}
let activated = false;
let InterruptHint = 0;
let audioManager = audio.getAudioManager();
let interAudioInterrupt = {
streamUsage: 1,
contentType: 0,
pauseWhenDucked: true
};
audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
console.info('come in FuZhuHap interrupt');
if (InterruptAction.actionType != undefined && InterruptAction.actionType != null) {
console.info('InterruptHap An event to gain the audio focus ========================= starts.');
console.info(`Focus gain event: ${InterruptAction} `);
activated = InterruptAction.activated;
InterruptHint = InterruptAction.hint
console.info('activated ============ is :' + activated);
console.info('InterruptHint ================ is :' + InterruptHint);
console.info('InterruptAction.actionType ============ is :' + InterruptAction.actionType);
}
});
console.info('AudioFrameworkRenderLog: Renderer after read');
await sleep(3000);
await audioRen.drain().then(async function () {
console.info('AudioFrameworkRenderLog: Renderer drained : SUCCESS');
let wantInfo = {
want:
{
bundleName: "com.example.audiorenderinterrupt",
abilityName: "com.example.entry.MainAbility"
},
resultCode: 1111
}
featureAbility.terminateSelfWithResult(wantInfo).then(()=>{
console.info('terminateSelf ================================== success')
})
.catch(()=>{
console.info('terminateSelf ==================================== fail')
})
let audioManager = audio.getAudioManager();
let interAudioInterrupt = {
streamUsage: 1,
contentType: 0,
pauseWhenDucked: true
};
audioManager.on('interrupt', interAudioInterrupt, (InterruptAction) => {
console.info(TAG + 'come in FuZhuHap interrupt');
if (InterruptAction.actionType != undefined && InterruptAction.actionType != null) {
console.info(TAG + 'InterruptHap An event to gain the audio focus ========================= starts.');
console.info(TAG + `Focus gain event: ${InterruptAction} `);
activated = InterruptAction.activated;
InterruptHint = InterruptAction.hint
console.info(TAG + 'activated ============ is :' + activated);
console.info(TAG + 'InterruptHint ================ is :' + InterruptHint);
console.info(TAG + 'InterruptAction.actionType ============ is :' + InterruptAction.actionType);
}
});
console.info(TAG + 'AudioFrameworkRenderLog: Renderer after read');
await sleep(3000);
// await audioRen.drain().then(async function () {
console.info(TAG + 'AudioFrameworkRenderLog: Renderer drained : SUCCESS');
let wantInfo = {
want:
{
bundleName: "com.example.audiorenderinterrupt",
abilityName: "com.example.entry.MainAbility"
},
resultCode: 1111
}
featureAbility.terminateSelfWithResult(wantInfo).then(() => {
console.info(TAG + 'terminateSelf ================================== success')
})
.catch(() => {
console.info(TAG + 'terminateSelf ==================================== fail')
})
}).catch((err) => {
console.error('AudioFrameworkRenderLog: Renderer drain: ERROR : ' + err.message);
});
},
onDestroy() {
console.log(TAG1 + 'onDestroy');
},
async onActive() {
console.log(TAG1 + 'onActive');
},
onInactive() {
console.log(TAG1 + 'onInactive');
},
onShow() {
console.log(TAG1 + 'onShow');
},
onHide() {
console.log(TAG1 + 'onHide');
}
// }).catch((err) => {
// console.error('AudioFrameworkRenderLog: Renderer drain: ERROR : ' + err.message);
// });
},
onDestroy() {
console.log(TAG1 + 'onDestroy');
},
async onActive() {
console.log(TAG1 + 'onActive');
},
onInactive() {
console.log(TAG1 + 'onInactive');
},
onShow() {
console.log(TAG1 + 'onShow');
},
onHide() {
console.log(TAG1 + 'onHide');
}
}
\ No newline at end of file
......@@ -5112,231 +5112,6 @@ describe('audioFramework', function () {
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0800
*@tc.name : getDevices - INPUT device - Promise - ENAME
*@tc.desc : getDevices - INPUT device
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0800', 2, function (done) {
audioManager.getRoutingManager(async (err, AudioRoutingManager) => {
if (err) {
console.error(`${TagFrmwk}: Callback: failed to get RoutingManager ${err.message}`);
expect().assertFail();
} else {
let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.INPUT_DEVICES_FLAG)
console.info(`${TagFrmwk}: Promise: getDevices INPUT_DEVICES_FLAG`);
value.forEach(displayDeviceProp);
if (dTValue != null && dRValue != null && devId > 0 && sRate != null && cCount != null &&
cMask != null) {
console.info(`${TagFrmwk}: Promise: getDevices : INPUT_DEVICES_FLAG : PASS`);
expect(true).assertTrue();
} else {
console.info(`${TagFrmwk}: Promise: getDevices : INPUT_DEVICES_FLAG : FAIL`);
expect(false).assertTrue();
}
}
done();
});
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0900
*@tc.name : getDevices - ALL device - Promise - ENAME
*@tc.desc : getDevices - ALL device
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_GETDEVICES_0900', 2, function (done) {
audioManager.getRoutingManager(async (err, AudioRoutingManager) => {
if (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect().assertFail();
} else {
let value = await AudioRoutingManager.getDevices(audio.DeviceFlag.ALL_DEVICES_FLAG)
console.info(`${TagFrmwk}: Promise: getDevices ALL_DEVICES_FLAG`);
value.forEach(displayDeviceProp);
if (dTValue != null && dRValue != null && devId > 0 && sRate != null && cCount != null &&
cMask != null) {
console.info(`${TagFrmwk}: Promise: getDevices : ALL_DEVICES_FLAG : PASS`);
expect(true).assertTrue();
}
else {
console.info(`${TagFrmwk}: Promise: getDevices : ALL_DEVICES_FLAG : FAIL`);
expect(false).assertTrue();
}
}
done();
});
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100', 2, async function (done) {
try {
var routingManager = await audioManager.getRoutingManager();
} catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
let count = 0;
console.info('getRoutingManager Callback START.');
routingManager.on('micStateChange', async (micStateChange) => {
console.info('micStateChange is '+micStateChange.mute);
count++;
})
try {
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(data);
console.info('Promise setMicrophoneMute PASS.');
let data1 = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS.' + data1);
} catch (err) {
console.log('ERROR:' + JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(0);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200', 2, async function (done) {
try {
var routingManager = await audioManager.getRoutingManager();
} catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
console.info('getRoutingManager Callback START.');
let count = 0;
routingManager.on('micStateChange', async (micStateChange) => {
console.info("Updated micState:" + JSON.stringify(micStateChange));
count++;
})
try {
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
let micStatus = !data;
await audioManager.setMicrophoneMute(micStatus);
console.info('Promise setMicrophoneMute PASS:' + micStatus);
} catch (err) {
console.log('ERROR:' + JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(1);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300', 2, async function (done) {
try {
var routingManager = await audioManager.getRoutingManager();
} catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
console.info('getRoutingManager Callback START.');
let count = 0;
routingManager.on('micStateChange', async (micStateChange) => {
console.info("Updated micState:" + JSON.stringify(micStateChange));
count++;
})
try {
let data = await audioManager.isMicrophoneMute();
console.info('Promise isMicrophoneMute PASS:' + data);
let micStatus = !data;
await audioManager.setMicrophoneMute(micStatus);
console.info('Promise setMicrophoneMute PASS:' + micStatus);
await audioManager.setMicrophoneMute(!micStatus);
console.info('Promise setMicrophoneMute PASS:' + (!micStatus));
} catch (err) {
console.log('ERROR:' + JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(2);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400
*@tc.name : SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400
*@tc.desc : micStateChange
*@tc.size : MEDIUM
*@tc.type : Function
*@tc.level : Level 2
*/
it('SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400', 2, async function (done) {
try {
var routingManager = await audioManager.getRoutingManager();
} catch (err) {
console.error(`${TagFrmwk}:Callback:failed to get RoutingManager ${err.message}`);
expect(false).assertTrue();
}
let count = 0;
try {
console.info("enter SUB_AUDIO_MANAGER_micStateChange_004");
routingManager.on('micStateChange', async (micStateChange1) => {
console.info("Updated micState--001:" + JSON.stringify(micStateChange1));
routingManager.on('micStateChange', async (micStateChange) => {
console.info("Updated micState--002:" + JSON.stringify(micStateChange));
count++
})
let data = await audioManager.isMicrophoneMute();
console.info('Second Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(!data);
console.info('Second:Promise setMicrophoneMute PASS:' + (!data));
})
let data = await audioManager.isMicrophoneMute();
console.info('First Promise isMicrophoneMute PASS:' + data);
await audioManager.setMicrophoneMute(!data);
console.info('First:Promise setMicrophoneMute PASS:' + (!data));
} catch (err) {
console.log('ERROR:' + JSON.stringify(err))
expect(false).assertTrue();
done();
}
await sleep(2000);
expect(count).assertEqual(1);
done();
})
/**
*@tc.number : SUB_MULTIMEDIA_AUDIO_DEVICEFALG_0100
*@tc.name : NONE_DEVICES_FLAG
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册