Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
OpenHarmony
Xts Acts
提交
c60a31aa
X
Xts Acts
项目概览
OpenHarmony
/
Xts Acts
1 年多 前同步成功
通知
9
Star
22
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
Xts Acts
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
c60a31aa
编写于
11月 02, 2022
作者:
L
lwx1121892
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
modify
Signed-off-by:
N
lwx1121892
<
liuxueqi3@huawei.com
>
上级
ae5fad0b
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
155 addition
and
146 deletion
+155
-146
multimedia/audio/audio_js_standard/audioInterruptRender/entry/src/main/ets/MainAbility/app.ets
...dioInterruptRender/entry/src/main/ets/MainAbility/app.ets
+139
-138
multimedia/audio/audio_js_standard/audioManager/src/main/js/test/AudioManagerApi9.test.js
...rd/audioManager/src/main/js/test/AudioManagerApi9.test.js
+16
-8
未找到文件。
multimedia/audio/audio_js_standard/audioInterruptRender/entry/src/main/ets/MainAbility/app.ets
浏览文件 @
c60a31aa
...
...
@@ -10,7 +10,7 @@
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* limitations under the License.
info(TAG +
*/
import
audio
from
'@ohos.multimedia.audio'
;
import
fileio
from
'@ohos.fileio'
;
...
...
@@ -19,157 +19,158 @@ let fdRead;
let
readPath
;
let
fdPath
;
let
filePath
;
let
TAG
=
'InterruptHap:'
;
var
TAG1
=
"Fa:SupportFunctionThree:MainAbility:"
;
var
listPush1
=
"Fa_SupportFunctionThree_MainAbility_"
;
var
lifeList
=
[];
export
default
{
async
onCreate
()
{
console
.
log
(
TAG1
+
'=============================================================================> onCreate'
);
function
sleep
(
ms
)
{
return
new
Promise
(
resolve
=>
setTimeout
(
resolve
,
ms
));
}
async
function
getFdRead
(
pathName
)
{
let
context
=
await
featureAbility
.
getContext
();
console
.
info
(
"case0 context is "
+
context
);
await
context
.
getFilesDir
()
.
then
((
data
)
=>
{
console
.
info
(
"case1 getFilesDir is path "
+
data
);
filePath
=
data
+
'/'
+
pathName
;
console
.
info
(
'case4 filePath is '
+
filePath
);
async
onCreate
()
{
console
.
log
(
TAG1
+
'=============================================================================> onCreate'
);
function
sleep
(
ms
)
{
return
new
Promise
(
resolve
=>
setTimeout
(
resolve
,
ms
));
}
async
function
getFdRead
(
pathName
)
{
let
context
=
await
featureAbility
.
getContext
();
console
.
info
(
TAG
+
"case0 context is "
+
context
);
await
context
.
getFilesDir
()
.
then
((
data
)
=>
{
console
.
info
(
TAG
+
"case1 getFilesDir is path "
+
data
);
filePath
=
data
+
'/'
+
pathName
;
console
.
info
(
TAG
+
'case4 filePath is '
+
filePath
);
})
fdPath
=
'fd://'
;
await
fileio
.
open
(
filePath
)
.
then
((
fdNumber
)
=>
{
fdPath
=
fdPath
+
''
+
fdNumber
;
fdRead
=
fdNumber
;
console
.
info
(
'[fileIO]case open fd success,fdPath is '
+
fdPath
);
console
.
info
(
'[fileIO]case open fd success,fdRead is '
+
fdRead
);
})
fdPath
=
'fd://'
;
await
fileio
.
open
(
filePath
)
.
then
((
fdNumber
)
=>
{
fdPath
=
fdPath
+
''
+
fdNumber
;
fdRead
=
fdNumber
;
console
.
info
(
TAG
+
'[fileIO]case open fd success,fdPath is '
+
fdPath
);
console
.
info
(
TAG
+
'[fileIO]case open fd success,fdRead is '
+
fdRead
);
},
(
err
)
=>
{
console
.
info
(
'[fileIO]case open fd failed'
);
})
.
catch
((
err
)
=>
{
console
.
info
(
'[fileIO]case catch open fd failed'
);
});
}
var
AudioStreamInfo
=
{
samplingRate
:
audio
.
AudioSamplingRate
.
SAMPLE_RATE_48000
,
channels
:
audio
.
AudioChannel
.
CHANNEL_2
,
sampleFormat
:
audio
.
AudioSampleFormat
.
SAMPLE_FORMAT_S32LE
,
encodingType
:
audio
.
AudioEncodingType
.
ENCODING_TYPE_RAW
}
},
(
err
)
=>
{
console
.
info
(
TAG
+
'[fileIO]case open fd failed'
);
})
.
catch
((
err
)
=>
{
console
.
info
(
TAG
+
'[fileIO]case catch open fd failed'
);
});
}
var
AudioStreamInfo
=
{
samplingRate
:
audio
.
AudioSamplingRate
.
SAMPLE_RATE_48000
,
channels
:
audio
.
AudioChannel
.
CHANNEL_2
,
sampleFormat
:
audio
.
AudioSampleFormat
.
SAMPLE_FORMAT_S32LE
,
encodingType
:
audio
.
AudioEncodingType
.
ENCODING_TYPE_RAW
}
var
AudioRendererInfo
=
{
content
:
audio
.
ContentType
.
CONTENT_TYPE_RINGTONE
,
usage
:
audio
.
StreamUsage
.
STREAM_USAGE_NOTIFICATION_RINGTONE
,
rendererFlags
:
0
}
var
AudioRendererInfo
=
{
content
:
audio
.
ContentType
.
CONTENT_TYPE_RINGTONE
,
usage
:
audio
.
StreamUsage
.
STREAM_USAGE_NOTIFICATION_RINGTONE
,
rendererFlags
:
0
}
var
AudioRendererOptions
=
{
streamInfo
:
AudioStreamInfo
,
rendererInfo
:
AudioRendererInfo
}
var
AudioRendererOptions
=
{
streamInfo
:
AudioStreamInfo
,
rendererInfo
:
AudioRendererInfo
}
readPath
=
'StarWars10s-2C-48000-4SW.wav'
;
getFdRead
(
readPath
);
readPath
=
'StarWars10s-2C-48000-4SW.wav'
;
getFdRead
(
readPath
);
var
audioRen
;
await
audio
.
createAudioRenderer
(
AudioRendererOptions
)
.
then
(
async
function
(
data
)
{
audioRen
=
data
;
console
.
info
(
'AudioFrameworkRenderLog: AudioRender Created : Success : Stream Type: SUCCESS'
+
audioRen
);
})
.
catch
((
err
)
=>
{
console
.
info
(
'AudioFrameworkRenderLog: AudioRender Created : ERROR : '
+
err
.
message
);
});
var
audioRen
;
await
audio
.
createAudioRenderer
(
AudioRendererOptions
)
.
then
(
async
function
(
data
)
{
audioRen
=
data
;
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: AudioRender Created : Success : Stream Type: SUCCESS'
+
audioRen
);
})
.
catch
((
err
)
=>
{
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: AudioRender Created : ERROR : '
+
err
.
message
);
});
console
.
info
(
'AudioFrameworkRenderLog: AudioRenderer : STATE : '
+
audioRen
.
state
);
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: AudioRenderer : STATE : '
+
audioRen
.
state
);
await
audioRen
.
start
()
.
then
(
async
function
()
{
console
.
info
(
'AudioFrameworkRenderLog: renderInstant started :SUCCESS '
);
})
.
catch
((
err
)
=>
{
console
.
info
(
'AudioFrameworkRenderLog: renderInstant start :ERROR : '
+
err
.
message
);
});
await
audioRen
.
start
()
.
then
(
async
function
()
{
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: renderInstant started :SUCCESS '
);
})
.
catch
((
err
)
=>
{
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: renderInstant start :ERROR : '
+
err
.
message
);
});
console
.
info
(
'AudioFrameworkRenderLog: AudioRenderer : STATE : '
+
audioRen
.
state
);
var
bufferSize
;
await
audioRen
.
getBufferSize
()
.
then
(
async
function
(
data
)
{
console
.
info
(
'AudioFrameworkRenderLog: getBufferSize :SUCCESS '
+
data
);
bufferSize
=
data
;
})
.
catch
((
err
)
=>
{
console
.
info
(
'AudioFrameworkRenderLog: getBufferSize :ERROR : '
+
err
.
message
);
});
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: AudioRenderer : STATE : '
+
audioRen
.
state
);
var
bufferSize
;
await
audioRen
.
getBufferSize
()
.
then
(
async
function
(
data
)
{
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: getBufferSize :SUCCESS '
+
data
);
bufferSize
=
data
;
})
.
catch
((
err
)
=>
{
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: getBufferSize :ERROR : '
+
err
.
message
);
});
let
ss
=
fileio
.
fdopenStreamSync
(
fdRead
,
'r'
);
console
.
info
(
'AudioFrameworkRenderLog:case 2:AudioFrameworkRenderLog: File Path: '
+
ss
);
let
discardHeader
=
new
ArrayBuffer
(
44
);
console
.
info
(
'AudioFrameworkRenderLog:case 2-1:AudioFrameworkRenderLog: File Path: '
);
ss
.
readSync
(
discardHeader
);
console
.
info
(
'AudioFrameworkRenderLog:case 2-2:AudioFrameworkRenderLog: File Path: '
);
let
totalSize
=
fileio
.
fstatSync
(
fdRead
)
.
size
;
console
.
info
(
'AudioFrameworkRenderLog:case 3 : AudioFrameworkRenderLog: File totalSize size: '
+
totalSize
);
totalSize
=
totalSize
-
44
;
console
.
info
(
'AudioFrameworkRenderLog: File size : Removing header: '
+
totalSize
);
let
rlen
=
0
;
while
(
rlen
<
totalSize
/
4
)
{
let
buf
=
new
ArrayBuffer
(
bufferSize
);
rlen
+=
ss
.
readSync
(
buf
);
console
.
info
(
'InterruptHap:BufferAudioFramework: bytes read from file: '
+
rlen
);
await
audioRen
.
write
(
buf
);
}
let
activated
=
false
;
let
ss
=
fileio
.
fdopenStreamSync
(
fdRead
,
'r'
);
console
.
info
(
TAG
+
'AudioFrameworkRenderLog:case 2:AudioFrameworkRenderLog: File Path: '
+
ss
);
let
discardHeader
=
new
ArrayBuffer
(
44
);
console
.
info
(
TAG
+
'AudioFrameworkRenderLog:case 2-1:AudioFrameworkRenderLog: File Path: '
);
ss
.
readSync
(
discardHeader
);
console
.
info
(
TAG
+
'AudioFrameworkRenderLog:case 2-2:AudioFrameworkRenderLog: File Path: '
);
let
totalSize
=
fileio
.
fstatSync
(
fdRead
)
.
size
;
console
.
info
(
TAG
+
'AudioFrameworkRenderLog:case 3 : AudioFrameworkRenderLog: File totalSize size: '
+
totalSize
);
totalSize
=
totalSize
-
44
;
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: File size : Removing header: '
+
totalSize
);
let
rlen
=
0
;
while
(
rlen
<
totalSize
/
4
)
{
let
buf
=
new
ArrayBuffer
(
bufferSize
);
rlen
+=
ss
.
readSync
(
buf
);
console
.
info
(
TAG
+
'InterruptHap:BufferAudioFramework: bytes read from file: '
+
rlen
);
await
audioRen
.
write
(
buf
);
}
let
activated
=
false
;
let
InterruptHint
=
0
;
let
audioManager
=
audio
.
getAudioManager
();
let
interAudioInterrupt
=
{
streamUsage
:
1
,
contentType
:
0
,
pauseWhenDucked
:
true
};
audioManager
.
on
(
'interrupt'
,
interAudioInterrupt
,
(
InterruptAction
)
=>
{
console
.
info
(
'come in FuZhuHap interrupt'
);
if
(
InterruptAction
.
actionType
!=
undefined
&&
InterruptAction
.
actionType
!=
null
)
{
console
.
info
(
'InterruptHap An event to gain the audio focus ========================= starts.'
);
console
.
info
(
`Focus gain event: ${InterruptAction} `
);
activated
=
InterruptAction
.
activated
;
InterruptHint
=
InterruptAction
.
hint
console
.
info
(
'activated ============ is :'
+
activated
);
console
.
info
(
'InterruptHint ================ is :'
+
InterruptHint
);
console
.
info
(
'InterruptAction.actionType ============ is :'
+
InterruptAction
.
actionType
);
}
});
console
.
info
(
'AudioFrameworkRenderLog: Renderer after read'
);
await
sleep
(
3000
);
await
audioRen
.
drain
()
.
then
(
async
function
()
{
console
.
info
(
'AudioFrameworkRenderLog: Renderer drained : SUCCESS'
);
let
wantInfo
=
{
want
:
{
bundleName
:
"com.example.audiorenderinterrupt"
,
abilityName
:
"com.example.entry.MainAbility"
},
resultCode
:
1111
}
featureAbility
.
terminateSelfWithResult
(
wantInfo
)
.
then
(()
=>
{
console
.
info
(
'terminateSelf ================================== success'
)
})
.
catch
(()
=>
{
console
.
info
(
'terminateSelf ==================================== fail'
)
})
let
audioManager
=
audio
.
getAudioManager
();
let
interAudioInterrupt
=
{
streamUsage
:
1
,
contentType
:
0
,
pauseWhenDucked
:
true
};
audioManager
.
on
(
'interrupt'
,
interAudioInterrupt
,
(
InterruptAction
)
=>
{
console
.
info
(
TAG
+
'come in FuZhuHap interrupt'
);
if
(
InterruptAction
.
actionType
!=
undefined
&&
InterruptAction
.
actionType
!=
null
)
{
console
.
info
(
TAG
+
'InterruptHap An event to gain the audio focus ========================= starts.'
);
console
.
info
(
TAG
+
`Focus gain event: ${InterruptAction} `
);
activated
=
InterruptAction
.
activated
;
InterruptHint
=
InterruptAction
.
hint
console
.
info
(
TAG
+
'activated ============ is :'
+
activated
);
console
.
info
(
TAG
+
'InterruptHint ================ is :'
+
InterruptHint
);
console
.
info
(
TAG
+
'InterruptAction.actionType ============ is :'
+
InterruptAction
.
actionType
);
}
});
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: Renderer after read'
);
await
sleep
(
3000
);
//
await audioRen.drain().then(async function () {
console
.
info
(
TAG
+
'AudioFrameworkRenderLog: Renderer drained : SUCCESS'
);
let
wantInfo
=
{
want
:
{
bundleName
:
"com.example.audiorenderinterrupt"
,
abilityName
:
"com.example.entry.MainAbility"
},
resultCode
:
1111
}
featureAbility
.
terminateSelfWithResult
(
wantInfo
)
.
then
(()
=>
{
console
.
info
(
TAG
+
'terminateSelf ================================== success'
)
})
.
catch
(()
=>
{
console
.
info
(
TAG
+
'terminateSelf ==================================== fail'
)
})
})
.
catch
((
err
)
=>
{
console
.
error
(
'AudioFrameworkRenderLog: Renderer drain: ERROR : '
+
err
.
message
);
});
},
onDestroy
()
{
console
.
log
(
TAG1
+
'onDestroy'
);
},
async
onActive
()
{
console
.
log
(
TAG1
+
'onActive'
);
},
onInactive
()
{
console
.
log
(
TAG1
+
'onInactive'
);
},
onShow
()
{
console
.
log
(
TAG1
+
'onShow'
);
},
onHide
()
{
console
.
log
(
TAG1
+
'onHide'
);
}
//
}).catch((err) => {
//
console.error('AudioFrameworkRenderLog: Renderer drain: ERROR : ' + err.message);
//
});
},
onDestroy
()
{
console
.
log
(
TAG1
+
'onDestroy'
);
},
async
onActive
()
{
console
.
log
(
TAG1
+
'onActive'
);
},
onInactive
()
{
console
.
log
(
TAG1
+
'onInactive'
);
},
onShow
()
{
console
.
log
(
TAG1
+
'onShow'
);
},
onHide
()
{
console
.
log
(
TAG1
+
'onHide'
);
}
}
\ No newline at end of file
multimedia/audio/audio_js_standard/audioManager/src/main/js/test/AudioManagerApi9.test.js
浏览文件 @
c60a31aa
...
...
@@ -1056,8 +1056,10 @@ describe('audioManagerApi9', function () {
*@tc.level : Level 2
*/
it
(
'
SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0100
'
,
2
,
async
function
(
done
)
{
let
volumManager
=
audioManager
.
getVolumeManager
();
let
VolumeGroupManager
=
await
volumManager
.
getVolumeGroupManager
();
let
audioVolumeManager
=
audioManager
.
getVolumeManager
();
let
volumeGroupInfos
=
await
audioVolumeManager
.
getVolumeGroupInfos
(
audio
.
LOCAL_NETWORK_ID
);
let
groupId_
=
volumeGroupInfos
[
0
].
groupId
let
VolumeGroupManager
=
await
audioVolumeManager
.
getVolumeGroupManager
(
groupId_
);
let
count
=
0
;
console
.
info
(
'
getVolumeGroupManager Callback START.
'
);
VolumeGroupManager
.
on
(
'
micStateChange
'
,
async
(
micStateChange
)
=>
{
...
...
@@ -1091,8 +1093,10 @@ describe('audioManagerApi9', function () {
*@tc.level : Level 2
*/
it
(
'
SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0200
'
,
2
,
async
function
(
done
)
{
let
volumManager
=
audioManager
.
getVolumeManager
();
let
VolumeGroupManager
=
await
volumManager
.
getVolumeGroupManager
();
let
audioVolumeManager
=
audioManager
.
getVolumeManager
();
let
volumeGroupInfos
=
await
audioVolumeManager
.
getVolumeGroupInfos
(
audio
.
LOCAL_NETWORK_ID
);
let
groupId_
=
volumeGroupInfos
[
0
].
groupId
let
VolumeGroupManager
=
await
audioVolumeManager
.
getVolumeGroupManager
(
groupId_
);
console
.
info
(
'
getVolumeGroupManager Callback START.
'
);
let
count
=
0
;
VolumeGroupManager
.
on
(
'
micStateChange
'
,
async
(
micStateChange
)
=>
{
...
...
@@ -1124,8 +1128,10 @@ describe('audioManagerApi9', function () {
*@tc.level : Level 2
*/
it
(
'
SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0300
'
,
2
,
async
function
(
done
)
{
let
volumManager
=
audioManager
.
getVolumeManager
();
let
VolumeGroupManager
=
await
volumManager
.
getVolumeGroupManager
();
let
audioVolumeManager
=
audioManager
.
getVolumeManager
();
let
volumeGroupInfos
=
await
audioVolumeManager
.
getVolumeGroupInfos
(
audio
.
LOCAL_NETWORK_ID
);
let
groupId_
=
volumeGroupInfos
[
0
].
groupId
let
VolumeGroupManager
=
await
audioVolumeManager
.
getVolumeGroupManager
(
groupId_
);
console
.
info
(
'
getVolumeGroupManager Callback START.
'
);
let
count
=
0
;
VolumeGroupManager
.
on
(
'
micStateChange
'
,
async
(
micStateChange
)
=>
{
...
...
@@ -1159,8 +1165,10 @@ describe('audioManagerApi9', function () {
*@tc.level : Level 2
*/
it
(
'
SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400
'
,
2
,
async
function
(
done
)
{
let
volumManager
=
audioManager
.
getVolumeManager
();
let
VolumeGroupManager
=
await
volumManager
.
getVolumeGroupManager
();
let
audioVolumeManager
=
audioManager
.
getVolumeManager
();
let
volumeGroupInfos
=
await
audioVolumeManager
.
getVolumeGroupInfos
(
audio
.
LOCAL_NETWORK_ID
);
let
groupId_
=
volumeGroupInfos
[
0
].
groupId
let
VolumeGroupManager
=
await
audioVolumeManager
.
getVolumeGroupManager
(
groupId_
);
let
count
=
0
;
try
{
console
.
info
(
"
enter SUB_MULTIMEDIA_AUDIO_ROUTING_MANAGER_MICSTATECHANGE_0400
"
);
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录