Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
doujutun3207
flink
提交
9e3bac39
F
flink
项目概览
doujutun3207
/
flink
与 Fork 源项目一致
从无法访问的项目Fork
通知
24
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
F
flink
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
未验证
提交
9e3bac39
编写于
12月 09, 2017
作者:
D
Dian Fu
提交者:
Dawid Wysakowicz
1月 02, 2018
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
[FLINK-8226] [cep] Dangling reference generated after NFA clean up timed out SharedBufferEntry
This closes #5141
上级
3fdee00e
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
71 addition
and
11 deletion
+71
-11
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/nfa/SharedBuffer.java
.../src/main/java/org/apache/flink/cep/nfa/SharedBuffer.java
+37
-11
flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/NFATest.java
...k-cep/src/test/java/org/apache/flink/cep/nfa/NFATest.java
+34
-0
未找到文件。
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/nfa/SharedBuffer.java
浏览文件 @
9e3bac39
...
@@ -191,14 +191,12 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
...
@@ -191,14 +191,12 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
*/
*/
public
boolean
prune
(
long
pruningTimestamp
)
{
public
boolean
prune
(
long
pruningTimestamp
)
{
Iterator
<
Map
.
Entry
<
K
,
SharedBufferPage
<
K
,
V
>>>
iter
=
pages
.
entrySet
().
iterator
();
Iterator
<
Map
.
Entry
<
K
,
SharedBufferPage
<
K
,
V
>>>
iter
=
pages
.
entrySet
().
iterator
();
boolean
pruned
=
false
;
List
<
SharedBufferEntry
<
K
,
V
>>
prunedEntries
=
new
ArrayList
<>()
;
while
(
iter
.
hasNext
())
{
while
(
iter
.
hasNext
())
{
SharedBufferPage
<
K
,
V
>
page
=
iter
.
next
().
getValue
();
SharedBufferPage
<
K
,
V
>
page
=
iter
.
next
().
getValue
();
if
(
page
.
prune
(
pruningTimestamp
))
{
page
.
prune
(
pruningTimestamp
,
prunedEntries
);
pruned
=
true
;
}
if
(
page
.
isEmpty
())
{
if
(
page
.
isEmpty
())
{
// delete page if it is empty
// delete page if it is empty
...
@@ -206,7 +204,14 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
...
@@ -206,7 +204,14 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
}
}
}
}
return
pruned
;
if
(!
prunedEntries
.
isEmpty
())
{
for
(
Map
.
Entry
<
K
,
SharedBufferPage
<
K
,
V
>>
entry
:
pages
.
entrySet
())
{
entry
.
getValue
().
removeEdges
(
prunedEntries
);
}
return
true
;
}
else
{
return
false
;
}
}
}
/**
/**
...
@@ -451,25 +456,21 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
...
@@ -451,25 +456,21 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
* Removes all entries from the map whose timestamp is smaller than the pruning timestamp.
* Removes all entries from the map whose timestamp is smaller than the pruning timestamp.
*
*
* @param pruningTimestamp Timestamp for the pruning
* @param pruningTimestamp Timestamp for the pruning
* @return {@code true} if pruning happened
*/
*/
public
boolean
prune
(
long
pruningTimestamp
)
{
public
void
prune
(
long
pruningTimestamp
,
List
<
SharedBufferEntry
<
K
,
V
>>
prunedEntries
)
{
Iterator
<
Map
.
Entry
<
ValueTimeWrapper
<
V
>,
SharedBufferEntry
<
K
,
V
>>>
iterator
=
entries
.
entrySet
().
iterator
();
Iterator
<
Map
.
Entry
<
ValueTimeWrapper
<
V
>,
SharedBufferEntry
<
K
,
V
>>>
iterator
=
entries
.
entrySet
().
iterator
();
boolean
continuePruning
=
true
;
boolean
continuePruning
=
true
;
boolean
pruned
=
false
;
while
(
iterator
.
hasNext
()
&&
continuePruning
)
{
while
(
iterator
.
hasNext
()
&&
continuePruning
)
{
SharedBufferEntry
<
K
,
V
>
entry
=
iterator
.
next
().
getValue
();
SharedBufferEntry
<
K
,
V
>
entry
=
iterator
.
next
().
getValue
();
if
(
entry
.
getValueTime
().
getTimestamp
()
<=
pruningTimestamp
)
{
if
(
entry
.
getValueTime
().
getTimestamp
()
<=
pruningTimestamp
)
{
prunedEntries
.
add
(
entry
);
iterator
.
remove
();
iterator
.
remove
();
pruned
=
true
;
}
else
{
}
else
{
continuePruning
=
false
;
continuePruning
=
false
;
}
}
}
}
return
pruned
;
}
}
public
boolean
isEmpty
()
{
public
boolean
isEmpty
()
{
...
@@ -480,6 +481,15 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
...
@@ -480,6 +481,15 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
return
entries
.
remove
(
valueTime
);
return
entries
.
remove
(
valueTime
);
}
}
/**
* Remove edges with the specified targets for the entries.
*/
private
void
removeEdges
(
final
List
<
SharedBufferEntry
<
K
,
V
>>
prunedEntries
)
{
for
(
Map
.
Entry
<
ValueTimeWrapper
<
V
>,
SharedBufferEntry
<
K
,
V
>>
entry
:
entries
.
entrySet
())
{
entry
.
getValue
().
removeEdges
(
prunedEntries
);
}
}
@Override
@Override
public
String
toString
()
{
public
String
toString
()
{
StringBuilder
builder
=
new
StringBuilder
();
StringBuilder
builder
=
new
StringBuilder
();
...
@@ -569,6 +579,22 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
...
@@ -569,6 +579,22 @@ public class SharedBuffer<K extends Serializable, V> implements Serializable {
edges
.
add
(
edge
);
edges
.
add
(
edge
);
}
}
/**
* Remove edges with the specified targets.
*/
private
void
removeEdges
(
final
List
<
SharedBufferEntry
<
K
,
V
>>
prunedEntries
)
{
Iterator
<
SharedBufferEdge
<
K
,
V
>>
itor
=
edges
.
iterator
();
while
(
itor
.
hasNext
())
{
SharedBufferEdge
<
K
,
V
>
edge
=
itor
.
next
();
for
(
SharedBufferEntry
<
K
,
V
>
prunedEntry
:
prunedEntries
)
{
if
(
prunedEntry
==
edge
.
getTarget
())
{
itor
.
remove
();
break
;
}
}
}
}
public
boolean
remove
()
{
public
boolean
remove
()
{
if
(
page
!=
null
)
{
if
(
page
!=
null
)
{
page
.
remove
(
valueTime
);
page
.
remove
(
valueTime
);
...
...
flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/nfa/NFATest.java
浏览文件 @
9e3bac39
...
@@ -26,6 +26,7 @@ import org.apache.flink.cep.pattern.conditions.IterativeCondition;
...
@@ -26,6 +26,7 @@ import org.apache.flink.cep.pattern.conditions.IterativeCondition;
import
org.apache.flink.cep.pattern.conditions.SimpleCondition
;
import
org.apache.flink.cep.pattern.conditions.SimpleCondition
;
import
org.apache.flink.core.memory.DataInputViewStreamWrapper
;
import
org.apache.flink.core.memory.DataInputViewStreamWrapper
;
import
org.apache.flink.core.memory.DataOutputViewStreamWrapper
;
import
org.apache.flink.core.memory.DataOutputViewStreamWrapper
;
import
org.apache.flink.streaming.api.windowing.time.Time
;
import
org.apache.flink.streaming.runtime.streamrecord.StreamRecord
;
import
org.apache.flink.streaming.runtime.streamrecord.StreamRecord
;
import
org.apache.flink.util.TestLogger
;
import
org.apache.flink.util.TestLogger
;
...
@@ -176,6 +177,26 @@ public class NFATest extends TestLogger {
...
@@ -176,6 +177,26 @@ public class NFATest extends TestLogger {
assertEquals
(
expectedPatterns
,
actualPatterns
);
assertEquals
(
expectedPatterns
,
actualPatterns
);
}
}
@Test
public
void
testTimeoutWindowPruning2
()
throws
IOException
{
NFA
<
Event
>
nfa
=
createLoopingNFA
(
2
);
List
<
StreamRecord
<
Event
>>
streamEvents
=
new
ArrayList
<>();
streamEvents
.
add
(
new
StreamRecord
<>(
new
Event
(
1
,
"loop"
,
1.0
),
101L
));
streamEvents
.
add
(
new
StreamRecord
<>(
new
Event
(
2
,
"loop"
,
2.0
),
102L
));
streamEvents
.
add
(
new
StreamRecord
<>(
new
Event
(
3
,
"loop"
,
3.0
),
103L
));
streamEvents
.
add
(
new
StreamRecord
<>(
new
Event
(
4
,
"loop"
,
4.0
),
104L
));
streamEvents
.
add
(
new
StreamRecord
<>(
new
Event
(
5
,
"loop"
,
5.0
),
105L
));
runNFA
(
nfa
,
streamEvents
);
NFA
.
NFASerializer
<
Event
>
serializer
=
new
NFA
.
NFASerializer
<>(
Event
.
createTypeSerializer
());
//serialize
ByteArrayOutputStream
baos
=
new
ByteArrayOutputStream
();
serializer
.
serialize
(
nfa
,
new
DataOutputViewStreamWrapper
(
baos
));
baos
.
close
();
}
public
<
T
>
Collection
<
Map
<
String
,
List
<
T
>>>
runNFA
(
NFA
<
T
>
nfa
,
List
<
StreamRecord
<
T
>>
inputs
)
{
public
<
T
>
Collection
<
Map
<
String
,
List
<
T
>>>
runNFA
(
NFA
<
T
>
nfa
,
List
<
StreamRecord
<
T
>>
inputs
)
{
Set
<
Map
<
String
,
List
<
T
>>>
actualPatterns
=
new
HashSet
<>();
Set
<
Map
<
String
,
List
<
T
>>>
actualPatterns
=
new
HashSet
<>();
...
@@ -358,4 +379,17 @@ public class NFATest extends TestLogger {
...
@@ -358,4 +379,17 @@ public class NFATest extends TestLogger {
return
nfa
;
return
nfa
;
}
}
private
NFA
<
Event
>
createLoopingNFA
(
long
windowLength
)
{
Pattern
<
Event
,
?>
pattern
=
Pattern
.<
Event
>
begin
(
"loop"
).
where
(
new
SimpleCondition
<
Event
>()
{
private
static
final
long
serialVersionUID
=
5726188262756267490L
;
@Override
public
boolean
filter
(
Event
value
)
throws
Exception
{
return
value
.
getName
().
equals
(
"loop"
);
}
}).
timesOrMore
(
3
).
within
(
Time
.
milliseconds
(
windowLength
));
return
NFACompiler
.
compile
(
pattern
,
Event
.
createTypeSerializer
(),
false
);
}
}
}
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录