Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
2dot5
ClickHouse
提交
15bad674
C
ClickHouse
项目概览
2dot5
/
ClickHouse
通知
3
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
DevOps
流水线
流水线任务
计划
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
C
ClickHouse
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
DevOps
DevOps
流水线
流水线任务
计划
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
流水线任务
提交
Issue看板
体验新版 GitCode,发现更多精彩内容 >>
提交
15bad674
编写于
2月 02, 2020
作者:
A
Andrew Onyshchuk
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Add AvroConfluent integration test
上级
b236f7a3
变更
5
隐藏空白更改
内联
并排
Showing
5 changed file
with
120 addition
and
1 deletion
+120
-1
dbms/tests/integration/helpers/cluster.py
dbms/tests/integration/helpers/cluster.py
+21
-0
dbms/tests/integration/helpers/docker_compose_kafka.yml
dbms/tests/integration/helpers/docker_compose_kafka.yml
+15
-0
dbms/tests/integration/image/Dockerfile
dbms/tests/integration/image/Dockerfile
+1
-1
dbms/tests/integration/test_format_avro_confluent/__init__.py
.../tests/integration/test_format_avro_confluent/__init__.py
+0
-0
dbms/tests/integration/test_format_avro_confluent/test.py
dbms/tests/integration/test_format_avro_confluent/test.py
+83
-0
未找到文件。
dbms/tests/integration/helpers/cluster.py
浏览文件 @
15bad674
...
...
@@ -20,6 +20,7 @@ from dicttoxml import dicttoxml
from
kazoo.client
import
KazooClient
from
kazoo.exceptions
import
KazooException
from
minio
import
Minio
from
confluent.schemaregistry.client
import
CachedSchemaRegistryClient
from
.client
import
Client
from
.hdfs_api
import
HDFSApi
...
...
@@ -122,6 +123,11 @@ class ClickHouseCluster:
self
.
minio_redirect_host
=
"redirect"
self
.
minio_redirect_port
=
80
# available when with_kafka == True
self
.
schema_registry_client
=
None
self
.
schema_registry_host
=
"schema-registry"
self
.
schema_registry_port
=
8081
self
.
docker_client
=
None
self
.
is_up
=
False
...
...
@@ -372,6 +378,19 @@ class ClickHouseCluster:
logging
.
warning
(
"Can't connect to Minio: %s"
,
str
(
ex
))
time
.
sleep
(
1
)
def
wait_schema_registry_to_start
(
self
,
timeout
=
10
):
sr_client
=
CachedSchemaRegistryClient
(
'http://localhost:8081'
)
start
=
time
.
time
()
while
time
.
time
()
-
start
<
timeout
:
try
:
sr_client
.
_send_request
(
sr_client
.
url
)
self
.
schema_registry_client
=
sr_client
logging
.
info
(
"Connected to SchemaRegistry"
)
return
except
Exception
as
ex
:
logging
.
warning
(
"Can't connect to SchemaRegistry: %s"
,
str
(
ex
))
time
.
sleep
(
1
)
def
start
(
self
,
destroy_dirs
=
True
):
if
self
.
is_up
:
return
...
...
@@ -415,6 +434,7 @@ class ClickHouseCluster:
if
self
.
with_kafka
and
self
.
base_kafka_cmd
:
subprocess_check_call
(
self
.
base_kafka_cmd
+
common_opts
+
[
'--renew-anon-volumes'
])
self
.
kafka_docker_id
=
self
.
get_instance_docker_id
(
'kafka1'
)
self
.
wait_schema_registry_to_start
(
120
)
if
self
.
with_hdfs
and
self
.
base_hdfs_cmd
:
subprocess_check_call
(
self
.
base_hdfs_cmd
+
common_opts
)
...
...
@@ -880,6 +900,7 @@ class ClickHouseInstance:
if
self
.
with_kafka
:
depends_on
.
append
(
"kafka1"
)
depends_on
.
append
(
"schema-registry"
)
if
self
.
with_zookeeper
:
depends_on
.
append
(
"zoo1"
)
...
...
dbms/tests/integration/helpers/docker_compose_kafka.yml
浏览文件 @
15bad674
...
...
@@ -29,3 +29,18 @@ services:
-
kafka_zookeeper
security_opt
:
-
label:disable
schema-registry
:
image
:
confluentinc/cp-schema-registry:5.2.0
hostname
:
schema-registry
ports
:
-
"
8081:8081"
environment
:
SCHEMA_REGISTRY_HOST_NAME
:
schema-registry
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL
:
PLAINTEXT
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS
:
PLAINTEXT://kafka1:19092
depends_on
:
-
kafka_zookeeper
-
kafka1
security_opt
:
-
label:disable
dbms/tests/integration/image/Dockerfile
浏览文件 @
15bad674
...
...
@@ -35,7 +35,7 @@ RUN apt-get update \
ENV
TZ=Europe/Moscow
RUN
ln
-snf
/usr/share/zoneinfo/
$TZ
/etc/localtime
&&
echo
$TZ
>
/etc/timezone
RUN
pip
install
urllib3
==
1.23 pytest docker-compose
==
1.22.0 docker dicttoxml kazoo PyMySQL
psycopg2
==
2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio
RUN
pip
install
urllib3
==
1.23 pytest docker-compose
==
1.22.0 docker dicttoxml kazoo PyMySQL
psycopg2
==
2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio
rpm-confluent-schemaregistry
ENV
DOCKER_CHANNEL stable
ENV
DOCKER_VERSION 17.09.1-ce
...
...
dbms/tests/integration/test_format_avro_confluent/__init__.py
0 → 100644
浏览文件 @
15bad674
dbms/tests/integration/test_format_avro_confluent/test.py
0 → 100644
浏览文件 @
15bad674
import
json
import
logging
import
io
import
pytest
from
helpers.cluster
import
ClickHouseCluster
,
ClickHouseInstance
import
helpers.client
import
avro.schema
from
confluent.schemaregistry.client
import
CachedSchemaRegistryClient
from
confluent.schemaregistry.serializers
import
MessageSerializer
logging
.
getLogger
().
setLevel
(
logging
.
INFO
)
logging
.
getLogger
().
addHandler
(
logging
.
StreamHandler
())
@
pytest
.
fixture
(
scope
=
"module"
)
def
cluster
():
try
:
cluster
=
ClickHouseCluster
(
__file__
)
cluster
.
add_instance
(
"dummy"
,
with_kafka
=
True
)
logging
.
info
(
"Starting cluster..."
)
cluster
.
start
()
logging
.
info
(
"Cluster started"
)
yield
cluster
finally
:
cluster
.
shutdown
()
def
run_query
(
instance
,
query
,
stdin
=
None
,
settings
=
None
):
# type: (ClickHouseInstance, str, object, dict) -> str
logging
.
info
(
"Running query '{}'..."
.
format
(
query
))
# use http to force parsing on server
result
=
instance
.
http_query
(
query
,
data
=
stdin
,
params
=
settings
)
logging
.
info
(
"Query finished"
)
return
result
def
test_select
(
cluster
):
# type: (ClickHouseCluster) -> None
schema_registry_client
=
cluster
.
schema_registry_client
serializer
=
MessageSerializer
(
schema_registry_client
)
schema
=
avro
.
schema
.
make_avsc_object
({
'name'
:
'test_record'
,
'type'
:
'record'
,
'fields'
:
[
{
'name'
:
'value'
,
'type'
:
'long'
}
]
})
buf
=
io
.
BytesIO
()
for
x
in
range
(
0
,
3
):
message
=
serializer
.
encode_record_with_schema
(
'test_subject'
,
schema
,
{
'value'
:
x
}
)
buf
.
write
(
message
)
stdin
=
buf
.
getvalue
()
instance
=
cluster
.
instances
[
"dummy"
]
# type: ClickHouseInstance
schema_registry_url
=
"http://{}:{}"
.
format
(
cluster
.
schema_registry_host
,
cluster
.
schema_registry_port
)
run_query
(
instance
,
"create table avro_data(value Int64) engine = Memory()"
)
settings
=
{
'format_avro_schema_registry_url'
:
schema_registry_url
}
run_query
(
instance
,
"insert into avro_data format AvroConfluent"
,
stdin
,
settings
)
stdout
=
run_query
(
instance
,
"select * from avro_data"
)
assert
list
(
map
(
str
.
split
,
stdout
.
splitlines
()))
==
[
[
"0"
],
[
"1"
],
[
"2"
],
]
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录