powered-by.html 40.9 KB
Newer Older
W
wizardforcel 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html xmlns:og="http://ogp.me/ns#">

<head>
	<title>Kafka 中文文档 - ApacheCN</title>
	<link rel='stylesheet' href='./css/styles.css' type='text/css'>
	<link rel='stylesheet' href='./css/syntax-highlighting.css' type='text/css'>
	<link rel="icon" type="image/gif" href="./images/apache_feather.gif">
	<meta name="robots" content="index,follow" />
	<meta name="language" content="en" />
	<meta name="keywords" content="apache kafka messaging queuing distributed stream processing">
	<meta name="description" content="Apache Kafka: A Distributed Streaming Platform.">
	<meta http-equiv='Content-Type' content='text/html;charset=utf-8' />
	<meta name="viewport" content="initial-scale = 1.0,maximum-scale = 1.0" />
	<meta property="og:title" content="Apache Kafka" />
	<meta property="og:image" content="http://apache-kafka.org/images/apache-kafka.png" />
	<meta property="og:description" content="Apache Kafka: A Distributed Streaming Platform." />
	<meta property="og:site_name" content="Apache Kafka" />
	<meta property="og:type" content="website" />
	<link href="https://fonts.googleapis.com/css?family=Cutive+Mono|Roboto:400,700,900" rel="stylesheet">
	<script src="./js/jquery.min.js"></script>
	<script async src="./js/apachecn/googletagmanager.js"></script>
	<script>
		window.dataLayer = window.dataLayer || [];
		function gtag() { dataLayer.push(arguments); }
		gtag('js', new Date());

		gtag('config', 'UA-102475051-9');
	</script>

	<script>
		var _hmt = _hmt || [];
		(function () {
			var hm = document.createElement("script");
			hm.src = "https://hm.baidu.com/hm.js?9f2b74b80ab8aafb5970835acf96a0ea";
			var s = document.getElementsByTagName("script")[0];
			s.parentNode.insertBefore(hm, s);
		})();
	</script>

	<script>
		(function () {
			var bp = document.createElement('script');
			var curProtocol = window.location.protocol.split(':')[0];
			if (curProtocol === 'https') {
				bp.src = 'https://zz.bdstatic.com/linksubmit/push.js';
			}
			else {
				bp.src = 'http://push.zhanzhang.baidu.com/push.js';
			}
			var s = document.getElementsByTagName("script")[0];
			s.parentNode.insertBefore(bp, s);
		})();
	</script>
	<script>
		// DO NOT NEED TO UPDATE
		// Legacy versions of the documentation to not do frontend redirect for
		// These docs are written as a single super long file so no need to re-route
		var legacyDocPaths = [
			'./07/documentation',
			'./07/documentation/',
			'./08/documentation',
			'./08/documentation/',
			'./081/documentation',
			'./081/documentation/',
			'./082/documentation',
			'./082/documentation/',
			'./090/documentation',
			'./090/documentation/',
			'./0100/documentation',
			'./0100/documentation/'
		];

		// Any direct request for Streams documentation in docs versions prior to 0101
		// Redirect these requests to the standalone Streams doc page
		var currentPath = window.location.pathname;
		var shouldRedirect = !legacyDocPaths.includes(currentPath);
		var isDocumenationPage = currentPath.includes('/documentation');

		var hasNotSpecifiedFullPath = !currentPath.includes('/documentation/streams') && !currentPath.includes('/documentation/streams/');

		// Look for legacy anchors to clue us in on what full path the user needs
		// Add more as needed
		var specifiedStreamsAnchor = window.location.hash.includes('#streams_');

		if (shouldRedirect && isDocumenationPage && hasNotSpecifiedFullPath) {
			if (specifiedStreamsAnchor) {
				window.location.pathname = currentPath + 'streams';
			}
		}
	</script>
</head>
<!--#include virtual="includes/_header.htm" -->

<script>
    // powered by items
    var poweredByItems = [
    {
        "link": "https://www.nytimes.com",
        "logo": "NYT.jpg",
        "logoBgColor": "#FFFFFF",
        "description": "<a href='https://open.nytimes.com/publishing-with-apache-kafka-at-the-new-york-times-7f0e3b7d2077'>The New York Times uses Apache Kafka </a>and the Kafka Streams API to store and distribute, in real-time, published content to the various applications and systems that make it available to the readers."
    }, {
        "link": "http://pinterest.com",
        "logo": "pinterest.png",
        "logoBgColor": "#ffffff",
        "description": "<a href='https://medium.com/@Pinterest_Engineering/using-kafka-streams-api-for-predictive-budgeting-9f58d206c996'>Pinterest uses Apache Kafka and the Kafka Streams API</a> at large scale to power the real-time, predictive budgeting system of their advertising infrastructure. With Kafka Streams, spend predictions are more accurate than ever."
    }, {
        "link": "http://www.zalando.com",
        "logo": "zalando.jpg",
        "logoBgColor": "#ffffff",
        "description": "As the leading online fashion retailer in Europe, Zalando uses Kafka as an ESB (Enterprise Service Bus), which helps us in transitioning from a monolithic to a micro services architecture. Using Kafka for processing <a href 'https://kafka-summit.org/sessions/using-kstreams-ktables-calculate-real-time-domain-rankings/' target=blank'> event streams</a> enables our technical team to do near-real time business intelligence."
    }, {
        "link": "https://linecorp.com/",
        "logo": "line.png",
        "logoBgColor": "#00b900",
        "description": "LINE uses Apache Kafka as a central datahub for our services to communicate to one another. Hundreds of billions of messages are produced daily and are used to execute various business logic, threat detection, search indexing and data analysis. LINE leverages Kafka Streams to reliably transform and filter topics enabling sub topics consumers can efficiently consume, meanwhile retaining easy maintainability thanks to its sophisticated yet minimal code base."
    }, {
        "link": "https://www.rabobank.com",
        "logo": "rabobank.jpg",
        "logoBgColor": "#ffffff",
        "description": "Rabobank is one of the 3 largest banks in the Netherlands. Its digital nervous system, the Business Event Bus, is powered by Apache Kafka. It is used by an increasing amount of financial processes and services, one which is Rabo Alerts. This service alerts customers in real-time upon financial events and is built using Kafka Streams."
    }, {
        "link": "http://addthis.com/",
        "logo": "addthis.png",
        "logoBgColor": "#ffffff",
        "description": "Apache Kafka is used at AddThis to collect events generated by our data network and broker that data to our analytics clusters and real-time web analytics platform."
    }, {
        "link": "http://www.airbnb.com/",
        "logo": "airbnb.png",
        "logoBgColor": "#ffffff",
        "description": "Used in our event pipeline, exception tracking & more to come."
    }, {
        "link": "http://www.ancestry.com/",
        "logo": "ancestry.svg",
        "logoBgColor": "#ffffff",
        "description": "Kafka is used as the <a href='http://blogs.ancestry.com/techroots/on-track-to-data-driven' target='_blank'event log processing pipeline </a>for delivering better personalized product and service to our customers."
    }, {
        "link": "http://www.ants.vn/",
        "logo": "ants.png",
        "logoBgColor": "#ffffff",
        "description": "Ants.vn use Kafka in production for stream processing and log transfer (over 5B messages/month and growing)"
    }, {
        "link": "https://boundary.com/",
        "logo": "boundary.gif",
        "logoBgColor": "#ffffff",
        "description": "Apache Kafka aggregates high-flow message streams into a unified distributed pubsub service, brokering the data for other internal systems as part of Boundary's real-time network analytics infrastructure."
    }, {
        "link": "https://www.box.com/",
        "logo": "box.png",
        "logoBgColor": "#ffffff",
        "description": "At Box, Kafka is used for the production analytics pipeline & real time monitoring infrastructure. We are planning to use Kafka for some of the new products & features"
    }, {
        "link": "http://www.cerner.com/",
        "logo": "cerner.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is used with HBase and Storm as described <a href='http://blog.cloudera.com/blog/2014/11/how-cerner-uses-cdh-with-apache-kafka/' target='_blank'here.</a>"
    }, {
        "link": "https://www.coursera.org/",
        "logo": "coursera.png",
        "logoBgColor": "#ffffff",
        "description": "At Coursera, Kafka powers education at scale, serving as the data pipeline for realtime learning analytics/dashboards."
    }, {
        "link": "https://www.cloudflare.com/",
        "logo": "cloudfare.png",
        "logoBgColor": "#ffffff",
        "description": "CloudFlare uses Kafka for our log processing and analytics pipeline, collecting hundreds of billions of events/day data from a thousands of servers."
    }, {
        "link": "http://www.cloudphysics.com/",
        "logo": "cloudphysics.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is powering our high-flow event pipeline that aggregates over 1.2 billion metric series from 1000+ data centers for near-to-real time data center operational analytics and modeling"
    }, {
        "link": "http://www.cisco.com/",
        "logo": "cisco.png",
        "logoBgColor": "#ffffff",
        "description": "Cisco is using Kafka as part of their OpenSOC (Security Operations Center). More details <a href='http://opensoc.github.io/' target='_blank'here.</a>"
    }, {
        "link": "http://www.cityzendata.com/",
        "logo": "cityzen.png",
        "logoBgColor": "#ffffff",
        "description": "Cityzen Data uses Kafka as well, we provide a platform for collecting, storing and analyzing machine data."
    }, {
        "link": "http://www.criteo.com/",
        "logo": "criteo.jpeg",
        "logoBgColor": "#ffffff",
        "description": "Criteo uses Kafka as well, we provide a platform for collecting, storing and analyzing machine data."
    }, {
        "link": "https://www.cj.com/",
        "logo": "CJ_Affiliate.png",
        "logoBgColor": "#ffffff",
        "description": "Apache Kafka is used at CJ Affiliate to process many of the key events driving our core product. Nearly every aspect of CJ's products and services currently benefit from the speed and stability this provides; additionally, Apache Kafka is one of the key technologies enabling CJ's upcoming real-time Insights & Analytics platform."
    }, {
        "link": "http://datasift.com/",
        "logo": "datasift.png",
        "logoBgColor": "#ffffff",
        "description": "Apache Kafka is used at DataSift as a collector of monitoring events and to track user's consumption of data streams in real time. <a href='http://highscalability.com/blog/2011/11/29/datasift-architecture-realtime-datamining-at-120000-tweets-p.html' target='_blank'>DataSift architecture</a>"
    }, {
        "link": "http://datadog.com/",
        "logo": "datadog.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka brokers data to most systems in our metrics and events ingestion pipeline. Different modules contribute and consume data from it, for streaming CEP (homegrown), persistence (at different &quot;atemperatures&quot;a in Redis, ElasticSearch, Cassandra, S3), or batch analysis (Hadoop)."
    }, {
        "link": "https://developer.ibm.com/messaging/message-hub/",
        "logo": "ibmmessagehub.png",
        "logoBgColor": "#1e3648",
        "description": "The Message Hub service in our Bluemix PaaS offers Kafka-based messaging in a multi-tenant, pay-as-you-go public cloud. It's intended to provide messaging services for microservices, event-driven processing and streaming data in to analytics systems."
    }, {
        "link": "https://empathy.micronauticsresearch.com/",
        "logo": "robotCircle.png",
        "logoBgColor": "#ffffff",
        "description": "<a href 'https://empathy.micronauticsresearch.com/' target=blank'> EmpathyWorks</a> is a framework for simulating and analyzing networks of artificial personalities."
    }, {
        "link": "https://www.etsy.com/",
        "logo": "etsy.png",
        "logoBgColor": "#ffffff",
        "description": "See <a href='http://siliconangle.com/blog/2015/08/11/etsy-going-all-in-with-kafka-as-dataflow-pipeline-hpbigdata15/' target='_blank'>this article</a>."
    }, {
        "link": "http://www.exponential.com/",
        "logo": "exponential.png",
        "logoBgColor": "#ffffff",
        "description": "Exponential is using Kafka in production to power the events ingestion pipeline for real time analytics and log feed consumption."
    }, {
        "link": "https://www.exoscale.ch/",
        "logo": "exoscale.png",
        "logoBgColor": "#ffffff",
        "description": "Exoscale uses Kafka in production."
    }, {
        "link": "https://eng.uber.com/",
        "logo": "uber.png",
        "logoBgColor": "#ffffff",
        "description": "Apache Kafka is a core part of Uber’s overall infrastructure stack and powers various online & near realtime use-cases."
    }, {
        "link": "http://emergingthreats.net/",
        "logo": "emergingthreats.png",
        "logoBgColor": "#ffffff",
        "description": "Emerging threats uses Kafka in our event pipeline to process billions of malware events for search indices, alerting systems, etc."
    }, {
        "link": "http://foursquare.com/",
        "logo": "foursquare.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka powers online to online messaging, and online to offline messaging at Foursquare. We integrate with monitoring, production systems, and our offline infrastructure, including hadoop."
    }, {
        "link": "http://www.flyhajj.com/",
        "logo": "flyhajj.png",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka to collect all metrics and events generated by the users of the website."
    }, {
        "link": "http://www.goldmansachs.com/",
        "logo": "goldmansachs.jpg",
        "logoBgColor": "#64a8f1",
        "description": "<a href='http://www.goldmansachs.com/' target='_blank'>www.goldmansachs.com</a>"
    }, {
        "link": "http://gnip.com/",
        "logo": "gnip.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is used in their twitter ingestion and processing pipeline."
    }, {
        "link": "http://graylog2.org/",
        "logo": "graylog2.jpg",
        "logoBgColor": "#ffffff",
        "description": "Graylog2 is a free and open source log management and data analysis system. It's using Kafka as default transport for Graylog2 Radio. The use case is described <a href='http://support.torch.sh/help/kb/graylog2-server/using-graylog2-radio-v020x' target='_blank'here</a>."
    }, {
        "link": "http://www.hotels.com/",
        "logo": "hotels.jpg",
        "logoBgColor": "#ffffff",
        "description": "Hotels.com uses Kafka as pipeline to collect real time events from multiple sources and for sending data to HDFS."
    }, {
        "link": "http://helprace.com/help-desk",
        "logo": "helprace.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is used as a distributed high speed message queue in our help desk software as well as our real-time event data aggregation and analytics."
    }, {
        "link": "http://helpshift.com/",
        "logo": "helpshift.png",
        "logoBgColor": "#ffffff",
        "description": "Produces billions of events with Kafka through an erlang based producer ekaf that supports 8.0, and consumes topics primarily with storm and clojure."
    }, {
        "link": "http://homeadvisor.com/",
        "logo": "homeadvisor.jpg",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka for logging and async event processing, among other uses."
    }, {
        "link": "http://www.ifttt.com/",
        "logo": "ifttt.png",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka to ingest real-time log and tracking data for analytics, dashboards, and machine learning."
    }, {
        "link": "http://www.infochimps.com/",
        "logo": "infochimps.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is part of the <a href='http://blog.infochimps.com/2012/10/30/next-gen-real-time-streaming-storm-kafka-integration' target='_blank'>InfoChimps real-time data platform</a>."
    }, {
        "link": "http://www.ipinyou.com.cn/?defaultLocale=en",
        "logo": "ipinyou.png",
        "logoBgColor": "#ffffff",
        "description": "The largest DSP in China which has its HQ in Beijing and offices in Shanghai, Guangzhou, Silicon Valley and Seattle. Kafka clusters are the central data hub in iPinYou. All kinds of Internet display advertising data, such as bid/no-bid, impression, click, advertiser, conversion and etc., are collected as primary data streams into Kafka brokers in real time, by LogAggregator (a substitute for Apache Flume, which is implemented in C/C++ by iPinYou, has customized functionality, better performance, lower resource-consuming)."
    }, {
        "link": "http://linkedin.com",
        "logo": "linkedin.jpg",
        "logoBgColor": "#007bb6",
        "description": "Apache Kafka is used at LinkedIn for activity stream data and operational metrics. This powers various products like LinkedIn Newsfeed, LinkedIn Today in addition to our offline analytics systems like Hadoop."
    }, {
        "link": "http://www.liveperson.com/",
        "logo": "liveperson.png",
        "logoBgColor": "#ffffff",
        "description": "Using Kafka as the main data bus for all real time events."
    }, {
        "link": "http://www.linksmart.com/",
        "logo": "linksmart.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is used at LinkSmart as an event stream feeding Hadoop and custom real time systems."
    }, {
        "link": "http://www.lucidworks.com/products/lucidworks-big-data",
        "logo": "lucidworks.png",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka for syncing LucidWorks Search (Solr) with incoming data from Hadoop and also for sending LucidWorks Search logs back to Hadoop for analysis."
    }, {
        "link": "http://loggly.com/",
        "logo": "loggly.png",
        "logoBgColor": "#ffffff",
        "description": "Loggly is the world's most popular cloud-based log management. Our cloud-based log management service helps DevOps and technical teams make sense of the the massive quantity of logs. Kafka is used as part of our <a href='http://www.loggly.com/behind-the-screens' target='_blank'log collection and processing infrastructure.</a>"
    }, {
        "link": "http://web.livefyre.com/",
        "logo": "livefyre.png",
        "logoBgColor": "#ffffff",
        "description": "Livefyre uses Kafka for the real time notifications, analytics pipeline and as the primary mechanism for general pub/sub."
    }, {
        "link": "https://mailchimp.com/",
        "logo": "mailchimp.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka powers MailChimp’s data pipeline that in turn powers <a href='https://mailchimp.com/pro/' target=blank'>MailChimp Pro</a>, as well as an increasing number of other product features. You can read some of the details <a href='https://devs.mailchimp.com/blog/powering-mailchimp-pro-reporting/' target=blank'>here</a>."
    }, {
        "link": "http://www.mate1.com/about",
        "logo": "mate1.png",
        "logoBgColor": "#000000",
        "description": "Apache kafka is used at Mate1 as our main event bus that powers our news and activity feeds, automated review systems, and will soon power real time notifications and log distribution."
    }, {
        "link": "http://metamarkets.com/",
        "logo": "metamarkets.png",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka to ingest real-time event data, stream it to Storm and Hadoop, and then serve it from our Druid cluster to feed our interactive analytics dashboards. We've also built  connectors for directly ingesting events from Kafka into Druid."
    }, {
        "link": "http://mozilla.org/",
        "logo": "mozilla.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka will soon be replacing part of our current production system to collect performance and usage data from the end-users browser for projects like Telemetry, Test Pilot, etc. Downstream consumers usually persist to either HDFS or HBase."
    }, {
        "link": "http://netflix.com",
        "logo": "netflix.png",
        "logoBgColor": "#FFFFFF",
        "description": "Real-time monitoring and event-processing <a href='http://techblog.netflix.com/2016/04/kafka-inside-keystone-pipeline.html' target='_blank'>pipeline</a>."
    }, {
        "link": "http://www.oracle.com/",
        "logo": "oracle.png",
        "logoBgColor": "#ffffff",
        "description": "Oracle provides native connectivity to Kafka from its Enterprise Service Bus product called OSB (Oracle Service Bus) which allows developers to leverage OSB built-in mediation capabilities to implement staged data pipelines."
    }, {
        "link": "http://www.outbrain.com/",
        "logo": "outbrain.png",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka in production for real time log collection and processing, and for cross-DC cache propagation."
    }, {
        "link": "http://www.oracle.com/technetwork/middleware/goldengate/overview/index.html",
        "logo": "oraclegoldengate.png",
        "logoBgColor": "#ffffff",
        "description": "GoldenGate offers a comprehensive solution that streams transactional data from various sources into various big data targets including Kafka in real-time, enabling organizations to build fault -tolerant, highly reliable, and extensible analytical applications."
    }, {
        "link": "http://www.ooyala.com/",
        "logo": "ooyala.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is used as the primary high speed message queue to power Storm and our real-time analytics/event ingestion pipelines."
    }, {
        "link": "http://www.ovh.com/us/index.xml",
        "logo": "ovh.png",
        "logoBgColor": "#ffffff",
        "description": "OVH uses Kafka in production for over a year now using it for event bus, data pipeline for antiddos and more to come."
    }, {
        "link": "http://www.parsely.com/",
        "logo": "parsely.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is used for all <a href='http://www.parsely.com/misc/slides/logs/#1' target=_blank'>data integration </a> of analytics event data."
    }, {
        "link": "http://www.paypal.com/",
        "logo": "paypal.png",
        "logoBgColor": "#ffffff",
        "description": "See <a href='https://github.com/paypal/couchbasekafka' target='_blank'>this</a>."
    }, {
        "link": "http://www.portoseguro.com.br/",
        "logo": "porto-seguro.png",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka in production for online and near real-time solutions. Kafka is a core part for many products, such as our Credit Card System."
    }, {
        "link": "http://quixey.com/",
        "logo": "quixey.png",
        "logoBgColor": "#ffffff",
        "description": "At Quixey, The Search Engine for Apps, Kafka is an integral part of our eventing, logging and messaging infrastructure."
    }, {
        "link": "http://www.retentionscience.com/",
        "logo": "retentionscience.jpg",
        "logoBgColor": "#ffffff",
        "description": "Click stream ingestion and processing."
    }, {
        "link": "http://www.richrelevance.com/",
        "logo": "richrelevance.png",
        "logoBgColor": "#ffffff",
        "description": "Real-time tracking event pipeline."
    }, {
        "link": "http://sematext.com/",
        "logo": "sematext.png",
        "logoBgColor": "#ffffff",
        "description": "In <a href='http://sematext.com/spm' target='_blank'SPM</a> (performance monitoring + alerting), Kafka is used for metrics collection and feeds SPM's in-memory data aggregation (OLAP cube creation) as well as our CEP/Alerts servers (see also: <a href='http://blog.sematext.com/2013/10/16/announcement-spm-performance-monitoring-for-kafka/' target=_blank'>SPM for Kafka performance monitoring</a>). In <a href='http://sematext.com/search-analytics' target='_blank'>SA (search analytics)</a> Kafka is used in search and click stream collection before being aggregated and persisted. In <a href='http://sematext.com/logsene' target='_blank'Logsene (log analytics)</a> Kafka is used to pass logs and other events from front-end receivers to the persistent backend."
    }, {
        "link": "http://www.skyscanner.net/",
        "logo": "skyscanner.png",
        "logoBgColor": "#ffffff",
        "description": "The world's travel search engine, uses Kafka for real-time log and event ingestion. It is the integration point for of all stream-processing and data transportation services."
    }, {
        "link": "http://www.strava.com/",
        "logo": "strava.jpg",
        "logoBgColor": "#ffffff",
        "description": "Powers our analytics pipeline, activity feeds denorm and several other production services."
    }, {
        "link": "http://www.swiftkey.net/",
        "logo": "swiftkey.png",
        "logoBgColor": "#ffffff",
        "description": "We use Apache Kafka for analytics event processing."
    }, {
        "link": "http://square.com",
        "logo": "square.png",
        "logoBgColor": "#FFFFFF",
        "description": "We use Kafka as a bus to move all systems events through our various datacenters. This includes metrics, logs, custom events etc. On the consumer side, we output into Splunk, Graphite, Esper-like real-time alerting."
    }, {
        "link": "http://spotify.com",
        "logo": "spotify.png",
        "logoBgColor": "#1ed760",
        "description": "Kafka is used at Spotify as part of their log <a href='http://www.meetup.com/stockholm-hug/events/121628932' target='_blank'>delivery system</a>."
    }, {
        "link": "http://www.stumbleupon.com/",
        "logo": "stumbleupon.png",
        "logoBgColor": "#eb4924",
        "description": "Data collection platform for analytics."
    }, {
        "link": "http://www.shopify.com/",
        "logo": "shopify.png",
        "logoBgColor": "#ffffff",
        "description": "Access logs, A/B testing events, domain events (&quot;a checkout happened&quot;, etc.), metrics, delivery to HDFS, and customer reporting. We are now focusing on consumers: analytics, support tools, and fraud analysis."
    }, {
        "link": "http://www.socialtwist.com/",
        "logo": "socialtwist.jpg",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka internally as part of our reliable email queueing system."
    }, {
        "link": "http://www.spongecell.com/",
        "logo": "spongecell.png",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka to run our entire analytics and monitoring pipeline driving both real-time and ETL applications for our customers."
    }, {
        "link": "https://www.simple.com/",
        "logo": "simple.gif",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka at Simple for log aggregation and to power our analytics infrastructure."
    }, {
        "link": "http://www.tagged.com/",
        "logo": "tagged.png",
        "logoBgColor": "#ffffff",
        "description": "Apache Kafka drives our new pub sub system which delivers real-time events for users in our latest game - Deckadence. It will soon be used in a host of new use cases including group chat and back end stats and log collection."
    }, {
        "link": "https://www.tumblr.com/",
        "logo": "tumblr.png",
        "logoBgColor": "#5eba8c",
        "description": "See <a href='http://highscalability.com/blog/2012/2/13/tumblr-architecture-15-billion-page-views-a-month-and-harder.html' target='_blank'>this</a>."
    }, {
        "link": "http://twitter.com",
        "logo": "twitter.jpg",
        "logoBgColor": "#28a9e2",
        "description": "As part of their Storm stream processing infrastructure, e.g. <a href='http://engineering.twitter.com/2013/01/improving-twitter-search-with-real-time.html' target='_blank'>this</a> and <a href='https://blog.twitter.com/2015/handling-five-billion-sessions-a-day-in-real-time' target='_blank'>this</a>."
    }, {
        "link": "http://www.trivago.com/",
        "logo": "trivago.png",
        "logoBgColor": "#ffffff",
        "description": "Trivago uses Kafka for stream processing in Storm as well as processing of application logs."
    }, {
        "link": "http://www.urbanairship.com/",
        "logo": "urbanairship.png",
        "logoBgColor": "#ffffff",
        "description": "At Urban Airship we use Kafka to buffer incoming data points from mobile devices for processing by our analytics infrastructure."
    }, {
        "link": "http://www.uswitch.com/",
        "logo": "uswitch.png",
        "logoBgColor": "#ffffff",
        "description": "See <a href='http://oobaloo.co.uk/kafka-for-uswitchs-event-pipeline' target='_blank'>this blog</a>."
    }, {
        "link": "http://www.visualrevenue.com/",
        "logo": "visualrevenue.jpg",
        "logoBgColor": "#1c1a88",
        "description": "We use Kafka as a distributed queue in front of our web traffic stream processing infrastructure (Storm)."
    }, {
        "link": "http://www.visualdna.com/",
        "logo": "visualdna.jpg",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka 1. as an infrastructure that helps us bring continuously the tracking events from various datacenters into our central hadoop cluster for offline processing, 2. as a propagation path for data integration, 3. as a real-time platform for future inference and recommendation engines"
    }, {
        "link": "http://wooga.com/",
        "logo": "wooga.png",
        "logoBgColor": "#ffffff",
        "description": "We use Kafka to aggregate and process tracking data from all our facebook games (which are hosted at various providers) in a central location."
    }, {
        "link": "http://www.wizecommerce.com/",
        "logo": "wizecommerce.gif",
        "logoBgColor": "#ffffff",
        "description": "At Wize Commerce (previously, NexTag), Kafka is used as a distributed queue in front of Storm based processing for search index generation. We plan to also use it for collecting user generated data on our web tier, landing the data into various data sinks like Hadoop, HBase, etc."
    }, {
        "link": "http://wikimediafoundation.org/wiki/Our_projects",
        "logo": "wikimedia.png",
        "logoBgColor": "#ffffff",
        "description": "Wikimedia Foundation uses Kafka as a log transport for analytics data from production webservers and applications.  This data is consumed into Hadoop using Camus and to other processors of analytics data."
    }, {
        "link": "https://www.vividcortex.com/",
        "logo": "vividcortex.png",
        "logoBgColor": "#ffffff",
        "description": "VividCortex uses Kafka in our SaaS MySQL performance management platform to reliably ingest high-volume 1-second timeseries data."
    }, {
        "link": "http://xitenetworks.com/",
        "logo": "xite.png",
        "logoBgColor": "#ffffff",
        "description": "Kafka is at the heart of our Data Infrastructure - Business Intelligence, Recommender Systems and Machine Learning solutions are build as reactive and streaming architecture. Also we use Kafka as a great alternative to REST APIs for micro-services integration. This allows us to scale and reliably upgrade micro-services without integration and consistency issues."
    }, {
        "link": "http://yahoo.com",
        "logo": "yahoo.png",
        "logoBgColor": "#3d018b",
        "description": "See <a href='http://yahooeng.tumblr.com/post/109994930921/kafka-yahoo' target='_blank'>this</a>."
    }, {
        "link": "http://www.yieldbot.com/",
        "logo": "yieldbot.png",
        "logoBgColor": "#ffffff",
        "description": "Yieldbot uses kafka for real-time events, camus for batch loading, and mirrormakers for x-region replication."
    }, {
        "link": "http://yellerapp.com/",
        "logo": "yeller.png",
        "logoBgColor": "#ffffff",
        "description": "Yeller uses Kafka to process large streams of incoming exception data for it's customers. Rate limiting, throttling and batching are all built on top of Kafka."
    }];
</script>

<body>
	<div class="main">
		<div class="header">
W
wizardforcel 已提交
549
			<a href=""><img width="325" height="97" class="logo" src="images/logo.png"></a>
W
wizardforcel 已提交
550 551 552 553 554 555 556
		</div>

<!--#include virtual="includes/_top.htm" -->
<div class="content">
        <nav class="b-sticky-nav">
  <div class="nav-scroller">
    <div class="nav__inner">
W
wizardforcel 已提交
557 558 559 560
      <a class="b-nav__home nav__item" href="">主页</a>
      <a class="b-nav__intro nav__item" href="intro.html">介绍</a>
      <a class="b-nav__quickstart nav__item" href="quickstart.html">快速开始</a>
      <a class="b-nav__uses nav__item" href="uses.html">使用案例</a>
W
wizardforcel 已提交
561 562

      <div class="nav__item nav__item__with__subs">
W
wizardforcel 已提交
563 564 565 566 567 568 569 570 571 572
        <a class="b-nav__docs nav__item nav__sub__anchor" href="documentation.html">文档</a>
        <a class="nav__item nav__sub__item" href="documentation.html#gettingStarted">入门</a>
        <a class="nav__item nav__sub__item" href="documentation.html#api">APIs</a>
        <a class="b-nav__streams nav__item nav__sub__item" href="documentation.html#streams">kafka streams</a>
        <a class="nav__item nav__sub__item" href="documentation.html#connect">kafka connect</a>
        <a class="nav__item nav__sub__item" href="documentation.html#configuration">配置</a>
        <a class="nav__item nav__sub__item" href="documentation.html#design">设计</a>
        <a class="nav__item nav__sub__item" href="documentation.html#implementation">实现</a>
        <a class="nav__item nav__sub__item" href="documentation.html#operations">操作</a>
        <a class="nav__item nav__sub__item" href="documentation.html#security">安全</a>
W
wizardforcel 已提交
573 574
      </div>

W
wizardforcel 已提交
575 576 577
      <a class="b-nav__performance nav__item" href="performance.html">性能</a>
      <a class="b-nav__poweredby nav__item" href="powered-by.html">powered by</a>
      <a class="b-nav__project nav__item" href="project.html">项目信息</a>
W
wizardforcel 已提交
578 579
      <a class="b-nav__ecosystem nav__item" href="https://cwiki.apache.org/confluence/display/KAFKA/Ecosystem" target="_blank">生态圈</a>
      <a class="b-nav__clients nav__item" href="https://cwiki.apache.org/confluence/display/KAFKA/Clients" target="_blank">客户端</a>
W
wizardforcel 已提交
580 581
      <a class="b-nav__events nav__item" href="events.html">事件</a>
      <a class="b-nav__contact nav__item" href="contact.html">联系我们</a>
W
wizardforcel 已提交
582 583 584 585 586 587 588 589 590 591

      <div class="nav__item nav__item__with__subs">
        <a class="b-nav__apache nav__item nav__sub__anchor b-nav__sub__anchor" href="#">apache</a>
        <a class="b-nav__apache nav__item nav__sub__item" href="http://www.apache.org/" target="_blank">贡献</a>
        <a class="b-nav__apache nav__item nav__sub__item" href="http://www.apache.org/licenses/" target="_blank">license</a>
        <a class="b-nav__apache nav__item nav__sub__item" href="http://www.apache.org/foundation/sponsorship.html" target="_blank">赞助</a>
        <a class="b-nav__apache nav__item nav__sub__item" href="http://www.apache.org/foundation/thanks.html" target="_blank">感谢</a>
        <a class="b-nav__apache nav__item nav__sub__item" href="http://www.apache.org/security/" target="_blank">安全</a>
      </div>

W
wizardforcel 已提交
592
      <a class="btn" href="downloads.html">下载</a>
W
wizardforcel 已提交
593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622
      <div class="social-links">
        <a class="twitter" href="https://twitter.com/apachekafka" target="_blank">@apachekafka</a>
      </div>
    </div>
  </div>
  <div class="navindicator">
    <div class="b-nav__home navindicator__item"></div>
    <div class="b-nav__intro navindicator__item"></div>
    <div class="b-nav__quickstart navindicator__item"></div>
    <div class="b-nav__uses navindicator__item"></div>
    <div class="b-nav__docs navindicator__item"></div>
    <div class="b-nav__performance navindicator__item"></div>
    <div class="b-nav__poweredby navindicator__item"></div>
    <div class="b-nav__project navindicator__item"></div>
    <div class="b-nav__ecosystem navindicator__item"></div>
    <div class="b-nav__clients navindicator__item"></div>
    <div class="b-nav__events navindicator__item"></div>
    <div class="b-nav__contact navindicator__item"></div>
  </div>
</nav>

		<!--#include virtual="includes/_nav.htm" -->
	<div class="right">
		<h1>Powered By</h1>

		<div class="grid" data-masonry='{ "itemSelector": ".grid__item"}'></div>

        <div class="callout callout--basic" style="margin-top:6rem">
            <h3>Want to appear on this page?</h3> 
            <p>
W
wizardforcel 已提交
623
                Send a quick description of your organization and usage to the <a href="contact">mailing list</a> or to <a href="https://twitter.com/apachekafka" target="_blank">@apachekafka</a> or <a href="https://twitter.com/jaykreps" target="_blank">@jaykreps</a> on twitter and we'll add you.
W
wizardforcel 已提交
624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643
            </p>
        </div>

<script src="https://unpkg.com/masonry-layout@4.1/dist/masonry.pkgd.min.js"></script>
<script>
// Show selected style on nav item
$(function() { $('.b-nav__poweredby').addClass('selected'); });
</script>

				</div>
			</div>
		</div>
		<div class="footer">
			<div class="footer__inner">
				<div class="footer__legal">
					<span class="footer__legal__one">The contents of this website are &copy; 2016 <a href="https://www.apache.org/" target="_blank">Apache Software Foundation</a> under the terms of the <a href="https://www.apache.org/licenses/LICENSE-2.0.html" target="_blank">Apache License v2</a>.</span>
					<span class="footer__legal__two">Apache Kafka, Kafka, and the Kafka logo are either registered trademarks or trademarks of The Apache Software Foundation</span>
					<span class="footer__legal__three">in the United States and other countries.</span>
				</div>
				<a class="apache-feather" target="_blank" href="http://www.apache.org">
W
wizardforcel 已提交
644
					<img width="40" src="images/feather-small.png" alt="Apache Feather">
W
wizardforcel 已提交
645 646 647 648 649
				</a>
			</div>
		</div>
	</body>

W
wizardforcel 已提交
650
    <script type="text/javascript" src="js/syntaxhighlighter.js"></script>
W
wizardforcel 已提交
651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682
	<script src="https://cdnjs.cloudflare.com/ajax/libs/handlebars.js/2.0.0/handlebars.js"></script>
	<script>
		$(function () {
			// list of pages that are rendered with Handlebars
			var templates = [
				'introduction',
				'implementation',
				'design',
				'api',
				'configuration',
				'ops',
				'security',
				'connect',
				'streams',
				'quickstart',
				'toc',
				'upgrade',
				'content'
			];

			// loop through all Handlebar templates on the page and render them
			for(var i = 0; i < templates.length; i++) {
				var templateScript = $("#" + templates[i] + "-template").html();
				if(templateScript) {
					var template = Handlebars.compile(templateScript);
					var html = template(context);
					$(".p-" + templates[i]).html(html);
				}
			}
		});
	</script>

W
wizardforcel 已提交
683
	<script src="js/jquery.sticky-kit.min.js"></script>
W
wizardforcel 已提交
684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766
	<script>
		$(function() {
			// Set mobile scroll position on nav
			function setNavScroll(offsetLeft) {
				$('.nav-scroller').animate({
					scrollLeft: $('.nav-scroller').scrollLeft() + $('nav .selected').offset().left - offsetLeft
				}, 50);
			}

			// Helper classes for nav
			$('nav').mouseenter(function(){
				$(this).addClass('hovering');
			});
			$('nav').mouseleave(function(){
				$(this).removeClass('hovering');
			});

			// Handle expanding sections of nav (async)
			$('.b-nav__sub__anchor').click(function(){
				$('nav .selected').removeClass('selected');
				$('.nav__item__with__subs--expanded').removeClass('nav__item__with__subs--expanded');

				$(this).addClass('selected');
				$(this).parent().toggleClass('nav__item__with__subs--expanded');

				if($(window).width() <= 650) {
					window.setTimeout(function(){
						setNavScroll(30);
					}, 300);
				}
			});

			// Initialize sticky elements on the page
			if($(window).width() > 650) {
				// Nav for desktop
				$('.b-sticky-nav').stick_in_parent({offset_top: 40});
				// Documentation banner for desktop
				$('.b-sticky-doc-banner').stick_in_parent({offset_top: 0});
			}	else {
				// Scroll nav for mobile so current nav item is in view
				window.setTimeout(function(){
					setNavScroll(80);
				}, 300);
			}

			// On window resize check to see if stuff should be unstuck
			window.onresize = function(event) {
			  if($(window).width() <= 650) {
			    $('.b-sticky-nav').trigger("sticky_kit:detach");
			  } else {
			    $('.b-sticky-nav').stick_in_parent({offset_top: 40});
					$('.b-sticky-doc-banner').stick_in_parent({offset_top: 0});
			  }
			};
		});
	</script>


</html>

		<!--#include virtual="includes/_footer.htm" -->

<script id="grid__item-template" type="text/x-handlebars-template">
    <div class="grid__item">
        <a href="{{link}}" target="_blank" class="grid__item__link" style="background-color:{{logoBgColor}};">
            <span class="grid__item__logo" style="background-image: url('/images/powered-by/{{logo}}');"></span>
        </a>
        <p class="grid__item__description">{{{description}}}</p>
    </div>
</script>

<script>
    $(function () {
        // loop through all Handlebar templates on the page and render them
        for(var i = 0; i < poweredByItems.length; i++) {
            var context = poweredByItems[i];
            var templateScript = $("#grid__item-template").html();
            var template = Handlebars.compile(templateScript);
            var html = template(context);
            $(".grid").append(html);
        }
    });
</script>