forked from apache/kafka-site
-
Notifications
You must be signed in to change notification settings - Fork 0
/
powered-by.html
885 lines (876 loc) · 62.3 KB
/
powered-by.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
<!--#include virtual="includes/_header.htm" -->
<script>
// powered by items
var poweredByItems = [
{
"link": "https://www.dash0.com/",
"logo": "dash0.svg",
"logoBgColor": "#333333",
"description": "Dash0 leverages Apache Kafka as its core event streaming platform to ingest, process, and distribute customer OpenTelemetry data at scale. Kafka's distributed architecture allows Dash0 to handle high volumes of real-time telemetry data with low latency."
},
{
"link": "https://netstratum.com/",
"logo": "netstratum-logo.png",
"logoBgColor": "#FFFFFF",
"description": "Netstratum is a dynamic software development company dedicated to crafting scalable, robust, and tailor-made solutions. With our relentless pursuit of excellence across diverse domains and extensive expertise, we have significantly enriched businesses worldwide. At the core of our data management, we harness the power of Kafka, enabling seamless streaming of massive data events from our various products, including Hoolva, Voifinity, HRMNest, and Finstratum. Through Kafka's advanced capabilities, we efficiently queue and log this data for analysis and visualization in data analytics tools. This real-time, transparent monitoring empowers us to make swift support and scaling decisions, ensuring optimal performance for our clients."
},
{
"link": "http://howly.com/",
"logo": "howly.png",
"logoBgColor": "#FFFFFF",
"description": "At Howly, Kafka is a key component of our event-driven architecture. We use Kafka to facilitate communication between microservices, which enables us to build a scalable and fault-tolerant system."
},
{
"link": "https://orexes.de/",
"logo": "orexes.png",
"logoBgColor": "#FFFFFF",
"description": "OREXES uses Kafka as the central hub of communication in an innovative identity management solution. They leverage Kafka functionalities to achieve real-time analytics, fault-tolerance and a high degree of reliability."
},
{
"link": "http://cloudscaleinc.com/",
"logo": "cloud-scale.png",
"logoBgColor": "#FFFFFF",
"description": "Cloud Scale® Inc uses Kafka for building a real-time analytics system for our eventing, logging, and messaging services."
},
{
"link": "https://tech.dream11.in/blog/2020-01-07_Data-Highway---Dream11-s-Inhouse-Analytics-Platform---The-Burden-and-Benefits-90b8777d282",
"logo": "dream11.jpg",
"logoBgColor": "#e10000",
"description": "We use Apache Kafka heavily for data ingestion to the Data platform, streaming as well as batch analytics, and for our microservices to communicate with one another. Kafka is a core component of the overall Dream11 Tech stack."
},
{
"link": "https://brainstation-23.com",
"logo": "bs-23.png",
"logoBgColor": "#FFFFFF",
"description": "Kafka is used as a bulk transaction queue and batch data processing in banking solutions where customer satisfaction relies on most. It is also used in batch data validation and filtering process."
},
{
"link": "https://www.globo.com",
"logo": "globo.png",
"logoBgColor": "#FFFFFF",
"description": "Kafka is used at Globo to queue and stream real-time and batch data to our Big Data event ingestion. It is also used for our logging systems."
},
{
"link": "https://www.axios.com",
"logo": "axios.png",
"logoBgColor": "#FFFFFF",
"description": "The Data Platform team at Axios uses kafka to stream real-time data from our products into our data lake where we are able to run exploratory queries to understand our data and build machine learning models. Kafka enables us to automate the collection of data from various parts of the organization for ingestion into our data platform."
},
{
"link": "https://www.holidaycheck.de/",
"logo": "holidaycheck.png",
"logoBgColor": "#FFFFFF",
"description": "Kafka at HolidayCheck is driving mission-critical features ahead like near-real-time analytics, fraud detection, pricing anomalies, recommenders. These help holidaymakers discover the best and most trusted offers for them. Kafka has also had a significant positive impact on the engineering culture - boosting transparency and autonomy for each team to deliver data-driven products at scale."
},
{
"link": "https://www.grab.com",
"logo": "grab.png",
"logoBgColor": "#FFFFFF",
"description": "The Data Technology team at Grab deploys, maintains, operates, and expands on Kafka's capabilities to support TB/hour scale, mission critical event logs, event sourcing and stream processing architectures. Both stateful and stateless applications built over these logs by backend engineers, machine learning engineers, data scientists, and analysts support realtime, near realtime, and analytical use cases across our business lines, from ride hailing and food delivery to fintech."
}, {
"link": "https://www.nytimes.com",
"logo": "NYT.jpg",
"logoBgColor": "#FFFFFF",
"description": "<a href='https://open.nytimes.com/publishing-with-apache-kafka-at-the-new-york-times-7f0e3b7d2077'>The New York Times uses Apache Kafka </a>and the Kafka Streams API to store and distribute, in real-time, published content to the various applications and systems that make it available to the readers."
}, {
"link": "http://pinterest.com",
"logo": "pinterest.png",
"logoBgColor": "#ffffff",
"description": "<a href='https://medium.com/@Pinterest_Engineering/using-kafka-streams-api-for-predictive-budgeting-9f58d206c996'>Pinterest uses Apache Kafka and the Kafka Streams API</a> at large scale to power the real-time, predictive budgeting system of their advertising infrastructure. With Kafka Streams, spend predictions are more accurate than ever."
}, {
"link": "http://www.zalando.com",
"logo": "zalando.jpg",
"logoBgColor": "#ffffff",
"description": "As the leading online fashion retailer in Europe, Zalando uses Kafka as an ESB (Enterprise Service Bus), which helps us in transitioning from a monolithic to a micro services architecture. Using Kafka for processing <a href 'https://kafka-summit.org/sessions/using-kstreams-ktables-calculate-real-time-domain-rankings/' target=blank'> event streams</a> enables our technical team to do near-real time business intelligence."
}, {
"link": "https://linecorp.com/",
"logo": "line.png",
"logoBgColor": "#00b900",
"description": "LINE uses Apache Kafka as a central datahub for our services to communicate to one another. Hundreds of billions of messages are produced daily and are used to execute various business logic, threat detection, search indexing and data analysis. LINE leverages Kafka Streams to reliably transform and filter topics enabling sub topics consumers can efficiently consume, meanwhile retaining easy maintainability thanks to its sophisticated yet minimal code base."
}, {
"link": "https://www.rabobank.com",
"logo": "rabobank.jpg",
"logoBgColor": "#ffffff",
"description": "Rabobank is one of the 3 largest banks in the Netherlands. Its digital nervous system, the Business Event Bus, is powered by Apache Kafka. It is used by an increasing amount of financial processes and services, one which is Rabo Alerts. This service alerts customers in real-time upon financial events and is built using Kafka Streams."
}, {
"link": "http://addthis.com/",
"logo": "addthis.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is used at AddThis to collect events generated by our data network and broker that data to our analytics clusters and real-time web analytics platform."
}, {
"link": "https://www.adidas-group.com/",
"logo": "adidas.png",
"logoBgColor": "#ffffff",
"description": "adidas uses Kafka as the core of Fast Data Streaming Platform, integrating source systems and enabling teams to implement real-time event processing for monitoring, analytics and reporting solutions."
}, {
"link": "https://www.agoda.com/",
"logo": "agoda.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka powers the backbone of Agoda's data pipeline with trillions of events streaming through daily across multiple data centers. The majority of the events are destined for analytical systems and directly influence business decisions at one of the world’s fastest growing online travel booking platforms."
}, {
"link": "https://agoora.com/",
"logo": "spoud-logo.svg",
"logoBgColor": "#ffffff",
"description": "AGOORA is a data-platform for Apache Kafka that encourages collaboration around data transparency and ownership."
}, {
"link": "http://www.airbnb.com/",
"logo": "airbnb.png",
"logoBgColor": "#ffffff",
"description": "Used in our event pipeline, exception tracking & more to come."
}, {
"link": "https://aiven.io/",
"logo": "aiven.svg",
"logoBgColor": "#ffffff",
"description": "Aiven is a cloud platform for open source technologies. We provide Apache Kafka as a managed service on public clouds, and use it internally to run and monitor our platform of tens of thousands of clusters."
}, {
"link": "https://www.altair.com",
"logo": "altair.svg",
"logoBgColor": "#ffffff",
"description": "Altair Panopticon(TM) incorporates Kafka into its stream processing engine. The software allows business users to build stream processing applications that subscribe to streaming data inputs, including Kafka topics and other real-time message queues, retrieve from SQL, NoSQL, and time series databases, join data streams and tables, aggregate streams within defined time windows, and conflate, filter, and merge data streams. The system can output processed data flows to Kafka or email, or write to databases like Kx kdb+, InfluxDb, or any SQL database."
}, {
"link": "http://www.amadeus.com",
"logo": "amadeus.jpg",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is used for both real-time and batch data processing, and is the chosen event log technology for Amadeus microservice-based streaming applications. Kafka is also used for operational use cases such as application logs collection."
}, {
"link": "http://www.ancestry.com/",
"logo": "ancestry.svg",
"logoBgColor": "#ffffff",
"description": "Kafka is used as the <a href='http://blogs.ancestry.com/techroots/on-track-to-data-driven' target='_blank'>event log processing pipeline </a>for delivering better personalized product and service to our customers."
}, {
"link": "http://www.ants.vn/",
"logo": "ants.png",
"logoBgColor": "#ffffff",
"description": "Ants.vn uses Kafka in production for stream processing and log transfer (over 5B messages/month and growing)"
}, {
"link": "http://appsflyer.com/",
"logo": "appsflyer.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is a core piece of AppsFlyer large-scale event driven architecture.<br /><br />It enables AppsFlyer to run a data pipeline that streams tens of billions of events on a daily basis"
}, {
"link": "http://axon.com/",
"logo": "axon.png",
"logoBgColor": "#000000",
"description": "Axon uses Apache Kafka to power mission critical infrastructure that enables first responders do their jobs effectively."
}, {
"link": "https://www.bandwidth.com/",
"logo": "bandwidth.png",
"logoBgColor": "#ffffff",
"description": "<a href='https://www.bandwidth.com/' target='_blank'>Bandwidth</a> uses Kafka in their Communications Platform to process call and message data records for billing and data analytics. Kafka also powers Bandwidth application metrics, system metrics and log aggregation pipelines."
}, {
"link": "https://www.barclays.com/",
"logo": "barclays.png",
"logoBgColor": "#ffffff",
"description": "Barclays utilizes Kafka for streaming and analytical information."
}, {
"link": "https://www.bookingsync.com/",
"logo": "bookingsync.svg",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is used as a backbone of data synchronization and propagating changes of entitites/aggregates between multiple applications in BookingSync ecosystem."
}, {
"link": "https://boundary.com/",
"logo": "boundary.gif",
"logoBgColor": "#ffffff",
"description": "Apache Kafka aggregates high-flow message streams into a unified distributed pubsub service, brokering the data for other internal systems as part of Boundary's real-time network analytics infrastructure."
}, {
"link": "https://www.box.com/",
"logo": "box.png",
"logoBgColor": "#ffffff",
"description": "At Box, Kafka is used for the production analytics pipeline & real time monitoring infrastructure. We are planning to use Kafka for some of the new products & features"
}, {
"link": "https://www.bytedance.com/en/",
"logo": "ByteDance.svg",
"logoBgColor": "#ffffff",
"description": "ByteDance uses Kafka as a data hub to collect events and logs for a variety of services such as online model training, stream data processing and real-time data analytics."
}, {
"link": "http://www.cerner.com/",
"logo": "cerner.png",
"logoBgColor": "#ffffff",
"description": "Kafka is used with HBase and Storm as described <a href='http://blog.cloudera.com/blog/2014/11/how-cerner-uses-cdh-with-apache-kafka/' target='_blank'>here.</a>"
}, {
"link": "https://www.coursera.org/",
"logo": "coursera.png",
"logoBgColor": "#ffffff",
"description": "At Coursera, Kafka powers education at scale, serving as the data pipeline for realtime learning analytics/dashboards."
}, {
"link": "https://www.cloudflare.com/",
"logo": "cloudfare.png",
"logoBgColor": "#ffffff",
"description": "Cloudflare uses Kafka for our log processing and analytics pipeline, collecting hundreds of billions of events/day and data from thousands of servers."
}, {
"link": "https://www.cloudio.io/",
"logo": "cloudio.png",
"logoBgColor": "#ffffff",
"description": "Kafka powers CloudIO Flow which is an enterprise grade data ingestion platform that streams data from multiple sources in real time or periodic intervals. Data is streamed through various stages such as Data Masking of some sensitive parts of your data, validations, transformation, actions and mapping to the output schema and then automatically loaded into the output (data lake such as Redshift, Azure SQL, Snowflake, S3 or any Database) for analysis and reporting."
}, {
"link": "http://www.cloudphysics.com/",
"logo": "cloudphysics.png",
"logoBgColor": "#ffffff",
"description": "Kafka is powering our high-flow event pipeline that aggregates over 1.2 billion metric series from 1000+ data centers for near-to-real time data center operational analytics and modeling"
}, {
"link": "http://www.cisco.com/",
"logo": "cisco.png",
"logoBgColor": "#ffffff",
"description": "Cisco is using Kafka as part of their OpenSOC (Security Operations Center). More details <a href='http://opensoc.github.io/' target='_blank'>here.</a>"
}, {
"link": "http://www.cityzendata.com/",
"logo": "cityzen.png",
"logoBgColor": "#ffffff",
"description": "Cityzen Data uses Kafka as well, we provide a platform for collecting, storing and analyzing machine data."
}, {
"link": "http://www.criteo.com/",
"logo": "criteo.png",
"logoBgColor": "#ffffff",
"description": "Criteo takes advantage of Apache Kafka at the heart of the core business. Kafka powers our business log collection pipeline and streaming infrastructure. We have tens of Kafka clusters deployed over multiple data centres across three continents processing up to 30 million messages/sec."
}, {
"link": "https://www.cj.com/",
"logo": "CJ_Affiliate.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is used at CJ Affiliate to process many of the key events driving our core product. Nearly every aspect of CJ's products and services currently benefit from the speed and stability this provides; additionally, Apache Kafka is one of the key technologies enabling CJ's upcoming real-time Insights & Analytics platform."
}, {
"link": "http://datasift.com/",
"logo": "datasift.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is used at DataSift as a collector of monitoring events and to track user's consumption of data streams in real time. <a href='http://highscalability.com/blog/2011/11/29/datasift-architecture-realtime-datamining-at-120000-tweets-p.html' target='_blank'>DataSift architecture</a>"
}, {
"link": "http://datadog.com/",
"logo": "datadog.png",
"logoBgColor": "#ffffff",
"description": "Kafka brokers data to most systems in our metrics and events ingestion pipeline. Different modules contribute and consume data from it, for streaming CEP (homegrown), persistence (at different "atemperatures"a in Redis, ElasticSearch, Cassandra, S3), or batch analysis (Hadoop)."
}, {
"link": "http://www.datavisor.com/",
"logo": "datavisor.png",
"logoBgColor": "#ffffff",
"description": "DataVisor uses Apache Kafka as a critical real-time data pipeline that connects various micro-services in our fraud detection systems. Kafka is at the core of enabling our product to horizontally scale for large, enterprise data flows."
},{
"link": "https://deephaven.io/",
"logo": "deephaven.svg",
"logoBgColor": "#040427",
"description": "Deephaven is a query engine for streaming workloads. Deephaven enables you to ingest and <a href='https://deephaven.io/core/docs/how-to-guides/data-import-export/kafka-stream/' target='_blank'>transform Kafka feeds as live dataframes</a>."
}, {
"link": "http://www.deep.bi/",
"logo": "https://images.squarespace-cdn.com/content/v1/5c335a239f87708868173efd/1547466246389-VD6SYBGIGV89AW7NMBIX/Logo.png",
"logoBgColor": "#ffffff",
"description": "Deep.BI helps enterprises leverage next-gen data & AI pipelines, powered by Apache Druid & Apache Flink. We use Kafka to process hundreds of thousands of real-time events per second."
}, {
"link": "https://developer.ibm.com/messaging/message-hub/",
"logo": "ibmmessagehub.png",
"logoBgColor": "#1e3648",
"description": "The Message Hub service in our Bluemix PaaS offers Kafka-based messaging in a multi-tenant, pay-as-you-go public cloud. It's intended to provide messaging services for microservices, event-driven processing and streaming data in to analytics systems."
}, {
"link": "https://www.devsisters.com/",
"logo": "devsisters.png",
"logoBgColor": "#FFFFFF",
"description": "We use Apache Kafka (and Kafka Streams) to collect and ingest all of our game service logs (including analytics, server, or access logs). Apache Kafka has been one of the core components of our data pipeline from early 2015."
}, {
"link": "http://www.dexcom.com",
"logo": "dexcom-logo.jpg",
"logoBgColor": "#FFFFFF",
"description": "Customer relationship management, order management system, incident management system, and tech support system."
}, {
"link": "https://empathy.micronauticsresearch.com/",
"logo": "robotCircle.png",
"logoBgColor": "#ffffff",
"description": "<a href 'https://empathy.micronauticsresearch.com/' target=blank'> EmpathyWorks</a> is a framework for simulating and analyzing networks of artificial personalities."
}, {
"link": "https://www.etsy.com/",
"logo": "etsy.png",
"logoBgColor": "#ffffff",
"description": "See <a href='http://siliconangle.com/blog/2015/08/11/etsy-going-all-in-with-kafka-as-dataflow-pipeline-hpbigdata15/' target='_blank'>this article</a>."
}, {
"link": "https://www.evidentsystems.com/",
"logo": "evident-systems.png",
"logoBgColor": "#ffffff",
"description": "Evident Systems helps companies build world-class applications and services using Event Sourcing and CQRS on Apache Kafka.<br /><br />oNote relies on Kafka for its own implementation, and also empowers its users to store Events defined in their Event Models into Kafka."
}, {
"link": "http://www.exponential.com/",
"logo": "exponential.png",
"logoBgColor": "#ffffff",
"description": "Exponential is using Kafka in production to power the events ingestion pipeline for real time analytics and log feed consumption."
}, {
"link": "https://www.exoscale.ch/",
"logo": "exoscale.png",
"logoBgColor": "#ffffff",
"description": "Exoscale uses Kafka in production."
}, {
"link": "https://eng.uber.com/",
"logo": "uber.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is a core part of Uber’s overall infrastructure stack and powers various online & near realtime use-cases."
}, {
"link": "http://emergingthreats.net/",
"logo": "emergingthreats.png",
"logoBgColor": "#ffffff",
"description": "Emerging threats uses Kafka in our event pipeline to process billions of malware events for search indices, alerting systems, etc."
}, {
"link": "http://foursquare.com/",
"logo": "foursquare.png",
"logoBgColor": "#ffffff",
"description": "Kafka powers online to online messaging, and online to offline messaging at Foursquare. We integrate with monitoring, production systems, and our offline infrastructure, including hadoop."
}, {
"link": "http://meetflo.com/",
"logo": "flow-by-moen.png",
"logoBgColor": "#ffffff",
"description": "As an IOT company, we use Kafka as the backbone of our event streaming/telemetry collection pipeline and as an event bus for microservices."
}, {
"link": "http://www.flyhajj.com/",
"logo": "flyhajj.png",
"logoBgColor": "#ffffff",
"description": "We use Kafka to collect all metrics and events generated by the users of the website."
}, {
"link": "http://www.goldmansachs.com/",
"logo": "goldmansachs.jpg",
"logoBgColor": "#64a8f1",
"description": "<a href='http://www.goldmansachs.com/' target='_blank'>www.goldmansachs.com</a>"
}, {
"link": "http://gnip.com/",
"logo": "gnip.png",
"logoBgColor": "#ffffff",
"description": "Kafka is used in their twitter ingestion and processing pipeline."
}, {
"link": "http://graylog2.org/",
"logo": "graylog2.jpg",
"logoBgColor": "#ffffff",
"description": "Graylog2 is a free and open source log management and data analysis system. It's using Kafka as default transport for Graylog2 Radio. The use case is described <a href='http://support.torch.sh/help/kb/graylog2-server/using-graylog2-radio-v020x' target='_blank'>here</a>."
}, {
"link": "https://www.hackerrank.com/",
"logo": "hacker.png",
"logoBgColor": "#ffffff",
"description": "HackerRank uses Kafka as events as a service platform. We publish all the internal activity on our infrastructure into Kafka, and a wide range of internal services subscribe to it."
}, {
"link": "http://www.hotels.com/",
"logo": "hotels.jpg",
"logoBgColor": "#ffffff",
"description": "Hotels.com uses Kafka as pipeline to collect real time events from multiple sources and for sending data to HDFS."
}, {
"link": "http://helprace.com/help-desk",
"logo": "helprace.png",
"logoBgColor": "#ffffff",
"description": "Kafka is used as a distributed high speed message queue in our help desk software as well as our real-time event data aggregation and analytics."
}, {
"link": "http://helpshift.com/",
"logo": "helpshift.png",
"logoBgColor": "#ffffff",
"description": "Produces billions of events with Kafka through an erlang based producer ekaf that supports 8.0, and consumes topics primarily with storm and clojure."
}, {
"link": "http://homeadvisor.com/",
"logo": "homeadvisor.jpg",
"logoBgColor": "#ffffff",
"description": "We use Kafka for logging and async event processing, among other uses."
}, {
"link": "http://www.ifttt.com/",
"logo": "ifttt.png",
"logoBgColor": "#ffffff",
"description": "We use Kafka to ingest real-time log and tracking data for analytics, dashboards, and machine learning."
}, {
"link": "http://www.infobip.com/",
"logo": "infobip.png",
"logoBgColor": "#ffffff",
"description": "Infobip, as a global CPaaS provider, uses Apache Kafka as a central data pipeline, processing over 70B messages per month in order to achieve real-time analytics and reporting."
}, {
"link": "http://www.infochimps.com/",
"logo": "infochimps.png",
"logoBgColor": "#ffffff",
"description": "Kafka is part of the <a href='http://blog.infochimps.com/2012/10/30/next-gen-real-time-streaming-storm-kafka-integration' target='_blank'>InfoChimps real-time data platform</a>."
}, {
"link": "https://www.instaclustr.com/",
"logo": "instaclustr.png",
"logoBgColor": "#ffffff",
"description": "Instaclustr provides a fully <a href='https://www.instaclustr.com/products/managed-apache-kafka/' target='_blank'>managed and hosted Apache Kafka service</a>. We also use Apache Kafka internally for our monitoring, metrics and alerting capabilities."
},{
"link": "http://www.ipinyou.com.cn/?defaultLocale=en",
"logo": "ipinyou.png",
"logoBgColor": "#ffffff",
"description": "The largest DSP in China which has its HQ in Beijing and offices in Shanghai, Guangzhou, Silicon Valley and Seattle. Kafka clusters are the central data hub in iPinYou. All kinds of Internet display advertising data, such as bid/no-bid, impression, click, advertiser, conversion and etc., are collected as primary data streams into Kafka brokers in real time, by LogAggregator (a substitute for Apache Flume, which is implemented in C/C++ by iPinYou, has customized functionality, better performance, lower resource-consuming)."
}, {
"link": "https://www.ironsrc.com/",
"logo": "ironsource.png",
"logoBgColor": "#ffffff",
"description": "ironSource powers the growth of the world's top games, using Apache Kafka as the backbone infrastructure for the async messaging of millions of events per second that run through their industry-leading game growth platform. In addition ironSource uses the Kafka Streams API to handle multiple real-time use cases, such as budget management, monitoring and alerting."
}, {
"link": "https://www.jitbit.com/",
"logo": "jitbit.png",
"logoBgColor": "#168af0",
"description": "Kafka powers Jitbit's logging pipeline, analytics events, but most importantly, queues full-text search indexing for hundreds of millions of help desk tickets in our cloud system."
}, {
"link": "http://banno.com",
"logo": "JHD_Logo.jpg",
"logoBgColor": "#ffffff",
"description": "The Banno Digital Platform from Jack Henry enables community financial institutions to provide world-class service in today’s digital landscape. The Banno team integrates various streams of data through Apache Kafka, reacting to events as they occur, to provide innovative banking solutions."
}, {
"link": "https://www.knoldus.com/",
"logo": "knoldus-logo.png",
"logoBgColor": "#ffffff",
"description": "Knoldus uses Kafka in most of the projects for building a real-time Analytics System as well as has been using Kafka Stream for Async Communication between Microservices."
}, {
"link": "https://www.kpow.io/",
"logo": "kpow-logo.png",
"logoBgColor": "#ffffff",
"description": "kPow is an all-in-one engineering toolkit for monitoring and managing Apache Kafka that is powered by Kafka and Kafka Streams."
}, {
"link": "https://www.kuaishou.com/en",
"logo": "KuaishouLogo.png",
"logoBgColor": "#FFFFFF",
"description": "At kuaishou, Kafka is used as the backbone of realtime data streams, including online training, data integration, realtime data processing, service asynchronous interaction processing and cache data synchronization."
}, {
"link": "https://www.laredoute-corporate.com",
"logo": "LaRedoute.svg",
"logoBgColor": "#FFFFFF",
"description": "La Redoute, the digital platform for families, uses Kafka as a central nervous system to decouple its application through business events. It enables a decentralized, event-driven architecture bringing near-real-time data reporting, analytics and emerging AI-pipelines combining Kafka Connect, Kafka Streams and KSQL."
}, {
"link": "http://linkedin.com",
"logo": "linkedin.jpg",
"logoBgColor": "#007bb6",
"description": "Apache Kafka is used at LinkedIn for activity stream data and operational metrics. This powers various products like LinkedIn Newsfeed, LinkedIn Today in addition to our offline analytics systems like Hadoop."
}, {
"link": "https://www.itau.com.br",
"logo": "itau.png",
"logoBgColor": "#ffffff",
"description": "Itaú Unibanco uses Apache Kafka for integrations, decoupling and application modernization. This kind of technology help us on digital strategies and enable us to deliver new solutions applied to the business, through application streaming and data pipelines, accelerating our digital transformation and evolving our technology architecture."
}, {
"link": "http://www.liveperson.com/",
"logo": "liveperson.png",
"logoBgColor": "#ffffff",
"description": "Using Kafka as the main data bus for all real time events."
}, {
"link": "http://www.linksmart.com/",
"logo": "linksmart.png",
"logoBgColor": "#ffffff",
"description": "Kafka is used at LinkSmart as an event stream feeding Hadoop and custom real time systems."
}, {
"link": "http://www.lucidworks.com/products/lucidworks-big-data",
"logo": "lucidworks.png",
"logoBgColor": "#ffffff",
"description": "We use Kafka for syncing LucidWorks Search (Solr) with incoming data from Hadoop and also for sending LucidWorks Search logs back to Hadoop for analysis."
}, {
"link": "http://loggly.com/",
"logo": "loggly.png",
"logoBgColor": "#ffffff",
"description": "Loggly is the world's most popular cloud-based log management. Our cloud-based log management service helps DevOps and technical teams make sense of the the massive quantity of logs. Kafka is used as part of our <a href='http://www.loggly.com/behind-the-screens' target='_blank'>log collection and processing infrastructure.</a>"
}, {
"link": "http://web.livefyre.com/",
"logo": "livefyre.png",
"logoBgColor": "#ffffff",
"description": "Livefyre uses Kafka for the real time notifications, analytics pipeline and as the primary mechanism for general pub/sub."
}, {
"link": "https://mailchimp.com/",
"logo": "mailchimp.png",
"logoBgColor": "#ffffff",
"description": "Kafka powers MailChimp’s data pipeline that in turn powers <a href='https://mailchimp.com/pro/' target=blank'>MailChimp Pro</a>, as well as an increasing number of other product features. You can read some of the details <a href='https://devs.mailchimp.com/blog/powering-mailchimp-pro-reporting/' target=blank'>here</a>."
}, {
"link": "http://www.mate1.com/about",
"logo": "mate1.png",
"logoBgColor": "#000000",
"description": "Apache kafka is used at Mate1 as our main event bus that powers our news and activity feeds, automated review systems, and will soon power real time notifications and log distribution."
}, {
"link": "http://metamarkets.com/",
"logo": "metamarkets.png",
"logoBgColor": "#ffffff",
"description": "We use Kafka to ingest real-time event data, stream it to Storm and Hadoop, and then serve it from our Druid cluster to feed our interactive analytics dashboards. We've also built connectors for directly ingesting events from Kafka into Druid."
}, {
"link": "https://www.moesif.com",
"logo": "moesif.png",
"logoBgColor": "#ffffff",
"description": "Moesif (an API analytics service) uses Kafka to power our pipeline to enrich large amounts of API event data. It's used as our backbone to ship that data to multiple data centers and services. It’s one of the most rock stable open source software out there for anyone in data processing."
}, {
"link": "http://mozilla.org/",
"logo": "mozilla.png",
"logoBgColor": "#ffffff",
"description": "Kafka will soon be replacing part of our current production system to collect performance and usage data from the end-users browser for projects like Telemetry, Test Pilot, etc. Downstream consumers usually persist to either HDFS or HBase."
}, {
"link": "http://netflix.com",
"logo": "netflix.png",
"logoBgColor": "#FFFFFF",
"description": "Real-time monitoring and event-processing <a href='http://techblog.netflix.com/2016/04/kafka-inside-keystone-pipeline.html' target='_blank'>pipeline</a>."
}, {
"link": "https://newrelic.com",
"logo": "NewRelic.png",
"logoBgColor": "#FFFFFF",
"description": "At New Relic, Apache Kafka supports the real-time monitoring and event processing of our customers' mission critical data. Deployed at scale, Kafka supports New Relic's data ingestion pipeline on the order of hundreds of gigabytes per second."
}, {
"link": "http://www.nuuly.com",
"logo": "nuuly.jpeg",
"logoBgColor": "#FFFFFF",
"description": "Nuuly, a clothing rental subscription from the Urban Outfitters family of brands, uses Kafka as a central nervous system to integrate our front-end customer experience with real-time inventory management and operations at our distribution center. Nuuly relies on Kafka Streams and Kafka Connect, coupled with data science and machine learning to provide in-the-moment business intelligence and to tailor a personalized rental experience to our customers."
}, {
"link": "http://www.oracle.com/",
"logo": "oracle.png",
"logoBgColor": "#ffffff",
"description": "Oracle provides native connectivity to Kafka from its Enterprise Service Bus product called OSB (Oracle Service Bus) which allows developers to leverage OSB built-in mediation capabilities to implement staged data pipelines."
}, {
"link": "http://www.outbrain.com/",
"logo": "outbrain.png",
"logoBgColor": "#ffffff",
"description": "We use Kafka in production for real time log collection and processing, and for cross-DC cache propagation."
}, {
"link": "http://www.oracle.com/technetwork/middleware/goldengate/overview/index.html",
"logo": "oraclegoldengate.png",
"logoBgColor": "#ffffff",
"description": "GoldenGate offers a comprehensive solution that streams transactional data from various sources into various big data targets including Kafka in real-time, enabling organizations to build fault -tolerant, highly reliable, and extensible analytical applications."
}, {
"link": "http://www.ooyala.com/",
"logo": "ooyala.png",
"logoBgColor": "#ffffff",
"description": "Kafka is used as the primary high speed message queue to power Storm and our real-time analytics/event ingestion pipelines."
}, {
"link": "https://oribi.io/oribi-vs-google-analytics",
"logo": "oribi.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is the core enabler of Oribi's big-data real-time analytics solution. As Oribi is built for businesses of all sizes to make smarter, data-driven decisions, Apache Kafka enables us to ingest and process billions of events on a daily basis."
}, {
"link": "http://www.optimove.com/",
"logo": "optimove.svg",
"logoBgColor": "#ffffff",
"description": "Optimove powers its real-time event streaming with Confluent Cloud and Apache Kafka to maintain always-up-to-date views of customer data and support machine learning algorithms that maximize campaign effectiveness."
}, {
"link": "http://www.otics.ca/",
"logo": "otics.png",
"logoBgColor": "#ffffff",
"description": "We use Apache Kafka with our MAADS-VIPER product to manage the distribution of insights from thousands of machine learning algorithms that allow users or machines to publish and consume these insights for decision-making. We also use Kafka for Real-Time Machine Learning to create micro machine learning models that provide clients with transactional learnings very fast."
}, {
"link": "http://www.ovh.com/us/index.xml",
"logo": "ovh.png",
"logoBgColor": "#ffffff",
"description": "OVH uses Kafka in production for over a year now using it for event bus, data pipeline for antiddos and more to come."
}, {
"link": "http://www.parsely.com/",
"logo": "parsely.png",
"logoBgColor": "#ffffff",
"description": "Kafka is used for all <a href='http://www.parsely.com/misc/slides/logs/#1' target='_blank'>data integration </a> of analytics event data."
}, {
"link": "http://www.paypal.com/",
"logo": "paypal.png",
"logoBgColor": "#ffffff",
"description": "At PayPal, Kafka is used for first-party tracking, application health metrics streaming and aggregation, database synchronization, application log aggregation, batch processing, risk detection and management, and analytics and compliance, with each of these use-cases processing over 100 billion messages per day See <a href='https://developer.paypal.com/community/blog/scaling-kafka-to-support-paypals-data-growth/' target='_blank'>this</a>."
}, {
"link": "http://www.portoseguro.com.br/",
"logo": "porto-seguro.png",
"logoBgColor": "#ffffff",
"description": "We use Kafka in production for online and near real-time solutions. Kafka is a core part for many products, such as our Credit Card System."
}, {
"link": "http://quixey.com/",
"logo": "quixey.png",
"logoBgColor": "#ffffff",
"description": "At Quixey, The Search Engine for Apps, Kafka is an integral part of our eventing, logging and messaging infrastructure."
}, {
"link": "https://recursionpharma.com",
"logo": "recursion.png",
"logoBgColor": "#ffffff",
"description": "Recursion uses Kafka Streams to power its data pipeline for its drug discovery efforts. Kafka is used to to coordinate various services across the company. For more information about the use case see <a href='https://www.confluent.io/kafka-summit-san-francisco-2019/discovering-drugs-with-kafka-streams' target='_blank'>this Kafka Summit talk</a>."
}, {
"link": "http://www.retentionscience.com/",
"logo": "retentionscience.jpg",
"logoBgColor": "#ffffff",
"description": "Click stream ingestion and processing."
}, {
"link": "http://www.richrelevance.com/",
"logo": "richrelevance.png",
"logoBgColor": "#ffffff",
"description": "Real-time tracking event pipeline."
}, {
"link": "https://rollbar.com/blog/apache-kafka-example-how-rollbar-removed-technical-debt-part-1/",
"logo": "rollbar.png",
"logoBgColor": "#ffffff",
"description": "Rollbar uses Kafka at large scale to store all incoming raw blobs. Kafka allowed us to have multiple workers and also allowed us to migrate to Kubernetes."
}, {
"link": "https://www.salesforce.com/",
"logo": "salesforce.jpg",
"logoBgColor": "#ffffff",
"description" : "Salesforce adopted Apache Kafka to implement a <a href='https://engineering.salesforce.com/expanding-visibility-with-apache-kafka-e305b12c4aba' , target='_blank'>pub/sub architecture system</a> and to securely add an enterprise-ready, <a href='https://engineering.salesforce.com/how-apache-kafka-inspired-our-platform-events-architecture-2f351fe4cf63', target='_blank'>event-driven layer</a> to our multi-tenant system. With Kafka as the central nervous system of our microservices architecture, <a href='https://engineering.salesforce.com/real-time-einstein-insights-using-kafka-streams-ca94008c2c6f', target='_blank'>Kafka Streams applications</a> perform a variety of operations to generate useful real-time insights for our customers."
}, {
"link": "https://www.schrodinger.com/platform",
"logo": "schrodinger.png",
"logoBgColor": "#ffffff",
"description": "At Schrödinger, Kafka powers our physics-based computational platform by feeding data into our predictive modeling, data analytics, and collaboration services thus enabling rapid exploration of chemical space.<br /><br />More specifically, Kafka is used as a distributed high speed event bus while Kafka Connect and Kafka Streams are the basic components of our streaming Change Data Capture framework used by LiveDesign, our enterprise informatics solution.<br /><br />Currently, Schrödinger processes billions of molecules per week and our Kafka-powered data pipeline enables us to scale our architecture easily and push this even further."
}, {
"link": "http://sematext.com/",
"logo": "sematext.png",
"logoBgColor": "#ffffff",
"description": "In <a href='http://sematext.com/spm' target='_blank'>SPM (performance monitoring + alerting)</a>, Kafka is used for metrics collection and feeds SPM's in-memory data aggregation (OLAP cube creation) as well as our CEP/Alerts servers (see also: <a href='http://blog.sematext.com/2013/10/16/announcement-spm-performance-monitoring-for-kafka/' target='_blank'>SPM for Kafka performance monitoring</a>). In <a href='http://sematext.com/search-analytics' target='_blank'>SA (search analytics)</a> Kafka is used in search and click stream collection before being aggregated and persisted. In <a href='http://sematext.com/logsene' target='_blank'>Logsene (log analytics)</a> Kafka is used to pass logs and other events from front-end receivers to the persistent backend."
}, {
"link": "https://sentiance.com/",
"logo": "sentiance.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is the very backbone of Sentiance's data processing platform, from the ingestion of raw sensor from smartphones into the data pipeline, to the rich insights generated by it and used in Mobility Intelligence, Crash Detection, Driver Coaching, Healthier Lifestyles and Consumer Profiling."
}, {
"link": "http://sentry.io/",
"logo": "sentry.png",
"logoBgColor": "#ffffff",
"description": "Sentry uses Apache Kafka® as our main platform for streaming data throughout the product, acting as our persistent and highly-available transport between services dedicated to event ingestion, product search, business intelligence, and machine learning."
}, {
"link": "https://www.skillsoft.com/",
"logo": "skillsoft.png",
"logoBgColor": "#ffffff",
"description": "At Skillsoft, Apache Kafka is a vital component of our online learning platform, <a href='https://www.skillsoft.com/meet-skillsoft-percipio' target='_blank'>Percipio</a>, empowering features such as activity tracking and content recommendation for learners."
}, {
"link": "http://www.skyscanner.net/",
"logo": "skyscanner.png",
"logoBgColor": "#ffffff",
"description": "The world's travel search engine, uses Kafka for real-time log and event ingestion. It is the integration point for of all stream-processing and data transportation services."
}, {
"link": "http://www.strava.com/",
"logo": "strava.jpg",
"logoBgColor": "#ffffff",
"description": "Powers our analytics pipeline, activity feeds denorm and several other production services."
}, {
"link": "http://www.swiftkey.net/",
"logo": "swiftkey.png",
"logoBgColor": "#ffffff",
"description": "We use Apache Kafka for analytics event processing."
}, {
"link": "http://square.com",
"logo": "square.png",
"logoBgColor": "#FFFFFF",
"description": "We use Kafka as a bus to move all systems events through our various datacenters. This includes metrics, logs, custom events etc. On the consumer side, we output into Splunk, Graphite, Esper-like real-time alerting."
}, {
"link": "http://spotify.com",
"logo": "spotify.png",
"logoBgColor": "#1ed760",
"description": "Kafka is used at Spotify as part of their log <a href='http://www.meetup.com/stockholm-hug/events/121628932' target='_blank'>delivery system</a>."
}, {
"link": "http://www.stumbleupon.com/",
"logo": "stumbleupon.png",
"logoBgColor": "#eb4924",
"description": "Data collection platform for analytics."
}, {
"link": "http://www.shopify.com/",
"logo": "shopify.png",
"logoBgColor": "#ffffff",
"description": "Access logs, A/B testing events, domain events ("a checkout happened", etc.), metrics, delivery to HDFS, and customer reporting. We are now focusing on consumers: analytics, support tools, and fraud analysis."
}, {
"link": "https://www.smily.com/",
"logo": "smily.svg",
"logoBgColor": "#ffffff",
"description": "Apache Kafka is used as a backbone of data synchronization and propagating changes entitites/aggregates between multiple applications in Smily ecosystem."
}, {
"link": "http://www.socialtwist.com/",
"logo": "socialtwist.jpg",
"logoBgColor": "#ffffff",
"description": "We use Kafka internally as part of our reliable email queueing system."
}, {
"link": "https://www.softnetx.com",
"logo": "softnetx.png",
"logoBgColor": "#ffffff",
"description": "Multi-Cloud Backup and Migration."
}, {
"link": "http://www.spongecell.com/",
"logo": "spongecell.png",
"logoBgColor": "#ffffff",
"description": "We use Kafka to run our entire analytics and monitoring pipeline driving both real-time and ETL applications for our customers."
}, {
"link": "https://www.simple.com/",
"logo": "simple.gif",
"logoBgColor": "#ffffff",
"description": "We use Kafka at Simple for log aggregation and to power our analytics infrastructure."
}, {
"link": "http://www.tagged.com/",
"logo": "tagged.png",
"logoBgColor": "#ffffff",
"description": "Apache Kafka drives our new pub sub system which delivers real-time events for users in our latest game - Deckadence. It will soon be used in a host of new use cases including group chat and back end stats and log collection."
}, {
"link": "http://www.tecton.ai/",
"logo": "tecton.png",
"logoBgColor": "#ffffff",
"description": "Tecton is a data platform for machine learning. It transforms raw data into production-ready ML data. Tecton empowers data scientists to build a library of great features, serve them in production instantly, and do it at scale.<br /><br />We use Apache Kafka in multiple ways. First - we consume data from Kafka and process it in real time to build features with fresh data or to combine data from multiple sources. Next - we can serve data to Kafka for consumption by multiple downstream models. "
}, {
"link": "http://www.tencent.com/",
"logo": "Tencent.svg",
"logoBgColor": "#ffffff",
"description": "The Platform and Content Group (PCG) at Tencent built essential cross-region asynchronous data pipelines around Apache Kafka to support the operation and growth of business that integrates Tencent's internet, social, and content platforms as microservices."
}, {
"link": "https://www.tokenanalyst.io/",
"logo": "tokenanalyst.png",
"logoBgColor": "#ffffff",
"description": "At TokenAnalyst, we’re using Kafka for ingestion of blockchain data—which is directly pushed from our cluster of Bitcoin and Ethereum nodes—to different streams of transformation and loading processes."
}, {
"link": "https://www.tumblr.com/",
"logo": "tumblr.png",
"logoBgColor": "#5eba8c",
"description": "See <a href='http://highscalability.com/blog/2012/2/13/tumblr-architecture-15-billion-page-views-a-month-and-harder.html' target='_blank'>this</a>."
}, {
"link": "http://twitter.com",
"logo": "twitter.jpg",
"logoBgColor": "#28a9e2",
"description": "As part of their Storm stream processing infrastructure, e.g. <a href='http://engineering.twitter.com/2013/01/improving-twitter-search-with-real-time.html' target='_blank'>this</a> and <a href='https://blog.twitter.com/2015/handling-five-billion-sessions-a-day-in-real-time' target='_blank'>this</a>."
}, {
"link": "http://www.trivago.com/",
"logo": "trivago.png",
"logoBgColor": "#ffffff",
"description": "Trivago uses Kafka for stream processing in Storm as well as processing of application logs."
}, {
"link": "http://www.urbanairship.com/",
"logo": "urbanairship.png",
"logoBgColor": "#ffffff",
"description": "At Urban Airship we use Kafka to buffer incoming data points from mobile devices for processing by our analytics infrastructure."
}, {
"link": "http://www.uswitch.com/",
"logo": "uswitch.png",
"logoBgColor": "#ffffff",
"description": "See <a href='http://oobaloo.co.uk/kafka-for-uswitchs-event-pipeline' target='_blank'>this blog</a>."
}, {
"link": "http://www.visualrevenue.com/",
"logo": "visualrevenue.jpg",
"logoBgColor": "#1c1a88",
"description": "We use Kafka as a distributed queue in front of our web traffic stream processing infrastructure (Storm)."
}, {
"link": "http://www.visualdna.com/",
"logo": "visualdna.jpg",
"logoBgColor": "#ffffff",
"description": "We use Kafka 1. as an infrastructure that helps us bring continuously the tracking events from various datacenters into our central hadoop cluster for offline processing, 2. as a propagation path for data integration, 3. as a real-time platform for future inference and recommendation engines"
}, {
"link": "http://wooga.com/",
"logo": "wooga.png",
"logoBgColor": "#ffffff",
"description": "We use Kafka to aggregate and process tracking data from all our facebook games (which are hosted at various providers) in a central location."
}, {
"link": "http://www.wizecommerce.com/",
"logo": "wizecommerce.gif",
"logoBgColor": "#ffffff",
"description": "At Wize Commerce (previously, NexTag), Kafka is used as a distributed queue in front of Storm based processing for search index generation. We plan to also use it for collecting user generated data on our web tier, landing the data into various data sinks like Hadoop, HBase, etc."
}, {
"link": "http://wikimediafoundation.org/wiki/Our_projects",
"logo": "wikimedia.png",
"logoBgColor": "#ffffff",
"description": "Wikimedia Foundation uses Kafka as the base of our <a href='https://wikitech.wikimedia.org/wiki/Event_Platform', target='_blank'>event data platform</a> for both production and analytics, including reactive Wikipedia cache invalidation and reliable ingestion of large data streams into Hadoop."
}, {
"link": "https://www.vividcortex.com/",
"logo": "vividcortex.png",
"logoBgColor": "#ffffff",
"description": "VividCortex uses Kafka in our SaaS MySQL performance management platform to reliably ingest high-volume 1-second timeseries data."
}, {
"link": "http://xitenetworks.com/",
"logo": "xite.png",
"logoBgColor": "#ffffff",
"description": "Kafka is at the heart of our Data Infrastructure - Business Intelligence, Recommender Systems and Machine Learning solutions are build as reactive and streaming architecture. Also we use Kafka as a great alternative to REST APIs for micro-services integration. This allows us to scale and reliably upgrade micro-services without integration and consistency issues."
}, {
"link": "http://yahoo.com",
"logo": "yahoo.png",
"logoBgColor": "#3d018b",
"description": "See <a href='http://yahooeng.tumblr.com/post/109994930921/kafka-yahoo' target='_blank'>this</a>."
}, {
"link": "http://www.yieldbot.com/",
"logo": "yieldbot.png",
"logoBgColor": "#ffffff",
"description": "Yieldbot uses kafka for real-time events, camus for batch loading, and mirrormakers for x-region replication."
}, {
"link": "http://yellerapp.com/",
"logo": "yeller.png",
"logoBgColor": "#ffffff",
"description": "Yeller uses Kafka to process large streams of incoming exception data for it's customers. Rate limiting, throttling and batching are all built on top of Kafka."
}, {
"link": "https://www.opt.nc/",
"logo": "OPT.jpg",
"logoBgColor": "#ffffff",
"description": "OPT.nc uses Kafka to process large amount of logs, to queue sms messages sending/queuing, spread and share internal data with Streams accross various informations systems like Telecommunication, Financial services, Geographical Information System (GIS) and Post Office delivery process."
}, {
"link": "https://atruvia.de/",
"logo": "atruvia_logo_online_rgb.png",
"logoBgColor": "#d4f2f5",
"description": "At Atruvia we use Apache Kafka to share events within the modern banking platform."
}, {
"link": "https://allegrograph.com/",
"logo": "allegrograph-franz-logo.png",
"logoBgColor": "#ffffff",
"description": "AllegroGraph and Kafka are used together as an Entity Event Knowledge Graph platform in diverse settings such as call centers, hospitals, insurance companies, aviation organizations and financial firms. By coupling AllegroGraph with Kafka, users can create a real-time decision engine that produces real-time event streams based on computations that trigger specific actions. AllegroGraph accepts incoming events, executes instant queries and analytics on the new data and then stores events and results."
}, {
"link": "http://www.atguigu.com/",
"logo": "atguigu.png",
"logoBgColor": "#ffffff",
"description": "In our real-time data warehouse, apache kafka is used as a reliable distributed message queue, which allows us to build a highly available analysis system."
}, {
"link": "https://www.covage.com/",
"logo": "covage.png",
"logoBgColor": "#ffffff",
"description": "Covage is an infrastructure operator designing, deploying and operating high speed open access networks. At the very heart of our IT platform, Kafka is ensuring propagating our business workflows' events among all applications."
}, {
"link": "https://www.qudosoft.de/",
"logo": "qudosoft_wortbildmarke.png",
"logoBgColor": "#ffffff",
"description": "At Qudosoft, as part of the bigger tech network organization behind Germany based KLiNGEL group, we build a big scale e-commerce plattform called Next Level Commerce (NLC). NLC is based on the principle of customer-oriented verticalization which allows us maximum autonomy for our teams. With our microservices architecture we strive for high flexibility and scalability. Using Kafka for processing event streams supports inter-connecting these services in exactly that manner."
}, {
"link": "https://www.moengage.com/",
"logo": "moengage.png",
"logoBgColor": "#ffffff",
"description": "At MoEngage, Apache Kafka is one of the core components of our infrastructure and the backbone of all the events streaming services. We run over 25+ Kafka clusters with processing over 1 Million messages per second across all these clusters. find more about our Journey with Kafka so far at <a href='https://www.moengage.com/blog/kafka-at-moengage/'>Kafka Redesign and Lessons Learned </a>."
}, {
"link": "https://hellosafe.ca/en",
"logo": "hellosafe.svg",
"logoBgColor": "#ffffff",
"description": "HelloSafe is an international online insurance policy comparison platform. It is a completely free site, made available to consumers with the aim of helping them to facilitate their choice of insurance, and to bring more transparency to a market that is often lacking."
}, {
"link": "https://nussknacker.io/",
"logo": "nussknacker.svg",
"logoBgColor": "#ffffff",
"description": "Nussknacker is a low-code tool that allows IT teams to hand over decision algorithms to non-technical users. Apache Kafka is Nussknacker's primary input and output interface in streaming use cases - Nussknacker reads events from Kafka, applies decision algorithms and outputs actions to Kafka."
}, {
"link": "https://edenlab.io",
"logo": "edenlab.svg",
"logoBgColor": "#ffffff",
"description": "Edenlab FHIR engineers use Kafka to manage real-time data flow between dozens of microservices in our product - <a href=https://kodjin.com>Kodjin Interoperability Suite</a> - a low-code FHIR-based infrastructure for healthcare data management (contains FHIR Server, Terminology Service, and Mapper). Edenlab is a custom software and product development company focused primarily on healthcare data interoperability. We are also working with some major customers from the Fintech space (like Mastercard ® and Raffeisen BankInternational)."
}, {
"link": "https://spitha.io/",
"logo": "spitha.png",
"logoBgColor": "#ffffff",
"description": "Based in South Korea, SPITHA is a team of experts specializing in Apache Kafka. We are driven by the question of how to make Kafka easier for users, and with that in mind, we are developing 'Felice,' a robust tool designed to streamline the operation and management of Kafka. Alongside this, we offer in-depth technical consulting and support services to provide comprehensive solutions to our clients."
}];
</script>
<body class="page-powered-by ">
<!--#include virtual="includes/_top.htm" -->
<div class="content">
<!--#include virtual="includes/_nav.htm" -->
<div class="right">
<h1 class="content-title">Powered By</h1>
<p>
Apache Kafka is the most popular open-source stream-processing software for collecting, processing, storing, and analyzing data at scale. Most known for its excellent performance, low latency, fault tolerance, and high throughput, it's capable of handling thousands of messages per second. With over 1,000 Kafka use cases and counting, some common benefits are building data pipelines, leveraging real-time data streams, enabling operational metrics, and data integration across countless sources.
</p>
<p>
Today, Kafka is used by thousands of companies including over 80% of the Fortune 100. Among these are Box, Goldman Sachs, Target, Cisco, Intuit, and more. As the trusted tool for empowering and innovating companies, Kafka allows organizations to modernize their data strategies with event streaming architecture. Learn how Kafka is used by organizations in every industry - from computer software, financial services, and health care, to government and transportation.
</p>
<div class="grid" data-masonry='{ "itemSelector": ".grid__item"}'></div>
<div class="callout callout--basic">
<h3>Want to appear on this page?</h3>
<p>
Submit a <a href="https://github.com/apache/kafka-site/edit/asf-site/powered-by.html">pull request</a> or send a quick description of your organization and usage to the <a href="/contact">mailing list</a> and we'll add you.
</p>
</div>
<script src="https://unpkg.com/[email protected]/dist/masonry.pkgd.min.js"></script>
<script>
// Show selected style on nav item
$(function() { $('.b-nav__poweredby').addClass('selected'); });
</script>
<!--#include virtual="includes/_footer.htm" -->
<script id="grid__item-template" type="text/x-handlebars-template">
<div class="grid__item">
<a href="{{link}}" target="_blank" class="grid__item__link" style="background-color:{{logoBgColor}};">
<span class="grid__item__logo" style="background-image: url('/images/powered-by/{{logo}}');"></span>
</a>
<p class="grid__item__description">{{{description}}}</p>
</div>
</script>
<script>
$(function () {
// enforce alphabetical sort by root domain for fairness
poweredByItems.forEach(function getRootDomain(item) {
/**
* 1. remove the protocol
* ex. https://subdomain.example.com/kafka-article/whatever.html => subdomain.example.com/kafka-article/whatever.html
* 2. split the URL at the first occurrence of /, ?, or # and takes the first part
* ex. subdomain.example.com/kafka-article/whatever.html => subdomain.example.com
* 3. split the domain on . and take the last two parts
* ex. subdomain.example.com => example.com
*/
item.domain = item.link.replace(/^https?:\/\//, '').split(/[/?#]/)[0].toLowerCase().split('.').slice(-2).join('.');
});
poweredByItems.sort(function(a, b) {
return a.domain.localeCompare(b.domain);
});
// loop through all Handlebar templates on the page and render them
for(var i = 0; i < poweredByItems.length; i++) {
var context = poweredByItems[i];
var templateScript = $("#grid__item-template").html();
var template = Handlebars.compile(templateScript);
var html = template(context);
$(".grid").append(html);
}
});
</script>