问题
Hi I'm currently setting up Kafka with Docker. I've managed to setup Zookeeper and Kafka with the published confluent image, see following docker-compose file:
version: '2'
services:
zookeeper:
image: confluentinc/cp-zookeeper:3.2.0
container_name: zookeeper
hostname: zookeeper
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
restart: always
kafka:
image: confluentinc/cp-kafka:3.2.0
hostname: kafka
container_name: kafka
depends_on:
- zookeeper
ports:
- '9092:9092'
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://192.168.99.100:9092
LISTENERS: PLAINTEXT://0.0.0.0:9092
restart: always
kafka-rest:
image: confluentinc/cp-kafka-rest:3.2.0
container_name: kafka-rest
depends_on:
- kafka
ports:
- '8082:8082'
environment:
KAFKA_REST_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_REST_LISTENERS: http://kafka-rest:8082
KAFKA_REST_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_REST_HOST_NAME: kafka-rest
restart: always
schema-registry:
image: confluentinc/cp-schema-registry:3.2.0
container_name: schema-registry
depends_on:
- kafka
ports:
- '8081'
environment:
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_LISTENERS: http://schema-registry:8081
restart: always
connect:
image: confluentinc/cp-kafka-connect:3.2.0
container_name: kafka-connect
depends_on:
- zookeeper
- kafka
- schema-registry
ports:
- "8083:8083"
restart: always
environment:
CONNECT_BOOTSTRAP_SERVERS: 'kafka:9092'
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_ZOOKEEPER_CONNECT: "zookeeper:2181"
Now I've managed to expose correctly the Kafka container to my non-dockerized applications by correctly setting the advertised.listener property to PLAINTEXT://{DOCKER_MACHINE_IP}:9092, but as you can see I've also added other confluent applications to extend my Kafka setup (Kafka REST, Schema-Registry). These can no longer connect to my Kafka instance because of the advertised.listener property.
I could change it to the correct container hostname --> PLAINTEXT://kafka:9092 but then I lose the ability to reach the kafka instance once again with my other apps. Is there any easy way to solve this issue?
回答1:
Omar, maybe you've already resolved your problem, but for future reference, Hans Jespersen's comment did the trick for me, even on Windows.
As admin, open C:\Windows\System32\drivers\etc\hosts
and add the following line to expose the kafka broker as localhost.
127.0.0.1 broker
And mydocker-compose.yml
file looks as follows:
---
version: '2'
services:
zookeeper:
image: confluentinc/cp-zookeeper
hostname: zookeeper
extra_hosts:
- "moby:127.0.0.1"
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
broker:
image: confluentinc/cp-kafka
hostname: broker
extra_hosts:
- "moby:127.0.0.1"
depends_on:
- zookeeper
ports:
- '9092:9092'
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://broker:9092'
KAFKA_DEFAULT_REPLICATION_FACTOR: 1
schema_registry:
image: confluentinc/cp-schema-registry
hostname: schema_registry
# extra_hosts:
# - "moby:127.0.0.1"
depends_on:
- zookeeper
- broker
ports:
- '8081:8081'
environment:
SCHEMA_REGISTRY_HOST_NAME: schema_registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
kafka-rest:
image: confluentinc/cp-kafka-rest
container_name: kafka-rest
extra_hosts:
- "moby:127.0.0.1"
depends_on:
- zookeeper
- broker
ports:
- '8082:8082'
environment:
KAFKA_REST_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_REST_LISTENERS: http://kafka-rest:8082
KAFKA_REST_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_REST_HOST_NAME: kafka-rest
Alternatively, exposing my laptop's current IP address (using ipconfig /all) works too, but this has the disadvantage that, whenever my network changes, I would have to change the docker-compose.yml
file too.
回答2:
Assuming that this is a setup intended for your local development environment, here is a solution for running in a Docker network.
version: '2'
services:
zookeeper:
image: confluentinc/cp-zookeeper:3.2.0
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
restart: always
ports: ['2181:2181']
kafka:
image: confluentinc/cp-kafka:3.2.0
depends_on:
- zookeeper
ports: ['29092:29092']
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
restart: always
kafka-rest:
image: confluentinc/cp-kafka-rest:3.2.0
depends_on:
- kafka
ports: ['8082:8082']
environment:
KAFKA_REST_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_REST_LISTENERS: http://0.0.0.0:8082
KAFKA_REST_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_REST_HOST_NAME: localhost
restart: always
schema-registry:
image: confluentinc/cp-schema-registry:3.2.0
depends_on:
- kafka
ports: ['8081:8081']
environment:
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
restart: always
connect:
image: confluentinc/cp-kafka-connect:3.2.0
depends_on:
- zookeeper
- kafka
- schema-registry
ports: ['8083:8083']
restart: always
environment:
CONNECT_BOOTSTRAP_SERVERS: 'kafka:29092'
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_ZOOKEEPER_CONNECT: "zookeeper:2181"
Updated here - https://github.com/confluentinc/examples/blob/5.3.1-post/cp-all-in-one/docker-compose.yml
来源:https://stackoverflow.com/questions/43045590/kafka-setup-with-docker-compose