sudo apt-get update sudo apt-get install default-jdk
wget https://archive.apache.org/dist/zookeeper/zookeeper-3.4.14/zookeeper-3.4.14.tar.gz tar -xvf zookeeper-3.4.14.tar.gz cd zookeeper-3.4.14 ./configure make sudo make install
sudo mkdir /var/lib/zookeeper sudo chown zookeeper:zookeeper /var/lib/zookeeper
编辑/etc/zookeeper/conf/zoo.cfg
文件,并添加以下内容:/etc/zookeeper/conf/zoo.cfg
文件,并添加以下内容:
dataDir=/var/lib/zookeeper clientPort=2181
启动ZooKeeper:
sudo service zookeeper start
wget https://archive.apache.org/dist/kafka/2.8.0/kafka_2.13-2.8.0.tgz tar -xvf kafka_2.13-2.8.0.tgz cd kafka_2.13-2.8.0
编辑/etc/kafka/server.properties
broker.id=0 listeners=PLAINTEXT://:9092 zookeeper.connect=localhost:2181启动ZooKeeper:
./bin/kafka-server-start.sh config/server.properties
./bin/kafka-topics.sh --create --topic test --partitions 1 --replication-factor 1
/etc/kafka/server.properties
文件,并添加以下内容:./bin/kafka-console-producer.sh --topic test启动Kafka:
./bin/kafka-console-consumer.sh --topic test --from-beginning
import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import java.util.Properties; public class SimpleProducer { public static void main(String[] args) { Properties properties = new Properties(); properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); KafkaProducer<String, String> producer = new KafkaProducer<>(properties); ProducerRecord<String, String> record = new ProducerRecord<>("test", "Hello, Kafka!"); producer.send(record); producer.close(); } }
import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import java.util.Arrays; import java.util.Properties; public class SimpleConsumer { public static void main(String[] args) { Properties properties = new Properties(); properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); properties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group"); properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties); consumer.subscribe(Arrays.asList("test")); while (true) { ConsumerRecords<String, String> records = consumer.poll(100); for (ConsumerRecord<String, String> record : records) { System.out.println(record.key() + ": " + record.value()); } } consumer.close(); } }
以上是在Linux中快速安装Kafka并进行入门:详细步骤指南的详细内容。更多信息请关注PHP中文网其他相关文章!