2021.12.24Kafka MyProducer、MyConsumer

2021.12.24Kafka MyProducer、MyConsumer,第1张

2021.12.24Kafka MyProducer、MyConsumer
 MyProducer
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.Scanner;

public class MyProducer {
    public static void main(String[] args) {
//        Properties properties = new Properties();
//        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.111.131:9092");
//        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
//        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class);

        

//        properties.put(ProducerConfig.ACKS_CONFIG,"0");
//        KafkaProducer producer = new KafkaProducer<>(properties);
//        Scanner scanner = new Scanner(System.in);
//        String tag="1";
//        while (tag.equals("1")){
//            System.out.println("请输入要发送到kafka的内容:");
//            String content=scanner.nextLine();
//            ProducerRecord record = new ProducerRecord<>("mydemo2", content);
//            producer.send(record);
//            System.out.println("是否退出? 0:退出 1:继续发送 请输入");
//            tag=scanner.nextLine();
//        }

        //多线程
        long start=System.currentTimeMillis();
        for (int i=0;i<1000;i++){
            new Thread(
                    new Runnable() {
                        @Override
                        public void run() {
                            Properties properties = new Properties();
                            properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.111.131:9092");
                            properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
                            properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
                            properties.put(ProducerConfig.ACKS_CONFIG,"-1");
                            KafkaProducer producer = new KafkaProducer<>(properties);
                            for (int j=0;j<100000;j++){
                                String name = Thread.currentThread().getName();
                                String sendMsg="currentThread name is" + name +"send msg count" + j;

                                ProducerRecord record = new ProducerRecord<>("mydemo2",sendMsg);
                                producer.send(record);
                                System.out.println(sendMsg);
                                if(j%1000==0){
                                    try {
                                        Thread.sleep(100);
                                    } catch (InterruptedException e) {
                                        e.printStackTrace();
                                    }
                                }
                            }


                        }
                    }
            ).start();
        }

        System.out.println("time: "+(System.currentTimeMillis()-start));
        System.out.println("geme over");

    }
}
 MyConsumer
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Collections;
import java.util.Properties;

public class MyConsumer {
    public static void main(String[] args) {
        Properties prop = new Properties();
        prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.111.131:9092");
        prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
        prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");
        prop.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"10000");
        //earliest 当各分区下有已经提交的offset,从上次提交的offset后开始消费
        //         如果没有查到已经提交的offset,从分区内的第一条消息开始消费
        //latest   当各分区有已经提交的offset,从上次提交的offset后开始消费
        //         如果没有查到已经提交的offset,消费新产生的分区下的消息
        //none     当各分区有已经提交的offset,从上次提交的offset后开始消费
        //         如果没有查到已经提交的offset,抛出异常
        prop.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
        //每一个消费,都要定义不同的Group_ID
        prop.put(ConsumerConfig.GROUP_ID_CONFIG,"group_1");




        
//        KafkaConsumer kafkaConsumer = new KafkaConsumer<>(prop);
//        kafkaConsumer.subscribe(Collections.singleton("mydemo"));
//
//            while(true){
//            ConsumerRecords records = kafkaConsumer.poll(100);
//            for(ConsumerRecordrecord:records){
//                System.out.println(record.offset()+" "+record.key()+" "+record.value());
//                System.out.println();
//
//            }
            System.out.println("-------------------------------------");
//            //如果自动提交关闭 ENABLE_AUTO_COMMIT_CONFIG设置为FALSE
//            //手动提交方法
            kafkaConsumer.commitAsync();
//        }

        for (int i = 0; i < 12 ; i++) {
            new Thread(new Runnable() {
                @Override
                public void run() {
                    KafkaConsumer kafkaConsumer = new KafkaConsumer<>(prop);
                    kafkaConsumer.subscribe(Collections.singleton("mydemo"));
                    while (true) {
                        ConsumerRecords records = kafkaConsumer.poll(100);
                        String content=Thread.currentThread().getName()+"没有数据";
                        for (ConsumerRecord record : records) {
                            System.out.println(Thread.currentThread().getName() + " "+record.offset() + " "
                                    + record.key() + " " + record.value());
                            System.out.println(content);

                        }
                        if (records.isEmpty()){
                            System.out.println(content);
                        }
                    }
                }
            }).start();


        }


    }
}

 

 

欢迎分享,转载请注明来源:内存溢出

原文地址:https://54852.com/zaji/5684788.html

(0)
打赏 微信扫一扫微信扫一扫 支付宝扫一扫支付宝扫一扫
上一篇 2022-12-17
下一篇2022-12-17

发表评论

登录后才能评论

评论列表(0条)

    保存