java封装kafka工具类

1,460 阅读5分钟

消费者配置类

public class KafkaConsumerEntity {

   /**
    * 消费者实例
    */
   private KafkaConsumer<String,String> consumer;
   
   /**
    * 是否正在被使用
    */
   private boolean isLock;
   
   /**
    * 最后使用时间
    */
   private String lastTime;

   
   

   public KafkaConsumer<String, String> getConsumer() {
      return consumer;
   }


   public void setConsumer(KafkaConsumer<String, String> consumer) {
      this.consumer = consumer;
   }


   public boolean isLock() {
      return isLock;
   }


   public void setLock(boolean isLock) {
      this.isLock = isLock;
   }


   public String getLastTime() {
      return lastTime;
   }


   public void setLastTime() {
      this.lastTime = DateUtil.getDateTime();
   }

   /**
    * 使用完成,进行资源释放,回归资源池
    * @return
    */
   public boolean release()
   {
      //释放资源
      this.setLastTime();
      this.setLock(false);
      return true;
   }


   public KafkaConsumerEntity(KafkaConsumer<String, String> consumer) {
      super();
      this.consumer = consumer;
      this.isLock = false;
      this.lastTime = DateUtil.getDateTime();
   }
   
   
}

生产者配置类

public class KafkaProducerEntity {
   
   /**
    * 消费者实例
    */
   private KafkaProducer<String,String> producer;
   
   /**
    * 是否正在被使用
    */
   private boolean isLock;
   
   /**
    * 最后使用时间
    */
   private String lastTime;

   
   
   public KafkaProducer<String, String> getProducer() {
      return producer;
   }

   public void setProducer(KafkaProducer<String, String> producer) {
      this.producer = producer;
   }

   public boolean isLock() {
      return isLock;
   }

   public void setLock(boolean isLock) {
      this.isLock = isLock;
   }

   public String getLastTime() {
      return lastTime;
   }

   public void setLastTime() {
      this.lastTime = DateUtil.getDateTime();
   }

   public KafkaProducerEntity(KafkaProducer<String, String> producer) {
      super();
      this.producer = producer;
      this.isLock = false;
      this.lastTime = DateUtil.getDateTime();
   }
   
   /**
    * 使用完成,进行资源释放,回归资源池
    * @return
    */
   public boolean release()
   {
      //释放资源
      this.setLastTime();
      this.setLock(false);
      return true;
   }

   
}

操作工具类

public class KafkaUtil {


   private static Logger logger = LoggerFactory.getLogger(KafkaUtil.class);

   /**
    * 初始化 创建 生产者队列池
    */
   public static List<KafkaProducerEntity> producerList =new ArrayList<KafkaProducerEntity>();

   /**
    * 初始化 创建消费者列池
    */
   public static List<KafkaConsumerEntity> consumerList =new ArrayList<KafkaConsumerEntity>();

   /**
    * 初始化队列
    * @param producerCount
    * @param consumerCout
    * @return
    */
   public static Boolean initData(int producerCount,int consumerCout)
   {
      expansionProducer(producerCount);
      expansionConsumer(consumerCout);
      return true;

   }


   /**
    * 获取空闲的生产者
    * @return
    */
   public static  KafkaProducerEntity getProducer()
   {

      for (KafkaProducerEntity kafkaProducerEntity : producerList) {

         if(!kafkaProducerEntity.isLock())
         {
            //设置获取时间  并锁定资源
            kafkaProducerEntity.setLastTime();
            kafkaProducerEntity.setLock(true);
            return kafkaProducerEntity;
         }

      }

      //如果节点资源部够,则扩充
      KafkaProducerEntity kafkaProducerEntity = expansionProducer(100);

      if(kafkaProducerEntity == null )
      {
         logger.error("kafka获取生产者实例失败");
         return null;
      }


      //设置获取时间  并锁定资源
      kafkaProducerEntity.setLastTime();
      kafkaProducerEntity.setLock(true);
      return kafkaProducerEntity;
   }


   /**
    * 根据数量 扩充 生产者实例
    * @param count
    * @return
    */
   public static KafkaProducerEntity expansionProducer(int count)
   {

      // 1.配置信息
        Properties props = new Properties();

        try {
            // 定义kafka服务器地址列表,不需要指定所有的broker
            props.put("bootstrap.servers",CacheUtil.get("KAFKA_BOOTSTRAP_SERVERS").toString());
            //  生产者需要leader确认请求完成之前接收的应答数
            props.put("acks", CacheUtil.get("KAFKA_ACKS").toString());
            // 客户端失败重试次数
            props.put("retries", CacheUtil.get("KAFKA_RETRIES").toString());
            // 生产者打包消息的批量大小,以字节为单位.此处是16k
            props.put("batch.size", CacheUtil.get("KAFKA_BATCH_SIZE").toString());
            // 生产者延迟1ms发送消息
            props.put("linger.ms", CacheUtil.get("KAFKA_LINGER_MS").toString());
            // 生产者缓存内存的大小,以字节为单位.此处是32m
            props.put("buffer.memory",CacheUtil.get("KAFKA_BUFFER_MEMORY").toString());
            // key 序列化类
            props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
            // value序列化类
            props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
      } catch (Exception e) {
         logger.error("系统未配置 kafka相关参数:"+e.getMessage());
         return null;
      }

        // 2.创建生产者
        KafkaProducer<String,String> producer;
        KafkaProducerEntity kafkaProducerEntity=null;
        for(int i =0;i<count;i++)
        {
           producer = new KafkaProducer<String, String>(props);
           kafkaProducerEntity = new KafkaProducerEntity(producer);
           producerList.add(kafkaProducerEntity);
        }

        logger.info("-------------成功扩展"+count+"个kafka生产者实例。-------------");
        return kafkaProducerEntity;

   }



   /**
    * 获取空闲的消费者
    * @return
    */
   public static synchronized  KafkaConsumerEntity getConsumer()
   {

      for (KafkaConsumerEntity kafkaConsumerEntity : consumerList) {

         if(!kafkaConsumerEntity.isLock())
         {
            //设置获取时间  并锁定资源
            kafkaConsumerEntity.setLastTime();
            kafkaConsumerEntity.setLock(true);
            return kafkaConsumerEntity;
         }

      }

      //如果节点资源部够,则扩充
      KafkaConsumerEntity kafkaConsumerEntity = expansionConsumer(100);

      if(kafkaConsumerEntity == null )
      {
         logger.error("kafka获取消费者实例失败");
         return null;
      }


      //设置获取时间  并锁定资源
      kafkaConsumerEntity.setLastTime();
      kafkaConsumerEntity.setLock(true);
      return kafkaConsumerEntity;
   }


   /**
    * 根据数量 扩充 生产者实例
    * @param count
    * @return
    */
   public static KafkaConsumerEntity expansionConsumer(int count)
   {

      // 1.配置信息
        Properties props = new Properties();

        try {
            // 定义kafka服务器地址列表,不需要指定所有的broker
            props.put("bootstrap.servers",CacheUtil.get("KAFKA_BOOTSTRAP_SERVERS").toString());
            // 消费者组id
            props.put("group.id", CacheUtil.get("KAFKA_GROUP_ID").toString());
            // 是否自动确认offset
            props.put("enable.auto.commit", CacheUtil.get("KAFKA_ENABLE_AUTO_COMMIT").toString());

            //自动确认offset时间间隔

            Object obj = CacheUtil.get("KAFKA_AUTO_COMMIT_INTERVAL_MS");

            if(obj != null  && !"0".equals(obj.toString()))
            {
                 props.put("auto.commit.interval.ms", obj.toString());
            }


            //单次最大条数
            props.put("max.poll.records", Integer.parseInt(CacheUtil.get("KAFKA_MAX_POLL_RECORDS").toString()));
            // key 序列化类
            props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            // value序列化类
            props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

      } catch (Exception e) {
         logger.error("系统未配置 kafka相关参数:"+e.getMessage());
         return null;
      }

        // 2.创建生产者

        KafkaConsumer<String,String> consumer;
        KafkaConsumerEntity kafkaConsumerEntity=null;
        for(int i =0;i<count;i++)
        {
           consumer = new KafkaConsumer<String, String>(props);
           kafkaConsumerEntity = new KafkaConsumerEntity(consumer);
           consumerList.add(kafkaConsumerEntity);
        }

        logger.info("-------------成功扩展"+count+"个kafka消费者实例。-------------");
        return kafkaConsumerEntity;

   }




   /**
     * 同步阻塞发送
     *  适用场景:业务不需要高吞吐量、更关心消息发送的顺序、不允许消息发送失败
     * @param topic
     * @param message
     * @return
     */
    public static boolean sendBlockMsg(String topic,String message)
    {

       KafkaProducerEntity kafkaProducerEntity=getProducer();

       if(kafkaProducerEntity == null )
       {
          logger.error("kafka生产者实例获取失败。");
          return false;
       }

        // 2.创建生产者
        KafkaProducer<String,String> producer =kafkaProducerEntity.getProducer();

        // 3.生产数据

        // 3.同步发送
        // 创建消息

        ProducerRecord<String,String> record = new ProducerRecord<String, String>(topic,message);
        // 发送
        Future<RecordMetadata> send = producer.send(record);
        try {
         RecordMetadata recordMetadata = send.get();
      } catch (InterruptedException e) {
         e.printStackTrace();
      } catch (ExecutionException e) {
         e.printStackTrace();
      }finally{
         kafkaProducerEntity.release();
      }
        return true;
    }


    /**
     * 异步发送(发送并忘记)
     *    适用场景:业务只关心吞吐量、不关心消息发送的顺序、可以允许消息发送失败
     * @param topic
     * @param message
     * @return
     */
    public static boolean sendSyncNioMsg(String topic,String message)
    {
       KafkaProducerEntity kafkaProducerEntity=getProducer();

       if(kafkaProducerEntity == null )
       {
          logger.error("kafka生产者实例获取失败。");
          return false;
       }

        // 2.创建生产者
        KafkaProducer<String,String> producer =kafkaProducerEntity.getProducer();

        // 3.生产数据

        // 3.异步发送(回调函数)
        // 创建消息
       // ProducerRecord<String,String> record = new ProducerRecord<String, String>("itheima_topic",0,"key-async2","异步发送消息,(回调函数)");
       // ProducerRecord<String,String> record = new ProducerRecord<String, String>(topic,1,"key-async2",message);
        ProducerRecord<String,String> record = new ProducerRecord<String, String>(topic,message);
        // 发送,回调函数处理
        producer.send(record);
        // 刷新
        producer.flush();

      kafkaProducerEntity.release();

       return true;
    }

    /**
     * 异步发送(回调函数)
     * 适用场景:业务需要知道消息发送成功、不关心消息发送的顺序
     * @param topic
     * @param message
     * @return
     */
    public static boolean sendSyncMsg(String topic,String message)
    {
       KafkaProducerEntity kafkaProducerEntity=getProducer();

       if(kafkaProducerEntity == null )
       {
          logger.error("kafka生产者实例获取失败。");
          return false;
       }

        // 2.创建生产者
        KafkaProducer<String,String> producer =kafkaProducerEntity.getProducer();

        // 3.生产数据

        // 3.异步发送(回调函数)
        // 创建消息
       // ProducerRecord<String,String> record = new ProducerRecord<String, String>("itheima_topic",0,"key-async2","异步发送消息,(回调函数)");

        ProducerRecord<String,String> record = new ProducerRecord<String, String>(topic,message);
     //   ProducerRecord<String,String> record = new ProducerRecord<String, String>(topic,1,"sap_order",message);
        // 发送,回调函数处理
        producer.send(record, new ProducerCallback(message));
        // 刷新
        producer.flush();

       kafkaProducerEntity.release();
       return true;
    }



    /***       ----------消费 ---------            **/

    /**
     * kafka 消费者 信息
     * @param kafkaConsumerEntity
     * @return
     */
    public static List<String>  consumerMsg(KafkaConsumerEntity kafkaConsumerEntity,String topic,Integer appointPartition)
    {

       //1. 返回消息
       List<String> resultList = new ArrayList<>();
        // 2.创建消费者
        KafkaConsumer<String,String> consumer = kafkaConsumerEntity.getConsumer();
        // 3.消费消息
        // 指定分区消费
        TopicPartition partition = new TopicPartition(topic,appointPartition);

        // 获取已经提交的偏移量
        long offset = 0L;
        OffsetAndMetadata offsetAndMetadata = consumer.committed(partition);
        if(offsetAndMetadata !=null){
            offset = offsetAndMetadata.offset();
        }
        // 指定偏移量消费
        consumer.assign(Arrays.asList(partition));
        consumer.seek(partition,offset);


      ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(2));

       // 打印数据
        for (ConsumerRecord<String, String> record : records) {
          //  System.out.println("消费的数据为:" + record.value());
            resultList.add(record.value());
        }

      //  consumerComplete(consumer);
        return resultList;

    }

    /**
     * 消费成功
     * 手动提交 offset
     * @param kafkaConsumerEntity
     * @return
     */
    public static boolean consumerCommit(KafkaConsumerEntity kafkaConsumerEntity)
    {

       KafkaConsumer<String,String> consumer = kafkaConsumerEntity.getConsumer();
       consumer.commitSync();
       kafkaConsumerEntity.release();

       return true;
    }


   /**
    * 创建主题
    * @param zkUrl zk地址
    * @param topicName 主题名称
    * @param partition 分区数
    * @param replication 副本数量
    */
   public static void createKafaTopic(String zkUrl,String topicName,Integer partition,Integer replication) {
        ZkUtils zkUtils = ZkUtils.apply(zkUrl, 30000, 30000,JaasUtils.isZkSaslEnabled());
        new RackAwareMode.Enforced$();
       AdminUtils.createTopic(zkUtils,topicName,  partition, replication,  new Properties(), AdminUtils.createTopic$default$6());
       zkUtils.close();
   }


   /**
    * 删除 topic
    * @param ZkStr
    * @param topicName
    */
    public static void deleteKafaTopic(String ZkStr,String topicName) {
        ZkUtils zkUtils = ZkUtils.
                apply(ZkStr, 30000, 30000,JaasUtils.isZkSaslEnabled());
       AdminUtils.deleteTopic(zkUtils, topicName);
       zkUtils.close();
   }

   /**
    * 查看所有主题
    * @param zkUrl
    */
    public static List<String> listAllTopic(String zkUrl){
        ZkUtils zkUtils = null;
        try {
            zkUtils = ZkUtils.apply(zkUrl,30000,30000,JaasUtils.isZkSaslEnabled());

            List<String> topics = JavaConversions.seqAsJavaList(zkUtils.getAllTopics());
            for (String string : topics) {
            System.out.println(string);
         }
            return topics;
        }catch (Exception e){
            e.printStackTrace();
            return null;
        }finally {
            if (zkUtils != null){
                zkUtils.close();
            }
        }
    }

    public static void main(String[] args) {

      //zookeeper地址:端口号
       //String zkurl = "10.8.3.215:2181,10.8.3.216:2181,10.8.3.172:2181";
      String zkurl = "10.8.6.70:2181,10.8.6.71:2181,10.8.6.72:2181";
      //创建topic
        createKafaTopic(zkurl,"sap_order_test",10,3);

     //  deleteKafaTopic(zkurl, "sap_order_test");
   }

}

生产者回调类

public class ProducerCallback implements Callback {

   private static Logger logger = LoggerFactory.getLogger(ProducerCallback.class);
   /**
    * 发送的消息
    */
   private String msg;
   
   
   
   public ProducerCallback(String msg) {
      super();
      this.msg = msg;
   }



   @Override
   public void onCompletion(RecordMetadata recordMetadata, Exception exception) {
      
      
      if( exception!= null)
      {
         logger.error("kafka生产消息发送异常:"+exception.getMessage());
         /* Map<String,String> map = JsonUtil.parse(msg, Map.class);
          
          String ip = map.get("ip");
          String data = map.get("data");
          Document doc = Jsoup.parse(data, "", new Parser(new XmlTreeBuilder()));
             OrderXMlUtil orderXMlUtil = new OrderXMlUtil();
             
             OrderTaskExecutor orderTaskExecutor = new OrderTaskExecutor();
             ProResult proResult =orderTaskExecutor.createOrderForHttpService(doc,ip);
             logger.info(proResult.toString());*/
             
      }else {
      // System.out.println("发送成功:"+msg);
      }
      
      

      
   }


}