KaFKa入门案例


方法内使用:

 Map<String, String> map = new HashMap<String, String>();

        HashMap<String, HashMap<String, String>> mapHashMap = new HashMap<>();

        map.put("uid", request.getData().getmId());

        map.put("channelId", "2");

        map.put("title", request.getData().getNickName());

        mapHashMap.put(String.valueOf(request.getData().getmId()), (HashMap<String, String>) map);

        // 调用kafak

        AddIndexReqDto addIndexReqDto = new AddIndexReqDto();

        addIndexReqDto.setIndexName("engine");

        addIndexReqDto.setIndexType("flmallMember");

        addIndexReqDto.setlData(mapHashMap);

        addMemberMq.sendMessage("search-engine-flmallMember", String.valueOf(request.getData().getmId()), "engine", addIndexReqDto);

内部自己封装方法

package com.foriseland.fas.member.mq;

import com.foriseland.fjf.mq.producer.KafkaProducerGeneric;

import com.foriseland.fsoa.member.utils.JsonUtilsZ;

import org.apache.kafka.clients.producer.RecordMetadata;

import org.junit.Test;

import org.springframework.beans.factory.annotation.Autowired;

import org.springframework.stereotype.Component;

import java.util.concurrent.Future;

@Component

public class AddMemberMq extends BaseTest {

@Autowired

private KafkaProducerGeneric producerServer;

/**

 * kafak生产者       

 * @param topic

 * @param key

 * @param value

 * @return

 */

@Test

public Future<RecordMetadata> sendMessage(String topic, String key, String value,Object obj) {

String json = JsonUtilsZ.objectToJson(obj);//这个value最好转成序列化                                                                                                                                                                                                                                           

Future<RecordMetadata> sendMessage = producerServer.sendMessage(topic, key, json);

return sendMessage;

}

}

工具类封装转json方法-----

public class JsonUtilsZ {

    // 定义jackson对象

    private static final ObjectMapper MAPPER = new ObjectMapper();

    public static String objectToJson(Object data) {

        try {

            String string = MAPPER.writeValueAsString(data);

            return string;

        } catch (JsonProcessingException e) {

            e.printStackTrace();

        }

        return null;

    }

方法内部封装,不用写------

/**

 * 绠�鍗曠殑鐢熶骇娑堟伅,

 *

 * @param topic

 * @param key

 * @param value

 */

public Future<RecordMetadata> sendMessage(String topic, String key, String value) {

KafkaUtils.ruleTopic(topic);

return this.kafkaProducer.send(new ProducerRecord<String, String>(topic, key, value));

}

方法内部封装,不用写------

public static  void ruleTopic(String topic) throws KafkaBaseException{

if(KafkaUtils.isRule(topic) == Boolean.FALSE){

KafkaBaseException ex = new KafkaBaseException("Kafka Topic is not effective,key be similar to [object-model-key]");

throw ex;

}

}

KafkaProducerGeneric---底层实现

package com.foriseland.fjf.mq.producer;

import java.util.Date;

import java.util.Properties;

import java.util.concurrent.Future;

import org.apache.kafka.clients.producer.Callback;

import org.apache.kafka.clients.producer.KafkaProducer;

import org.apache.kafka.clients.producer.ProducerRecord;

import org.apache.kafka.clients.producer.RecordMetadata;

import com.foriseland.fjf.mq.domain.FLKafkaProducer;

import com.foriseland.fjf.mq.domain.FLOpertionType;

import com.foriseland.fjf.mq.property.FLKafkaOtherProperties;

import com.foriseland.fjf.mq.util.KafkaUtils;

/**

 *

 * @className: KafkaProducerGeneric

 * @describe: KAFKA鐨凱roduct鏈嶅姟

 * @author: chaiyachun

 * @createTime 2018骞�3鏈�3鏃� 涓嬪崍3:57:03

 */

public class KafkaProducerGeneric {

private Properties properties;

private KafkaProducer<String, String> kafkaProducer;

private int topPartitionCount;

void init() {

if (properties == null || properties.size() == 0) {

throw new RuntimeException("KafkaProducerGeneric 閰嶇疆涓� null,鍒濆鍖栭厤缃紓甯�,璇锋鏌afkaProducerGeneric Bean鏄惁琚垵濮嬪寲!");

}

Object tpc = properties.get(FLKafkaOtherProperties.PRODUCER_PARTITION_COUNT);

this.topPartitionCount = tpc == null ? 3 : Integer.valueOf(tpc.toString());

this.kafkaProducer = new org.apache.kafka.clients.producer.KafkaProducer<>(properties);

System.out.println("[KafkaProducerGeneric init config] -> success");

}

/**

 * 绠�鍗曠殑鐢熶骇娑堟伅,

 *

 * @param topic

 * @param key

 * @param value

 */

public Future<RecordMetadata> sendMessage(String topic, String key, String value) {

KafkaUtils.ruleTopic(topic);

return this.kafkaProducer.send(new ProducerRecord<String, String>(topic, key, value));

}

public RecordMetadata sendMessage(String topic, String key, String value, boolean isTimely) {

KafkaUtils.ruleTopic(topic);

Future<RecordMetadata> result = sendMessage(topic, key, value);

return futureResult(isTimely, result);

}

/**

 * 鎸囧畾鍒嗗尯鐢熶骇娑堟伅,鍚屾鐢熶骇API

 * @param topic

 * @param key

 * @param value

 * @param partition

 */

public Future<RecordMetadata> sendMessageByPartition(String topic, String key, String value, int partition) {

KafkaUtils.ruleTopic(topic);

return this.kafkaProducer.send(new ProducerRecord<String, String>(topic, partition, key, value));

}

/**

 * @param topic

 * @param key

 * @param value

 * @param partition

 */

public RecordMetadata sendMessage(String topic, String key, String value, int partition, boolean isTimely) {

KafkaUtils.ruleTopic(topic);

Future<RecordMetadata> result = this.sendMessageByPartition(topic, key, value, partition);

return this.futureResult(isTimely, result);

}

/**

 * 鍥炶皟鍑芥暟鍙戦�佹秷鎭�

 *

 * @param topic

 * @param key

 * @param value

 * @param callback

 * @return

 */

public Future<RecordMetadata> sendMessageAndCallback(String topic, String key, String value, Callback callback) {

KafkaUtils.ruleTopic(topic);

return this.kafkaProducer.send(new ProducerRecord<String, String>(topic, key, value), callback);

}

/**

 * 鎸囧畾鍒嗗尯鍜屽洖璋冨彂閫佹秷鎭�,

 *

 * @param topic

 * @param key

 * @param value

 * @param callback

 * @return

 */

public Future<RecordMetadata> sendMessageAndCallback(String topic, String key, String value, int partition,

Callback callback) {

KafkaUtils.ruleTopic(topic);

return this.kafkaProducer.send(new ProducerRecord<String, String>(topic, partition, key, value), callback);

}

/**

 * 鎸囧畾鍒嗗尯鍜屽洖璋冨彂閫佹秷鎭苟涓斿洖璋�,

 *

 * @param topic

 * @param key

 * @param value

 * @param

 * @return

 */

public Future<RecordMetadata> sendMessageRequiredCallback(String topic, String key, String value) {

KafkaUtils.ruleTopic(topic);

return this.kafkaProducer.send(new ProducerRecord<String, String>(topic, key, value),

new CallBack(topic, key, value));

}

RecordMetadata futureResult(boolean isTimely, Future<RecordMetadata> result) {

if (isTimely) {

try {

return result.get();

} catch (Exception e) {

e.printStackTrace();

throw new RuntimeException(e);

}

}

return null;

}

public void close() {

kafkaProducer.close();

kafkaProducer = null;

System.out.println("[kafkaProducer close]");

}

// ------------------- setter getter---------------------

public Properties getProperties() {

return properties;

}

public void setProperties(Properties properties) {

this.properties = properties;

}

 

public int getTopPartitionCount() {

return topPartitionCount;

}

public void setTopPartitionCount(int topPartitionCount) {

this.topPartitionCount = topPartitionCount;

}

public static class CallBack implements Callback {

private String key;

private String value;

@SuppressWarnings("unused")

private String topic;

public CallBack(String topic, String key, String value) {

this.key = key;

this.topic = topic;

this.value = value;

}

FLKafkaProducer createKfkaProducer(Throwable exception, String topic, long offset, int partition, String key,

String value, long checksum) {

FLKafkaProducer producer = new FLKafkaProducer();

Date now = new Date();

producer.setKafkaChecksum(System.nanoTime() + "");

producer.setExceptionMessage(exception.getMessage());

producer.setKafkaProducerId(KafkaUtils.get32UUID());

producer.setKafkaOffset(offset);

producer.setOpertionType(FLOpertionType.UNRESOLVE);

producer.setKafkaTopic(topic);

producer.setResolveTime(now);

producer.setUpdateTime(now);

producer.setKafkaPartition(partition);

producer.setKafkaKey(key);

producer.setKafkaValue(value);

return producer;

}

@Override

public void onCompletion(RecordMetadata record, Exception e) {

if (record != null) {

System.err.println("record : " + record);

if (e != null) {

System.err.println("Exception : " + e.getMessage() + "..." + e.getLocalizedMessage());

e.printStackTrace();

}

// TODO: 16/7/27

System.err.printf("offset=%d , partition=%s , topic=%s  key=%s,  value=%s  \r\n", record.offset(),record.partition(), record.topic(), this.key, this.value);

}

}

}

}


猜你喜欢

转载自blog.csdn.net/chai1230/article/details/80340036