kafka查询指定时间数据的偏移量package st;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import kafka.api.PartitionOfftRequestInfo;
TopicAndPartition;
import kafka.javaapi.OfftRespon;
import kafka.javaapi.PartitionMetadata;mesh
千钧一发是什么意思import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import sumer.SimpleConsumer;
public class KafkaOfftSearch {
public Map<String,String> getPartitonAndOfft() {
int port = 6667; //端⼝
private static long timestamp = 1459209600000l; //要查询的时间
String topic = "topic1"; //指定主题
List<String> eds = new ArrayList<String>(); //kafka broke地址
eds.add("11.11.184.172");
eds.add("11.11.184.174");
eds.add("11.11.184.183");
考研英语国家分数线
eds.add("11.11.184.167");
eds.add("11.11.184.177");
KafkaOfftSearch kot = new KafkaOfftSearch();
TreeMap<Integer,PartitionMetadata> metadatas = kot.findLeader(eds, port, topic);
vipabcMap<String,String> map = new HashMap<String,String>();
List<Long> offSetList = new ArrayList<>();
for (Entry<Integer,PartitionMetadata> entry : Set()) {
int partition = Key();
String leadBroker = Value().leader().host();
String clientName = "Client_" + topic + "_" + partition;
SimpleConsumer consumer = new SimpleConsumer(leadBroker, port, 100000,64 * 1024, clientName);
long readOfft = getLastOfft(consumer, topic, partition,
timestamp, clientName);
offSetList.add(readOfft);
西安培训机构map.put(partition+"", readOfft+"");
accommodationSystem.out.println(partition+":"+readOfft);
if(consumer!=null)consumer.clo();
}
return map;
}
public static long getLastOfft(SimpleConsumer consumer, String topic,
handint partition, long whichTime, String clientName) {
TopicAndPartition topicAndPartition = new TopicAndPartition(topic,
brinchpartition);
Map<TopicAndPartition, PartitionOfftRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOfftRequestInfo>(); requestInfo.put(topicAndPartition, new PartitionOfftRequestInfo(
jobdescription
whichTime, 1));
kafka.javaapi.OfftRequest request = new kafka.javaapi.OfftRequest(
requestInfo, kafka.api.OfftRequest.CurrentVersion(),
clientName);
OfftRespon respon = OfftsBefore(request);
if (respon.hasError()) {
System.out
.println("Error fetching data Offt Data the Broker. Reason: "
+ Code(topic, partition));
return0;
}
long[] offts = respon.offts(topic, partition);
return offts[0];
}
private TreeMap<Integer,PartitionMetadata> findLeader(List<String> a_edBrokers, int a_port, String a_topic) {
TreeMap<Integer, PartitionMetadata> map = new TreeMap<Integer, PartitionMetadata>(); loop: for (String ed : a_edBrokers) {
SimpleConsumer consumer = null;
try {
consumer = new SimpleConsumer(ed, a_port, 100000, 64 * 1024,
科尔顿 海恩斯"leaderLookup"+new Date().getTime());
List<String> topics = Collections.singletonList(a_topic);
TopicMetadataRequest req = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataRespon resp = consumer.nd(req);
List<TopicMetadata> metaData = picsMetadata();
for (TopicMetadata item : metaData) {
for (PartitionMetadata part : item.partitionsMetadata()) {
map.put(part.partitionId(), part);
}
}
} catch (Exception e) {
System.out.println("Error communicating with Broker [" + ed
+ "] to find Leader for [" + a_topic + ", ] Reason: " + e);
} finally {
if (consumer != null)
consumer.clo();
}
}
return map;
}
}