问题是这样的我用spark-submit 提交作业,提交2s后自动停止
package com.itcchina.zs57s.pcap.receive.handle;
import ItcWorkFlowClassify.ItcWorkItem;
import ItcWorkFlowView.ItcWorkItemInfo;
import ItcWorkFlowView.ItcWorkItemsInfo;
import ItcWorkFlowView.WorkItemInfo;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.itcchina.framework.api.IWorkerItem;
import com.itcchina.zs57s.pcap.domain.DictionaryColletion;
import com.itcchina.zs57s.pcap.domain.KeyValueClass;
import com.itcchina.zs57s.pcap.domain.Packet;
import com.itcchina.zs57s.pcap.serializable.PacketDecoder;
import kafka.serializer.StringDecoder;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.apache.spark.streaming.receiver.Receiver;
import scala.Tuple2;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by newbie on 7/15/16.
*/
public class KafkaReceiveWorkItem extends AbsItcWorkItem implements Serializable {
public KafkaReceiveWorkItem(ItcWorkItemInfo itcWorkItemsInfo, ItcWorkItem workItem) {
super(itcWorkItemsInfo, workItem);
}
private Map<String, String> kafkaParams = new HashMap<String, String>();
private Map<String, Integer> map_topic = new HashMap<String, Integer>();
private void initailKafka() {
kafkaParams.put("metadata.broker.list", this.getWorkItemInfo().getMapValue("pcap.receive.kafka.config.brokerdata_data_list"));
kafkaParams.put("zookeeper.connect", this.getWorkItemInfo().getMapValue("pcap.receive.kafka.config.zookeeper_list"));
kafkaParams.put("group.id", this.getWorkItemInfo().getMapValue("pcap.receive.kafka.config.group_id"));
kafkaParams.put("auto.offset.reset", this.getWorkItemInfo().getMapValue("pcap.receive.kafka.config.auto_offset_reset"));
kafkaParams.put("rebalance.max.retries", this.getWorkItemInfo().getMapValue("pcap.receive.kafka.config.rebalance_max_retries"));
kafkaParams.put("rebalance.backoff.ms", this.getWorkItemInfo().getMapValue("pcap.receive.kafka.config.rebalance_backoff.ms"));
map_topic.put(this.getWorkItemInfo().getMapValue("pcap.receive.kafka.config.topic"),
Integer.parseInt(this.getWorkItemInfo().getMapValue("pcap.receive.kafka.config.thread_num")));
}
IWorkerItem currWorkItem = null;
Broadcast<Map<String, Map<String, Object>>> broadcast = null;
Map<String, Map<String, Object>> map = new HashMap<>();
public List<KeyValueClass> iniParams(Object userParams) {
if (userParams == null) {
throw new NullPointerException();
}
Gson gson = new Gson();
List<KeyValueClass> keyValueClasses = gson.fromJson(userParams.toString(), new TypeToken<List<KeyValueClass>>() {
}.getType());
return keyValueClasses;
}
@Override
public void reset() {
}
@Override
public void start(ItcWorkFlowView.WorkItemInfo workItemInfo) throws Exception {
JavaStreamingContext javaStreamingContext = this.InitailStreaming();
broadcast = javaStreamingContext.sparkContext().broadcast(map);
JavaPairReceiverInputDStream<String, Packet> javaPairReceiverInputDStream = KafkaUtils.createStream(javaStreamingContext, String.class,
Packet.class, StringDecoder.class, PacketDecoder.class, kafkaParams, map_topic, StorageLevel.MEMORY_AND_DISK());
JavaDStream<Packet> javaDStream = javaPairReceiverInputDStream.flatMap(new FlatMapFunction<Tuple2<String, Packet>, Packet>() {
@Override
public Iterable<Packet> call(Tuple2<String, Packet> stringPacketTuple2) throws Exception {
List<Packet> list = new ArrayList<Packet>();
Packet packet = stringPacketTuple2._2;
list.add(packet);
if (broadcast == null || !broadcast.value().containsKey(packet.getTaskName())) {
List<KeyValueClass> keyValueClasses = iniParams(packet.getUserParams());
Map<String, Object> executeParams = new HashMap<String, Object>();
executeParams.put(DictionaryColletion.returnVls, keyValueClasses);
map.put(packet.getTaskName(), executeParams);
}
return list;
}
});
WorkItemInfo workItemInfo1 = new WorkItemInfo();
workItemInfo1.setParams(javaDStream);
this.getNext().start(workItemInfo1);
javaStreamingContext.checkpoint("/");
javaStreamingContext.start();
javaStreamingContext.awaitTermination();
}
@Override
public void stop() throws Exception {
}
}相关代码