1,258
社区成员
发帖
与我相关
我的任务
分享
@Repository
public class RapidCalculateTask implements Serializable{
private static Logger log = LoggerFactory.getLogger(RapidCalculateTask.class);
/**
*
*/
private static final long serialVersionUID = -7664074371811203339L;
@Resource
BeCalculateTaskDao beCalTask;
public void startStruStram() {
try {
CalculateService calService = new CalculateService();
Properties prop = calService.getProperties();
String servers_kafka = prop.getProperty("spark.servers_kf");
String subscribe = prop.getProperty("spark.subscribe");
SparkConf conf = calService.getSparkConfig(prop);
conf.setJars(SparkContext.jarOfClass( this.getClass() ).toList());
SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
Dataset<Row> lines = spark.readStream()
.format("kafka")
.option("kafka.bootstrap.servers", servers_kafka)
.option("subscribe", subscribe)
.load().selectExpr("CAST(value AS STRING)");
StreamingQuery query = lines.writeStream()
.foreach(new RapidForeachWriter<Row>(beCalTask))
.start();
query.awaitTermination();
} catch (StreamingQueryException e) {
e.printStackTrace();
}
}
}
public class RapidForeachWriter<Rows> extends ForeachWriter<Rows> implements Serializable{
/**
*
*/
private static final long serialVersionUID = 5549303376220035143L;
private static Logger log = LoggerFactory.getLogger(RapidForeachWriter.class);
private BeCalculateTaskDao beCalTask;
private JavaSparkContext javaCont;
public RapidForeachWriter (BeCalculateTaskDao beCalTask){
log.info("RapidForeachWriter 检查 BeCalculateTaskDao 是否为空"+beCalTask);
this.beCalTask = beCalTask;
}
@Override
public void close(Throwable errorOrNull) {
// TODO Auto-generated method stub
}
@Override
public boolean open(long partitionId, long version) {
log.info("检查 beCalTask 是否为空-[partitionId:"+partitionId+";version:"+version+"]"+beCalTask);
try {
if(beCalTask.equals(null)) {
return false;
}
return true;
} catch (Exception e) {
return false;
}
}
@Override
public void process(Rows value){
}
}