case "phoenix" =>{
outputStream.foreachRDD(rdd=>{
val spark=SparkSession.builder().config(rdd.sparkContext.getConf).getOrCreate()
val ds=spark.createDataFrame(rdd,Class.forName(settings.BEAN_CLASS))
ds.write.
format("org.apache.phoenix.spark").
mode(SaveMode.Overwrite).
options(Map(
"table" -> settings.OUTPUT_TABLENAME,
"zkUrl" -> settings.ZK_URL,
"zookeeper.znode.parent" -> settings.ZNODE_PARENT,
"hbase.rootdir" -> settings.ROOT_DIR,
"hbase.client.keyvalue.maxsize" -> "0")).
save()
})
}
it works with spark2.2 and phoenix-4.12.0-HBase-1.2, but doesn t work with spark3.0 preview and phoenix-5.0.0-HBase-2.0, how to fix it?