Unable to write multiple queries in foreachbatch
import org.apache.spark.sql.SparkSession import org.apache.spark.sql._ import org.apache.spark.SparkConf import org.apache.spark.sql.functions.{col, from_json} import org.apache.spark.sql.types.{IntegerType, StringType, StructType} import java.time.{ZonedDateTime, ZoneId} import java.time.format.DateTimeFormatter object SparkStreamingKafka1 { def main(args:Array[String]):Unit={ System.setProperty("hadoop.home.dir", "C:\hadoop\") val spark = SparkSession.builder().appName("test").master("local[*]").getOrCreate() spark.sparkContext.setLogLevel("OFF") import spark.implicits._ val df = spark.readStream .format("kafka") .option("kafka.bootstrap.servers", "localhost:9092") .option("subscribe", "demo2") .option("startingOffsets", "earliest") // From starting .load() val personStringDF = df.selectExpr("CAST(value AS STRING)") val schema=new … Read more