dgadiraju
7/9/2017 - 12:34 AM

scala-spark-sql-create-sql-context.scala

val sqlContext = new SQLContext(sc)
sqlContext.setConf("spark.sql.shuffle.partitions", "2")

import sqlContext.implicits._