How to save a spark DataFrame as a patitioned hive table

utilise saveAsTable method

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
val conf = new SparkConf().setAppName("Simple Application").setMaster("local")
val sc = new SparkContext(conf)
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
import sqlContext.implicits._
val hiveContext = new org.apache.spark.sql.hive.HiveContext(sc)
hiveContext.sql("use database")
val cmd =
"""
select
col1,
col2
from
table
""".stripMargin
val yourDf = hiveContext.sql(cmd)
yourDf.printSchema()
yourDf.write.partitionBy("col2").saveAsTable("partitionTableName")

Comments