import org.apache.spark.SparkConf
import org.apache.spark.SparkContext._
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
object SimpleApp {
def main(args: Array[String]): Unit = {
val conf = new SparkConf()
.setAppName("HiveToPhoenix")
.setMaster("local[*]")
val sc = new SparkContext(conf)
val spark = SparkSession.builder()
.appName("Spark Hive Example")
.config("hive.metastore.uris","thrift://11.22.333.444:10000")
.enableHiveSupport()
.getOrCreate()
val jdbcDF = spark.read.format("jdbc")
.option("url", "jdbc:hive2://11.22.333.444:10000")
.option("dbtable", "temp.test_db")
.option("user", "hive")
.option("password", "1234")
.option("driver", "org.apache.hive.jdbc.HiveDriver")
.option("numberPartitons",5)
.load()
println("able to connect------------------")
jdbcDF.show()
jdbcDF.printSchema
spark.sql("SELECT * FROM temp.test_dbwhere yymmdd=20210322").show()
sc.stop()
}
}
댓글 없음:
댓글 쓰기