package com.chouqu
import org.apache.spark.sql.SparkSession
object Date02 {
def main(args: Array[String]): Unit = {
val spark: SparkSession = SparkSession
.builder()
.appName("Date02")
.master("local[*]")
.config("hive.exec.max.dynamic.partitions", 5000)
.config("hive.exec.dynamic.partition.mode", "nonstric")
.enableHiveSupport()
.getOrCreate()
spark.read
.format("jdbc")
.option("driver", "com.mysql.jdbc.Driver")
.option("url", "jdbc:mysql://192.168.45.5:29677/ds_db01?useSSL=false")
.option("user", "root")
.option("password", "123456")
.option("dbtable", "product_info")
.load()
.createOrReplaceTempView("product_info_yi")
spark.sql("use ods")
// spark.sql("drop table product_info")
spark.sql(
"""
|create table product_info(
|product_id string,
|product_core string,
|product_name string,
|bar_code string,
|brand_id string,
|one_category_id string,
|two_category_id string,
|three_category_id string,
|supplier_id string,
|price string,
|average_cost string,
|publish_status string,
|audit_status string,
|weight string,
|length string,
|height string,
|width string,
|color_type string,
|production_date string,
|shelf_life string,
|descript string,
|indate string,
|modified_time string
|)
|partitioned by (etl_date string)
|row format delimited fields terminated by '\t'
|""".stripMargin)
// spark.sql("insert into table product_info_yi select row_number() over() as modified_time from product_info")
spark.sql(
"""
|insert into table product_info partition (etl_date = '20221024')
|select * from product_info_yi
|""".stripMargin)
spark.sql("show partitions ods.product_info").show()
spark.sql("select * from product_info limit 10").show()
}
}