标签:pre div source save obj map code load() stop
object SparkSQL02_Datasource { def main(args: Array[String]): Unit = { //创建上下文环境配置对象 val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL01_Demo") //创建SparkSession对象 val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate() import spark.implicits._ //方式1:通用的load方法读取 spark.read.format("jdbc") .option("url", "jdbc:mysql://hadoop202:3306/test") .option("driver", "com.mysql.jdbc.Driver") .option("user", "root") .option("password", "123456") .option("dbtable", "user") .load().show //方式2:通用的load方法读取 参数另一种形式 spark.read.format("jdbc") .options(Map("url"->"jdbc:mysql://hadoop202:3306/test?user=root&password=123456", "dbtable"->"user","driver"->"com.mysql.jdbc.Driver")).load().show //方式3:使用jdbc方法读取 val props: Properties = new Properties() props.setProperty("user", "root") props.setProperty("password", "123456") val df: DataFrame = spark.read.jdbc("jdbc:mysql://hadoop202:3306/test", "user", props) df.show //释放资源 spark.stop() } }
object SparkSQL03_Datasource { def main(args: Array[String]): Unit = { //创建上下文环境配置对象 val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL01_Demo") //创建SparkSession对象 val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate() import spark.implicits._ val rdd: RDD[User2] = spark.sparkContext.makeRDD(List(User2("lisi", 20), User2("zs", 30))) val ds: Dataset[User2] = rdd.toDS //方式1:通用的方式 format指定写出类型 ds.write .format("jdbc") .option("url", "jdbc:mysql://hadoop202:3306/test") .option("user", "root") .option("password", "123456") .option("dbtable", "user") .mode(SaveMode.Append) .save() //方式2:通过jdbc方法 val props: Properties = new Properties() props.setProperty("user", "root") props.setProperty("password", "123456") ds.write.mode(SaveMode.Append).jdbc("jdbc:mysql://hadoop202:3306/test", "user", props) //释放资源 spark.stop() } } case class User2(name: String, age: Long)
标签:pre div source save obj map code load() stop
原文地址:https://www.cnblogs.com/ttyypjt/p/14757079.html