标签:
隐式转换函数
implicit def function
例如
implicit def rddToPairRDDFunctions[K, V](rdd: RDD[(K, V)])
scala> class Person(val name: String)
defined class Person
scala> class Enginner(val name: String, val salary: Double) {
| def code = println(name + " Coding ...")
| }
defined class Enginner
scala> implicit def person2Engineer(p: Person):Enginner = {
| println("person2Enginner " + p.name)
| new Enginner(p.name, 1000)
| }
warning: there were 1 feature warning(s); re-run with -feature for details
person2Engineer: (p: Person)Enginner
scala> def toCode(p: Person) {
| p.code
| }
toCode: (p: Person)Unit
scala> toCode(new Person("Scala"))
person2Enginner Scala
Scala Coding ...
package com.dt.scala.implicits
import scala.io.Source
import java.io.File
class RicherFile(val file: File) {
def read = Source.fromFile(file.getPath()).mkString
}
class File_Implicits(path: String) extends File(path)
object File_Implicits {
implicit def file2RicherFile(file: File) = new RicherFile(file)
}
object Implicits_Internals {
def main(args: Array[String]) {
var file = new File_Implicits("content.txt")
println(file.read)
}
}
隐式参数类型的伴生对象中去找隐式值
隐式参数
scala> class Level(val level: Int)
defined class Level
scala> def toWorker(name: String) (implicit level: Level) {
| println(name + " : " + level.level)
| }
toWorker: (name: String)(implicit level: Level)Unit
scala> implicit val level = new Level(8)
level: Level = Level@327d8933
scala> toWorker("Spark")
Spark : 8
package com.dt.scala.implicits
object Context_Implicits {
implicit val default: String = "Flink"
}
object Param {
def print(content: String)(implicit language: String) {
println(language + " : " + content)
}
}
object Implicit_Parameters {
def main(args: Array[String]) {
Param.print("Spark")("Scala")
import Context_Implicits._
Param.print("Hadoop")
}
}
abstract class Template[T] {
def add(x: T, y: T): T
}
abstract class SubTemplate[T] extends Template[T] {
def uint: T
}
object Implicits_Object {
def main(args: Array[String]) {
implicit object StringAdd extends SubTemplate[String] {\
override def add(x: String, y: String) = x concat y
override def unit: String = ""
}
implicit object IntAdd extends SubTemplate[Int] {\
override def add(x: Int, y: Int) = x + y
override def unit: Int = 0
}
def sum[T](xs: List[T])(implicit m: SubTemplate[T]): T =
if(xs.isEmpty) m.unit
else m.add(xs.head, sum(xs.tail))
println(sum(List(1,2,3,4,5)))
println(sum(List("Scala","Spark","Kafka")))
}
}
import java.io.File
import scala.io.Source
object Context_hepler {
implicit class FileEnhancer(file: File) {
def read = Source.fromFile(file.getPath).mkString
}
implicit class Op(x: Int) {
def addSAP(second: Int) = x + second
}
}
object Implicits_Class {
def main(args: Array[String]) {
import Context_Helper._
println(1.addSAP(2))
println(new File("context.txt").read)
}
}
Java共享全局变量的加锁机制
Scala使用Actor实现并发编程
scala> import scala.actors.Actor
import scala.actors.Actor
scala> class HiActor extends Actor {
| def act(){
| while(true){
| receive {
| case name: String => println(name)
| }
| }
| }
| }
defined class HiActor
scala> val actor = new HiActor
actor: HiActor = HiActor@343e4e76
scala> actor.start()
res3: scala.actors.Actor = HiActor@343e4e76
scala> actor ! "Spark"
scala> Spark
scala> case class Basic(name: String, age: Int)
defined class Basic
scala> case class Worker(name: String, age: Int)
defined class Worker
scala> class BasicActor extends Actor {
| def act(){
| while(true) {
| receive {
| case Basic(name, age) => println("Basic Information " + name + " , " + age)
| case Worker(name, age) => println("Worker Information " + name + " , " + age)
| }
| }
| }
| }
defined class BasicActor
scala> val b = new BasicActor
b: BasicActor = BasicActor@469f6ecc
scala> b.start
res5: scala.actors.Actor = BasicActor@469f6ecc
scala> b ! Basic("Scala", 13)
scala> Basic Information Scala , 13
scala> b ! Worker("Hadoop", 15)
Worker Information Hadoop , 15
!? 发送消息 必须等到Actor消息处理完
!! 发送消息,未来某个时间获得结果
首先,我要说明下,我的笔记及作业都来自王家林老师的大数据门徒3000课程,希望大家多多关注,里面都是经典,这里是老师的电子名片:
第5课:彻底精通Scala隐式转换和并发编程及Spark源码阅读
标签:
原文地址:http://www.cnblogs.com/jkge/p/5126018.html