Spark on Scala with Examples
- RDDs
- Dataframe
- Dataframe with UDFs
- Dataframe with UDAFs
- Datasets
- Read from Different file formats
- Write Datafram to multiple formats
libraryDependencies += "io.spray" %% "spray-json" % "1.3.6"
package com.test.serialization
import models.{Address, Patient}
import spray.json._
object Hospital {
object MyJsonProtocol extends DefaultJsonProtocol {
implicit val address = jsonFormat(Address, "country", "state", "zip")
implicit val patient = jsonFormat(Patient, "name", "regNumber", "address")
}
import MyJsonProtocol._
def main(args: Array[String]): Unit = {
val p1 = Patient(name = "Amar", regNumber = 234, address = Address("IN", "KA", 49))
println(p1.toJson.sortedPrint)
}
}
Patient
package com.test.serialization.models
case class Patient(name: String = "XXXXX", regNumber: Int, address:Address) {
def lengthOfName= {
name.length
}
def getFullName = {
"Mr./Mrs. "+this.name
}
}
Address
package com.test.serialization.models
case class Address(country:String,state:String,zip:Int){
}
Output
{
"address": {
"country": "IN",
"state": "KA",
"zip": 49
},
"name": "Amar",
"regNumber": 234
}