Spark解析json

----

import org.apache.spark.{SparkConf, SparkContext}

import scala.util.parsing.json.JSON

object JSONParse {
  def main(args: Array[String]): Unit = {
    val inputFileName = "file:///Users/walker/learn/mycode/spark/test_data/people.json"

    val conf = new SparkConf().setAppName("JSONParse").setMaster("local")
    val sc = new SparkContext(conf)
    val jsonStrRDD = sc.textFile(inputFileName)
    val parsedResult = jsonStrRDD.map(JSON.parseFull(_)) //jsonStrRDD.map(line => JSON.parseFull(line))
    parsedResult.foreach(
      r => r match {
//          解析成功,返回Some(map: Map[String,Any])
        case Some(map: Map[String,Any]) => println(map)
//          解析不成功,返回None
        case None => println("Parsing failed")
      }
    )
  }
}

原始json数据:

{"name":"Michael"}
{"name":"Andy", "age":30}
{"name":"Justin", "age":19}

{"name":"Justin", "age":19,hello}
{57657:12345, "age":19}

-----

猜你喜欢

转载自www.cnblogs.com/wooluwalker/p/12319680.html