I can not access SparkConf in the package. But I already imported import org.apache.spark.SparkConf . My code is:
import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.SparkConf import org.apache.spark.rdd.RDD import org.apache.spark._ import org.apache.spark.streaming._ import org.apache.spark.streaming.StreamingContext._ object SparkStreaming { def main(arg: Array[String]) = { val conf = new SparkConf.setMaster("local[2]").setAppName("NetworkWordCount") val ssc = new StreamingContext( conf, Seconds(1) ) val lines = ssc.socketTextStream("localhost", 9999) val words = lines.flatMap(_.split(" ")) val pairs_new = words.map( w => (w, 1) ) val wordsCount = pairs_new.reduceByKey(_ + _) wordsCount.print() ssc.start()
sbt dependencies:
name := "Spark Streaming" version := "1.0" scalaVersion := "2.10.4" libraryDependencies ++= Seq( "org.apache.spark" %% "spark-core" % "1.5.2" % "provided", "org.apache.spark" %% "spark-mllib" % "1.5.2", "org.apache.spark" %% "spark-streaming" % "1.5.2" )
But the error shows that SparkConf impossible to access.
[error] /home/cliu/Documents/github/Spark-Streaming/src/main/scala/Spark-Streaming.scala:31: object SparkConf in package spark cannot be accessed in package org.apache.spark [error] val conf = new SparkConf.setMaster("local[2]").setAppName("NetworkWordCount") [error] ^
source share