Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ sbt test
## Running the application in local mode

```
spark-submit --class com.cloudwick.spark.WordCountRunner \
spark-submit --class com.careerbuilder.spark.WordCountRunner \
--master "local[*]" \
target/scala-2.10/spark-starter_2.10-1.0.jar \
[input_path] \
Expand All @@ -44,7 +44,7 @@ spark-submit --class com.cloudwick.spark.WordCountRunner \
## Running the application in the cluster

```
spark-submit --class com.cloudwick.spark.WordCountRunner \
spark-submit --class com.careerbuilder.spark.WordCountRunner \
--master "local[*]" \
[path_to_jar]/spark-starter_2.10-1.0.jar \
[input_path] \
Expand Down
21 changes: 16 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name := "spark-starter"
name := "Carotene4"

version := "1.0"

Expand All @@ -8,7 +8,9 @@ resolvers ++= Seq(
"typesafe-repository" at "http://repo.typesafe.com/typesafe/releases"
)

val sparkVersion = "1.2.1"
resolvers += "Artima Maven Repository" at "http://repo.artima.com/releases"

val sparkVersion = "1.6.0"

// build a uber jar if using any of the external streaming components
libraryDependencies ++= Seq(
Expand All @@ -18,11 +20,20 @@ libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-streaming-kafka" % sparkVersion,
"org.apache.spark" %% "spark-streaming-twitter" % sparkVersion,
// Test dependencies
"org.scalatest" %% "scalatest" % "2.2.4" % "test",
// "org.scalatest" %% "scalatest" % "2.2.4" % "test",
"org.scalactic" %% "scalactic" % "3.0.0",
"org.scalatest" %% "scalatest" % "3.0.0" % "test",
"org.xerial.snappy" % "snappy-java" % "1.1.1.7"
)
)

assemblyMergeStrategy in assembly := {
case PathList("org", "apache", "spark", "unused", "UnusedStubClass.class") => MergeStrategy.discard
case x =>
val oldStrategy = (assemblyMergeStrategy in assembly).value
oldStrategy(x)
}

parallelExecution in Test := false

fork in Test := true


2 changes: 1 addition & 1 deletion project/assembly.sbt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.13.0")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.3")
5 changes: 4 additions & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1 +1,4 @@
logLevel := Level.Warn
logLevel := Level.Warn
resolvers += "Artima Maven Repository" at "http://repo.artima.com/releases"

addSbtPlugin("com.artima.supersafe" % "sbtplugin" % "1.1.0")
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
package com.cloudwick.spark
package com.careerbuilder.spark

import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD

/**
* Simple word count application to illustrate spark standalone applications usage
* @author ashrith
* @author oozturk
*/

case class WordCount(word: String, count: Int)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
package com.cloudwick.spark
package com.careerbuilder.spark

import org.apache.spark.{SparkContext, SparkConf, Logging}

/**
* Main method to the Spark WordCount application.
* @author ashrith
* @author oozturk
*/
object WordCountRunner extends App with Logging {
if (args.length < 2) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package com.cloudwick
package com.careerbuilder

/**
* Pakcage object for examples
* @author ashrith
* @author oozturk
*/
package object spark {
implicit class StringUtils(val value: String) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.cloudwick.spark
package com.careerbuilder.spark

import com.cloudwick.spark.sparkspec.SparkSpec
import com.careerbuilder.spark.WordCount
import org.scalatest.{FlatSpec, GivenWhenThen, Matchers}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.cloudwick.spark.sparkspec
package com.careerbuilder.spark

import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{Suite, BeforeAndAfterAll}
Expand Down