Skip to content

Commit

Permalink
WIP server
Browse files Browse the repository at this point in the history
  • Loading branch information
dszeto committed Jun 25, 2014
1 parent 79461a8 commit 261754a
Show file tree
Hide file tree
Showing 9 changed files with 238 additions and 139 deletions.
5 changes: 4 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ javacOptions in ThisBuild ++= Seq("-source", "1.7", "-target", "1.7",

lazy val root = project in file(".") aggregate(
core,
engines//,
engines,
tools
//experiment
)

Expand All @@ -28,3 +29,5 @@ lazy val core = (project in file("core"))

lazy val engines = (project in file("engines")).
dependsOn(core)

lazy val tools = project in file("tools") dependsOn(core)
2 changes: 1 addition & 1 deletion core/src/main/scala/Controller.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ trait Algorithm[
def predict(model: M, feature: F): P
}

trait Server[-F <: BaseFeature, P <: BasePrediction, SP <: BaseServerParams]
trait Server[F <: BaseFeature, P <: BasePrediction, SP <: BaseServerParams]
extends BaseServer[F, P, SP] {
def init(serverParams: SP): Unit

Expand Down
34 changes: 17 additions & 17 deletions core/src/main/scala/core/BaseController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ import scala.reflect.ClassTag
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
abstract

abstract
class BaseCleanser[
TD <: BaseTrainingData,
CD <: BaseCleansedData,
Expand All @@ -21,7 +21,7 @@ class BaseCleanser[
}


abstract
abstract
class LocalCleanser[
TD <: BaseTrainingData,
CD <: BaseCleansedData : Manifest,
Expand Down Expand Up @@ -62,15 +62,15 @@ class SparkCleanser[

abstract class BaseAlgorithm[
CD <: BaseCleansedData,
F <: BaseFeature,
F <: BaseFeature : Manifest,
P <: BasePrediction,
M <: BaseModel,
AP <: BaseAlgoParams: Manifest]
extends AbstractParameterizedDoer[AP] {

def trainBase(
sc: SparkContext,
cleansedData: BaseCleansedData): RDD[BaseModel]
sc: SparkContext,
cleansedData: BaseCleansedData): RDD[BaseModel]

def predictBase(baseModel: BaseModel, baseFeature: BaseFeature)
: BasePrediction = {
Expand All @@ -80,11 +80,14 @@ abstract class BaseAlgorithm[
}

def predict(model: M, feature: F): P

def featureClass() = manifest[F]

}

abstract class LocalAlgorithm[
CD <: BaseCleansedData,
F <: BaseFeature,
F <: BaseFeature : Manifest,
P <: BasePrediction,
M <: BaseModel : Manifest,
AP <: BaseAlgoParams: Manifest]
Expand All @@ -102,20 +105,20 @@ abstract class LocalAlgorithm[
}

def train(cleansedData: CD): M

def predict(model: M, feature: F): P
}

abstract class SparkAlgorithm[
CD <: BaseCleansedData,
F <: BaseFeature,
F <: BaseFeature : Manifest,
P <: BasePrediction,
M <: BaseModel : Manifest,
AP <: BaseAlgoParams: Manifest]
extends BaseAlgorithm[CD, F, P, M, AP] {
// train returns a local object M, and we parallelize it.
def trainBase(
sc: SparkContext,
sc: SparkContext,
cleansedData: BaseCleansedData): RDD[BaseModel] = {
val m: BaseModel = train(cleansedData.asInstanceOf[CD])
sc.parallelize(Array(m))
Expand All @@ -132,7 +135,7 @@ abstract class SparkAlgorithm[
/* Server */

abstract class BaseServer[
-F <: BaseFeature,
F <: BaseFeature,
P <: BasePrediction,
SP <: BaseServerParams: Manifest]
extends AbstractParameterizedDoer[SP] {
Expand Down Expand Up @@ -179,7 +182,8 @@ class BaseEngine[
: Map[String,
Class[_ <:
BaseAlgorithm[CD, F, P, _ <: BaseModel, _ <: BaseAlgoParams]]],
val serverClass: Class[_ <: BaseServer[F, P, _ <: BaseServerParams]]) {}
val serverClass: Class[_ <: BaseServer[F, P, _ <: BaseServerParams]]) {
}

class LocalEngine[
TD <: BaseTrainingData,
Expand All @@ -194,7 +198,7 @@ class LocalEngine[
LocalAlgorithm[CD, F, P, _ <: BaseModel, _ <: BaseAlgoParams]]],
serverClass: Class[_ <: BaseServer[F, P, _ <: BaseServerParams]])
extends BaseEngine(cleanserClass, algorithmClassMap, serverClass)

class SparkEngine[
TD <: BaseTrainingData,
CD <: BaseCleansedData,
Expand All @@ -208,7 +212,3 @@ class SparkEngine[
SparkAlgorithm[CD, F, P, _ <: BaseModel, _ <: BaseAlgoParams]]],
serverClass: Class[_ <: BaseServer[F, P, _ <: BaseServerParams]])
extends BaseEngine(cleanserClass, algorithmClassMap, serverClass)




118 changes: 0 additions & 118 deletions core/src/main/scala/tools/RunServer.scala

This file was deleted.

2 changes: 1 addition & 1 deletion engines/src/main/scala/itemrank/examples/manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"name": "Kenneth Chan Smart Item Ranking Engine",
"jars": [
"../engines/target/scala-2.10/engines_2.10-0.8.0-SNAPSHOT.jar",
"target/scala-2.10/core_2.10-0.8.0-SNAPSHOT.jar",
"../core/target/scala-2.10/core_2.10-0.8.0-SNAPSHOT.jar",
"../engines/target/scala-2.10/engines-assembly-0.8.0-SNAPSHOT-deps.jar"
],
"engineFactory": "io.prediction.engines.itemrank.ItemRankEngine",
Expand Down
2 changes: 1 addition & 1 deletion engines/src/main/scala/stock/examples/manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"name": "Justin Yip Always Win Stock Engine",
"jars": [
"../engines/target/scala-2.10/engines_2.10-0.8.0-SNAPSHOT.jar",
"target/scala-2.10/core_2.10-0.8.0-SNAPSHOT.jar",
"../core/target/scala-2.10/core_2.10-0.8.0-SNAPSHOT.jar",
"../engines/target/scala-2.10/engines-assembly-0.8.0-SNAPSHOT-deps.jar"
],
"engineFactory": "io.prediction.engines.stock.StockEngine",
Expand Down
2 changes: 2 additions & 0 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")

addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.5.1")
25 changes: 25 additions & 0 deletions tools/build.sbt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name := "tools"

libraryDependencies ++= Seq(
"ch.qos.logback" % "logback-classic" % "1.1.2",
"com.github.scopt" %% "scopt" % "3.2.0",
"com.twitter" %% "chill" % "0.3.6" exclude("com.esotericsoftware.minlog", "minlog"),
"com.twitter" %% "chill-bijection" % "0.3.6",
"com.typesafe" % "config" % "1.2.1",
"com.typesafe.akka" %% "akka-actor" % "2.3.3",
"com.typesafe.akka" %% "akka-cluster" % "2.3.3",
"com.typesafe.akka" %% "akka-contrib" % "2.3.3",
"com.typesafe.akka" %% "akka-testkit" % "2.3.3",
"io.spray" % "spray-can" % "1.3.1",
"io.spray" % "spray-routing" % "1.3.1",
"org.apache.spark" %% "spark-core" % "1.0.0" % "provided",
"org.clapper" %% "grizzled-slf4j" % "1.0.2",
"org.mongodb" %% "casbah" % "2.7.2",
"org.json4s" %% "json4s-native" % "3.2.10",
"org.json4s" %% "json4s-ext" % "3.2.10")

Revolver.settings

packSettings

packMain := Map("runserver" -> "io.prediction.tools.RunServer")
Loading

0 comments on commit 261754a

Please sign in to comment.