Skip to content

Commit

Permalink
Fixes #13 Add an ATK example
Browse files Browse the repository at this point in the history
  • Loading branch information
kkasravi committed Sep 27, 2015
1 parent ca59322 commit fc7b226
Show file tree
Hide file tree
Showing 4 changed files with 179 additions and 5 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package io.gearpump.examples.atk_pipeline

import akka.actor.ActorSystem
import io.gearpump.cluster.UserConfig
import io.gearpump.cluster.client.ClientContext
import io.gearpump.cluster.main.{ArgumentsParser, CLIOption, ParseResult}
import io.gearpump.streaming.dsl.{TypedDataSource, CollectionDataSource}
import io.gearpump.streaming.dsl.plan.OpTranslator.{HandlerTask, SourceTask}
import io.gearpump.streaming.source.DataSource
import io.gearpump.streaming.{Processor, StreamApplication}
import io.gearpump.util.Graph._
import io.gearpump.util.{AkkaApp, Graph, LogUtil}
import org.slf4j.Logger

trait Scoring extends java.io.Serializable {
protected val LOG: Logger = LogUtil.getLogger(getClass)
val tar: String = ""
val archiveName: String = null
val modelName: String = null
val ModelBytesFileName: String = null

def load: Unit = {
//TODO
// This will replaced by ATK utility object that loads a tar from HDFS and returns a Model.
LOG.info(s"KMeansDataProcessor load")

/*
Try({
val pt = new Path(tar)
val uri = new URI(tar)
val hdfsFileSystem: org.apache.hadoop.fs.FileSystem = org.apache.hadoop.fs.FileSystem.get(uri, new Configuration())
val tempFilePath = "/tmp/kmeans.tar"
val local = new Path(tempFilePath)
hdfsFileSystem.copyToLocalFile(false, pt, local)
val tmpPath = "/tmp/"
val myTarFile: TarArchiveInputStream = new TarArchiveInputStream(new FileInputStream(new File(tempFilePath)))
var entry: TarArchiveEntry = null
entry = myTarFile.getNextTarEntry
while (entry != null) {
// Get the name of the file
val individualFile: String = entry.getName
// Get Size of the file and create a byte array for the size
val content: Array[Byte] = new Array[Byte](entry.getSize.toInt)
myTarFile.read(content, 0, content.length)
val outputFile = new FileOutputStream(new File(tmpPath + individualFile))
IOUtils.write(content, outputFile)
outputFile.close()
if (individualFile.contains(".jar")) {
archiveName = individualFile.substring(0, individualFile.indexOf(".jar"))
}
else if (individualFile.contains("modelname")) {
val s = new String(content)
modelName = s.replaceAll("\n", "")
}
else {
ModelBytesFileName = tmpPath + individualFile
}
entry = myTarFile.getNextTarEntry
}
myTarFile.close()
}) match {
case Success(a) =>
case Failure(throwable) =>
LOG.error(s"Error: $throwable")
}
*/


}

def score(vector: Array[String]): Unit

}

class KMeans(override val tar: String) extends Scoring {

def score(vector: Array[String]): Unit = {
LOG.info("KMeansDataProcessor next")
}

}

object PipeLine extends AkkaApp with ArgumentsParser {
private val LOG: Logger = LogUtil.getLogger(getClass)

override val options: Array[(String, CLIOption[Any])] = Array(
"tar"-> CLIOption[String]("<tar file location in hdfs>", required = false, defaultValue = Some("/user/gearpump/atk/kmeans.tar"))
)

def application(config: ParseResult, system: ActorSystem): StreamApplication = {
val TAR = "trustedanalytics.scoring-engine.archive-tar"
import ATKTask._
import SourceTask._
implicit val actorSystem = system
val tar = config.getString("tar")
val appConfig = UserConfig.empty.withString(TAR, tar)
val source = new CollectionDataSource[String](Seq("one","two","three"))
val sourceProcessor = Processor[HandlerTask,DataSource](source, 1, "Source", UserConfig.empty)
val kmeans = new KMeans(tar)
val kmeansProcessor = Processor[HandlerTask,Scoring](kmeans, 1, "ATK", appConfig)
val app = StreamApplication("ATKPipeline", Graph(
sourceProcessor ~> kmeansProcessor
), appConfig)
app
}

override def main(akkaConf: Config, args: Array[String]): Unit = {
val config = parse(args)
val context = ClientContext(akkaConf)
val appId = context.submit(application(config, context.system))
context.close()
}

}

Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@ import com.typesafe.config.ConfigFactory
import io.gearpump.cluster.UserConfig
import io.gearpump.cluster.client.ClientContext
import io.gearpump.cluster.main.{ArgumentsParser, CLIOption, ParseResult}
import io.gearpump.streaming.dsl.plan.OpTranslator.{HandlerTask, SourceTask}
import io.gearpump.streaming.kafka.{KafkaSource, KafkaStorageFactory}
import io.gearpump.streaming.source.DataSourceProcessor
import io.gearpump.streaming.source.DataSource
import io.gearpump.streaming.{Processor, StreamApplication}
import io.gearpump.util.Graph._
import io.gearpump.util.{AkkaApp, Graph, LogUtil}
Expand All @@ -44,6 +45,7 @@ object PipeLine extends AkkaApp with ArgumentsParser {
)

def application(config: ParseResult, system: ActorSystem): StreamApplication = {
import SourceTask._
implicit val actorSystem = system
import Messages._
val pipelineString =
Expand Down Expand Up @@ -75,7 +77,7 @@ object PipeLine extends AkkaApp with ArgumentsParser {

val offsetStorageFactory = new KafkaStorageFactory(zookeepers, brokers)
val source = new KafkaSource(topic, zookeepers, offsetStorageFactory)
val kafka = DataSourceProcessor(source, 1)
val kafka = Processor[HandlerTask,DataSource](source, 1, "KafkaSource", UserConfig.empty)
val cpuProcessor = Processor[CpuProcessor](processors, "CpuProcessor")
val memoryProcessor = Processor[MemoryProcessor](processors, "MemoryProcessor")
val cpuPersistor = Processor[CpuPersistor](persistors, "CpuPersistor")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@ import io.gearpump.cluster.UserConfig
import io.gearpump.cluster.client.ClientContext
import io.gearpump.cluster.main.{ArgumentsParser, CLIOption, ParseResult}
import io.gearpump.partitioner.ShufflePartitioner
import io.gearpump.streaming.dsl.plan.OpTranslator.{HandlerTask, SourceTask}
import io.gearpump.streaming.kafka.{KafkaSource, KafkaStorageFactory}
import io.gearpump.streaming.source.DataSourceProcessor
import io.gearpump.streaming.source.DataSource
import io.gearpump.streaming.{Processor, StreamApplication}
import io.gearpump.util.Graph._
import io.gearpump.util.{AkkaApp, Graph, LogUtil}
Expand Down Expand Up @@ -54,6 +55,7 @@ object PipeLine extends AkkaApp with ArgumentsParser {
)

def application(config: ParseResult, system: ActorSystem): StreamApplication = {
import SourceTask._
implicit val actorSystem = system
val readerNum = config.getInt("reader")
val scorerNum = config.getInt("scorer")
Expand All @@ -67,7 +69,7 @@ object PipeLine extends AkkaApp with ArgumentsParser {

val partitioner = new ShufflePartitioner()
val source = new KafkaSource(topic, zookeepers, offsetStorageFactory)
val reader = DataSourceProcessor(source, readerNum)
val reader = Processor[HandlerTask,DataSource](source, readerNum, "KafkaSource", UserConfig.empty)
val scorer = Processor[ScoringTask](scorerNum)
val writer = Processor[ParquetWriterTask](writerNum)

Expand Down
35 changes: 34 additions & 1 deletion project/Build.scala
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ object Build extends sbt.Build {
new File(packagePath).renameTo(new File(target))
}
)
).aggregate(kafka_hdfs_pipeline, kafka_hbase_pipeline)
).aggregate(kafka_hdfs_pipeline, kafka_hbase_pipeline, atk_pipeline)

lazy val kafka_hdfs_pipeline = Project(
id = "gearpump-kafka-hdfs-pipeline",
Expand Down Expand Up @@ -266,4 +266,37 @@ object Build extends sbt.Build {
)
)

lazy val atk_pipeline = Project(
id = "gearpump-atk-pipeline",
base = file("atk-pipeline"),
settings = commonSettings ++ myAssemblySettings ++
Seq(
mergeStrategy in assembly := {
case PathList("META-INF", "maven","org.slf4j","slf4j-api", ps) if ps.startsWith("pom") => MergeStrategy.discard
case x =>
val oldStrategy = (mergeStrategy in assembly).value
oldStrategy(x)
},
libraryDependencies ++= Seq(
"com.lihaoyi" %% "upickle" % upickleVersion,
"com.github.intel-hadoop" %% "gearpump-core" % gearpumpVersion % "provided"
exclude("org.fusesource.leveldbjni", "leveldbjni-all"),
"com.github.intel-hadoop" %% "gearpump-core" % gearpumpVersion % "test" classifier "tests",
"com.github.intel-hadoop" %% "gearpump-streaming" % gearpumpVersion % "provided"
exclude("org.fusesource.leveldbjni", "leveldbjni-all"),
"com.github.intel-hadoop" %% "gearpump-streaming" % gearpumpVersion % "test" classifier "tests",
"com.github.intel-hadoop" %% "gearpump-external-kafka" % gearpumpVersion
exclude("org.fusesource.leveldbjni", "leveldbjni-all"),
"org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.2",
"com.julianpeeters" % "avro-scala-macro-annotations_2.11" % "0.9.0",
"com.typesafe.akka" %% "akka-testkit" % akkaVersion % "test",
"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
"org.scalacheck" %% "scalacheck" % scalaCheckVersion % "test",
"org.mockito" % "mockito-core" % mockitoVersion % "test",
"junit" % "junit" % junitVersion % "test"
),
mainClass in (Compile, packageBin) := Some("io.gearpump.examples.atk_pipeline.PipeLine"),
target in assembly := baseDirectory.value.getParentFile / "target" / scalaVersionMajor
)
)
}

0 comments on commit fc7b226

Please sign in to comment.