Skip to content

Commit 3e31ec5

Browse files
committed
Add some benchmarks
1 parent 3ec753b commit 3e31ec5

File tree

4 files changed

+214
-0
lines changed

4 files changed

+214
-0
lines changed

benchmark/build.sbt

+28
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
libraryDependencies ++= {
2+
import Ordering.Implicits._
3+
if (VersionNumber(scalaVersion.value).numbers >= Seq(2, 12)) {
4+
Nil
5+
} else {
6+
Seq(
7+
"com.thoughtworks.deeplearning.etl" %% "cifar100" % "0.1.1-SNAPSHOT",
8+
"ch.qos.logback" % "logback-classic" % "1.2.3" % Optional,
9+
"org.nd4j" %% "nd4s" % "0.8.0",
10+
"org.nd4j" % "nd4j-api" % "0.8.0",
11+
"org.nd4j" % "nd4j-native-platform" % "0.8.0" % Optional
12+
)
13+
}
14+
}
15+
16+
fork in Test := true
17+
18+
enablePlugins(JmhPlugin)
19+
20+
publishArtifact := false
21+
22+
addCompilerPlugin("com.thoughtworks.dsl" %% "compilerplugins-bangnotation" % "1.0.0-RC10")
23+
24+
addCompilerPlugin("com.thoughtworks.dsl" %% "compilerplugins-reseteverywhere" % "1.0.0-RC10")
25+
26+
libraryDependencies += "com.thoughtworks.dsl" %% "domains-scalaz" % "1.0.0-RC10"
27+
28+
addCompilerPlugin("com.thoughtworks.import" %% "import" % "2.0.2")
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,181 @@
1+
package com.thoughtworks.deeplearning.benchmark
2+
3+
import java.util.concurrent.{ExecutorService, Executors}
4+
5+
import com.thoughtworks.deeplearning.DeepLearning
6+
import com.thoughtworks.deeplearning.etl.Cifar100
7+
import com.thoughtworks.deeplearning.etl.Cifar100.Batch
8+
import com.thoughtworks.deeplearning.plugins.Builtins
9+
import com.thoughtworks.feature.Factory
10+
import org.openjdk.jmh.annotations._
11+
import com.thoughtworks.future._
12+
import org.nd4j.linalg.api.ndarray.INDArray
13+
import org.nd4j.linalg.factory.Nd4j
14+
15+
import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService}
16+
17+
/**
18+
* @author 杨博 (Yang Bo)
19+
*/
20+
object benchmark {
21+
22+
import $exec.`https://gist.github.com/Atry/1fb0608c655e3233e68b27ba99515f16/raw/39ba06ee597839d618f2fcfe9526744c60f2f70a/FixedLearningRate.sc`
23+
24+
trait LayerOutput {
25+
def numberOfFeatures: Int
26+
type Output
27+
def output: Output
28+
def typeClassInstance: DeepLearning.Aux[Output, INDArray, INDArray]
29+
}
30+
object LayerOutput {
31+
def input(indArray: INDArray): LayerOutput = new LayerOutput {
32+
def numberOfFeatures: Int = indArray.shape().apply(1)
33+
34+
type Output = INDArray
35+
def output = indArray
36+
37+
def typeClassInstance: DeepLearning.Aux[INDArray, INDArray, INDArray] = ???
38+
}
39+
}
40+
41+
@Threads(value = 1)
42+
@State(Scope.Benchmark)
43+
class FourLayer {
44+
45+
@Param(Array("4"))
46+
protected var batchSize: Int = _
47+
48+
@Param(Array("1", "2", "4"))
49+
protected var sizeOfThreadPool: Int = _
50+
51+
@Param(Array("16", "32", "64"))
52+
protected var numberOfHiddenFeatures: Int = _
53+
54+
@Param(Array("16", "8"))
55+
protected var numberOfBranches: Int = _
56+
57+
private implicit var executionContext: ExecutionContextExecutorService = _
58+
59+
private lazy val batches = {
60+
val cifar100: Cifar100 = Cifar100.load().blockingAwait
61+
Iterator.continually(cifar100.epochByCoarseClass(batchSize)).flatten
62+
}
63+
64+
class Model {
65+
val hyperparameters = Factory[Builtins with FixedLearningRate].newInstance(learningRate = 0.0001)
66+
67+
import hyperparameters._, implicits._
68+
69+
object CoarseFeatures extends (INDArray => INDArrayLayer) {
70+
71+
val branches = Seq.fill(numberOfBranches)(new (INDArray => INDArrayLayer) {
72+
object Dense1 extends (INDArray => INDArrayLayer) {
73+
val weight = INDArrayWeight(Nd4j.randn(Cifar100.NumberOfPixelsPerSample, numberOfHiddenFeatures))
74+
val bias = INDArrayWeight(Nd4j.randn(1, numberOfHiddenFeatures))
75+
76+
def apply(input: INDArray) = {
77+
max(input dot weight + bias, 0.0)
78+
}
79+
}
80+
81+
val weight = INDArrayWeight(Nd4j.randn(numberOfHiddenFeatures, numberOfHiddenFeatures))
82+
val bias = INDArrayWeight(Nd4j.randn(1, numberOfHiddenFeatures))
83+
84+
def apply(input: INDArray) = {
85+
max(Dense1(input) dot weight + bias, 0.0)
86+
}
87+
})
88+
89+
def apply(input: INDArray) = {
90+
branches.map(_.apply(input)).reduce(_ + _)
91+
}
92+
}
93+
94+
object CoarseProbabilityModel {
95+
val weight = INDArrayWeight(Nd4j.randn(numberOfHiddenFeatures, Cifar100.NumberOfCoarseClasses))
96+
val bias = INDArrayWeight(Nd4j.randn(1, Cifar100.NumberOfCoarseClasses))
97+
98+
def apply(input: INDArrayLayer) = {
99+
val scores = input dot weight + bias
100+
101+
val expScores = exp(scores)
102+
expScores / expScores.sum(1)
103+
}
104+
}
105+
106+
val fineProbabilityModel = Seq.fill(Cifar100.NumberOfCoarseClasses)(new (INDArrayLayer => INDArrayLayer) {
107+
object Dense2 extends (INDArrayLayer => INDArrayLayer) {
108+
109+
object Dense1 extends (INDArrayLayer => INDArrayLayer) {
110+
val weight = INDArrayWeight(Nd4j.randn(numberOfHiddenFeatures, numberOfHiddenFeatures))
111+
val bias = INDArrayWeight(Nd4j.randn(1, numberOfHiddenFeatures))
112+
113+
def apply(coarseFeatures: INDArrayLayer) = {
114+
max(coarseFeatures dot weight + bias, 0.0)
115+
}
116+
}
117+
118+
val weight = INDArrayWeight(Nd4j.randn(numberOfHiddenFeatures, numberOfHiddenFeatures))
119+
val bias = INDArrayWeight(Nd4j.randn(1, numberOfHiddenFeatures))
120+
121+
def apply(coarseFeatures: INDArrayLayer) = {
122+
max(Dense1(coarseFeatures) dot weight + bias, 0.0)
123+
}
124+
}
125+
126+
val weight = INDArrayWeight(Nd4j.randn(numberOfHiddenFeatures, Cifar100.NumberOfFineClassesPerCoarseClass))
127+
val bias = INDArrayWeight(Nd4j.randn(1, Cifar100.NumberOfFineClassesPerCoarseClass))
128+
129+
def apply(coarseFeatures: INDArrayLayer) = {
130+
val scores = Dense2(coarseFeatures) dot weight + bias
131+
132+
val expScores = exp(scores)
133+
expScores / expScores.sum(1)
134+
}
135+
})
136+
137+
def loss(coarseLabel: Int, batch: Batch): DoubleLayer = {
138+
def crossEntropy(prediction: INDArrayLayer, expectOutput: INDArray): DoubleLayer = {
139+
-(hyperparameters.log(prediction) * expectOutput).mean
140+
}
141+
142+
val Array(batchSize, width, height, channels) = batch.pixels.shape()
143+
val coarseFeatures = CoarseFeatures(batch.pixels.reshape(batchSize, width * height * channels))
144+
val coarseProbabilities = CoarseProbabilityModel(coarseFeatures)
145+
val fineProbabilities = fineProbabilityModel(coarseLabel)(coarseFeatures)
146+
147+
crossEntropy(coarseProbabilities, batch.coarseClasses) + crossEntropy(fineProbabilities, batch.localFineClasses)
148+
}
149+
150+
def train(coarseLabel: Int, batch: Batch) = {
151+
loss(coarseLabel, batch).train
152+
}
153+
154+
}
155+
156+
private var model: Model = null
157+
158+
@Setup
159+
final def setup(): Unit = {
160+
executionContext = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(sizeOfThreadPool))
161+
model = new Model
162+
}
163+
164+
@TearDown
165+
final def tearDown(): Unit = {
166+
model = null
167+
executionContext.shutdown()
168+
executionContext = null
169+
}
170+
171+
@Benchmark
172+
final def deepLearningDotScala(): Double = {
173+
val (coarseClass, batch) = batches.synchronized {
174+
batches.next()
175+
}
176+
model.train(coarseClass, batch).blockingAwait
177+
}
178+
179+
}
180+
181+
}

build.sbt

+3
Original file line numberDiff line numberDiff line change
@@ -144,6 +144,9 @@ lazy val `plugins-Builtins` =
144144
`plugins-CumulativeINDArrayLayers`,
145145
DeepLearning % "test->test"
146146
)
147+
148+
lazy val benchmark = project.dependsOn(`plugins-Builtins`)
149+
147150
publishArtifact := false
148151

149152
lazy val unidoc =

project/plugins.sbt

+2
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
11
addSbtPlugin("com.thoughtworks.sbt-best-practice" % "sbt-best-practice" % "2.5.0")
22

33
addSbtPlugin("com.thoughtworks.example" % "sbt-example" % "2.0.2")
4+
5+
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.4")

0 commit comments

Comments
 (0)