Skip to content

Commit 3024524

Browse files
committed
Rename BufferedTensor to NonInlineTensor
1 parent a5fe637 commit 3024524

File tree

2 files changed

+22
-22
lines changed

2 files changed

+22
-22
lines changed

Tensors/src/main/scala/com/thoughtworks/compute/Tensors.scala

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -422,12 +422,12 @@ trait Tensors extends OpenCL {
422422
}
423423

424424
def apply[A](elements: A, padding: Float = 0.0f)(
425-
implicit tensorBuilder: TensorBuilder.Aux[A, Float]): BufferedTensor = {
425+
implicit tensorBuilder: TensorBuilder.Aux[A, Float]): NonInlineTensor = {
426426
val padding0 = padding
427427
new {
428428
val shape: Array[Int] = tensorBuilder.shape(elements).toArray
429429
val padding: Float = padding0
430-
} with BufferedTensor {
430+
} with NonInlineTensor {
431431
private[compute] val doBuffer = {
432432
Do(TryT(ResourceT(UnitContinuation.delay {
433433
val data = tensorBuilder.flatten(elements).toArray
@@ -454,13 +454,13 @@ trait Tensors extends OpenCL {
454454
} with InlineTensor
455455
}
456456

457-
def random(shape: Array[Int], seed: Int = Random.nextInt(), padding: Float = 0.0f): BufferedTensor = {
457+
def random(shape: Array[Int], seed: Int = Random.nextInt(), padding: Float = 0.0f): NonInlineTensor = {
458458
val shape0 = shape
459459
val padding0 = padding
460460
new {
461461
val padding = padding0
462462
val shape = shape0
463-
} with BufferedTensor {
463+
} with NonInlineTensor {
464464
private[compute] val doBuffer: Do[PendingBuffer[Float]] = {
465465
val size = shape.product
466466
allocateBuffer[Float](size).flatMap { buffer =>
@@ -475,13 +475,13 @@ trait Tensors extends OpenCL {
475475
}
476476

477477
/** Generate random numbers in normal distribution. */
478-
def randomNormal(shape: Array[Int], seed: Int = Random.nextInt(), padding: Float = 0.0f): BufferedTensor = {
478+
def randomNormal(shape: Array[Int], seed: Int = Random.nextInt(), padding: Float = 0.0f): NonInlineTensor = {
479479
val shape0 = shape
480480
val padding0 = padding
481481
new {
482482
val padding = padding0
483483
val shape = shape0
484-
} with BufferedTensor {
484+
} with NonInlineTensor {
485485
private[compute] val doBuffer: Do[PendingBuffer[Float]] = {
486486
val size = shape.product
487487
val paddingSize = if (size % 2 == 1) {
@@ -541,7 +541,7 @@ trait Tensors extends OpenCL {
541541
}
542542
}
543543

544-
def join(tensors0: Seq[Tensor]): BufferedTensor = {
544+
def join(tensors0: Seq[Tensor]): NonInlineTensor = {
545545
def force[A](seq: Seq[A]) = {
546546
seq match {
547547
case seqView: SeqView[A, _] @unchecked =>
@@ -556,7 +556,7 @@ trait Tensors extends OpenCL {
556556
new {
557557
val shape = headTensor.shape :+ tensors.length
558558
val padding: Float = headTensor.padding
559-
} with BufferedTensor {
559+
} with NonInlineTensor {
560560
private[compute] val doBuffer = {
561561
val elements = tensors.map(_.closure)
562562
enqueueClosure(trees.tuple.join(elements: _*), headTensor.shape).asInstanceOf[Do[PendingBuffer[Float]]]
@@ -591,9 +591,9 @@ trait Tensors extends OpenCL {
591591
/**
592592
* @group delayed
593593
*/
594-
def notInline: BufferedTensor
594+
def nonInline: NonInlineTensor
595595

596-
private def reduce(programs: MonoidPrograms): BufferedTensor = {
596+
private def reduce(programs: MonoidPrograms): NonInlineTensor = {
597597
new {
598598
val padding: Float = thisTensor.padding
599599

@@ -683,7 +683,7 @@ trait Tensors extends OpenCL {
683683
}
684684
}
685685
}.shared
686-
} with BufferedTensor {
686+
} with NonInlineTensor {
687687
def shape: Array[Int] = Tensors.ScalarShape
688688
}
689689
}
@@ -771,15 +771,15 @@ trait Tensors extends OpenCL {
771771
/**
772772
* @group delayed
773773
*/
774-
def reshape(newShape: Array[Int]): BufferedTensor = {
774+
def reshape(newShape: Array[Int]): NonInlineTensor = {
775775
if (newShape.product != shape.product) {
776776
throw new IllegalArgumentException
777777
}
778778
new {
779779
val padding: Float = thisTensor.padding
780780
val shape: Array[Int] = newShape
781781
private[compute] val doBuffer: Do[PendingBuffer[Float]] = thisTensor.doBuffer
782-
} with BufferedTensor
782+
} with NonInlineTensor
783783
}
784784

785785
/**
@@ -1149,12 +1149,12 @@ trait Tensors extends OpenCL {
11491149
enqueueClosure(closure, shape)
11501150
}.shared
11511151

1152-
def notInline: BufferedTensor =
1152+
def nonInline: NonInlineTensor =
11531153
new {
11541154
val padding: Float = thisInlineTensor.padding
11551155
private[compute] val doBuffer: Do[PendingBuffer[Float]] = thisInlineTensor.doBuffer
11561156
val shape: Array[Int] = thisInlineTensor.shape
1157-
} with BufferedTensor
1157+
} with NonInlineTensor
11581158
}
11591159

11601160
trait TransformedTensor extends InlineTensor {
@@ -1174,9 +1174,9 @@ trait Tensors extends OpenCL {
11741174

11751175
}
11761176

1177-
trait BufferedTensor extends Tensor {
1177+
trait NonInlineTensor extends Tensor {
11781178

1179-
def notInline: this.type = this
1179+
def nonInline: this.type = this
11801180

11811181
@transient
11821182
protected lazy val closure = {

benchmarks/src/jmh/scala/com/thoughtworks/compute/benchmarks.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -105,9 +105,9 @@ object benchmarks {
105105
}
106106

107107
def doBenchmark(): Do[() => Array[Float]] = {
108-
val weight: BufferedTensor = Tensor.randomNormal(Array(inputDepth, outputDepth))
108+
val weight: NonInlineTensor = Tensor.randomNormal(Array(inputDepth, outputDepth))
109109

110-
val input: BufferedTensor = Tensor.randomNormal(Array(batchSize, inputDepth))
110+
val input: NonInlineTensor = Tensor.randomNormal(Array(batchSize, inputDepth))
111111

112112
weight.doCache.flatMap { weight =>
113113
input.doCache.map { input =>
@@ -233,7 +233,7 @@ object benchmarks {
233233
trait Benchmarks extends BenchmarkTensors {
234234

235235
def doBenchmark(): Do[() => Float] = {
236-
val input: BufferedTensor = Tensor.randomNormal(Array.fill(numberOfDimensions)(size))
236+
val input: NonInlineTensor = Tensor.randomNormal(Array.fill(numberOfDimensions)(size))
237237

238238
input.doCache.map { input =>
239239
{ () =>
@@ -365,7 +365,7 @@ object benchmarks {
365365

366366
trait Benchmarks extends BenchmarkTensors {
367367

368-
final case class ConvolutionalLayer(weight: BufferedTensor, bias: BufferedTensor) {
368+
final case class ConvolutionalLayer(weight: NonInlineTensor, bias: NonInlineTensor) {
369369
def forward(input: Tensor): Tensor = {
370370
convolute(input, weight, bias)
371371
}
@@ -467,7 +467,7 @@ object benchmarks {
467467
}
468468

469469
def doBenchmark(): Do[() => Array[Float]] = {
470-
val input: BufferedTensor = Tensor.randomNormal(Array(batchSize, imageHeight, imageWidth, depth))
470+
val input: NonInlineTensor = Tensor.randomNormal(Array(batchSize, imageHeight, imageWidth, depth))
471471
val layers = (for (i <- (0 until numberOfLayers).view) yield {
472472
ConvolutionalLayer(weight = Tensor.randomNormal(Array(kernelHeight, kernelWidth, depth, depth)),
473473
bias = Tensor.randomNormal(Array(depth)))

0 commit comments

Comments
 (0)