@@ -422,12 +422,12 @@ trait Tensors extends OpenCL {
422
422
}
423
423
424
424
def apply [A ](elements : A , padding : Float = 0.0f )(
425
- implicit tensorBuilder : TensorBuilder .Aux [A , Float ]): BufferedTensor = {
425
+ implicit tensorBuilder : TensorBuilder .Aux [A , Float ]): NonInlineTensor = {
426
426
val padding0 = padding
427
427
new {
428
428
val shape : Array [Int ] = tensorBuilder.shape(elements).toArray
429
429
val padding : Float = padding0
430
- } with BufferedTensor {
430
+ } with NonInlineTensor {
431
431
private [compute] val doBuffer = {
432
432
Do (TryT (ResourceT (UnitContinuation .delay {
433
433
val data = tensorBuilder.flatten(elements).toArray
@@ -454,13 +454,13 @@ trait Tensors extends OpenCL {
454
454
} with InlineTensor
455
455
}
456
456
457
- def random (shape : Array [Int ], seed : Int = Random .nextInt(), padding : Float = 0.0f ): BufferedTensor = {
457
+ def random (shape : Array [Int ], seed : Int = Random .nextInt(), padding : Float = 0.0f ): NonInlineTensor = {
458
458
val shape0 = shape
459
459
val padding0 = padding
460
460
new {
461
461
val padding = padding0
462
462
val shape = shape0
463
- } with BufferedTensor {
463
+ } with NonInlineTensor {
464
464
private [compute] val doBuffer : Do [PendingBuffer [Float ]] = {
465
465
val size = shape.product
466
466
allocateBuffer[Float ](size).flatMap { buffer =>
@@ -475,13 +475,13 @@ trait Tensors extends OpenCL {
475
475
}
476
476
477
477
/** Generate random numbers in normal distribution. */
478
- def randomNormal (shape : Array [Int ], seed : Int = Random .nextInt(), padding : Float = 0.0f ): BufferedTensor = {
478
+ def randomNormal (shape : Array [Int ], seed : Int = Random .nextInt(), padding : Float = 0.0f ): NonInlineTensor = {
479
479
val shape0 = shape
480
480
val padding0 = padding
481
481
new {
482
482
val padding = padding0
483
483
val shape = shape0
484
- } with BufferedTensor {
484
+ } with NonInlineTensor {
485
485
private [compute] val doBuffer : Do [PendingBuffer [Float ]] = {
486
486
val size = shape.product
487
487
val paddingSize = if (size % 2 == 1 ) {
@@ -541,7 +541,7 @@ trait Tensors extends OpenCL {
541
541
}
542
542
}
543
543
544
- def join (tensors0 : Seq [Tensor ]): BufferedTensor = {
544
+ def join (tensors0 : Seq [Tensor ]): NonInlineTensor = {
545
545
def force [A ](seq : Seq [A ]) = {
546
546
seq match {
547
547
case seqView : SeqView [A , _] @ unchecked =>
@@ -556,7 +556,7 @@ trait Tensors extends OpenCL {
556
556
new {
557
557
val shape = headTensor.shape :+ tensors.length
558
558
val padding : Float = headTensor.padding
559
- } with BufferedTensor {
559
+ } with NonInlineTensor {
560
560
private [compute] val doBuffer = {
561
561
val elements = tensors.map(_.closure)
562
562
enqueueClosure(trees.tuple.join(elements : _* ), headTensor.shape).asInstanceOf [Do [PendingBuffer [Float ]]]
@@ -591,9 +591,9 @@ trait Tensors extends OpenCL {
591
591
/**
592
592
* @group delayed
593
593
*/
594
- def notInline : BufferedTensor
594
+ def nonInline : NonInlineTensor
595
595
596
- private def reduce (programs : MonoidPrograms ): BufferedTensor = {
596
+ private def reduce (programs : MonoidPrograms ): NonInlineTensor = {
597
597
new {
598
598
val padding : Float = thisTensor.padding
599
599
@@ -683,7 +683,7 @@ trait Tensors extends OpenCL {
683
683
}
684
684
}
685
685
}.shared
686
- } with BufferedTensor {
686
+ } with NonInlineTensor {
687
687
def shape : Array [Int ] = Tensors .ScalarShape
688
688
}
689
689
}
@@ -771,15 +771,15 @@ trait Tensors extends OpenCL {
771
771
/**
772
772
* @group delayed
773
773
*/
774
- def reshape (newShape : Array [Int ]): BufferedTensor = {
774
+ def reshape (newShape : Array [Int ]): NonInlineTensor = {
775
775
if (newShape.product != shape.product) {
776
776
throw new IllegalArgumentException
777
777
}
778
778
new {
779
779
val padding : Float = thisTensor.padding
780
780
val shape : Array [Int ] = newShape
781
781
private [compute] val doBuffer : Do [PendingBuffer [Float ]] = thisTensor.doBuffer
782
- } with BufferedTensor
782
+ } with NonInlineTensor
783
783
}
784
784
785
785
/**
@@ -1149,12 +1149,12 @@ trait Tensors extends OpenCL {
1149
1149
enqueueClosure(closure, shape)
1150
1150
}.shared
1151
1151
1152
- def notInline : BufferedTensor =
1152
+ def nonInline : NonInlineTensor =
1153
1153
new {
1154
1154
val padding : Float = thisInlineTensor.padding
1155
1155
private [compute] val doBuffer : Do [PendingBuffer [Float ]] = thisInlineTensor.doBuffer
1156
1156
val shape : Array [Int ] = thisInlineTensor.shape
1157
- } with BufferedTensor
1157
+ } with NonInlineTensor
1158
1158
}
1159
1159
1160
1160
trait TransformedTensor extends InlineTensor {
@@ -1174,9 +1174,9 @@ trait Tensors extends OpenCL {
1174
1174
1175
1175
}
1176
1176
1177
- trait BufferedTensor extends Tensor {
1177
+ trait NonInlineTensor extends Tensor {
1178
1178
1179
- def notInline : this .type = this
1179
+ def nonInline : this .type = this
1180
1180
1181
1181
@ transient
1182
1182
protected lazy val closure = {
0 commit comments