File tree Expand file tree Collapse file tree 5 files changed +114
-57
lines changed Expand file tree Collapse file tree 5 files changed +114
-57
lines changed Original file line number Diff line number Diff line change @@ -537,39 +537,44 @@ extension Tensor: Mergeable where Scalar: TensorFlowFloatingPoint {
537
537
}
538
538
539
539
/// Concatenates two values.
540
- @differentiable ( reverse) public func concatenate< T: Mergeable > (
540
+ @differentiable ( reverse)
541
+ public func concatenate< T: Mergeable > (
541
542
_ first: T ,
542
543
_ second: T
543
544
) -> T {
544
545
T . concatenate ( first, second)
545
546
}
546
547
547
548
/// Adds two values and produces their sum.
548
- @differentiable ( reverse) public func sum< T: Mergeable > (
549
+ @differentiable ( reverse)
550
+ public func sum< T: Mergeable > (
549
551
_ first: T ,
550
552
_ second: T
551
553
) -> T {
552
554
T . sum ( first, second)
553
555
}
554
556
555
557
/// Averages two values.
556
- @differentiable ( reverse) public func average< T: Mergeable > (
558
+ @differentiable ( reverse)
559
+ public func average< T: Mergeable > (
557
560
_ first: T ,
558
561
_ second: T
559
562
) -> T {
560
563
T . average ( first, second)
561
564
}
562
565
563
566
/// Multiplies two values.
564
- @differentiable ( reverse) public func multiply< T: Mergeable > (
567
+ @differentiable ( reverse)
568
+ public func multiply< T: Mergeable > (
565
569
_ first: T ,
566
570
_ second: T
567
571
) -> T {
568
572
T . multiply ( first, second)
569
573
}
570
574
571
575
/// Stack two values.
572
- @differentiable ( reverse) public func stack< T: Mergeable > (
576
+ @differentiable ( reverse)
577
+ public func stack< T: Mergeable > (
573
578
_ first: T ,
574
579
_ second: T
575
580
) -> T {
Original file line number Diff line number Diff line change @@ -357,7 +357,8 @@ public func huberLoss<Scalar: TensorFlowFloatingPoint>(
357
357
/// Workaround for TF-1030 so that we can use sum as a default argument for reductions.
358
358
/// `Tensor<Scalar>.sum()` is the preferred way to do this.
359
359
// TODO(TF-1030): Remove this and replace with `{ $0.sum() }`.
360
- @differentiable ( reverse) public func _sum< Scalar: TensorFlowFloatingPoint > (
360
+ @differentiable ( reverse)
361
+ public func _sum< Scalar: TensorFlowFloatingPoint > (
361
362
_ value: Tensor < Scalar >
362
363
) -> Tensor < Scalar > {
363
364
return value. sum ( )
@@ -366,7 +367,8 @@ public func huberLoss<Scalar: TensorFlowFloatingPoint>(
366
367
/// Workaround for TF-1030 so that we can use mean as a default argument for reductions.
367
368
/// `Tensor<Scalar>.mean()` is the preferred way to do this.
368
369
// TODO(TF-1030): Remove this and replace with `{ $0.mean() }`.
369
- @differentiable ( reverse) public func _mean< Scalar: TensorFlowFloatingPoint > (
370
+ @differentiable ( reverse)
371
+ public func _mean< Scalar: TensorFlowFloatingPoint > (
370
372
_ value: Tensor < Scalar >
371
373
) -> Tensor < Scalar > {
372
374
return value. mean ( )
Original file line number Diff line number Diff line change @@ -235,7 +235,8 @@ func logdet<T: TensorFlowFloatingPoint>(_ matrix: Tensor<T>) -> Tensor<T> {
235
235
///
236
236
/// - Parameter input: A tensor of shape `[..., M, M]`.
237
237
@inlinable
238
- @differentiable ( reverse) public func cholesky< T: TensorFlowFloatingPoint > ( _ x: Tensor < T > ) -> Tensor < T > {
238
+ @differentiable ( reverse)
239
+ public func cholesky< T: TensorFlowFloatingPoint > ( _ x: Tensor < T > ) -> Tensor < T > {
239
240
_Raw. cholesky ( x)
240
241
}
241
242
@@ -325,7 +326,8 @@ extension Tensor where Scalar: TensorFlowFloatingPoint {
325
326
/// - Precondition: `matrix` must be a tensor with shape `[..., M, M]`.
326
327
/// - Precondition: `rhs` must be a tensor with shape `[..., M, K]`.
327
328
@inlinable
328
- @differentiable ( reverse) public func triangularSolve< T: TensorFlowFloatingPoint > (
329
+ @differentiable ( reverse)
330
+ public func triangularSolve< T: TensorFlowFloatingPoint > (
329
331
matrix: Tensor < T > ,
330
332
rhs: Tensor < T > ,
331
333
lower: Bool = true ,
You can’t perform that action at this time.
0 commit comments