Skip to content

Commit c0e7fb4

Browse files
committed
Remove bogus whitespace changes
1 parent e8fb8a5 commit c0e7fb4

25 files changed

+246
-123
lines changed

Documentation/X10/SUMMARY.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,8 @@ public struct MyModel: Layer {
2222
public var dense3 = Dense<Float>(inputSize: 4, outputSize: 4)
2323
public var flatten = Flatten<Float>()
2424
25-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Float>) -> Tensor<Float> {
25+
@differentiable(reverse)
26+
public func callAsFunction(_ input: Tensor<Float>) -> Tensor<Float> {
2627
let layer1 = dense1(input)
2728
let layer2 = layer1.reshaped(to: [1, 4])
2829
let layer3 = dense2(layer2)

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,8 @@ struct Model: Layer {
3434
var layer2 = Dense<Float>(inputSize: hiddenSize, outputSize: hiddenSize, activation: relu)
3535
var layer3 = Dense<Float>(inputSize: hiddenSize, outputSize: 3, activation: identity)
3636

37-
@differentiable(reverse) func callAsFunction(_ input: Tensor<Float>) -> Tensor<Float> {
37+
@differentiable(reverse)
38+
func callAsFunction(_ input: Tensor<Float>) -> Tensor<Float> {
3839
return input.sequenced(through: layer1, layer2, layer3)
3940
}
4041
}

Sources/TensorFlow/Layer.swift

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,8 @@ extension Module where Input: TensorProtocol, Output: DifferentiableTensorProtoc
7272
///
7373
/// - Parameter output: The output to the layer.
7474
/// - Returns: The annotated output.
75-
@differentiable(reverse) public func annotated(_ output: Output) -> Output {
75+
@differentiable(reverse)
76+
public func annotated(_ output: Output) -> Output {
7677
let annotated = output.annotate("type=\(Self.self)")
7778
return annotated
7879
}
@@ -152,7 +153,8 @@ public protocol Layer: Module where Input: Differentiable {
152153
///
153154
/// - Parameter input: The input to the layer.
154155
/// - Returns: The output.
155-
@differentiable(reverse) func callAsFunction(_ input: Input) -> Output
156+
@differentiable(reverse)
157+
func callAsFunction(_ input: Input) -> Output
156158
}
157159

158160

@@ -254,7 +256,8 @@ extension Differentiable {
254256
/// - l1: The first layer.
255257
/// - l2: The second layer.
256258
/// - Returns: The final layer's output after sequential application.
257-
@differentiable(reverse) public func sequenced<L1: Layer, L2: Layer>(through l1: L1, _ l2: L2) -> L2.Output
259+
@differentiable(reverse)
260+
public func sequenced<L1: Layer, L2: Layer>(through l1: L1, _ l2: L2) -> L2.Output
258261
where L1.Input == Self, L1.Output == L2.Input {
259262
let o1 = l1(self)
260263
return l2(o1)
@@ -268,7 +271,8 @@ extension Differentiable {
268271
/// - l2: The second layer.
269272
/// - l3: The third layer.
270273
/// - Returns: The final layer's output after sequential application.
271-
@differentiable(reverse) public func sequenced<L1: Layer, L2: Layer, L3: Layer>(through l1: L1, _ l2: L2, _ l3: L3)
274+
@differentiable(reverse)
275+
public func sequenced<L1: Layer, L2: Layer, L3: Layer>(through l1: L1, _ l2: L2, _ l3: L3)
272276
-> L3.Output
273277
where L1.Input == Self, L1.Output == L2.Input, L2.Output == L3.Input {
274278
let o1 = l1(self)
@@ -285,7 +289,8 @@ extension Differentiable {
285289
/// - l3: The third layer.
286290
/// - l4: The fourth layer.
287291
/// - Returns: The final layer's output after sequential application.
288-
@differentiable(reverse) public func sequenced<L1: Layer, L2: Layer, L3: Layer, L4: Layer>(
292+
@differentiable(reverse)
293+
public func sequenced<L1: Layer, L2: Layer, L3: Layer, L4: Layer>(
289294
through l1: L1, _ l2: L2, _ l3: L3, _ l4: L4
290295
) -> L4.Output
291296
where
@@ -308,7 +313,8 @@ extension Differentiable {
308313
/// - l4: The third layer.
309314
/// - l5: The fifth layer.
310315
/// - Returns: The final layer's output after sequential application.
311-
@differentiable(reverse) public func sequenced<L1: Layer, L2: Layer, L3: Layer, L4: Layer, L5: Layer>(
316+
@differentiable(reverse)
317+
public func sequenced<L1: Layer, L2: Layer, L3: Layer, L4: Layer, L5: Layer>(
312318
through l1: L1, _ l2: L2, _ l3: L3, _ l4: L4, _ l5: L5
313319
) -> L5.Output
314320
where
@@ -333,7 +339,8 @@ extension Differentiable {
333339
/// - l5: The fifth layer.
334340
/// - l6: The sixth layer.
335341
/// - Returns: The final layer's output after sequential application.
336-
@differentiable(reverse) public func sequenced<L1: Layer, L2: Layer, L3: Layer, L4: Layer, L5: Layer, L6: Layer>(
342+
@differentiable(reverse)
343+
public func sequenced<L1: Layer, L2: Layer, L3: Layer, L4: Layer, L5: Layer, L6: Layer>(
337344
through l1: L1, _ l2: L2, _ l3: L3, _ l4: L4, _ l5: L5, _ l6: L6
338345
) -> L6.Output
339346
where

Sources/TensorFlow/Layers/Convolutional.swift

Lines changed: 22 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,8 @@ public struct Conv1D<Scalar: TensorFlowFloatingPoint>: Layer {
7979
/// - Returns: The output of shape [batch size, output width, output channel count].
8080
///
8181
/// - Note: Padding size equals zero when using `.valid`.
82-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
82+
@differentiable(reverse)
83+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
8384
let conv = conv1D(
8485
input,
8586
filter: filter,
@@ -200,7 +201,8 @@ public struct Conv2D<Scalar: TensorFlowFloatingPoint>: Layer {
200201
/// [batch count, output height, output width, output channel count].
201202
///
202203
/// - Note: Padding size equals zero when using `.valid`.
203-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
204+
@differentiable(reverse)
205+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
204206
let conv = conv2D(
205207
input,
206208
filter: filter,
@@ -434,7 +436,8 @@ public struct TransposedConv1D<Scalar: TensorFlowFloatingPoint>: Layer {
434436
///
435437
/// - Parameter input: The input to the layer.
436438
/// - Returns: The output.
437-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
439+
@differentiable(reverse)
440+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
438441
let batchSize = input.shape[0]
439442
let w = (input.shape[1] - (1 * paddingIndex)) * stride + (filter.shape[0] * paddingIndex)
440443
let c = filter.shape[2]
@@ -535,7 +538,8 @@ public struct TransposedConv2D<Scalar: TensorFlowFloatingPoint>: Layer {
535538
///
536539
/// - Parameter input: The input to the layer.
537540
/// - Returns: The output.
538-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
541+
@differentiable(reverse)
542+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
539543
let batchSize = input.shape[0]
540544
let h = (input.shape[1] - (1 * paddingIndex)) * strides.0 + (filter.shape[0] * paddingIndex)
541545
let w = (input.shape[2] - (1 * paddingIndex)) * strides.1 + (filter.shape[1] * paddingIndex)
@@ -637,7 +641,8 @@ public struct TransposedConv3D<Scalar: TensorFlowFloatingPoint>: Layer {
637641
///
638642
/// - Parameter input: The input to the layer.
639643
/// - Returns: The output.
640-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
644+
@differentiable(reverse)
645+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
641646
let batchSize = input.shape[0]
642647
let w = (input.shape[1] - (1 * paddingIndex)) * strides.0 + (filter.shape[0] * paddingIndex)
643648
let h = (input.shape[2] - (1 * paddingIndex)) * strides.1 + (filter.shape[1] * paddingIndex)
@@ -744,7 +749,8 @@ public struct DepthwiseConv2D<Scalar: TensorFlowFloatingPoint>: Layer {
744749
/// [batch count, input height, input width, input channel count]
745750
/// - Returns: The output of shape,
746751
/// [batch count, output height, output width, input channel count * channel multiplier]
747-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
752+
@differentiable(reverse)
753+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
748754
let conv = depthwiseConv2D(
749755
input,
750756
filter: filter,
@@ -817,7 +823,8 @@ public struct ZeroPadding1D<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer
817823
///
818824
/// - Parameter input: The input to the layer.
819825
/// - Returns: The output.
820-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
826+
@differentiable(reverse)
827+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
821828
input.padded(forSizes: [(0, 0), padding, (0, 0)])
822829
}
823830
}
@@ -850,7 +857,8 @@ public struct ZeroPadding2D<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer
850857
///
851858
/// - Parameter input: The input to the layer.
852859
/// - Returns: The output.
853-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
860+
@differentiable(reverse)
861+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
854862
input.padded(forSizes: [(0, 0), padding.0, padding.1, (0, 0)])
855863
}
856864
}
@@ -883,7 +891,8 @@ public struct ZeroPadding3D<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer
883891
///
884892
/// - Parameter input: The input to the layer.
885893
/// - Returns: The output.
886-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
894+
@differentiable(reverse)
895+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
887896
input.padded(forSizes: [(0, 0), padding.0, padding.1, padding.2, (0, 0)])
888897
}
889898
}
@@ -951,7 +960,8 @@ public struct SeparableConv1D<Scalar: TensorFlowFloatingPoint>: Layer {
951960
///
952961
/// - Parameter input: The input to the layer.
953962
/// - Returns: The output.
954-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
963+
@differentiable(reverse)
964+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
955965
let depthwise = depthwiseConv2D(
956966
input.expandingShape(at: 1),
957967
filter: depthwiseFilter.expandingShape(at: 1),
@@ -1072,7 +1082,8 @@ public struct SeparableConv2D<Scalar: TensorFlowFloatingPoint>: Layer {
10721082
///
10731083
/// - Parameter input: The input to the layer.
10741084
/// - Returns: The output.
1075-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
1085+
@differentiable(reverse)
1086+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
10761087
let depthwise = depthwiseConv2D(
10771088
input,
10781089
filter: depthwiseFilter,

Sources/TensorFlow/Layers/Core.swift

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,8 @@ public struct Flatten<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer {
2828
///
2929
/// - Parameter input: The input to the layer.
3030
/// - Returns: The output.
31-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
31+
@differentiable(reverse)
32+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
3233
let batchSize = input.shape[0]
3334
let remaining = input.shape[1..<input.rank].contiguousSize
3435
return input.reshaped(to: [batchSize, remaining])
@@ -65,7 +66,8 @@ public struct Reshape<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer {
6566
///
6667
/// - Parameter input: The input to the layer.
6768
/// - Returns: The output.
68-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
69+
@differentiable(reverse)
70+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
6971
return input.reshaped(toShape: shape)
7072
}
7173
}
@@ -81,7 +83,8 @@ public struct Function<Input: Differentiable, Output: Differentiable>: Parameter
8183
self.body = body
8284
}
8385

84-
@differentiable(reverse) public func callAsFunction(_ input: Input) -> Output {
86+
@differentiable(reverse)
87+
public func callAsFunction(_ input: Input) -> Output {
8588
body(input)
8689
}
8790
}

Sources/TensorFlow/Layers/Dense.swift

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,8 @@ public struct Dense<Scalar: TensorFlowFloatingPoint>: Layer {
7777
///
7878
/// - Parameter input: The input to the layer.
7979
/// - Returns: The output.
80-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
80+
@differentiable(reverse)
81+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
8182
if batched {
8283
let hidden = matmul(input.expandingShape(at: 1), weight).squeezingShape(at: 1)
8384
return activation(useBias ? hidden + bias : hidden)

Sources/TensorFlow/Layers/Dropout.swift

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,8 @@ public struct Dropout<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer {
5454
///
5555
/// - Parameter input: The input to the layer.
5656
/// - Returns: The output.
57-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
57+
@differentiable(reverse)
58+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
5859
switch Context.local.learningPhase {
5960
case .training:
6061
return input.droppingOut(probability: probability)
@@ -80,7 +81,8 @@ public struct GaussianNoise<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer
8081
}
8182

8283
/// Returns a tensor obtained by adding noise to `input`
83-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
84+
@differentiable(reverse)
85+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
8486
switch Context.local.learningPhase {
8587
case .training:
8688
let noise = Tensor<Scalar>(
@@ -116,7 +118,8 @@ public struct GaussianDropout<Scalar: TensorFlowFloatingPoint>: ParameterlessLay
116118
}
117119

118120
/// Applies multiplicative 1-centered Gaussian noise to the input during training only.
119-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
121+
@differentiable(reverse)
122+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
120123
switch Context.local.learningPhase {
121124
case .training:
122125
let noise = Tensor<Scalar>(
@@ -155,7 +158,8 @@ public struct AlphaDropout<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer
155158
}
156159

157160
/// Adds noise to `input` during training, and is a no-op during inference.
158-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
161+
@differentiable(reverse)
162+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
159163
switch Context.local.learningPhase {
160164
case .training:
161165
let alpha = 1.6732632423543772848170429916717

Sources/TensorFlow/Layers/Morphological.swift

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,8 @@ public struct `Dilation2D`<Scalar: TensorFlowFloatingPoint>: Layer {
6969
/// [batch count, output height, output width, output channel count].
7070
///
7171
/// - Note: Padding size equals zero when using `.valid`.
72-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
72+
@differentiable(reverse)
73+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
7374
let dilated = dilation2D(
7475
input,
7576
filter: filter,
@@ -138,7 +139,8 @@ public struct `Erosion2D`<Scalar: TensorFlowFloatingPoint>: Layer {
138139
/// [batch count, output height, output width, output channel count].
139140
///
140141
/// - Note: Padding size equals zero when using `.valid`.
141-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
142+
@differentiable(reverse)
143+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
142144
let eroded = erosion2D(
143145
input,
144146
filter: filter,

Sources/TensorFlow/Layers/Normalization.swift

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -241,7 +241,8 @@ public struct LayerNorm<Scalar: TensorFlowFloatingPoint>: Layer {
241241
///
242242
/// - Parameter input: The input to the layer.
243243
/// - Returns: The output.
244-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
244+
@differentiable(reverse)
245+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
245246
// Note: `withoutDerivative(at:)` is currently needed in the following to prevent the resulting
246247
// tensor for `epsilon` from being scalarized on the backwards pass, breaking X10 traces.
247248
let epsilon = withoutDerivative(at: input) { Tensor(self.epsilon, deviceAndPrecisionLike: $0) }
@@ -341,7 +342,8 @@ public struct GroupNorm<Scalar: TensorFlowFloatingPoint>: Layer {
341342
/// - Returns: The output.
342343
/// - Precondition: The axis cannot be batch axis.
343344
/// - Precondition: The numbers of features of the input and the offset must be same.
344-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
345+
@differentiable(reverse)
346+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
345347
let positiveAxis = (input.rank + axis) % input.rank
346348
precondition(positiveAxis != 0, "The axis cannot be batch axis.")
347349
precondition(
@@ -446,7 +448,8 @@ public struct InstanceNorm<Scalar: TensorFlowFloatingPoint>: Layer {
446448
///
447449
/// - Parameter input: The input to the layer.
448450
/// - Returns: The output.
449-
@differentiable(reverse) public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
451+
@differentiable(reverse)
452+
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
450453
delegate(input)
451454
}
452455
}

0 commit comments

Comments
 (0)