%install '.package(path: "$cwd/FastaiNotebook_02_fully_connected")' FastaiNotebook_02_fully_connected
Installing packages: .package(path: "/home/ubuntu/fastai_docs/dev_swift/FastaiNotebook_02_fully_connected") FastaiNotebook_02_fully_connected With SwiftPM flags: [] Working in: /tmp/tmpxtwrx9n4 Fetching https://github.com/mxcl/Path.swift Fetching https://github.com/JustHTTP/Just Completed resolution in 2.04s Cloning https://github.com/JustHTTP/Just Resolving https://github.com/JustHTTP/Just at 0.7.1 Cloning https://github.com/mxcl/Path.swift Resolving https://github.com/mxcl/Path.swift at 0.16.2 Compile Swift Module 'Just' (1 sources) Compile Swift Module 'Path' (9 sources) Compile Swift Module 'FastaiNotebook_02_fully_connected' (5 sources) Compile Swift Module 'jupyterInstalledPackages' (1 sources) Linking ./.build/x86_64-unknown-linux/debug/libjupyterInstalledPackages.so Initializing Swift... Loading library... Installation complete!
import FastaiNotebook_02_fully_connected
// export
import Foundation
import TensorFlow
import Path
var (xTrain, yTrain, xValid, yValid) = loadMNIST(path: Path.home/".fastai"/"data"/"mnist_tst")
let (trainMean, trainStd) = (xTrain.mean(), xTrain.standardDeviation())
xTrain = normalize(xTrain, mean: trainMean, std: trainStd)
xValid = normalize(xValid, mean: trainMean, std: trainStd)
xTrain = xTrain.reshaped(to: [xTrain.shape[0], 28, 28, 1])
xValid = xValid.reshaped(to: [xValid.shape[0], 28, 28, 1])
print(xTrain.shape, xValid.shape)
TensorShape(dimensions: [60000, 28, 28, 1]) TensorShape(dimensions: [10000, 28, 28, 1])
let images = xTrain.shape[0]
let classes = xValid.max() + 1
let channels = 32
var layer1 = FAConv2D<Float>(filterShape: (5, 5, 1, channels)) //Conv2D(1, nh, 5)
let x = xValid[0..<100]
x.shape
▿ TensorShape
▿ dimensions : 4 elements
- 0 : 100
- 1 : 28
- 2 : 28
- 3 : 1
extension Tensor where Scalar: TensorFlowFloatingPoint {
func stats() -> (mean: Tensor, std: Tensor) {
return (mean: self.mean(), std: self.standardDeviation())
}
}
(filter: layer1.filter.stats(), bias: layer1.bias.stats())
▿ 2 elements
▿ filter : 2 elements
- mean : -0.00026652578
- std : 0.04966226
▿ bias : 2 elements
- mean : 0.0
- std : 0.0
let result = layer1.applied(to: x)
result.stats()
▿ 2 elements - mean : -0.00050843204 - std : 0.27653128
// export
extension Tensor where Scalar: TensorFlowFloatingPoint {
init(kaimingNormal shape: TensorShape, negativeSlope: Double = 1.0) {
// Assumes Leaky ReLU nonlinearity
let gain = Scalar(sqrt(2.0 / (1.0 + pow(negativeSlope, 2))))
let spatialDimCount = shape.count - 2
let receptiveField = shape[0..<spatialDimCount].contiguousSize
let fanIn = shape[shape.count - 2] * receptiveField
self.init(
randomNormal: shape,
stddev: gain / sqrt(Scalar(fanIn)),
generator: &PhiloxRandomNumberGenerator.global
)
}
}
layer1.filter = Tensor(kaimingNormal: layer1.filter.shape, negativeSlope: 1.0)
layer1.applied(to: x).stats()
▿ 2 elements - mean : 0.0077752783 - std : 1.1046494
// export
func leakyRelu<T: TensorFlowFloatingPoint>(
_ x: Tensor<T>,
negativeSlope: Double = 0.0
) -> Tensor<T> {
return max(0, x) + T(negativeSlope) * min(0, x)
}
layer1.filter = Tensor(kaimingNormal: layer1.filter.shape, negativeSlope: 0.0)
leakyRelu(layer1.applied(to: x)).stats()
▿ 2 elements - mean : 0.46370196 - std : 0.8628286
var layer1 = FAConv2D<Float>(filterShape: (5, 5, 1, channels)) //Conv2D(1, nh, 5)
leakyRelu(layer1.applied(to: x)).stats()
▿ 2 elements - mean : 0.08939172 - std : 0.17101486
layer1.filter.shape
▿ TensorShape
▿ dimensions : 4 elements
- 0 : 5
- 1 : 5
- 2 : 1
- 3 : 32
let spatialDimCount = layer1.filter.rank - 2
let receptiveField = layer1.filter.shape[0..<spatialDimCount].contiguousSize
receptiveField
25
let filtersIn = layer1.filter.shape[2]
let filtersOut = layer1.filter.shape[3]
print(filtersIn, filtersOut)
1 32
let fanIn = filtersIn * receptiveField
let fanOut = filtersOut * receptiveField
print(fanIn, fanOut)
25 800
func gain(_ negativeSlope: Double) -> Double {
return sqrt(2.0 / (1.0 + pow(negativeSlope, 2.0)))
}
(gain(1.0), gain(0.0), gain(0.01), gain(0.1), gain(sqrt(5.0)))
▿ 5 elements - .0 : 1.0 - .1 : 1.4142135623730951 - .2 : 1.4141428569978354 - .3 : 1.4071950894605838 - .4 : 0.5773502691896257
(2 * Tensor<Float>(randomUniform: [10000]) - 1).standardDeviation()
0.57633716
1.0 / sqrt(3.0)
0.5773502691896258
//export
extension Tensor where Scalar: TensorFlowFloatingPoint {
init(kaimingUniform shape: TensorShape, negativeSlope: Double = 1.0) {
// Assumes Leaky ReLU nonlinearity
let gain = Scalar(sqrt(2.0 / (1.0 + pow(negativeSlope, 2))))
let spatialDimCount = shape.count - 2
let receptiveField = shape[0..<spatialDimCount].contiguousSize
let fanIn = shape[shape.count - 2] * receptiveField
let bound = sqrt(Scalar(3.0)) * gain / sqrt(Scalar(fanIn))
self = bound * (2 * Tensor(
randomUniform: shape,
generator: &PhiloxRandomNumberGenerator.global
) - 1)
}
}
layer1.filter = Tensor(kaimingUniform: layer1.filter.shape, negativeSlope: 0.0)
leakyRelu(layer1.applied(to: x)).stats()
▿ 2 elements - mean : 0.52419895 - std : 0.9307874
layer1.filter = Tensor(kaimingUniform: layer1.filter.shape, negativeSlope: sqrt(5.0))
leakyRelu(layer1.applied(to: x)).stats()
▿ 2 elements - mean : 0.19034371 - std : 0.33603325
public struct Model: Layer {
public var conv1 = FAConv2D<Float>(
filterShape: (5, 5, 1, 8), strides: (2, 2), padding: .same, activation: relu
)
public var conv2 = FAConv2D<Float>(
filterShape: (3, 3, 8, 16), strides: (2, 2), padding: .same, activation: relu
)
public var conv3 = FAConv2D<Float>(
filterShape: (3, 3, 16, 32), strides: (2, 2), padding: .same, activation: relu
)
public var conv4 = FAConv2D<Float>(
filterShape: (3, 3, 32, 1), strides: (2, 2), padding: .valid
)
public var flatten = Flatten<Float>()
@differentiable
public func applied(to input: Tensor<Float>) -> Tensor<Float> {
return input.sequenced(through: conv1, conv2, conv3, conv4, flatten)
}
}
let y = Tensor<Float>(yValid[0..<100])
var model = Model()
let prediction = model.applied(to: x)
prediction.stats()
▿ 2 elements - mean : 0.07328143 - std : 0.070444785
let gradients = gradient(at: model) { model in
meanSquaredError(predicted: model.applied(to: x), expected: y)
}
gradients.conv1.filter.stats()
▿ 2 elements - mean : -0.1473091 - std : 0.27992472
for keyPath in [\Model.conv1, \Model.conv2, \Model.conv3, \Model.conv4] {
model[keyPath: keyPath].filter = Tensor(kaimingUniform: model[keyPath: keyPath].filter.shape)
}
let prediction = model.applied(to: x)
prediction.stats()
▿ 2 elements - mean : 0.45652133 - std : 0.2774702
let gradients = gradient(at: model) { model in
meanSquaredError(predicted: model.applied(to: x), expected: y)
}
gradients.conv1.filter.stats()
▿ 2 elements - mean : -0.03488037 - std : 0.60459524
notebookToScript(fname: (Path.cwd / "02a_why_sqrt5.ipynb").string)