%install '.package(path: "$cwd/FastaiNotebook_05_anneal")' FastaiNotebook_05_anneal
Installing packages: .package(path: "/home/ubuntu/fastai_docs/dev_swift/FastaiNotebook_05_anneal") FastaiNotebook_05_anneal With SwiftPM flags: [] Working in: /tmp/tmpm75u2_zd Fetching https://github.com/mxcl/Path.swift Fetching https://github.com/JustHTTP/Just Completed resolution in 2.69s Cloning https://github.com/JustHTTP/Just Resolving https://github.com/JustHTTP/Just at 0.7.1 Cloning https://github.com/mxcl/Path.swift Resolving https://github.com/mxcl/Path.swift at 0.16.2 Compile Swift Module 'Just' (1 sources) Compile Swift Module 'Path' (9 sources) Compile Swift Module 'FastaiNotebook_05_anneal' (8 sources) Compile Swift Module 'jupyterInstalledPackages' (1 sources) Linking ./.build/x86_64-unknown-linux/debug/libjupyterInstalledPackages.so Initializing Swift... Loading library... Installation complete!
import FastaiNotebook_05_anneal
%include "EnableIPythonDisplay.swift"
IPythonDisplay.shell.enable_matplotlib("inline")
('inline', 'module://ipykernel.pylab.backend_inline')
// export
import Path
import TensorFlow
import Python
let data = mnistDataBunch(flat: true)
let (n,m) = (60000,784)
let c = 10
let nHid = 50
var opt = SGD<BasicModel, Float>(learningRate: 1e-2)
func modelInit() -> BasicModel {return BasicModel(nIn: m, nHid: nHid, nOut: c)}
let learner = Learner(data: data, lossFunction: softmaxCrossEntropy, optimizer: opt, initializingWith: modelInit)
let recorder = learner.makeRecorder()
Check the previous callbacks load.
learner.delegates = [learner.makeTrainEvalDelegate(), learner.makeShowProgress(),
learner.makeNormalize(mean: mnistStats.mean, std: mnistStats.std),
learner.makeAvgMetric(metrics: [accuracy]), recorder]
learner.fit(2)
Epoch 0: [0.49177492, 0.8759]
Epoch 1: [0.37792677, 0.8985]
Make an extension to quickly load them.
// export
//TODO: when recorder can be accessed as a property, remove it from the return
extension Learner where Opt.Scalar: PythonConvertible {
public func makeDefaultDelegates(metrics: [(Tensor<Float>, Tensor<Int32>) -> Tensor<Float>] = []) -> Recorder {
let recorder = makeRecorder()
delegates = [makeTrainEvalDelegate(), makeShowProgress(), recorder]
if !metrics.isEmpty { delegates.append(makeAvgMetric(metrics: metrics)) }
return recorder
}
}
extension Learner {
public class TestControlFlow: Delegate {
public override var order: Int { return 3 }
var waitForSkipBatch, waitForSkipEpoch, waitForEndTrain: Int
public init(nIter:Int, nBatch: Int, nEpoch: Int){
(waitForSkipBatch, waitForSkipEpoch, waitForEndTrain) = (nIter, nBatch, nEpoch)
}
public override func learnerDidProduceNewGradient(learner: Learner) throws {
if learner.currentIter >= waitForSkipBatch {throw LearnerAction.skipBatch}
}
public override func batchDidFinish(learner: Learner) throws {
if learner.currentIter >= waitForSkipBatch {
print("batchDidFinish properly executed after skip batch at iter \(learner.currentIter)")
}
if learner.currentIter >= waitForSkipEpoch {throw LearnerAction.skipEpoch}
}
public override func epochDidFinish(learner: Learner) throws {
print("epochDidFinish properly executed after skip epoch (number \(learner.currentEpoch))")
if learner.currentEpoch >= waitForEndTrain {throw LearnerAction.stop}
}
public override func trainingDidFinish(learner: Learner){
print("trainingDidFinish properly executed after stop")
}
}
}
let learner = Learner(data: data, lossFunction: softmaxCrossEntropy, optimizer: opt, initializingWith: modelInit)
learner.delegates = [type(of: learner).TestControlFlow(nIter:5, nBatch: 7, nEpoch: 2),
learner.makeTrainEvalDelegate()]
learner.fit(5)
batchDidFinish properly executed after skip batch at iter 5 batchDidFinish properly executed after skip batch at iter 6 batchDidFinish properly executed after skip batch at iter 7 epochDidFinish properly executed after skip epoch (number 0) batchDidFinish properly executed after skip batch at iter 5 batchDidFinish properly executed after skip batch at iter 6 batchDidFinish properly executed after skip batch at iter 7 epochDidFinish properly executed after skip epoch (number 1) batchDidFinish properly executed after skip batch at iter 5 batchDidFinish properly executed after skip batch at iter 6 batchDidFinish properly executed after skip batch at iter 7 epochDidFinish properly executed after skip epoch (number 2) trainingDidFinish properly executed after stop
Check if the orders were taken into account:
(learner.delegates[0].order,learner.delegates[1].order)
▿ 2 elements - .0 : 0 - .1 : 3
// export
extension Learner where Opt.Scalar: BinaryFloatingPoint {
public class LRFinder: Delegate {
public typealias ScheduleFunc = (Float) -> Float
// A learning rate schedule from step to float.
private var scheduler: ScheduleFunc
private var numIter: Int
private var minLoss: Float? = nil
public init(start: Float = 1e-5, end: Float = 10, numIter: Int = 100) {
scheduler = makeAnnealer(start: start, end: end, schedule: expSchedule)
self.numIter = numIter
}
override public func batchWillStart(learner: Learner) {
learner.optimizer.learningRate = Opt.Scalar(scheduler(Float(learner.currentIter)/Float(numIter)))
}
override public func batchDidFinish(learner: Learner) throws {
if minLoss == nil {minLoss = learner.currentLoss.scalar}
else {
if learner.currentLoss.scalarized() < minLoss! { minLoss = learner.currentLoss.scalarized()}
if learner.currentLoss.scalarized() > 4 * minLoss! { throw LearnerAction.stop }
if learner.currentIter >= numIter { throw LearnerAction.stop }
}
}
override public func validationWillStart(learner: Learner<Label, Opt>) throws {
//Skip validation during the LR range test
throw LearnerAction.skipEpoch
}
}
public func makeLRFinder(start: Float = 1e-5, end: Float = 10, numIter: Int = 100) -> LRFinder {
return LRFinder(start: start, end: end, numIter: numIter)
}
}
let learner = Learner(data: data, lossFunction: softmaxCrossEntropy, optimizer: opt, initializingWith: modelInit)
let recorder = learner.makeDefaultDelegates()
learner.delegates.append(learner.makeNormalize(mean: mnistStats.mean, std: mnistStats.std))
learner.delegates.append(learner.makeLRFinder())
learner.fit(2)
recorder.plotLRFinder()
// export
//TODO: when Recorder is a property of Learner don't return it.
extension Learner where Opt.Scalar: PythonConvertible & BinaryFloatingPoint {
public func lrFind(start: Float = 1e-5, end: Float = 10, numIter: Int = 100) -> Recorder {
let epochCount = data.train.count(where: {_ in true}) / numIter + 1
let recorder = makeDefaultDelegates()
delegates.append(makeLRFinder(start: start, end: end, numIter: numIter))
try! self.fit(epochCount)
return recorder
}
}
let recorder = learner.lrFind()
recorder.plotLRFinder()
notebookToScript(fname: (Path.cwd / "05b_early_stopping.ipynb").string)