%install '.package(url: "https://github.com/mxcl/Path.swift", from: "0.16.1")' Path
%install '.package(url: "https://github.com/JustHTTP/Just", from: "0.7.1")' Just
Installing packages: .package(url: "https://github.com/mxcl/Path.swift", from: "0.16.1") Path .package(url: "https://github.com/JustHTTP/Just", from: "0.7.1") Just With SwiftPM flags: [] Working in: /tmp/tmpinjts7mc Fetching https://github.com/mxcl/Path.swift Fetching https://github.com/JustHTTP/Just Completed resolution in 0.99s Cloning https://github.com/JustHTTP/Just Resolving https://github.com/JustHTTP/Just at 0.7.1 Cloning https://github.com/mxcl/Path.swift Resolving https://github.com/mxcl/Path.swift at 0.16.2 Compile Swift Module 'Just' (1 sources) Compile Swift Module 'Path' (9 sources) Compile Swift Module 'jupyterInstalledPackages' (1 sources) Linking ./.build/x86_64-unknown-linux/debug/libjupyterInstalledPackages.so Initializing Swift... Loading library... Installation complete!
// export
import Foundation
import Just
import Path
//export
public func shellCommand(_ launchPath: String, _ arguments: [String]) -> String?
{
let task = Process()
task.executableURL = URL.init(fileURLWithPath:launchPath)
task.arguments = arguments
let pipe = Pipe()
task.standardOutput = pipe
do {try task.run()} catch {print("Unexpected error: \(error).")}
let data = pipe.fileHandleForReading.readDataToEndOfFile()
let output = String(data: data, encoding: String.Encoding.utf8)
return output
}
if let res = shellCommand("/bin/ls", ["-lh"]){print(res)}
total 11M -rw-rw-r-- 1 ubuntu ubuntu 11K Apr 17 11:08 00_load_data.ipynb -rw-rw-r-- 1 ubuntu ubuntu 25K Apr 17 11:08 01_matmul.ipynb -rw-rw-r-- 1 ubuntu ubuntu 12K Apr 17 11:08 01a_fastai_layers.ipynb -rw-rw-r-- 1 ubuntu ubuntu 32K Apr 16 13:02 01b_sequential_layer.ipynb -rw-rw-r-- 1 ubuntu ubuntu 10K Apr 17 11:08 01c_array_differentiable.ipynb -rw-rw-r-- 1 ubuntu ubuntu 26K Apr 17 11:08 02_fully_connected.ipynb -rw-rw-r-- 1 ubuntu ubuntu 18K Apr 15 18:31 02a_why_sqrt5.ipynb -rw-rw-r-- 1 ubuntu ubuntu 16K Apr 15 18:36 02b_initializing.ipynb -rw-rw-r-- 1 ubuntu ubuntu 23K Apr 15 21:38 03_minibatch_training.ipynb -rw-rw-r-- 1 ubuntu ubuntu 24K Apr 17 11:08 04_callbacks.ipynb -rw-rw-r-- 1 ubuntu ubuntu 75K Apr 17 11:08 05_anneal.ipynb -rw-rw-r-- 1 ubuntu ubuntu 59K Apr 15 20:04 05b_early_stopping.ipynb -rw-rw-r-- 1 ubuntu ubuntu 143K Apr 16 13:20 06_cuda.ipynb -rw-rw-r-- 1 ubuntu ubuntu 31K Apr 16 13:23 07_batchnorm.ipynb -rw-rw-r-- 1 ubuntu ubuntu 586K Apr 17 11:08 08_data_block.ipynb -rw-rw-r-- 1 ubuntu ubuntu 6.9K Apr 16 17:35 08a_heterogeneous_dictionary.ipynb -rw-rw-r-- 1 ubuntu ubuntu 21K Apr 17 11:08 09_optimizer.ipynb drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 10 03:35 FastaiNotebook_00_load_data drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 12 17:38 FastaiNotebook_01_matmul drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 12 17:38 FastaiNotebook_01a_fastai_layers drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_01c_array_differentiable drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_02_fully_connected drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_02a_why_sqrt5 drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_03_minibatch_training drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_04_callbacks drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_05_anneal drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_05b_early_stopping drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_06_cuda drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 17 11:08 FastaiNotebook_07_batchnorm drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 16 15:38 FastaiNotebook_08_data_block drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 16 17:09 FastaiNotebook_08a_heterogeneous_dictionary drwxrwxr-x 3 ubuntu ubuntu 4.0K Apr 10 03:35 FastaiNotebooks -rw-rw-r-- 1 ubuntu ubuntu 3.0K Apr 10 03:35 create_packages.ipynb -rw-rw-r-- 1 ubuntu ubuntu 1.2K Apr 10 13:11 create_packages.py -rw-rw-r-- 1 ubuntu ubuntu 52K Apr 11 13:05 image_dataset_ops.ipynb -rw-rw-r-- 1 ubuntu ubuntu 13K Apr 10 03:35 learner.ipynb -rw-rw-r-- 1 ubuntu ubuntu 4.9K Apr 10 03:35 swift_nn.ipynb -rw-rw-r-- 1 ubuntu ubuntu 4.4K Apr 17 11:08 test_array_differentiable.ipynb -rw-rw-r-- 1 ubuntu ubuntu 9.5M Apr 17 11:10 train-images-idx3-ubyte.gz
//export
public func downloadFile(_ url: String, dest: String?=nil, force: Bool=false){
let dest_name = (dest ?? (Path.cwd/url.split(separator: "/").last!).string)
let url_dest = URL.init(fileURLWithPath: (dest ?? (Path.cwd/url.split(separator: "/").last!).string))
if (force || !Path(dest_name)!.exists){
print("Downloading \(url)...")
if let cts = Just.get(url).content{
do {try cts.write(to: URL.init(fileURLWithPath:dest_name))}
catch {print("Can't write to \(url_dest).\n\(error)")}
} else {print("Can't reach \(url)")}
}
}
downloadFile("https://storage.googleapis.com/cvdf-datasets/mnist/train-images-idx3-ubyte.gz")
//export
import TensorFlow
//export
protocol ConvertableFromByte {
init(_ d:UInt8)
}
//export
extension Float : ConvertableFromByte{}
extension Int : ConvertableFromByte{}
extension Int32 : ConvertableFromByte{}
func readData<T:ConvertableFromByte & TensorFlowScalar>(_ fn:String, _ skip:Int) -> Tensor<T> {
let data = try! Data.init(contentsOf: URL.init(fileURLWithPath: fn)).dropFirst(skip)
return Tensor(data.map(T.init))
}
//export
func loadMNIST<T:ConvertableFromByte & TensorFlowScalar>(training: Bool, labels: Bool, path: Path, flat: Bool) -> Tensor<T> {
let split = training ? "train" : "t10k"
let kind = labels ? "labels" : "images"
let batch = training ? 60000 : 10000
let shape: TensorShape = labels ? [batch] : (flat ? [batch, 784] : [batch, 28, 28])
let dropK = labels ? 8 : 16
let baseUrl = "https://storage.googleapis.com/cvdf-datasets/mnist/"
let fname = split + "-" + kind + "-idx\(labels ? 1 : 3)-ubyte"
let file = path/fname
if !file.exists {
downloadFile("\(baseUrl)\(fname).gz", dest:(path/"\(fname).gz").string)
_ = shellCommand("/bin/gunzip", ["-fq", (path/"\(fname).gz").string])
}
let data = try! Data.init(contentsOf: URL.init(fileURLWithPath: file.string)).dropFirst(dropK)
if labels { return Tensor(data.map(T.init)) }
else { return Tensor(data.map(T.init)).reshaped(to: shape)}
}
public func loadMNIST(path:Path, flat:Bool = false) -> (Tensor<Float>, Tensor<Int32>, Tensor<Float>, Tensor<Int32>) {
try! path.mkdir(.p)
return (
loadMNIST(training: true, labels: false, path: path, flat: flat) / 255.0,
loadMNIST(training: true, labels: true, path: path, flat: flat),
loadMNIST(training: false, labels: false, path: path, flat: flat) / 255.0,
loadMNIST(training: false, labels: true, path: path, flat: flat)
)
}
//export
public let mnistPath = Path.home/".fastai"/"data"/"mnist_tst"
let (xTrain, yTrain, xValid, yValid) = loadMNIST(path: mnistPath)
xTrain.shape
▿ TensorShape
▿ dimensions : 3 elements
- 0 : 60000
- 1 : 28
- 2 : 28
let (xTrain, yTrain, xValid, yValid) = loadMNIST(path: mnistPath, flat: true)
xTrain.shape
▿ TensorShape
▿ dimensions : 2 elements
- 0 : 60000
- 1 : 784
//export
import Dispatch
public func time(_ function: () -> ()) {
let start = DispatchTime.now()
function()
let end = DispatchTime.now()
let nanoseconds = Double(end.uptimeNanoseconds - start.uptimeNanoseconds)
let milliseconds = nanoseconds / 1e6
print("\(milliseconds) ms")
}
time {var valImgs: Tensor<Float> = loadMNIST(training:false, labels: false, path: mnistPath, flat:false)}
372.649852 ms
//export
public func time(repeating: Int, _ function: () -> ()) {
function()
var times:[Double] = []
for _ in 1...repeating{
let start = DispatchTime.now()
function()
let end = DispatchTime.now()
let nanoseconds = Double(end.uptimeNanoseconds - start.uptimeNanoseconds)
let milliseconds = nanoseconds / 1e6
times.append(milliseconds)
}
print("\(times.reduce(0.0, +)/Double(times.count)) ms")
}
time(repeating:10) {var valImgs: Tensor<Float> = loadMNIST(training:false, labels: false, path: mnistPath, flat:false)}
274.26179950000005 ms
// export
public func notebookToScript(fname: String){
let url_fname = URL.init(fileURLWithPath: fname)
let last = fname.lastPathComponent
let out_fname = (url_fname.deletingLastPathComponent().appendingPathComponent("FastaiNotebooks", isDirectory: true)
.appendingPathComponent("Sources", isDirectory: true)
.appendingPathComponent("FastaiNotebooks", isDirectory: true).appendingPathComponent(last)
.deletingPathExtension().appendingPathExtension("swift"))
do{
let data = try Data.init(contentsOf: url_fname)
let jsonData = try! JSONSerialization.jsonObject(with: data, options: .allowFragments) as! [String: Any]
let cells = jsonData["cells"] as! [[String:Any]]
var module = """
/*
THIS FILE WAS AUTOGENERATED! DO NOT EDIT!
file to edit: \(fname.lastPathComponent)
*/
"""
for cell in cells{
if let source = cell["source"] as? [String]{
if source.isEmpty {continue}
if source[0].range(of: #"^\s*//\s*export\s*$"#, options: .regularExpression) != nil{
module.append("\n" + source[1...].joined() + "\n")
}
}
}
try? module.write(to: out_fname, atomically: false, encoding: .utf8)
} catch {print("Can't read the content of \(fname)")}
}
// export
public func exportNotebooks(_ path: Path){
for entry in try! path.ls(){
if entry.kind == Entry.Kind.file{
if entry.path.basename().range(of: #"^\d*_.*ipynb$"#, options: .regularExpression) != nil {
print("Converting \(entry.path.basename())")
notebookToScript(fname: entry.path.basename())
}
}
}
}
notebookToScript(fname: (Path.cwd / "00_load_data.ipynb").string)