A neural network, and tensor dynamic automatic differentiation implementation for Rust.
tracked()
, or start_tracking()
must be used (see the documentation for details).for _ in 0..8 { let mut input = vec![0.0; inputsize * batchsize]; let mut target = vec![0.0; outputsize * batchsize]; for j in 0..batchsize { let x: Float = rng.genrange(-1.0..1.0); let y: Float = rng.genrange(-1.0..1.0); input[inputsize * j] = x; input[inputsize * j + 1] = y; target[outputsize * j] = x.exp(); target[output_size * j + 1] = x.exp() + y.sin(); }
let input = Arrays::new((vec![batch_size, input_size], input));
let target = Arrays::new((vec![batch_size, output_size], target));
let _result = model.forward(input.clone());
let loss = model.backward(target.clone());
println!("loss: {}", loss);
}
* Dynamic computational graph:
rust
let a = arr![5.0].tracked();
let b = arr![2.0].tracked();
let mut c = arr![0.0].tracked();
for _ in 0..10 { c = &c + &(&a * &b); if c[0] > 50.0 { c = &c * &a; } }
assert_eq!(c, arr![195300.0]);
c.backward(None);
asserteq!(c.gradient(), arr![1.0]);
asserteq!(b.gradient(), arr![97650.0]);
assert_eq!(a.gradient(), arr![232420.0]);
* Custom operation (still needs some work):
rust
// note proper implementations should handle tracked, and untracked cases
let op: array::ForwardOp = Arc::new(|x: &[&Array]| {
Arrays::new((x[0].dimensions(), x[0].values().iter().zip(x[1].values()).map(|(x, y)| x * y).collect::
let opclone = Arc::clone(&op);
let backwardop: array::BackwardOp = Arc::new(move |c: &mut Vec
let a = arr![1.0, 2.0, 3.0].tracked(); let b = arr![3.0, 2.0, 1.0].tracked(); let mut product = Array::op(&vec![&a, &b], op, Some(backwardop)); asserteq!(product, arr![3.0, 4.0, 3.0]); product.backward(None); asserteq!(b.gradient(), arr![1.0, 2.0, 3.0]); asserteq!(a.gradient(), arr![3.0, 2.0, 1.0]); ```
arr!
macro (which however, currently still needs more work).Arc
, and Mutex
.tracked()
, and untracked()
in array.rs
.