autograd

Build Status

This library provides differentiable operations and tensors. The current backend is rust-ndarray.

Examples

Here we are computing partial derivatives of z = 2x^2 + 3y + 1.

```rust

extern crate autograd as ag;

let ref x = ag::placeholder(&[]); let ref y = ag::placeholder(&[]); let ref z = 2xx + 3*y + 1;

// dz/dy let ref gy = ag::grad(&[z], &[y])[0];

// dz/dx let ref gx = ag::grad(&[z], &[x])[0];

// ddz/dx (differentiates z again) let ref ggx = ag::grad(&[gx], &[x])[0];

// evaluation of symbolic gradients println!("{:?}", gy.eval(&[])); // => Ok(3.) println!("{:?}", ggx.eval(&[])); // => Ok(4.)

// dz/dx requires to fill the placeholder x println!("{:?}", gx.eval(&[(x, &ag::ndarray::arr0(2.))])); // => Ok(8.) ```

Another example: multi layer perceptron for MNIST digits classification.

```rust use self::ag::gradientdescentops::Optimizer;

// -- graph def -- let ref x = ag::placeholder(&[-1, 2828]); let ref y = ag::placeholder(&[-1]); let ref w = ag::variable(ag::ndarray_ext::glorot_uniform(&[2828, 10])); let ref b = ag::variable(ag::ndarrayext::zeros(&[1, 10])); let ref z = ag::matmul(x, w) + b; let ref loss = ag::reducemean(&ag::sparsesoftmaxcrossentropy(z, y), &[0, 1], false); let ref params = [w, b] let ref grads = ag::grad(loss, params); let ref predictions = ag::argmax(z, -1, true); let ref accuracy = ag::reducemean(&ag::equal(predictions, y), &[0], false); let mut adam = ag::gradientdescentops::Adam::default(); let ref updateops = adam.computeupdates(params, grads);

// -- dataset -- let ((xtrain, ytrain), (xtest, ytest)) = dataset::load();

// -- training loop -- for epoch in 0..maxepoch { ... ag::run(updateops, &[(x, &xbatch), (y, &ybatch)]); }

``` For more, see documentation or examples