mvnc

Wrapper around the Movidius Neural Computing stick C API.

On version 0.3.0

In order to increase safety, mutable borrows are now required for Graph.load_tensor and Graph.get_result.

Instead of returning a Slot indicator, a unique id is returned for each loaded tensor, allowing for more reliable association of results.

Minor changes include additional Error values.

Version history

Version | Description ------- | -------------------------------------------------- 0.3.0 | Improved safety and documentation 0.2.1 | Moved repository to github (version not published) 0.2.0 | Complete crate including Slot indicator 0.1.3 | Implemented graph module 0.1.2 | Implemented device module 0.1.1 | Added internal IntoResult trait and readme.md 0.1.0 | Implemented log module

Build instructions

The libmvnc.so library from the Movidius™ Neural Compute SDK must be present.

Example

The most recent version of this example can be found at https://github.com/WiebeCnossen/mvnc/blob/master/examples/mnist.rs

``` extern crate half; extern crate mvnc; extern crate rand;

use std::fs::File; use std::io::{self, Read};

use half::{consts, f16};

use mvnc::{Device, Graph}; use mvnc::graph::Blocking; use mvnc::log;

use rand::{Rng, ThreadRng};

pub fn main() { log::setloglevel(&log::LogLevel::Verbose).expect("Setting log level failed"); for i in 0.. { if let Some(devicename) = Device::getname(i) { println!("Device {} = '{}'", i, devicename); if let Err(error) = runmnist(&device_name) { println!("{:?}", error); } } else { println!("Finished; # devices = {}", i); break; } } }

fn readgraph() -> Result, io::Error> { let mut data = vec![]; File::open("./examples/mnist.graph")? .readtoend(&mut data) .map(|| data) }

fn randominput(rng: &mut ThreadRng) -> Vec { (0..768).map(|| f16::from_f32(rng.gen())).collect() }

fn runmnist(devicename: &str) -> Result<(), Error> { let mut rng = rand::threadrng(); let device = Device::open(devicename)?;

let data = read_graph()?;
let mut graph = Graph::allocate(&device, &data)?;

graph.set_blocking(&Blocking::Block)?;
println!("Blocking -> {:?}", graph.get_blocking()?);
for _ in 0..10 {
    exec_block(&mut graph, &mut rng)?;
}

graph.set_blocking(&Blocking::DontBlock)?;
println!("Blocking -> {:?}", graph.get_blocking()?);
for _ in 0..10 {
    exec_dont_block(&mut graph, &mut rng)?;
}

println!(
    "Thermal throttling level = {:?}",
    device.get_thermal_throttling_level()?
);

Ok(())

}

fn execblock(graph: &mut Graph, rng: &mut ThreadRng) -> Result<(), Error> { graph.loadtensor(&randominput(rng))?; let (id, digit) = graph .getresult::() .map(|(id, output)| (id, mostprobabledigit(output)))?; let timetaken: f32 = graph.gettimetaken()?.iter().cloned().sum(); printresult(id, digit, time_taken); Ok(()) }

fn execdontblock(graph: &mut Graph, rng: &mut ThreadRng) -> Result<(), Error> { loop { match graph.loadtensor(&randominput(rng)) { Ok(_) => (), Err(mvnc::Error::Busy) => break, // All buffers filled Err(e) => return Err(e.into()), } }

loop {
    let result = graph
        .get_result::<f16>()
        .map(|(id, output)| (id, most_probable_digit(output)));
    match result {
        Ok((id, digit)) => {
            let time_taken: f32 = graph.get_time_taken()?.iter().cloned().sum();
            print_result(id, digit, time_taken);
        }
        Err(mvnc::Error::Idle) => return Ok(()), // No calculations pending
        Err(mvnc::Error::NoData) => (),          // Calculation not ready
        Err(e) => return Err(e.into()),
    }
}

}

fn mostprobabledigit(output: &[f16]) -> usize { let mut max = consts::MIN; let mut digit = 0; for (i, &prob) in output.iter().enumerate() { if prob > max { max = prob; digit = i; } } digit }

fn printresult(id: usize, digit: usize, timetaken: f32) { println!( "Run {:2} in {:.2}ms, most probable digit = {}", id, time_taken, digit ); }

[derive(Debug)]

enum Error { MvncError(mvnc::Error), IoError(io::Error), }

impl From for Error { fn from(error: mvnc::Error) -> Error { Error::MvncError(error) } }

impl From for Error { fn from(error: io::Error) -> Error { Error::IoError(error) } } ```