This crates provides TensorFlow Lite APIs.
Please read the API documentation on docs.rs
The following example shows how to use the TensorFlow Lite interpreter when provided a TensorFlow Lite FlatBuffer file. The example also demonstrates how to run inference on input data.
```rust extern crate failure;
extern crate tflite;
use std::fs::File; use std::io::Read; use std::path::Path;
use failure::Fallible;
use tflite::ops::builtin::BuiltinOpResolver; use tflite::{FlatBufferModel, InterpreterBuilder};
fn testmnist
let builder = InterpreterBuilder::new(&model, &resolver)?;
let mut interpreter = builder.build()?;
interpreter.allocate_tensors()?;
let inputs = interpreter.inputs().to_vec();
assert_eq!(inputs.len(), 1);
let input_index = inputs[0];
let outputs = interpreter.outputs().to_vec();
assert_eq!(outputs.len(), 1);
let output_index = outputs[0];
let input_tensor = interpreter.tensor_info(input_index)?;
assert_eq!(input_tensor.dims, vec![1, 28, 28, 1]);
let output_tensor = interpreter.tensor_info(output_index)?;
assert_eq!(output_tensor.dims, vec![1, 10]);
let mut input_file = File::open("data/mnist10.bin")?;
for i in 0..10 {
input_file.read_exact(interpreter.tensor_data_mut(input_index)?)?;
interpreter.invoke()?;
let output: &[u8] = interpreter.tensor_data(output_index)?;
let guess = output
.iter()
.enumerate()
.max_by(|x, y| x.1.cmp(y.1))
.unwrap()
.0;
println!("{}: {:?}", i, output);
assert_eq!(i, guess);
}
Ok(())
}
fn mobilenetv1mnist() { testmnist("data/MNISTnetuint8quant.tflite").unwrap(); }
fn mobilenetv2mnist() { testmnist("data/MNISTnetv2uint8_quant.tflite").unwrap(); } ```