Rust implementation of real-coded genetic algorithm for solving optimization problems and training of neural networks. The latter is also known as neuroevolution.
Features:
* automatically computes statistics for single and multiple runs for EA and NE
* EA settings and results can be saved to json
* allows defining user-specified objective functions for EA and NE (see examples below)
```rust let popsize = 20u32; // population size. let problemdim = 10u32; // number of optimization parameters.
let problem = RosenbrockProblem{}; // objective function.
let gencount = 10u32; // generations number.
let settings = GASettings::new(popsize, gencount, problemdim);
let mut ga: GA
// get and print results of the current run. println!("\n\nGA results: {:?}", res);
// make multiple runs and get combined results. let res = ga.run_multiple(settings, 10 as u32).expect("Error during multiple GA runs"); println!("\n\nResults of multple GA runs: {:?}", res);
```
```rust let (popsize, gencount, paramcount) = (20, 20, 100); // genecount does not matter here as NN structure is defined by a problem. let settings = EASettings::new(popsize, gencount, paramcount); let problem = SymbolicRegressionProblem::newf();
let mut ne: NE
```rust const INPUTSIZE: usize = 20; const OUTPUTSIZE: usize = 2;
let mut rng = rand::threadrng(); // needed for weights initialization when NN is built.
let mut net: MultilayeredNetwork = MultilayeredNetwork::new(INPUTSIZE, OUTPUTSIZE);
net.addhiddenlayer(30 as usize, ActivationFunctionType::Sigmoid)
.addhidden_layer(20 as usize, ActivationFunctionType::Sigmoid)
.build(&mut rng, NeuralArchitecture::Multilayered); // build
finishes creation of neural network.
let (ws, bs) = net.get_weights(); // ws
and bs
are Vec
arrays containing weights and biases for each layer.
assert!(ws.len() == 3); // number of elements equals to number of hidden layers + 1 output layer
assert!(bs.len() == 3); // number of elements equals to number of hidden layers + 1 output layer
```
``rust
// Dummy problem returning random fitness.
pub struct DummyProblem;
impl Problem for DummyProblem {
// Function to evaluate a specific individual.
fn compute<T: Individual>(&self, ind: &mut T) -> f32 {
// use
tovec` to get real-coded representation of an individual.
let v = ind.tovec().unwrap();
let mut rng: StdRng = StdRng::from_seed(&[0]);
rng.gen::<f32>()
}
} ```
```rust
// Dummy problem returning random fitness.
struct RandomNEProblem {}
impl RandomNEProblem {
fn new() -> RandomNEProblem {
RandomNEProblem{}
}
}
impl NeuroProblem for RandomNEProblem {
// return number of NN inputs.
fn getinputsnum(&self) -> usize {1}
// return number of NN outputs.
fn getoutputsnum(&self) -> usize {1}
// return NN with random weights and a fixed structure. For now the structure should be the same all the time to make sure that crossover is possible. Likely to change in the future.
fn getdefaultnet(&self) -> MultilayeredNetwork {
let mut rng = rand::threadrng();
let mut net: MultilayeredNetwork = MultilayeredNetwork::new(self.getinputsnum(), self.getoutputsnum());
net.addhiddenlayer(5 as usize, ActivationFunctionType::Sigmoid)
.build(&mut rng, NeuralArchitecture::Multilayered);
net
}
// Function to evaluate performance of a given NN.
fn computewithnet
```