diff --git a/Cargo.lock b/Cargo.lock index 88cb66d..c52b3fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -53,9 +53,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "genetic-rs" -version = "0.5.4" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a68bb62a836f6ea3261d77cfec4012316e206f53e7d0eab519f5f3630e86001f" +checksum = "372d080448bae68a4a8963e6acadd81621510cdf535c8eb5ecc39ab605a17e88" dependencies = [ "genetic-rs-common", "genetic-rs-macros", @@ -63,20 +63,21 @@ dependencies = [ [[package]] name = "genetic-rs-common" -version = "0.5.4" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be7aaffd4e4dc82d11819d40794f089c37d02595a401f229ed2877d1a4c401d" +checksum = "94a87c5bbc9d445ab0684eb5109b5781578c02a63f8ed2d286ca75b94848f43f" dependencies = [ "rand", "rayon", "replace_with", + "tracing", ] [[package]] name = "genetic-rs-macros" -version = "0.5.4" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e73b1f36ea3e799232e1a3141a2765fa6ee9ed7bb3fed96ccfb3bf272d1832e" +checksum = "f5d928bc6dae6aef04ff1156a4555d3313f5a6cf607235b5931c62710d066c5a" dependencies = [ "genetic-rs-common", "proc-macro2", @@ -86,12 +87,13 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.12" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", "libc", + "r-efi", "wasi", ] @@ -171,22 +173,28 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "rand" -version = "0.8.5" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" dependencies = [ - "libc", "rand_chacha", "rand_core", + "zerocopy", ] [[package]] name = "rand_chacha" -version = "0.3.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", "rand_core", @@ -194,9 +202,9 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.4" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ "getrandom", ] @@ -324,6 +332,38 @@ checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags", +] + +[[package]] +name = "zerocopy" +version = "0.8.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/Cargo.toml b/Cargo.toml index c9e0def..ead6c31 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,12 +20,12 @@ rustdoc-args = ["--cfg", "docsrs"] [features] default = [] serde = ["dep:serde", "dep:serde-big-array"] -tracing = ["dep:tracing"] +tracing = ["dep:tracing", "genetic-rs/tracing"] [dependencies] atomic_float = "1.1.0" bitflags = "2.9.0" -genetic-rs = { version = "0.5.4", features = ["rayon", "derive"] } +genetic-rs = { version = "0.6.0", features = ["rayon", "derive"] } lazy_static = "1.5.0" rayon = "1.10.0" replace_with = "0.1.7" diff --git a/examples/readme_ex.rs b/examples/readme_ex.rs index ed01512..07b96b9 100644 --- a/examples/readme_ex.rs +++ b/examples/readme_ex.rs @@ -1,5 +1,4 @@ use neat::*; -use rand::prelude::*; // derive some traits so that we can use this agent with `genetic-rs`. #[derive(Debug, Clone, PartialEq, CrossoverReproduction, DivisionReproduction, RandomlyMutable)] @@ -11,7 +10,7 @@ impl Prunable for MyAgentGenome {} impl GenerateRandom for MyAgentGenome { // allows us to use `Vec::gen_random` for the initial population. - fn gen_random(rng: &mut impl rand::Rng) -> Self { + fn gen_random(rng: &mut impl Rng) -> Self { Self { brain: NeuralNetwork::new(MutationSettings::default(), rng), } @@ -24,13 +23,13 @@ fn inverse_error(expected: f32, actual: f32) -> f32 { } fn fitness(agent: &MyAgentGenome) -> f32 { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let mut fit = 0.; for _ in 0..10 { // run the test multiple times for consistency - let inputs = [rng.gen(), rng.gen(), rng.gen()]; + let inputs = [rng.random(), rng.random(), rng.random()]; // try to force the network to learn to do some basic logic let expected0: f32 = (inputs[0] >= 0.5 && inputs[1] < 0.5).into(); diff --git a/src/activation.rs b/src/activation.rs index 3c3f821..3044061 100644 --- a/src/activation.rs +++ b/src/activation.rs @@ -105,6 +105,16 @@ impl Default for ActivationRegistry { } } +impl fmt::Debug for ActivationRegistry { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let keys: Vec<_> = self.fns.keys().collect(); + + f.debug_struct("ActivationRegistry") + .field("fns", &keys) + .finish() + } +} + /// A trait that represents an activation method. pub trait Activation { /// The activation function. diff --git a/src/neuralnet.rs b/src/neuralnet.rs index b2ce69c..24bc7c4 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -8,7 +8,6 @@ use std::{ use atomic_float::AtomicF32; use genetic_rs::prelude::*; -use rand::Rng; use replace_with::replace_with_or_abort; use crate::{ @@ -24,6 +23,9 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "serde")] use serde_big_array::BigArray; +#[cfg(feature = "tracing")] +use tracing::*; + /// The mutation settings for [`NeuralNetwork`]. /// Does not affect [`NeuralNetwork::mutate`], only [`NeuralNetwork::divide`] and [`NeuralNetwork::crossover`]. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -78,6 +80,7 @@ pub struct NeuralNetwork { impl NeuralNetwork { // TODO option to set default output layer activations /// Creates a new random neural network with the given settings. + #[cfg_attr(feature = "tracing", instrument)] pub fn new(mutation_settings: MutationSettings, rng: &mut impl Rng) -> Self { let mut output_layer = Vec::with_capacity(O); @@ -94,20 +97,20 @@ impl NeuralNetwork { for _ in 0..I { let mut already_chosen = Vec::new(); - let conns = rng.gen_range(1..=O); + let conns = rng.random_range(1..=O); total_connections += conns; let outputs = (0..conns) .map(|_| { - let mut j = rng.gen_range(0..O); + let mut j = rng.random_range(0..O); while already_chosen.contains(&j) { - j = rng.gen_range(0..O); + j = rng.random_range(0..O); } output_layer[j].input_count += 1; already_chosen.push(j); - (NeuronLocation::Output(j), rng.gen()) + (NeuronLocation::Output(j), rng.random()) }) .collect(); @@ -131,6 +134,7 @@ impl NeuralNetwork { } /// Runs the neural network, propagating values from input to output layer. + #[cfg_attr(feature = "tracing", instrument)] pub fn predict(&self, inputs: [f32; I]) -> [f32; O] { let cache = Arc::new(NeuralNetCache::from(self)); cache.prime_inputs(inputs); @@ -181,6 +185,7 @@ impl NeuralNetwork { } /// Split a [`Connection`] into two of the same weight, joined by a new [`Neuron`] in the hidden layer(s). + #[cfg_attr(feature = "tracing", instrument)] pub fn split_connection(&mut self, connection: Connection, rng: &mut impl Rng) { let newloc = NeuronLocation::Hidden(self.hidden_layers.len()); @@ -210,6 +215,7 @@ impl NeuralNetwork { } /// Returns false if the connection is cyclic. + #[cfg_attr(feature = "tracing", instrument)] pub fn is_connection_safe(&self, connection: Connection) -> bool { let mut visited = HashSet::from([connection.from]); @@ -217,6 +223,7 @@ impl NeuralNetwork { } // TODO maybe parallelize + #[cfg_attr(feature = "tracing", instrument)] fn dfs(&self, visited: &mut HashSet, current: NeuronLocation) -> bool { if !visited.insert(current) { return false; @@ -257,17 +264,17 @@ impl NeuralNetwork { /// Get a random valid location within the network. pub fn random_location(&self, rng: &mut impl Rng) -> NeuronLocation { if self.hidden_layers.is_empty() { - return match rng.gen_range(0..2) { - 0 => NeuronLocation::Input(rng.gen_range(0..self.input_layer.len())), - 1 => NeuronLocation::Output(rng.gen_range(0..self.output_layer.len())), + return match rng.random_range(0..2) { + 0 => NeuronLocation::Input(rng.random_range(0..self.input_layer.len())), + 1 => NeuronLocation::Output(rng.random_range(0..self.output_layer.len())), _ => unreachable!(), }; } - match rng.gen_range(0..3) { - 0 => NeuronLocation::Input(rng.gen_range(0..self.input_layer.len())), - 1 => NeuronLocation::Hidden(rng.gen_range(0..self.hidden_layers.len())), - 2 => NeuronLocation::Output(rng.gen_range(0..self.output_layer.len())), + match rng.random_range(0..3) { + 0 => NeuronLocation::Input(rng.random_range(0..self.input_layer.len())), + 1 => NeuronLocation::Hidden(rng.random_range(0..self.hidden_layers.len())), + 2 => NeuronLocation::Output(rng.random_range(0..self.output_layer.len())), _ => unreachable!(), } } @@ -280,18 +287,18 @@ impl NeuralNetwork { ) -> Option { let components: Vec<_> = scope.iter().collect(); - match components[rng.gen_range(0..components.len())] { - NeuronScope::INPUT => Some(NeuronLocation::Input(rng.gen_range(0..I))), + match components[rng.random_range(0..components.len())] { + NeuronScope::INPUT => Some(NeuronLocation::Input(rng.random_range(0..I))), NeuronScope::HIDDEN => { if self.hidden_layers.is_empty() { None } else { Some(NeuronLocation::Hidden( - rng.gen_range(0..self.hidden_layers.len()), + rng.random_range(0..self.hidden_layers.len()), )) } } - NeuronScope::OUTPUT => Some(NeuronLocation::Output(rng.gen_range(0..O))), + NeuronScope::OUTPUT => Some(NeuronLocation::Output(rng.random_range(0..O))), _ => unreachable!(), } } @@ -316,6 +323,7 @@ impl NeuralNetwork { /// Remove a connection and any hanging neurons caused by the deletion. /// Returns whether a hanging neuron (i.e. a neuron with no inputs) was removed. + #[cfg_attr(feature = "tracing", instrument)] pub fn remove_connection(&mut self, connection: Connection) -> bool { if self.get_neuron(connection.to).input_count == 0 { println!("erroneous network: {self:#?}"); @@ -367,6 +375,7 @@ impl NeuralNetwork { } } + #[cfg_attr(feature = "tracing", instrument)] unsafe fn downshift_connections(&mut self, i: usize) { let removed_connections = AtomicUsize::new(0); self.input_layer.par_iter_mut().for_each(|n| { @@ -416,6 +425,7 @@ impl NeuralNetwork { /// Recalculates the [`input_count`][`Neuron::input_count`] field for all neurons in the network, /// as well as the [`total_connections`][`NeuralNetwork::total_connections`] field on the NeuralNetwork. /// Deletes any hidden layer neurons with an [`input_count`][`Neuron::input_count`] of 0. + #[cfg_attr(feature = "tracing", instrument)] pub fn recalculate_connections(&mut self) { // TODO optimization/parallelization. unsafe { self.clear_input_counts() }; @@ -460,17 +470,17 @@ impl NeuralNetwork { pub fn mutate_weights(&mut self, rate: f32) { self.map_weights(|w| { // TODO maybe `Send`able rng. - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); - if rng.gen::() <= rate { - *w += rng.gen_range(-rate..rate); + if rng.random::() <= rate { + *w += rng.random_range(-rate..rate); } }); } /// Creates a random valid connection, if one can be made. pub fn add_random_connection(&mut self, rng: &mut impl Rng) -> Option<(Connection, f32)> { - let weight = rng.gen::(); + let weight = rng.random::(); // TODO make this not look nested and gross if let Some(from) = self.random_location_in_scope(rng, !NeuronScope::OUTPUT) { @@ -505,6 +515,7 @@ impl NeuralNetwork { } /// Splits a random connection in the network, if there are any. + #[cfg_attr(feature = "tracing", instrument)] pub fn split_random_connection(&mut self, rng: &mut impl Rng) -> bool { if let Some((conn, _)) = self.random_connection(rng) { self.split_connection(conn, rng); @@ -516,24 +527,26 @@ impl NeuralNetwork { } impl RandomlyMutable for NeuralNetwork { + #[cfg_attr(feature = "tracing", instrument)] fn mutate(&mut self, rate: f32, rng: &mut impl Rng) { self.mutate_weights(rate); - if rng.gen::() <= rate && self.total_connections > 0 { + if rng.random::() <= rate && self.total_connections > 0 { self.split_random_connection(rng); } - if rng.gen::() <= rate || self.total_connections == 0 { + if rng.random::() <= rate || self.total_connections == 0 { self.add_random_connection(rng); } - if rng.gen::() <= rate && self.total_connections > 0 { + if rng.random::() <= rate && self.total_connections > 0 { self.remove_random_connection(rng); } } } impl DivisionReproduction for NeuralNetwork { + #[cfg_attr(feature = "tracing", instrument)] fn divide(&self, rng: &mut impl Rng) -> Self { let mut child = self.clone(); @@ -546,11 +559,12 @@ impl DivisionReproduction for NeuralNetwork CrossoverReproduction for NeuralNetwork { - fn crossover(&self, other: &Self, rng: &mut impl rand::Rng) -> Self { + #[cfg_attr(feature = "tracing", instrument)] + fn crossover(&self, other: &Self, rng: &mut impl Rng) -> Self { let mut output_layer = self.output_layer.clone(); for (i, n) in output_layer.iter_mut().enumerate() { - if rng.gen::() >= 0.5 { + if rng.random::() >= 0.5 { *n = other.output_layer[i].clone(); } } @@ -573,7 +587,7 @@ impl CrossoverReproduction for NeuralNetwork() >= 0.5 { + if rng.random::() >= 0.5 { if let Some(n) = smaller.hidden_layers.get(i) { let mut n = n.clone(); @@ -600,14 +614,14 @@ impl CrossoverReproduction for NeuralNetwork() >= 0.5 { + if rng.random::() >= 0.5 { *n = other.input_layer[i].clone(); } n.prune_invalid_outputs(hidden_len, O); } // crossover mutation settings just in case. - let mutation_settings = if rng.gen::() >= 0.5 { + let mutation_settings = if rng.random::() >= 0.5 { self.mutation_settings.clone() } else { other.mutation_settings.clone() @@ -672,6 +686,7 @@ pub struct Neuron { impl Neuron { /// Creates a new neuron with a specified activation function and outputs. + #[cfg_attr(feature = "tracing", instrument)] pub fn new_with_activation( outputs: Vec<(NeuronLocation, f32)>, activation_fn: ActivationFn, @@ -680,13 +695,14 @@ impl Neuron { Self { input_count: 0, outputs, - bias: rng.gen(), + bias: rng.random(), activation_fn, } } /// Creates a new neuron with the given output locations. /// Chooses a random activation function within the specified scope. + #[cfg_attr(feature = "tracing", instrument)] pub fn new( outputs: Vec<(NeuronLocation, f32)>, current_scope: NeuronScope, @@ -717,7 +733,7 @@ impl Neuron { Self::new_with_activation( outputs, - activations.remove(rng.gen_range(0..activations.len())), + activations.remove(rng.random_range(0..activations.len())), rng, ) } @@ -771,7 +787,7 @@ impl Neuron { while i < self.outputs.len() { let o = &mut self.outputs[i]; if o.0 == loc { - o.1 += rng.gen_range(-max..max); + o.1 += rng.random_range(-max..max); return Some(o.1); } @@ -789,9 +805,10 @@ impl Neuron { panic!("cannot sample outputs from a neuron with no outputs"); } - self.outputs[rng.gen_range(0..self.outputs.len())] + self.outputs[rng.random_range(0..self.outputs.len())] } + #[cfg_attr(feature = "tracing", instrument)] pub(crate) fn handle_removed(&mut self, i: usize) -> usize { // TODO par_iter_mut or something instead of replace let removed = AtomicUsize::new(0); diff --git a/src/tests.rs b/src/tests.rs index bf048c7..a0f8a45 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,5 +1,4 @@ use crate::{activation::NeuronScope, *}; -use rand::prelude::*; // no support for tuple structs derive in genetic-rs yet :( #[derive(Debug, Clone, PartialEq)] @@ -14,13 +13,13 @@ impl RandomlyMutable for Agent { } impl DivisionReproduction for Agent { - fn divide(&self, rng: &mut impl rand::Rng) -> Self { + fn divide(&self, rng: &mut impl Rng) -> Self { Self(self.0.divide(rng)) } } impl CrossoverReproduction for Agent { - fn crossover(&self, other: &Self, rng: &mut impl rand::Rng) -> Self { + fn crossover(&self, other: &Self, rng: &mut impl Rng) -> Self { Self(self.0.crossover(&other.0, rng)) } } @@ -35,7 +34,7 @@ struct GuessTheNumber(f32); impl GuessTheNumber { fn new(rng: &mut impl Rng) -> Self { - Self(rng.gen()) + Self(rng.random()) } fn guess(&self, n: f32) -> Option { @@ -53,7 +52,7 @@ impl GuessTheNumber { } fn fitness(agent: &Agent) -> f32 { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let mut fitness = 0.; @@ -124,7 +123,7 @@ fn crossover() { #[cfg(feature = "serde")] #[test] fn serde() { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let net: NeuralNetwork<5, 10> = NeuralNetwork::new(MutationSettings::default(), &mut rng); let text = serde_json::to_string(&net).unwrap(); @@ -174,7 +173,7 @@ fn neural_net_cache_sync() { } fn small_test_network() -> NeuralNetwork<1, 1> { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let input = Neuron::new( vec![ @@ -221,7 +220,7 @@ fn remove_neuron() { #[test] fn recalculate_connections() { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let input = Neuron::new( vec![ @@ -324,7 +323,7 @@ fn remove_connection() { #[test] fn random_location_in_scope() { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let mut network = small_test_network(); assert_eq!( @@ -352,7 +351,7 @@ fn random_location_in_scope() { #[test] fn split_connection() { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let mut network = small_test_network(); network.split_connection(