Skip to content

Commit

Permalink
fix(CI): various minor changes for satisfying CI (#432)
Browse files Browse the repository at this point in the history
- Run cargo fmt
- Cleanup comparator impl
- Make some types public instead of pub(self)
- Disable incorrect clippy lint
- Apply fixes suggested by clippy

<!-- If applicable - remember to add the PR to the EA Rust project (ONLY
IF THERE IS NO LINKED ISSUE) -->

## Description

<!-- Please describe the motivation & changes introduced by this PR -->

## Linked issues

<!-- Please use "Resolves #<issue_no> syntax in case this PR should be
linked to an issue -->

## Important implementation details

<!-- if any, optional section -->
  • Loading branch information
kkafar authored Oct 9, 2023
1 parent cd9a3e9 commit 31e393c
Show file tree
Hide file tree
Showing 9 changed files with 70 additions and 55 deletions.
8 changes: 4 additions & 4 deletions examples/jssp/logging.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use log4rs::{
encode::pattern::PatternEncoder,
};


#[derive(serde::Serialize, Clone, Debug)]
pub struct OutputData {
pub solution_string: String,
Expand All @@ -19,7 +18,10 @@ pub struct OutputData {
pub total_time: u128,
}

pub fn init_logging(event_log_files: &HashMap<String, PathBuf>, metadata_log_file: &PathBuf) -> Result<log4rs::Handle, log::SetLoggerError> {
pub fn init_logging(
event_log_files: &HashMap<String, PathBuf>,
metadata_log_file: &PathBuf,
) -> Result<log4rs::Handle, log::SetLoggerError> {
let log_pattern = String::from("[{l}] {m}{n}");
let csv_log_pattern = String::from("{m}{n}");
let csv_encoder = Box::new(PatternEncoder::new(&csv_log_pattern));
Expand All @@ -33,7 +35,6 @@ pub fn init_logging(event_log_files: &HashMap<String, PathBuf>, metadata_log_fil
// Register console appender
cfg_builder = cfg_builder.appender(Appender::builder().build("main", Box::new(stdout_appender)));


// Register appenders & loggers for given events
if !event_log_files.is_empty() {
let csv_encoder = Box::new(PatternEncoder::new(&csv_log_pattern));
Expand Down Expand Up @@ -70,7 +71,6 @@ pub fn init_logging(event_log_files: &HashMap<String, PathBuf>, metadata_log_fil
.build("metadata", log::LevelFilter::Info),
);


let config = cfg_builder
.build(
log4rs::config::Root::builder()
Expand Down
1 change: 1 addition & 0 deletions examples/jssp/problem/individual.rs
Original file line number Diff line number Diff line change
Expand Up @@ -358,6 +358,7 @@ impl PartialEq for JsspIndividual {
impl Eq for JsspIndividual {}

impl PartialOrd for JsspIndividual {
#[allow(clippy::incorrect_partial_ord_impl_on_ord_type)]
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.fitness.partial_cmp(&other.fitness)
}
Expand Down
27 changes: 12 additions & 15 deletions examples/jssp/problem/probe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,27 +115,24 @@ impl Probe<JsspIndividual> for JsspProbe {
metadata: &ecrs::ga::GAMetadata,
_population: &[JsspIndividual],
best_individual: &JsspIndividual,
) {
) {
let mut ops = best_individual.operations.clone();
ops.sort_unstable_by(|a, b| {
if a.finish_time == b.finish_time {
if a.duration == 0 && b.duration != 0 {
return Ordering::Greater;
} else if a.duration != 0 && b.duration == 0 {
return Ordering::Less;
} else if a.machine < b.machine {
return Ordering::Less;
} else {
return Ordering::Greater;
}
} else if a.finish_time < b.finish_time {
return Ordering::Less;
if a.finish_time < b.finish_time {
Ordering::Less
} else if a.finish_time > b.finish_time {
Ordering::Greater
} else if a.duration != 0 && b.duration != 0 {
a.machine.cmp(&b.machine)
} else if a.duration != 0 && b.duration == 0 {
Ordering::Less
} else {
return Ordering::Greater;
Ordering::Greater
}
});
let n = ops.len();
let solution_string = ops.into_iter()
let solution_string = ops
.into_iter()
.filter(|op| op.id != 0 && op.id != n + 1)
.map(|op| op.id.to_string())
.join("_");
Expand Down
2 changes: 1 addition & 1 deletion src/aco/tsp/goodness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ mod tests {
let beta = 3.0;
let pheromone = FMatrix::from_vec(2, 2, vec![4.0, 2.0, 8.0, 0.5]);

let goodness = vec![16.0, 32.0, 4096.0, 128.0];
let goodness = [16.0, 32.0, 4096.0, 128.0];

let mut g_op = CanonicalGoodness::new(alpha, beta, heuristic);
for (a, b) in goodness.iter().zip(g_op.apply(&pheromone).iter()) {
Expand Down
68 changes: 41 additions & 27 deletions src/ga/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ type FitnessFn<S, R> = fn(&S) -> R;

/// Error type for internal use
#[derive(Debug, Clone)]
enum ConfigError {
pub enum ConfigError {
MissingParam(String),
MissingOperator(String),
MissingPopulationFactory,
Expand All @@ -53,7 +53,7 @@ impl Error for ConfigError {}
// TODO: We should really consider creating a macro here, so that we
// don't have to write it by hand...
#[derive(Debug, Clone)]
pub(self) struct GAParamsOpt {
pub struct GAParamsOpt {
pub selection_rate: Option<f64>,
pub mutation_rate: Option<f64>,
pub population_size: Option<usize>,
Expand Down Expand Up @@ -88,24 +88,28 @@ impl TryFrom<GAParamsOpt> for GAParams {

fn try_from(params_opt: GAParamsOpt) -> Result<Self, Self::Error> {
let Some(selection_rate) = params_opt.selection_rate else {
return Err(ConfigError::MissingParam("Unspecified selection rate".to_owned()));
};
return Err(ConfigError::MissingParam("Unspecified selection rate".to_owned()));
};

let Some(mutation_rate) = params_opt.mutation_rate else {
return Err(ConfigError::MissingParam("Unspecified mutation rate".to_owned()));
};
return Err(ConfigError::MissingParam("Unspecified mutation rate".to_owned()));
};

let Some(population_size) = params_opt.population_size else {
return Err(ConfigError::MissingParam("Unspecified population size".to_owned()));
};
return Err(ConfigError::MissingParam(
"Unspecified population size".to_owned(),
));
};

let Some(generation_limit) = params_opt.generation_limit else {
return Err(ConfigError::MissingParam("Unspecified generation_limit".to_owned()));
};
return Err(ConfigError::MissingParam(
"Unspecified generation_limit".to_owned(),
));
};

let Some(max_duration) = params_opt.max_duration else {
return Err(ConfigError::MissingParam("Unspecified max duration".to_owned()));
};
return Err(ConfigError::MissingParam("Unspecified max duration".to_owned()));
};

Ok(GAParams {
selection_rate,
Expand All @@ -121,7 +125,7 @@ impl TryFrom<GAParamsOpt> for GAParams {
/// inside `Option` type, so that builders can incrementally fill it up.
// TODO: We should really consider creating a macro here, so that we
// don't have to write it by hand...
pub(self) struct GAConfigOpt<IndividualT, MutOpT, CrossOpT, SelOpT, ReplOpT, PopGenT, FitnessT, ProbeT>
pub struct GAConfigOpt<IndividualT, MutOpT, CrossOpT, SelOpT, ReplOpT, PopGenT, FitnessT, ProbeT>
where
IndividualT: IndividualTrait,
MutOpT: MutationOperator<IndividualT>,
Expand Down Expand Up @@ -192,32 +196,42 @@ where
let params = GAParams::try_from(config_opt.params)?;

let Some(fitness_fn) = config_opt.fitness_fn else {
return Err(ConfigError::MissingOperator("No fitness function specified".to_owned()));
};
return Err(ConfigError::MissingOperator(
"No fitness function specified".to_owned(),
));
};

let Some(mutation_operator) = config_opt.mutation_operator else {
return Err(ConfigError::MissingOperator("No mutation operator specified".to_owned()));
};
return Err(ConfigError::MissingOperator(
"No mutation operator specified".to_owned(),
));
};

let Some(crossover_operator) = config_opt.crossover_operator else {
return Err(ConfigError::MissingOperator("No crossover operator specified".to_owned()));
};
return Err(ConfigError::MissingOperator(
"No crossover operator specified".to_owned(),
));
};

let Some(selection_operator) = config_opt.selection_operator else {
return Err(ConfigError::MissingOperator("No selection operator specified".to_owned()));
};
return Err(ConfigError::MissingOperator(
"No selection operator specified".to_owned(),
));
};

let Some(replacement_operator) = config_opt.replacement_operator else {
return Err(ConfigError::MissingOperator("No replacement operator specified".to_owned()));
};
return Err(ConfigError::MissingOperator(
"No replacement operator specified".to_owned(),
));
};

let Some(population_factory) = config_opt.population_factory else {
return Err(ConfigError::MissingPopulationFactory);
};
return Err(ConfigError::MissingPopulationFactory);
};

let Some(probe) = config_opt.probe else {
return Err(ConfigError::NoProbe);
};
return Err(ConfigError::NoProbe);
};

Ok(GAConfig {
params,
Expand Down
1 change: 1 addition & 0 deletions src/ga/individual.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ impl<T: Chromosome> PartialEq<Self> for Individual<T> {
impl<T: Chromosome> Eq for Individual<T> {}

impl<T: Chromosome> PartialOrd<Self> for Individual<T> {
#[allow(clippy::incorrect_partial_ord_impl_on_ord_type)]
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.fitness.partial_cmp(&other.fitness)
}
Expand Down
2 changes: 1 addition & 1 deletion src/ga/operators/crossover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -933,7 +933,7 @@ mod test {
child
.chromosome()
.iter()
.zip(vec![1, 3, 2, 4, 6, 5].iter())
.zip([1, 3, 2, 4, 6, 5].iter())
.for_each(|(x, x_expected)| assert_eq!(x, x_expected))
}

Expand Down
8 changes: 5 additions & 3 deletions src/ga/operators/selection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -583,9 +583,11 @@ where
weights.push((-idv.fitness() / temp).exp())
}

let Ok(indices) = rand::seq::index::sample_weighted(&mut self.rng, population.len(), |i| weights[i], count) else {
panic!("Some error occured while generating indices. This is most likely an library implementation error. Please file an issue: https://github.com/kkafar/evolutionary-algorithms");
};
let Ok(indices) =
rand::seq::index::sample_weighted(&mut self.rng, population.len(), |i| weights[i], count)
else {
panic!("Some error occured while generating indices. This is most likely an library implementation error. Please file an issue: https://github.com/kkafar/evolutionary-algorithms");
};

for i in indices {
selected.push(&population[i]);
Expand Down
8 changes: 4 additions & 4 deletions src/test_functions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ pub fn brad(x: &Vec<f64>) -> f64 {
let x1 = x[0];
let x2 = x[1];
let x3 = x[2];
let y = vec![
let y = [
0.14, 0.18, 0.22, 0.25, 0.29, 0.32, 0.35, 0.39, 0.37, 0.58, 0.73, 0.96, 1.34, 2.10, 4.39,
];
let mut res = 0_f64;
Expand Down Expand Up @@ -1252,7 +1252,7 @@ pub fn hartmann_3d(x: &Vec<f64>) -> f64 {
x.len(),
"Hartmann 3-dimensional function takes only a three dimensional vector as a parameter."
);
let alfa = vec![1.0, 1.2, 3.0, 3.2];
let alfa = [1.0, 1.2, 3.0, 3.2];
let a = vec![
vec![3.0, 10.0, 30.0],
vec![0.1, 10.0, 35.0],
Expand Down Expand Up @@ -1285,7 +1285,7 @@ pub fn hartmann_4d(x: &Vec<f64>) -> f64 {
x.len(),
"Hartmann 4-dimensional function takes only a three dimensional vector as a parameter."
);
let alfa = vec![1.0, 1.2, 3.0, 3.2];
let alfa = [1.0, 1.2, 3.0, 3.2];
let a = vec![
vec![10.0, 3.0, 17.0, 3.5, 1.7, 8.0],
vec![0.05, 10.0, 17.0, 0.1, 8.0, 14.0],
Expand Down Expand Up @@ -1320,7 +1320,7 @@ pub fn hartmann_6d(x: &Vec<f64>) -> f64 {
x.len(),
"Hartmann 6-dimensional function takes only a six dimensional vector as a parameter."
);
let alfa = vec![1.0, 1.2, 3.0, 3.2];
let alfa = [1.0, 1.2, 3.0, 3.2];
let a = vec![
vec![10.0, 3.0, 17.0, 3.5, 1.7, 8.0],
vec![0.05, 10.0, 17.0, 0.1, 8.0, 14.0],
Expand Down

0 comments on commit 31e393c

Please sign in to comment.