Skip to content

Commit

Permalink
Merge branch 'release/0.13.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidfrontier45 committed Jul 26, 2024
2 parents 0c81297 + e430d3e commit 7d87715
Show file tree
Hide file tree
Showing 19 changed files with 73 additions and 39 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ categories = ["algorithms"]
repository = "https://github.com/lucidfrontier45/localsearch"
license-file = "LICENSE"
readme = "README.md"
version = "0.12.1"
version = "0.13.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
Expand Down
10 changes: 8 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,17 @@ fn main() {
pb.set_position(op.iter as u64);
};

let res = opt.optimize(&model, None, n_iter, time_limit, Some(&callback), ());
let res = opt.run(&model, None, n_iter, time_limit, Some(&callback), ());
pb.finish();
dbg!(res);
}

```

Further details can be found at API document, example and test codes.
In addition you can also add `preprocess_initial_solution` and `postprocess_final_solution` to your model.
`preprocess_initial_solution` is called before start of optimization iteration.
If initial solution is not supplied, `generate_initial_solution` is called and the generate solution is then passed to `preprocess_initial_solution`.
`postprocess_final_solution` is called after the optimization iteration.


Further details can be found at API document, example and test codes.
2 changes: 1 addition & 1 deletion examples/quadratic_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ fn main() {
pb.set_position(op.iter as u64);
};

let res = opt.optimize(&model, None, n_iter, time_limit, Some(&callback), ());
let res = opt.run(&model, None, n_iter, time_limit, Some(&callback), ());
pb.finish();
dbg!(res);
}
10 changes: 5 additions & 5 deletions examples/tsp_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ fn main() {

println!("run hill climbing");
let optimizer = HillClimbingOptimizer::new(1000, 200);
let (final_solution, final_score, _) = optimizer.optimize(
let (final_solution, final_score, _) = optimizer.run(
&tsp_model,
initial_solution.clone(),
n_iter,
Expand All @@ -283,7 +283,7 @@ fn main() {
println!("run tabu search");
let tabu_list = DequeTabuList::new(20);
let optimizer = TabuSearchOptimizer::new(patience, 200, 10);
let (final_solution, final_score, _) = optimizer.optimize(
let (final_solution, final_score, _) = optimizer.run(
&tsp_model,
initial_solution.clone(),
n_iter,
Expand All @@ -301,7 +301,7 @@ fn main() {

println!("run annealing");
let optimizer = SimulatedAnnealingOptimizer::new(patience, 200);
let (final_solution, final_score, _) = optimizer.optimize(
let (final_solution, final_score, _) = optimizer.run(
&tsp_model,
initial_solution.clone(),
n_iter,
Expand All @@ -319,7 +319,7 @@ fn main() {

println!("run epsilon greedy");
let optimizer = EpsilonGreedyOptimizer::new(patience, 200, 10, 0.3);
let (final_solution, final_score, _) = optimizer.optimize(
let (final_solution, final_score, _) = optimizer.run(
&tsp_model,
initial_solution.clone(),
n_iter,
Expand All @@ -337,7 +337,7 @@ fn main() {

println!("run relative annealing");
let optimizer = RelativeAnnealingOptimizer::new(patience, 200, 10, 1e1);
let (final_solution, final_score, _) = optimizer.optimize(
let (final_solution, final_score, _) = optimizer.run(
&tsp_model,
initial_solution,
n_iter,
Expand Down
10 changes: 10 additions & 0 deletions src/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,14 @@ pub trait OptModel: Sync + Send {

/// Evaluate the given solution
fn evaluate_solution(&self, solution: &Self::SolutionType) -> Self::ScoreType;

/// Preprocess the solution
fn preprocess_solution(&self, solution: Self::SolutionType) -> Self::SolutionType {
solution
}

/// Postprocess the solution
fn postprocess_solution(&self, solution: Self::SolutionType) -> Self::SolutionType {
solution
}
}
33 changes: 32 additions & 1 deletion src/optim/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pub trait LocalSearchOptimizer<M: OptModel> {
fn optimize<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
initial_solution: M::SolutionType,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
Expand All @@ -24,6 +24,37 @@ pub trait LocalSearchOptimizer<M: OptModel> {
where
M: OptModel,
F: OptCallbackFn<M::SolutionType, M::ScoreType>;

/// Start optimization
fn run<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
extra_in: Self::ExtraIn,
) -> (M::SolutionType, M::ScoreType, Self::ExtraOut)
where
M: OptModel,
F: OptCallbackFn<M::SolutionType, M::ScoreType>,
{
let initial_solution = model.preprocess_solution(initial_solution.unwrap_or_else(|| {
let mut rng = rand::thread_rng();
model.generate_random_solution(&mut rng).unwrap()
}));

let (solution, score, extra) = self.optimize(
model,
initial_solution,
n_iter,
time_limit,
callback,
extra_in,
);

(model.postprocess_solution(solution), score, extra)
}
}

trait_set! {
Expand Down
2 changes: 1 addition & 1 deletion src/optim/epsilon_greedy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ impl<M: OptModel> LocalSearchOptimizer<M> for EpsilonGreedyOptimizer {
fn optimize<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
initial_solution: M::SolutionType,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
Expand Down
8 changes: 2 additions & 6 deletions src/optim/generic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ where
fn optimize<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
initial_solution: M::SolutionType,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
Expand All @@ -78,11 +78,7 @@ where
{
let start_time = Instant::now();
let mut rng = rand::thread_rng();
let mut current_solution = if let Some(s) = initial_solution {
s
} else {
model.generate_random_solution(&mut rng).unwrap()
};
let mut current_solution = initial_solution;
let mut current_score = model.evaluate_solution(&current_solution);
let best_solution = Rc::new(RefCell::new(current_solution.clone()));
let mut best_score = current_score;
Expand Down
2 changes: 1 addition & 1 deletion src/optim/hill_climbing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ impl<M: OptModel> LocalSearchOptimizer<M> for HillClimbingOptimizer {
fn optimize<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
initial_solution: M::SolutionType,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
Expand Down
2 changes: 1 addition & 1 deletion src/optim/logistic_annealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for LogisticA
fn optimize<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
initial_solution: M::SolutionType,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
Expand Down
2 changes: 1 addition & 1 deletion src/optim/relative_annealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for RelativeA
fn optimize<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
initial_solution: M::SolutionType,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
Expand Down
8 changes: 2 additions & 6 deletions src/optim/simulated_annealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for Simulated
fn optimize<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
initial_solution: M::SolutionType,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
Expand All @@ -56,11 +56,7 @@ impl<M: OptModel<ScoreType = NotNan<f64>>> LocalSearchOptimizer<M> for Simulated
let start_time = Instant::now();
let (max_temperature, min_temperature) = max_min_temperatures;
let mut rng = rand::thread_rng();
let mut current_solution = if let Some(s) = initial_solution {
s
} else {
model.generate_random_solution(&mut rng).unwrap()
};
let mut current_solution = initial_solution;
let mut current_score = model.evaluate_solution(&current_solution);
let best_solution = Rc::new(RefCell::new(current_solution.clone()));
let mut best_score = current_score;
Expand Down
9 changes: 2 additions & 7 deletions src/optim/tabu_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ impl<M: OptModel, T: TabuList<Item = (M::SolutionType, M::TransitionType)>> Loca
fn optimize<F>(
&self,
model: &M,
initial_solution: Option<M::SolutionType>,
initial_solution: M::SolutionType,
n_iter: usize,
time_limit: Duration,
callback: Option<&F>,
Expand All @@ -99,12 +99,7 @@ impl<M: OptModel, T: TabuList<Item = (M::SolutionType, M::TransitionType)>> Loca
F: OptCallbackFn<M::SolutionType, M::ScoreType>,
{
let start_time = Instant::now();
let mut rng = rand::thread_rng();
let mut current_solution = if let Some(s) = initial_solution {
s
} else {
model.generate_random_solution(&mut rng).unwrap()
};
let mut current_solution = initial_solution;
let mut current_score = model.evaluate_solution(&current_solution);
let best_solution = Rc::new(RefCell::new(current_solution.clone()));
let mut best_score = current_score;
Expand Down
2 changes: 1 addition & 1 deletion src/tests/test_epsilon_greedy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ fn test() {
let model = QuadraticModel::new(3, vec![2.0, 0.0, -3.5], (-10.0, 10.0));
let opt = EpsilonGreedyOptimizer::new(1000, 10, 200, 0.1);
let null_closure = None::<&fn(_)>;
let (final_solution, final_score, _) = opt.optimize(
let (final_solution, final_score, _) = opt.run(
&model,
None,
10000,
Expand Down
2 changes: 1 addition & 1 deletion src/tests/test_hill_climbing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ fn test() {
let model = QuadraticModel::new(3, vec![2.0, 0.0, -3.5], (-10.0, 10.0));
let opt = HillClimbingOptimizer::new(1000, 10);
let null_closure = None::<&fn(_)>;
let (final_solution, final_score, _) = opt.optimize(
let (final_solution, final_score, _) = opt.run(
&model,
None,
10000,
Expand Down
2 changes: 1 addition & 1 deletion src/tests/test_logistic_annealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ fn test() {
let model = QuadraticModel::new(3, vec![2.0, 0.0, -3.5], (-10.0, 10.0));
let opt = LogisticAnnealingOptimizer::new(5000, 10, 200, 1e1);
let null_closure = None::<&fn(_)>;
let (final_solution, final_score, _) = opt.optimize(
let (final_solution, final_score, _) = opt.run(
&model,
None,
10000,
Expand Down
2 changes: 1 addition & 1 deletion src/tests/test_relative_annealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ fn test() {
let model = QuadraticModel::new(3, vec![2.0, 0.0, -3.5], (-10.0, 10.0));
let opt = RelativeAnnealingOptimizer::new(5000, 10, 200, 1e1);
let null_closure = None::<&fn(_)>;
let (final_solution, final_score, _) = opt.optimize(
let (final_solution, final_score, _) = opt.run(
&model,
None,
10000,
Expand Down
2 changes: 1 addition & 1 deletion src/tests/test_simulated_annealing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ fn test() {
let model = QuadraticModel::new(3, vec![2.0, 0.0, -3.5], (-10.0, 10.0));
let opt = SimulatedAnnealingOptimizer::new(10000, 10);
let null_closure = None::<&fn(_)>;
let (final_solution, final_score, _) = opt.optimize(
let (final_solution, final_score, _) = opt.run(
&model,
None,
5000,
Expand Down
2 changes: 1 addition & 1 deletion src/tests/test_tabu_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ fn test() {
let opt = TabuSearchOptimizer::new(1000, 25, 5);
let tabu_list = MyTabuList::new(10);
let null_closure = None::<&fn(_)>;
let (final_solution, final_score, _) = opt.optimize(
let (final_solution, final_score, _) = opt.run(
&model,
None,
10000,
Expand Down

0 comments on commit 7d87715

Please sign in to comment.