diff --git a/CHANGELOG.md b/CHANGELOG.md index 9b83e062d..85827b7f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ are already published. So, I stick to it for now. * update rust version * apply some minor code refactorings * apply some performance optimizations +* refactor dbscan clustering api ## [1.24.0] 2024-07-13 diff --git a/examples/data/config/config.full.json b/examples/data/config/config.full.json index b62bb19ad..6aaf635cd 100644 --- a/examples/data/config/config.full.json +++ b/examples/data/config/config.full.json @@ -179,8 +179,7 @@ "probability": 1, "type": "cluster", "min": 8, - "max": 16, - "minItems": 4 + "max": 16 } ] }, diff --git a/vrp-cli/src/extensions/solve/config.rs b/vrp-cli/src/extensions/solve/config.rs index 7295f9e4d..b75f85ae5 100644 --- a/vrp-cli/src/extensions/solve/config.rs +++ b/vrp-cli/src/extensions/solve/config.rs @@ -265,7 +265,7 @@ pub enum RuinMethod { /// Clustered jobs removal method. #[serde(rename(deserialize = "cluster"))] #[serde(rename_all = "camelCase")] - Cluster { probability: Float, min: usize, max: usize, min_items: usize }, + Cluster { probability: Float, min: usize, max: usize }, } /// Specifies recreate methods with their probability weight and specific parameters. @@ -685,9 +685,9 @@ fn create_ruin_method( RuinMethod::WorstJob { probability, min, max, skip: worst_skip } => { (Arc::new(WorstJobRemoval::new(*worst_skip, get_limits(*min, *max))), *probability) } - RuinMethod::Cluster { probability, min, max, min_items } => ( + RuinMethod::Cluster { probability, min, max } => ( // TODO: remove unwrap - Arc::new(ClusterRemoval::new(problem.clone(), environment, *min_items, get_limits(*min, *max)).unwrap()), + Arc::new(ClusterRemoval::new(problem.clone(), environment, get_limits(*min, *max)).unwrap()), *probability, ), RuinMethod::CloseRoute { probability } => (Arc::new(CloseRouteRemoval::new(limits)), *probability), diff --git a/vrp-core/src/models/problem/jobs.rs b/vrp-core/src/models/problem/jobs.rs index 28aa40e07..f804c98ee 100644 --- a/vrp-core/src/models/problem/jobs.rs +++ b/vrp-core/src/models/problem/jobs.rs @@ -234,7 +234,7 @@ const MAX_NEIGHBOURS: usize = 256; pub struct Jobs { jobs: Vec, index: HashMap, - _clusters: Vec>, + clusters: Vec>, } impl Jobs { @@ -246,10 +246,10 @@ impl Jobs { logger: &InfoLogger, ) -> GenericResult { let index = create_index(fleet, jobs.clone(), transport, logger); - let _clusters = + let clusters = create_job_clusters(&jobs, fleet, Some(3), None, |profile, job| neighbors(&index, profile, job))?; - Ok(Jobs { jobs, index, _clusters }) + Ok(Jobs { jobs, index, clusters }) } /// Returns all jobs in the original order as a slice. @@ -263,6 +263,11 @@ impl Jobs { neighbors(&self.index, profile, job) } + /// Returns job clusters based on their neighborhood approximation. + pub fn clusters(&self) -> &[HashSet] { + &self.clusters + } + /// Returns job rank as relative cost from any vehicle's start position. /// Returns `None` if a job is not found in index. pub fn rank(&self, profile: &Profile, job: &Job) -> Option { diff --git a/vrp-core/src/solver/search/ruin/cluster_removal.rs b/vrp-core/src/solver/search/ruin/cluster_removal.rs index 05d1a3bd9..523c87cb2 100644 --- a/vrp-core/src/solver/search/ruin/cluster_removal.rs +++ b/vrp-core/src/solver/search/ruin/cluster_removal.rs @@ -3,9 +3,7 @@ mod cluster_removal_test; use super::*; -use crate::construction::clustering::dbscan::create_job_clusters; use crate::construction::heuristics::InsertionContext; -use crate::models::common::Timestamp; use crate::models::problem::Job; use crate::models::Problem; use crate::solver::search::{get_route_jobs, JobRemovalTracker, TabuList}; @@ -21,18 +19,14 @@ pub struct ClusterRemoval { impl ClusterRemoval { /// Creates a new instance of `ClusterRemoval`. - pub fn new( - problem: Arc, - environment: Arc, - min_items: usize, - limits: RemovalLimits, - ) -> GenericResult { - let clusters = - create_job_clusters(problem.jobs.all(), problem.fleet.as_ref(), Some(min_items), None, |profile, job| { - problem.jobs.neighbors(profile, job, Timestamp::default()) - })?; - let mut clusters = - clusters.into_iter().map(|cluster| cluster.into_iter().collect::>()).collect::>(); + pub fn new(problem: Arc, environment: Arc, limits: RemovalLimits) -> GenericResult { + let mut clusters = problem + .jobs + .clusters() + .iter() + .cloned() + .map(|cluster| cluster.into_iter().collect::>()) + .collect::>(); clusters.shuffle(&mut environment.random.get_rng()); @@ -42,7 +36,7 @@ impl ClusterRemoval { /// Creates a new instance of `ClusterRemoval` with default parameters. pub fn new_with_defaults(problem: Arc, environment: Arc) -> GenericResult { let limits = RemovalLimits::new(problem.as_ref()); - Self::new(problem, environment, 3, limits) + Self::new(problem, environment, limits) } } diff --git a/vrp-core/tests/unit/solver/search/ruin/cluster_removal_test.rs b/vrp-core/tests/unit/solver/search/ruin/cluster_removal_test.rs index 4d006ae0f..f853771b8 100644 --- a/vrp-core/tests/unit/solver/search/ruin/cluster_removal_test.rs +++ b/vrp-core/tests/unit/solver/search/ruin/cluster_removal_test.rs @@ -29,22 +29,22 @@ fn can_handle_empty_problem() { let problem = Arc::new(ProblemBuilder::default().build()); let limits = RemovalLimits::new(&problem); - let removal = ClusterRemoval::new(problem, Arc::new(Environment::default()), 3, limits).unwrap(); + let removal = ClusterRemoval::new(problem, Arc::new(Environment::default()), limits).unwrap(); assert!(removal.clusters.is_empty()); } -parameterized_test! {can_ruin_jobs, (limit, cluster_size, expected), { - can_ruin_jobs_impl(limit, cluster_size, expected); +parameterized_test! {can_ruin_jobs, (limit, expected), { + can_ruin_jobs_impl(limit, expected); }} can_ruin_jobs! { - case_01: (4, 3, 4), - case_02: (5, 3, 4), - case_03: (8, 3, 4), + case_01: (4, 4), + case_02: (5, 4), + case_03: (8, 4), } -fn can_ruin_jobs_impl(limit: usize, min_items: usize, expected: usize) { +fn can_ruin_jobs_impl(limit: usize, expected: usize) { let limits = RemovalLimits { removed_activities_range: limit..limit, affected_routes_range: 8..8 }; let (problem, solution) = generate_matrix_routes( 8, @@ -59,7 +59,7 @@ fn can_ruin_jobs_impl(limit: usize, min_items: usize, expected: usize) { let environment = Arc::new(Environment::default()); let insertion_ctx = InsertionContext::new_from_solution(problem.clone(), (solution, None), environment.clone()); - let insertion_ctx = ClusterRemoval::new(problem, environment, min_items, limits) + let insertion_ctx = ClusterRemoval::new(problem, environment, limits) .expect("cannot create clusters") .run(&create_default_refinement_ctx(insertion_ctx.problem.clone()), insertion_ctx);