Skip to content

Commit

Permalink
Expose clustering api from Jobs
Browse files Browse the repository at this point in the history
  • Loading branch information
reinterpretcat committed Oct 1, 2024
1 parent 036df17 commit ea87a60
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 31 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ are already published. So, I stick to it for now.
* update rust version
* apply some minor code refactorings
* apply some performance optimizations
* refactor dbscan clustering api


## [1.24.0] 2024-07-13
Expand Down
3 changes: 1 addition & 2 deletions examples/data/config/config.full.json
Original file line number Diff line number Diff line change
Expand Up @@ -179,8 +179,7 @@
"probability": 1,
"type": "cluster",
"min": 8,
"max": 16,
"minItems": 4
"max": 16
}
]
},
Expand Down
6 changes: 3 additions & 3 deletions vrp-cli/src/extensions/solve/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ pub enum RuinMethod {
/// Clustered jobs removal method.
#[serde(rename(deserialize = "cluster"))]
#[serde(rename_all = "camelCase")]
Cluster { probability: Float, min: usize, max: usize, min_items: usize },
Cluster { probability: Float, min: usize, max: usize },
}

/// Specifies recreate methods with their probability weight and specific parameters.
Expand Down Expand Up @@ -685,9 +685,9 @@ fn create_ruin_method(
RuinMethod::WorstJob { probability, min, max, skip: worst_skip } => {
(Arc::new(WorstJobRemoval::new(*worst_skip, get_limits(*min, *max))), *probability)
}
RuinMethod::Cluster { probability, min, max, min_items } => (
RuinMethod::Cluster { probability, min, max } => (
// TODO: remove unwrap
Arc::new(ClusterRemoval::new(problem.clone(), environment, *min_items, get_limits(*min, *max)).unwrap()),
Arc::new(ClusterRemoval::new(problem.clone(), environment, get_limits(*min, *max)).unwrap()),
*probability,
),
RuinMethod::CloseRoute { probability } => (Arc::new(CloseRouteRemoval::new(limits)), *probability),
Expand Down
11 changes: 8 additions & 3 deletions vrp-core/src/models/problem/jobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ const MAX_NEIGHBOURS: usize = 256;
pub struct Jobs {
jobs: Vec<Job>,
index: HashMap<usize, JobIndex>,
_clusters: Vec<HashSet<Job>>,
clusters: Vec<HashSet<Job>>,
}

impl Jobs {
Expand All @@ -246,10 +246,10 @@ impl Jobs {
logger: &InfoLogger,
) -> GenericResult<Jobs> {
let index = create_index(fleet, jobs.clone(), transport, logger);
let _clusters =
let clusters =
create_job_clusters(&jobs, fleet, Some(3), None, |profile, job| neighbors(&index, profile, job))?;

Ok(Jobs { jobs, index, _clusters })
Ok(Jobs { jobs, index, clusters })
}

/// Returns all jobs in the original order as a slice.
Expand All @@ -263,6 +263,11 @@ impl Jobs {
neighbors(&self.index, profile, job)
}

/// Returns job clusters based on their neighborhood approximation.
pub fn clusters(&self) -> &[HashSet<Job>] {
&self.clusters
}

/// Returns job rank as relative cost from any vehicle's start position.
/// Returns `None` if a job is not found in index.
pub fn rank(&self, profile: &Profile, job: &Job) -> Option<Cost> {
Expand Down
24 changes: 9 additions & 15 deletions vrp-core/src/solver/search/ruin/cluster_removal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@
mod cluster_removal_test;

use super::*;
use crate::construction::clustering::dbscan::create_job_clusters;
use crate::construction::heuristics::InsertionContext;
use crate::models::common::Timestamp;
use crate::models::problem::Job;
use crate::models::Problem;
use crate::solver::search::{get_route_jobs, JobRemovalTracker, TabuList};
Expand All @@ -21,18 +19,14 @@ pub struct ClusterRemoval {

impl ClusterRemoval {
/// Creates a new instance of `ClusterRemoval`.
pub fn new(
problem: Arc<Problem>,
environment: Arc<Environment>,
min_items: usize,
limits: RemovalLimits,
) -> GenericResult<Self> {
let clusters =
create_job_clusters(problem.jobs.all(), problem.fleet.as_ref(), Some(min_items), None, |profile, job| {
problem.jobs.neighbors(profile, job, Timestamp::default())
})?;
let mut clusters =
clusters.into_iter().map(|cluster| cluster.into_iter().collect::<Vec<_>>()).collect::<Vec<_>>();
pub fn new(problem: Arc<Problem>, environment: Arc<Environment>, limits: RemovalLimits) -> GenericResult<Self> {
let mut clusters = problem
.jobs
.clusters()
.iter()
.cloned()
.map(|cluster| cluster.into_iter().collect::<Vec<_>>())
.collect::<Vec<_>>();

clusters.shuffle(&mut environment.random.get_rng());

Expand All @@ -42,7 +36,7 @@ impl ClusterRemoval {
/// Creates a new instance of `ClusterRemoval` with default parameters.
pub fn new_with_defaults(problem: Arc<Problem>, environment: Arc<Environment>) -> GenericResult<Self> {
let limits = RemovalLimits::new(problem.as_ref());
Self::new(problem, environment, 3, limits)
Self::new(problem, environment, limits)
}
}

Expand Down
16 changes: 8 additions & 8 deletions vrp-core/tests/unit/solver/search/ruin/cluster_removal_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,22 +29,22 @@ fn can_handle_empty_problem() {
let problem = Arc::new(ProblemBuilder::default().build());
let limits = RemovalLimits::new(&problem);

let removal = ClusterRemoval::new(problem, Arc::new(Environment::default()), 3, limits).unwrap();
let removal = ClusterRemoval::new(problem, Arc::new(Environment::default()), limits).unwrap();

assert!(removal.clusters.is_empty());
}

parameterized_test! {can_ruin_jobs, (limit, cluster_size, expected), {
can_ruin_jobs_impl(limit, cluster_size, expected);
parameterized_test! {can_ruin_jobs, (limit, expected), {
can_ruin_jobs_impl(limit, expected);
}}

can_ruin_jobs! {
case_01: (4, 3, 4),
case_02: (5, 3, 4),
case_03: (8, 3, 4),
case_01: (4, 4),
case_02: (5, 4),
case_03: (8, 4),
}

fn can_ruin_jobs_impl(limit: usize, min_items: usize, expected: usize) {
fn can_ruin_jobs_impl(limit: usize, expected: usize) {
let limits = RemovalLimits { removed_activities_range: limit..limit, affected_routes_range: 8..8 };
let (problem, solution) = generate_matrix_routes(
8,
Expand All @@ -59,7 +59,7 @@ fn can_ruin_jobs_impl(limit: usize, min_items: usize, expected: usize) {
let environment = Arc::new(Environment::default());
let insertion_ctx = InsertionContext::new_from_solution(problem.clone(), (solution, None), environment.clone());

let insertion_ctx = ClusterRemoval::new(problem, environment, min_items, limits)
let insertion_ctx = ClusterRemoval::new(problem, environment, limits)
.expect("cannot create clusters")
.run(&create_default_refinement_ctx(insertion_ctx.problem.clone()), insertion_ctx);

Expand Down

0 comments on commit ea87a60

Please sign in to comment.