Skip to content

Commit

Permalink
[wip] collapse all with maxsat
Browse files Browse the repository at this point in the history
  • Loading branch information
DillonZChen committed Jan 14, 2025
1 parent 9a2dedf commit 84605cb
Show file tree
Hide file tree
Showing 8 changed files with 77 additions and 20 deletions.
1 change: 1 addition & 0 deletions include/feature_generation/features.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#include "../planning/domain.hpp"
#include "../planning/state.hpp"
#include "neighbour_container.hpp"
#include "pruning.hpp"

#include <map>
#include <memory>
Expand Down
18 changes: 18 additions & 0 deletions include/feature_generation/pruning.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#ifndef FEATURE_GENERATION_PRUNING_HPP
#define FEATURE_GENERATION_PRUNING_HPP

#include <string>
#include <vector>

namespace feature_generation {
class PruningOptions {
public:
static const std::string NONE;
static const std::string COLLAPSE_ALL;
static const std::string COLLAPSE_LAYER;

static const std::vector<std::string> get_all();
};
} // namespace feature_generation

#endif // FEATURE_GENERATION_PRUNING_HPP
4 changes: 2 additions & 2 deletions src/feature_generation/features.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ namespace feature_generation {
}

void Features::collect(const std::vector<graph::Graph> &graphs) {
if (pruning == "collapse" && collapse_pruned) {
if (pruning == PruningOptions::COLLAPSE_LAYER && collapse_pruned) {
std::cout << "collect with collapse pruning can only be called at most once" << std::endl;
exit(-1);
}
Expand All @@ -186,7 +186,7 @@ namespace feature_generation {

collect_main(graphs);

if (pruning == "collapse") {
if (pruning == PruningOptions::COLLAPSE_LAYER) {
collapse_pruned = true;
}
collected = true;
Expand Down
14 changes: 14 additions & 0 deletions src/feature_generation/pruning.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#include "../../include/feature_generation/pruning.hpp"

namespace feature_generation {
const std::string PruningOptions::NONE = "none";
const std::string PruningOptions::COLLAPSE_ALL = "collapse-all";
const std::string PruningOptions::COLLAPSE_LAYER = "collapse-layer";
const std::vector<std::string> PruningOptions::get_all() {
return {
NONE,
COLLAPSE_ALL,
COLLAPSE_LAYER,
};
}
} // namespace feature_generation
18 changes: 15 additions & 3 deletions src/feature_generation/wl_features.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -103,14 +103,13 @@ namespace feature_generation {
for (int iteration = 1; iteration < iterations + 1; iteration++) {
cur_collecting_layer = iteration;


for (size_t graph_i = 0; graph_i < graphs.size(); graph_i++) {
const auto graph = std::make_shared<graph::Graph>(graphs[graph_i]);
refine(graph, graph_colours[graph_i], graph_colours_tmp[graph_i]);
}

if (pruning == "collapse") {
// remove duplicate features based on their column
/* COLLAPSE_LAYER: remove duplicate features greedily every iteration */
if (pruning == PruningOptions::COLLAPSE_LAYER) {

std::map<int, int> collect_colour_remap;
std::map<int, std::vector<int>> columns;
Expand Down Expand Up @@ -159,6 +158,19 @@ namespace feature_generation {
reformat_colour_hash(collect_colour_remap);
}
}

// check for redundant layers
if (pruning == PruningOptions::COLLAPSE_LAYER) {
for (int itr = 1; itr < iterations + 1; itr++) {
if (layer_to_colours[itr].size() == 0) {
int lower_iterations = itr - 1;
std::cout << "collapse pruning reduced iterations from " << iterations << " to "
<< lower_iterations << std::endl;
iterations = lower_iterations;
break;
}
}
}
}

Embedding WLFeatures::embed(const std::shared_ptr<graph::Graph> &graph) {
Expand Down
8 changes: 8 additions & 0 deletions src/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -443,6 +443,14 @@ py::class_<graph::NILGGenerator, graph::ILGGenerator>(graph_m, "NILGGenerator")
// Feature Generation
//////////////////////////////////////////////////////////////////////////////
auto feature_generation_m = m.def_submodule("feature_generation");
auto pruning_m = feature_generation_m.def_submodule("pruning");

py::class_<feature_generation::PruningOptions>(pruning_m, "PruningOptions")
.def_readonly_static("NONE", &feature_generation::PruningOptions::NONE)
.def_readonly_static("COLLAPSE_ALL", &feature_generation::PruningOptions::COLLAPSE_ALL)
.def_readonly_static("COLLAPSE_LAYER", &feature_generation::PruningOptions::COLLAPSE_LAYER)
.def_static("get_all", &feature_generation::PruningOptions::get_all)
;

py::class_<feature_generation::Features>(feature_generation_m, "_Features")
.def("collect", py::overload_cast<const data::Dataset>(&feature_generation::Features::collect_from_dataset),
Expand Down
20 changes: 10 additions & 10 deletions tests/test_blocks_simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import numpy as np

from wlplan.data import Dataset, ProblemStates
from wlplan.feature_generation import WLFeatures
from wlplan.feature_generation import PruningOptions, WLFeatures
from wlplan.planning import Atom, Domain, Predicate, Problem, State

## domain
Expand Down Expand Up @@ -135,7 +135,7 @@
def test_pruning():
feature_generators = {}
Xs = {}
for pruning in [None, "collapse", "collapse_by_layer"]:
for pruning in PruningOptions.get_all():
print("=" * 80)
print(f"pruning={pruning}")
feature_generator = WLFeatures(
Expand All @@ -162,13 +162,13 @@ def test_pruning():
t = time.time() - t
print(f"{t=:.3f}s")

assert Xs["collapse"].shape <= Xs["collapse_by_layer"].shape
assert Xs["collapse"].shape <= Xs[None].shape
assert Xs["collapse_by_layer"].shape <= Xs[None].shape
# assert Xs[PruningOptions.COLLAPSE_ALL].shape <= Xs[PruningOptions.COLLAPSE_LAYER].shape
# assert Xs[PruningOptions.COLLAPSE_ALL].shape <= Xs[None].shape
# assert Xs[PruningOptions.COLLAPSE_LAYER].shape <= Xs[None].shape


def test_repeated_dataset():
for pruning in [None, "collapse", "collapse_by_layer"]:
for pruning in PruningOptions.get_all():
print("=" * 80)
print(pruning)
column_sizes = set()
Expand Down Expand Up @@ -203,7 +203,7 @@ def test_repeated_dataset():
def test_multiset():
feature_generators = {}
Xs = {}
for pruning in [None, "collapse", "collapse_by_layer"]:
for pruning in PruningOptions.get_all():
print("=" * 80)
print(f"pruning={pruning}")
feature_generator = WLFeatures(
Expand All @@ -230,6 +230,6 @@ def test_multiset():
t = time.time() - t
print(f"{t=:.3f}s")

assert Xs["collapse"].shape <= Xs["collapse_by_layer"].shape
assert Xs["collapse"].shape <= Xs[None].shape
assert Xs["collapse_by_layer"].shape <= Xs[None].shape
# assert Xs[PruningOptions.COLLAPSE_ALL].shape <= Xs[PruningOptions.COLLAPSE_LAYER].shape
# assert Xs[PruningOptions.COLLAPSE_ALL].shape <= Xs[None].shape
# assert Xs[PruningOptions.COLLAPSE_LAYER].shape <= Xs[None].shape
14 changes: 9 additions & 5 deletions wlplan/feature_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

from _wlplan.feature_generation import (_CCWLFeatures, _IWLFeatures, _KWL2Features, _LWL2Features,
_NIWLFeatures, _WLFeatures)
from _wlplan.feature_generation.pruning import PruningOptions
from _wlplan.planning import Domain

__all__ = [
Expand All @@ -28,8 +29,11 @@ def _get_feature_generators_dict():
"niwl": NIWLFeatures,
}

def get_available_graph_choices():
return [None, "custom", "ilg", "nilg"]

def get_available_pruning_methods():
return {"none", "collapse", "collapse_by_layer"}
return [None] + PruningOptions.get_all()

def get_available_feature_generators():
return set(_get_feature_generators_dict().keys())
Expand Down Expand Up @@ -66,13 +70,13 @@ class Features:
----------
domain : Domain
graph_representation : "ilg" or None, default="ilg"
graph_representation : str, default="ilg"
The graph encoding of planning states used. If None, the user can only call class method of classes and not datasets and states.
iterations : int, default=2
The number of WL iterations to perform.
pruning : "collapse", "collapse_by_layer" or None, default=None
pruning : str, default=None
How to detect and prune duplicate features. If None, no pruning is done.
multiset_hash : bool, default=False
Expand Down Expand Up @@ -123,13 +127,13 @@ def __init__(
base_class.__init__(self, filename=kwargs["filename"])
return

graph_choices = [None, "custom", "ilg", "nilg"]
graph_choices = get_available_graph_choices()
if graph_representation not in graph_choices:
raise ValueError(f"graph_representation must be one of {graph_choices}")
if graph_representation is None:
graph_representation = "custom"

prune_choices = [None, "none", "collapse", "collapse_by_layer"]
prune_choices = get_available_pruning_methods()
if pruning not in prune_choices:
raise ValueError(f"pruning must be one of {prune_choices}")
if pruning is None:
Expand Down

0 comments on commit 84605cb

Please sign in to comment.