diff --git a/references.bib b/references.bib index b48c9ec..e20eb3a 100644 --- a/references.bib +++ b/references.bib @@ -43,7 +43,7 @@ @inproceedings{braun2024cake Anote={./images/braun2024cake.png}, Key = {Student Paper Highlight at AISTATS 2024}, Note={Access to pre-trained models has recently emerged as a standard across numerous machine learning domains. Unfortunately, access to the original data the models were trained on may not equally be granted. This makes it tremendously challenging to fine-tune, compress models, adapt continually, or to do any other type of data-driven update. We posit that original data access may however not be required. Specifically, we propose Contrastive Abductive Knowledge Extraction (CAKE), a model-agnostic knowledge distillation procedure that mimics deep classifiers without access to the original data. To this end, CAKE generates pairs of noisy synthetic samples and diffuses them contrastively toward a model’s decision boundary. We empirically corroborate CAKE's effectiveness using several benchmark datasets and various architectural choices, paving the way for broad application.}, - Url={} + Url={https://proceedings.mlr.press/v238/braun24b/braun24b.pdf} }