Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
kristiankersting committed May 7, 2024
1 parent 87ec581 commit cca5db2
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion build/bib-list.js
Original file line number Diff line number Diff line change
Expand Up @@ -1978,7 +1978,7 @@ var bibtexify = (function($) {
// adds links to the PDF or url of the item
links: function(entryData) {
var itemStr = '';
if (entryData.url && (entryData.url.match(/.*\.pdf/) || entryData.url.match(/\/pdf\//)) ) {
if (entryData.url && ( entryData.url.match(/.*\.pdf/) || entryData.url.match(/\/pdf\//) || entryData.url.match(/pdf\?id=/)) ) {
itemStr += '&nbsp;<span style="color:white;font-size:12px;background-color:#FFCECE;cursor: pointer;"> <a title="PDF of this paper" href="'+ entryData.url +'" target="_blank"><font color="black">pdf<\/font><\/a>&nbsp;<\/span>';
// itemStr += '<button type="button" class="btn btn-danger btn-xs disabled" style="border: none;cursor: pointer;font-size:12px;background-color: #900"> <a title="PDF of this article" href="' +
// entryData.url + '" target="_blank"><font color="white">.pdf (draft)<\/font><\/a><\/button>';
Expand Down
4 changes: 2 additions & 2 deletions references.bib
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@ @inproceedings{braun2024cake
year={2024},
Keywords={Deep Learning, model-agnostic transfer, Knowledge distillation, Student-Teacher},
Anote={./images/braun2024cake.png},
Key = {Student Paper Highlight at AISTATS 2024},
Key = {Outstanding Student Paper Highlight at AISTATS 2024},
Note={Access to pre-trained models has recently emerged as a standard across numerous machine learning domains. Unfortunately, access to the original data the models were trained on may not equally be granted. This makes it tremendously challenging to fine-tune, compress models, adapt continually, or to do any other type of data-driven update. We posit that original data access may however not be required. Specifically, we propose Contrastive Abductive Knowledge Extraction (CAKE), a model-agnostic knowledge distillation procedure that mimics deep classifiers without access to the original data. To this end, CAKE generates pairs of noisy synthetic samples and diffuses them contrastively toward a model’s decision boundary. We empirically corroborate CAKE's effectiveness using several benchmark datasets and various architectural choices, paving the way for broad application.},
Url={https://proceedings.mlr.press/v238/braun24b/braun24b.pdf}
}


@inproceedings{wuest2024pix2code,
Anote={./images/wuest_pix2code.png},
title={Pix2Code: Learning to Compose Neural Visual Concepts as Programs},
Expand Down

0 comments on commit cca5db2

Please sign in to comment.