Skip to content

Commit

Permalink
Expose transformer model config
Browse files Browse the repository at this point in the history
  • Loading branch information
Riccorl authored May 19, 2023
1 parent 8d599d4 commit 77cc2a1
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

setuptools.setup(
name="transformers_embedder",
version="3.0.9",
version="3.0.10",
author="Riccardo Orlando",
author_email="orlandoricc@gmail.com",
description="Word level transformer based embeddings",
Expand Down
6 changes: 3 additions & 3 deletions transformers_embedder/embedder.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(
) -> None:
super().__init__()
if isinstance(model, str):
config = tr.AutoConfig.from_pretrained(
self.config = tr.AutoConfig.from_pretrained(
model,
output_hidden_states=True,
output_attentions=True,
Expand All @@ -84,11 +84,11 @@ def __init__(
)
if from_pretrained:
self.transformer_model = tr.AutoModel.from_pretrained(
model, config=config, *args, **kwargs
model, config=self.config, *args, **kwargs
)
else:
self.transformer_model = tr.AutoModel.from_config(
config, *args, **kwargs
self.config, *args, **kwargs
)
else:
self.transformer_model = model
Expand Down

0 comments on commit 77cc2a1

Please sign in to comment.