diff --git a/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py deleted file mode 100644 index bf2bdfd0f47f..000000000000 --- a/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py +++ /dev/null @@ -1,36 +0,0 @@ -from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings - -from langflow.base.models.model import LCModelComponent -from langflow.field_typing import Embeddings -from langflow.io import BoolInput, DictInput, MessageTextInput, Output - - -class HuggingFaceEmbeddingsComponent(LCModelComponent): - display_name = "Hugging Face Embeddings" - description = "Generate embeddings using HuggingFace models." - documentation = ( - "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers" - ) - icon = "HuggingFace" - name = "HuggingFaceEmbeddings" - - inputs = [ - MessageTextInput(name="cache_folder", display_name="Cache Folder", advanced=True), - DictInput(name="encode_kwargs", display_name="Encode Kwargs", advanced=True), - DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), - MessageTextInput(name="model_name", display_name="Model Name", value="sentence-transformers/all-mpnet-base-v2"), - BoolInput(name="multi_process", display_name="Multi Process", advanced=True), - ] - - outputs = [ - Output(display_name="Embeddings", name="embeddings", method="build_embeddings"), - ] - - def build_embeddings(self) -> Embeddings: - return HuggingFaceEmbeddings( - cache_folder=self.cache_folder, - encode_kwargs=self.encode_kwargs, - model_kwargs=self.model_kwargs, - model_name=self.model_name, - multi_process=self.multi_process, - ) diff --git a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py index 54b6c312c13e..b2b15c6ef37f 100644 --- a/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py @@ -7,14 +7,14 @@ class HuggingFaceInferenceAPIEmbeddingsComponent(LCModelComponent): - display_name = "Hugging Face API Embeddings" + display_name = "HuggingFace Embeddings" description = "Generate embeddings using Hugging Face Inference API models." documentation = "https://github.com/huggingface/text-embeddings-inference" icon = "HuggingFace" name = "HuggingFaceInferenceAPIEmbeddings" inputs = [ - SecretStrInput(name="api_key", display_name="API Key", advanced=True), + SecretStrInput(name="api_key", display_name="API Key"), MessageTextInput(name="api_url", display_name="API URL", advanced=True, value="http://localhost:8080"), MessageTextInput(name="model_name", display_name="Model Name", value="BAAI/bge-large-en-v1.5"), ] diff --git a/src/backend/base/langflow/components/embeddings/__init__.py b/src/backend/base/langflow/components/embeddings/__init__.py index 520e3e13f6f0..eddac5e5d817 100644 --- a/src/backend/base/langflow/components/embeddings/__init__.py +++ b/src/backend/base/langflow/components/embeddings/__init__.py @@ -3,7 +3,6 @@ from .AstraVectorize import AstraVectorizeComponent from .AzureOpenAIEmbeddings import AzureOpenAIEmbeddingsComponent from .CohereEmbeddings import CohereEmbeddingsComponent -from .HuggingFaceEmbeddings import HuggingFaceEmbeddingsComponent from .HuggingFaceInferenceAPIEmbeddings import HuggingFaceInferenceAPIEmbeddingsComponent from .OllamaEmbeddings import OllamaEmbeddingsComponent from .OpenAIEmbeddings import OpenAIEmbeddingsComponent @@ -15,7 +14,6 @@ "AstraVectorizeComponent", "AzureOpenAIEmbeddingsComponent", "CohereEmbeddingsComponent", - "HuggingFaceEmbeddingsComponent", "HuggingFaceInferenceAPIEmbeddingsComponent", "OllamaEmbeddingsComponent", "OpenAIEmbeddingsComponent", diff --git a/src/backend/base/langflow/components/models/HuggingFaceModel.py b/src/backend/base/langflow/components/models/HuggingFaceModel.py index 069d63d18301..83c27fbf8959 100644 --- a/src/backend/base/langflow/components/models/HuggingFaceModel.py +++ b/src/backend/base/langflow/components/models/HuggingFaceModel.py @@ -1,6 +1,4 @@ from tenacity import retry, stop_after_attempt, wait_fixed - -from langchain_community.chat_models.huggingface import ChatHuggingFace from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint from langflow.base.models.model import LCModelComponent @@ -9,53 +7,54 @@ class HuggingFaceEndpointsComponent(LCModelComponent): - display_name: str = "Hugging Face API" + display_name: str = "HuggingFace" description: str = "Generate text using Hugging Face Inference APIs." icon = "HuggingFace" name = "HuggingFaceModel" inputs = LCModelComponent._base_inputs + [ - SecretStrInput(name="endpoint_url", display_name="Endpoint URL", password=True), StrInput( name="model_id", - display_name="Model Id", - info="Id field of endpoint_url response.", + display_name="Model ID", + value="openai-community/gpt2", ), DropdownInput( name="task", display_name="Task", - options=["text2text-generation", "text-generation", "summarization"], + options=["text2text-generation", "text-generation", "summarization", "translation"], + value="text-generation", ), - SecretStrInput(name="huggingfacehub_api_token", display_name="API token", password=True), + SecretStrInput(name="huggingfacehub_api_token", display_name="API Token", password=True), DictInput(name="model_kwargs", display_name="Model Keyword Arguments", advanced=True), - IntInput(name="retry_attempts", display_name="Retry Attempts", value=1), + IntInput(name="retry_attempts", display_name="Retry Attempts", value=1, advanced=True), ] - def create_huggingface_endpoint(self, endpoint_url, task, huggingfacehub_api_token, model_kwargs): - @retry(stop=stop_after_attempt(self.retry_attempts), wait=wait_fixed(2)) + def create_huggingface_endpoint( + self, model_id: str, task: str, huggingfacehub_api_token: str, model_kwargs: dict + ) -> HuggingFaceEndpoint: + retry_attempts = self.retry_attempts # Access the retry attempts input + endpoint_url = f"https://api-inference.huggingface.co/models/{model_id}" + + @retry(stop=stop_after_attempt(retry_attempts), wait=wait_fixed(2)) def _attempt_create(): - try: - return HuggingFaceEndpoint( # type: ignore - endpoint_url=endpoint_url, - task=task, - huggingfacehub_api_token=huggingfacehub_api_token, - model_kwargs=model_kwargs, - ) - except Exception as e: - raise ValueError("Could not connect to HuggingFace Endpoints API.") from e + return HuggingFaceEndpoint( + endpoint_url=endpoint_url, + task=task, + huggingfacehub_api_token=huggingfacehub_api_token, + model_kwargs=model_kwargs, + ) return _attempt_create() def build_model(self) -> LanguageModel: # type: ignore[type-var] - endpoint_url = self.endpoint_url + model_id = self.model_id task = self.task huggingfacehub_api_token = self.huggingfacehub_api_token model_kwargs = self.model_kwargs or {} try: - llm = self.create_huggingface_endpoint(endpoint_url, task, huggingfacehub_api_token, model_kwargs) + llm = self.create_huggingface_endpoint(model_id, task, huggingfacehub_api_token, model_kwargs) except Exception as e: raise ValueError("Could not connect to HuggingFace Endpoints API.") from e - output = ChatHuggingFace(llm=llm, model_id=self.model_id) - return output # type: ignore + return llm