diff --git a/.vscode/launch.json b/.vscode/launch.json index bd9f47334487..1f66413d183f 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,7 +1,6 @@ { "version": "0.2.0", "configurations": [ - { "name": "Debug Backend", "type": "debugpy", @@ -18,7 +17,7 @@ "--loop", "asyncio", "--reload-include", - "src/backend/*" + "./src/backend/*" ], "jinja": true, "justMyCode": false, diff --git a/poetry.lock b/poetry.lock index b71793db0428..440c65ca63fe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4745,19 +4745,19 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" [[package]] name = "langchain-anthropic" -version = "0.1.20" +version = "0.1.21" description = "An integration package connecting AnthropicMessages and LangChain" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_anthropic-0.1.20-py3-none-any.whl", hash = "sha256:3a0d89ac6856be98beb3ec63813393bf29af3c5134247979c055938e741b7d9d"}, - {file = "langchain_anthropic-0.1.20.tar.gz", hash = "sha256:cb9607fecfc0f0de49b79dd0fc066790e2877873ef753abd98d2ae38d6e0f5b2"}, + {file = "langchain_anthropic-0.1.21-py3-none-any.whl", hash = "sha256:74094162e0badd9f5d275a1e2c4019303fbb45638091c202dcaab5f88fda97aa"}, + {file = "langchain_anthropic-0.1.21.tar.gz", hash = "sha256:04131e024f79c6a60837a4f3b1399b90440da7a9b0c984b92704e66c317f6c5b"}, ] [package.dependencies] anthropic = ">=0.28.0,<1" defusedxml = ">=0.7.1,<0.8.0" -langchain-core = ">=0.2.17,<0.3" +langchain-core = ">=0.2.24,<0.3" [[package]] name = "langchain-astradb" diff --git a/src/backend/base/langflow/components/helpers/Memory.py b/src/backend/base/langflow/components/helpers/Memory.py index 25a6d89e3e5e..0173f9cfb8e4 100644 --- a/src/backend/base/langflow/components/helpers/Memory.py +++ b/src/backend/base/langflow/components/helpers/Memory.py @@ -1,13 +1,13 @@ +from langchain.memory import ConversationBufferMemory + from langflow.custom import Component +from langflow.field_typing import BaseChatMemory from langflow.helpers.data import data_to_text from langflow.inputs import HandleInput from langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output -from langflow.memory import get_messages, LCBuiltinChatMemory +from langflow.memory import LCBuiltinChatMemory, get_messages from langflow.schema import Data from langflow.schema.message import Message -from langflow.field_typing import BaseChatMemory -from langchain.memory import ConversationBufferMemory - from langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER @@ -118,5 +118,5 @@ def build_lc_memory(self) -> BaseChatMemory: if self.memory: chat_memory = self.memory else: - chat_memory = LCBuiltinChatMemory(flow_id=self.graph.flow_id, session_id=self.session_id) + chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id) return ConversationBufferMemory(chat_memory=chat_memory) diff --git a/src/backend/base/langflow/custom/__init__.py b/src/backend/base/langflow/custom/__init__.py index c6ce56c3ec64..0ba9d831da8c 100644 --- a/src/backend/base/langflow/custom/__init__.py +++ b/src/backend/base/langflow/custom/__init__.py @@ -1,4 +1,4 @@ -from langflow.custom.custom_component import CustomComponent from langflow.custom.custom_component.component import Component +from langflow.custom.custom_component.custom_component import CustomComponent __all__ = ["CustomComponent", "Component"] diff --git a/src/backend/base/langflow/custom/code_parser/code_parser.py b/src/backend/base/langflow/custom/code_parser/code_parser.py index 8fc276efd544..a5d8c5595449 100644 --- a/src/backend/base/langflow/custom/code_parser/code_parser.py +++ b/src/backend/base/langflow/custom/code_parser/code_parser.py @@ -384,7 +384,7 @@ def parse_global_vars(self, node: ast.Assign) -> None: def execute_and_inspect_classes(self, code: str): custom_component_class = eval_custom_component_code(code) - custom_component = custom_component_class() + custom_component = custom_component_class(_code=code) dunder_class = custom_component.__class__ # Get the base classes at two levels of inheritance bases = [] diff --git a/src/backend/base/langflow/custom/custom_component/__init__.py b/src/backend/base/langflow/custom/custom_component/__init__.py index 295992f6089a..e69de29bb2d1 100644 --- a/src/backend/base/langflow/custom/custom_component/__init__.py +++ b/src/backend/base/langflow/custom/custom_component/__init__.py @@ -1,3 +0,0 @@ -from .custom_component import CustomComponent - -__all__ = ["CustomComponent"] diff --git a/src/backend/base/langflow/custom/custom_component/base_component.py b/src/backend/base/langflow/custom/custom_component/base_component.py index 5cb36ec396c3..2403d590a956 100644 --- a/src/backend/base/langflow/custom/custom_component/base_component.py +++ b/src/backend/base/langflow/custom/custom_component/base_component.py @@ -97,7 +97,7 @@ def build_template_config(self) -> dict: return {} cc_class = eval_custom_component_code(self._code) - component_instance = cc_class() + component_instance = cc_class(_code=self._code) template_config = self.get_template_config(component_instance) return template_config diff --git a/src/backend/base/langflow/custom/custom_component/component.py b/src/backend/base/langflow/custom/custom_component/component.py index 7c495e823749..5a222e8a9610 100644 --- a/src/backend/base/langflow/custom/custom_component/component.py +++ b/src/backend/base/langflow/custom/custom_component/component.py @@ -6,9 +6,7 @@ import yaml from pydantic import BaseModel -from langflow.graph.edge.schema import EdgeData from langflow.helpers.custom import format_type -from langflow.inputs.inputs import InputTypes from langflow.schema.artifact import get_artifact_type, post_process_raw from langflow.schema.data import Data from langflow.schema.message import Message @@ -20,13 +18,15 @@ from .custom_component import CustomComponent if TYPE_CHECKING: + from langflow.graph.edge.schema import EdgeData from langflow.graph.vertex.base import Vertex + from langflow.inputs.inputs import InputTypes BACKWARDS_COMPATIBLE_ATTRIBUTES = ["user_id", "vertex", "tracing_service"] class Component(CustomComponent): - inputs: List[InputTypes] = [] + inputs: List["InputTypes"] = [] outputs: List[Output] = [] code_class_base_inheritance: ClassVar[str] = "Component" _output_logs: dict[str, Log] = {} @@ -41,7 +41,7 @@ def __init__(self, **kwargs): config[key] = value else: inputs[key] = value - self._inputs: dict[str, InputTypes] = {} + self._inputs: dict[str, "InputTypes"] = {} self._outputs: dict[str, Output] = {} self._results: dict[str, Any] = {} self._attributes: dict[str, Any] = {} @@ -64,6 +64,20 @@ def __init__(self, **kwargs): self.map_outputs(self.outputs) # Set output types self._set_output_types() + self.set_class_code() + + def set_class_code(self): + # Get the source code of the calling class + if self._code: + return + try: + module = inspect.getmodule(self.__class__) + if module is None: + raise ValueError("Could not find module for class") + class_code = inspect.getsource(module) + self._code = class_code + except OSError: + raise ValueError(f"Could not find source code for {self.__class__.__name__}") def set(self, **kwargs): """ @@ -174,7 +188,7 @@ def map_outputs(self, outputs: List[Output]): raise ValueError("Output name cannot be None.") self._outputs[output.name] = output - def map_inputs(self, inputs: List[InputTypes]): + def map_inputs(self, inputs: List["InputTypes"]): """ Maps the given inputs to the component. @@ -272,7 +286,7 @@ def _add_edge(self, component, key, output, _input): "target": self._id, "data": { "sourceHandle": { - "dataType": self.name, + "dataType": component.name or component.__class__.__name__, "id": component._id, "name": output.name, "output_types": output.types, @@ -366,6 +380,21 @@ def to_frontend_node(self): self._map_parameters_on_template(frontend_node_dict["template"]) frontend_node = ComponentFrontendNode.from_dict(frontend_node_dict) + if not self._code: + self.set_class_code() + code_field = Input( + dynamic=True, + required=True, + placeholder="", + multiline=True, + value=self._code, + password=False, + name="code", + advanced=True, + field_type="code", + is_list=False, + ) + frontend_node.template.add_field(code_field) for output in frontend_node.outputs: if output.types: @@ -379,7 +408,7 @@ def to_frontend_node(self): data = { "data": { "node": frontend_node.to_dict(keep_name=False), - "type": self.__class__.__name__, + "type": self.name or self.__class__.__name__, } } return data diff --git a/src/backend/base/langflow/custom/utils.py b/src/backend/base/langflow/custom/utils.py index 9fdbc789ed6e..c92d208c48dc 100644 --- a/src/backend/base/langflow/custom/utils.py +++ b/src/backend/base/langflow/custom/utils.py @@ -283,7 +283,7 @@ def get_component_instance(custom_component: CustomComponent, user_id: Optional[ ) from exc try: - custom_instance = custom_class(_user_id=user_id) + custom_instance = custom_class(_user_id=user_id, _code=custom_component._code) return custom_instance except Exception as exc: logger.error(f"Error while instantiating custom component: {str(exc)}") @@ -339,7 +339,7 @@ def run_build_config( raise exc -def add_code_field(frontend_node: CustomComponentFrontendNode, raw_code, field_config): +def add_code_field(frontend_node: CustomComponentFrontendNode, raw_code): code_field = Input( dynamic=True, required=True, @@ -364,7 +364,7 @@ def build_custom_component_template_from_inputs( cc_instance = get_component_instance(custom_component, user_id=user_id) field_config = cc_instance.get_template_config(cc_instance) frontend_node = ComponentFrontendNode.from_inputs(**field_config) - frontend_node = add_code_field(frontend_node, custom_component._code, field_config.get("code", {})) + frontend_node = add_code_field(frontend_node, custom_component._code) # But we now need to calculate the return_type of the methods in the outputs for output in frontend_node.outputs: if output.types: @@ -408,7 +408,7 @@ def build_custom_component_template( add_extra_fields(frontend_node, field_config, entrypoint_args) - frontend_node = add_code_field(frontend_node, custom_component._code, field_config.get("code", {})) + frontend_node = add_code_field(frontend_node, custom_component._code) add_base_classes(frontend_node, custom_component.get_function_entrypoint_return_type) add_output_types(frontend_node, custom_component.get_function_entrypoint_return_type) diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py index 393f657315ed..901f4288810d 100644 --- a/src/backend/base/langflow/graph/graph/base.py +++ b/src/backend/base/langflow/graph/graph/base.py @@ -182,12 +182,17 @@ def add_component_edge(self, source_id: str, output_input_tuple: Tuple[str, str] if not isinstance(target_vertex, ComponentVertex): raise ValueError(f"Target vertex {target_id} is not a component vertex.") output_name, input_name = output_input_tuple + if source_vertex._custom_component is None: + raise ValueError(f"Source vertex {source_id} does not have a custom component.") + if target_vertex._custom_component is None: + raise ValueError(f"Target vertex {target_id} does not have a custom component.") edge_data: EdgeData = { "source": source_id, "target": target_id, "data": { "sourceHandle": { - "dataType": source_vertex.base_name, + "dataType": source_vertex._custom_component.name + or source_vertex._custom_component.__class__.__name__, "id": source_vertex.id, "name": output_name, "output_types": source_vertex.get_output(output_name).types, @@ -676,7 +681,7 @@ def metadata(self): "flow_name": self.flow_name, } - def build_graph_maps(self, edges: Optional[List[ContractEdge]] = None, vertices: Optional[List[Vertex]] = None): + def build_graph_maps(self, edges: Optional[List[ContractEdge]] = None, vertices: Optional[List["Vertex"]] = None): """ Builds the adjacency maps for the graph. """ @@ -738,7 +743,7 @@ def get_edge(self, source_id: str, target_id: str) -> Optional[ContractEdge]: return edge return None - def build_parent_child_map(self, vertices: List[Vertex]): + def build_parent_child_map(self, vertices: List["Vertex"]): parent_child_map = defaultdict(list) for vertex in vertices: parent_child_map[vertex.id] = [child.id for child in self.get_successors(vertex)] @@ -834,7 +839,7 @@ def __eq__(self, other: object) -> bool: # both graphs have the same vertices and edges # but the data of the vertices might be different - def update_edges_from_vertex(self, vertex: Vertex, other_vertex: Vertex) -> None: + def update_edges_from_vertex(self, vertex: "Vertex", other_vertex: "Vertex") -> None: """Updates the edges of a vertex in the Graph.""" new_edges = [] for edge in self.edges: @@ -844,13 +849,13 @@ def update_edges_from_vertex(self, vertex: Vertex, other_vertex: Vertex) -> None new_edges += other_vertex.edges self.edges = new_edges - def vertex_data_is_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool: + def vertex_data_is_identical(self, vertex: "Vertex", other_vertex: "Vertex") -> bool: data_is_equivalent = vertex == other_vertex if not data_is_equivalent: return False return self.vertex_edges_are_identical(vertex, other_vertex) - def vertex_edges_are_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool: + def vertex_edges_are_identical(self, vertex: "Vertex", other_vertex: "Vertex") -> bool: same_length = len(vertex.edges) == len(other_vertex.edges) if not same_length: return False @@ -909,7 +914,7 @@ def update(self, other: "Graph") -> "Graph": self.increment_update_count() return self - def update_vertex_from_another(self, vertex: Vertex, other_vertex: Vertex) -> None: + def update_vertex_from_another(self, vertex: "Vertex", other_vertex: "Vertex") -> None: """ Updates a vertex from another vertex. @@ -947,12 +952,12 @@ def _add_vertex(self, vertex: Vertex) -> None: self.vertices.append(vertex) self.vertex_map[vertex.id] = vertex - def add_vertex(self, vertex: Vertex) -> None: + def add_vertex(self, vertex: "Vertex") -> None: """Adds a new vertex to the graph.""" self._add_vertex(vertex) self._update_edges(vertex) - def _update_edges(self, vertex: Vertex) -> None: + def _update_edges(self, vertex: "Vertex") -> None: """Updates the edges of a vertex.""" # Vertex has edges, so we need to update the edges for edge in vertex.edges: @@ -987,19 +992,19 @@ def _build_vertex_params(self) -> None: for vertex in self.vertices: vertex._build_params() - def _validate_vertex(self, vertex: Vertex) -> bool: + def _validate_vertex(self, vertex: "Vertex") -> bool: """Validates a vertex.""" # All vertices that do not have edges are invalid return len(self.get_vertex_edges(vertex.id)) > 0 - def get_vertex(self, vertex_id: str, silent: bool = False) -> Vertex: + def get_vertex(self, vertex_id: str, silent: bool = False) -> "Vertex": """Returns a vertex by id.""" try: return self.vertex_map[vertex_id] except KeyError: raise ValueError(f"Vertex {vertex_id} not found") - def get_root_of_group_node(self, vertex_id: str) -> Vertex: + def get_root_of_group_node(self, vertex_id: str) -> "Vertex": """Returns the root of a group node.""" if vertex_id in self.top_level_vertices: # Get all vertices with vertex_id as .parent_node_id @@ -1157,9 +1162,9 @@ def get_vertex_edges( or (edge.target_id == vertex_id and is_target is not False) ] - def get_vertices_with_target(self, vertex_id: str) -> List[Vertex]: + def get_vertices_with_target(self, vertex_id: str) -> List["Vertex"]: """Returns the vertices connected to a vertex.""" - vertices: List[Vertex] = [] + vertices: List["Vertex"] = [] for edge in self.edges: if edge.target_id == vertex_id: vertex = self.get_vertex(edge.source_id) @@ -1246,7 +1251,7 @@ async def _execute_tasks(self, tasks: List[asyncio.Task], lock: asyncio.Lock) -> """Executes tasks in parallel, handling exceptions for each task.""" results = [] completed_tasks = await asyncio.gather(*tasks, return_exceptions=True) - vertices: List[Vertex] = [] + vertices: List["Vertex"] = [] for i, result in enumerate(completed_tasks): task_name = tasks[i].get_name() @@ -1273,7 +1278,7 @@ async def _execute_tasks(self, tasks: List[asyncio.Task], lock: asyncio.Lock) -> no_duplicate_results = list(set(results)) return no_duplicate_results - def topological_sort(self) -> List[Vertex]: + def topological_sort(self) -> List["Vertex"]: """ Performs a topological sort of the vertices in the graph. @@ -1306,7 +1311,7 @@ def dfs(vertex): return list(reversed(sorted_vertices)) - def generator_build(self) -> Generator[Vertex, None, None]: + def generator_build(self) -> Generator["Vertex", None, None]: """Builds each vertex in the graph and yields it.""" sorted_vertices = self.topological_sort() logger.debug("There are %s vertices in the graph", len(sorted_vertices)) @@ -1316,7 +1321,7 @@ def get_predecessors(self, vertex): """Returns the predecessors of a vertex.""" return [self.get_vertex(source_id) for source_id in self.predecessor_map.get(vertex.id, [])] - def get_all_successors(self, vertex: Vertex, recursive=True, flat=True): + def get_all_successors(self, vertex: "Vertex", recursive=True, flat=True): # Recursively get the successors of the current vertex # successors = vertex.successors # if not successors: @@ -1353,13 +1358,13 @@ def get_all_successors(self, vertex: Vertex, recursive=True, flat=True): successors_result.append([successor]) return successors_result - def get_successors(self, vertex: Vertex) -> List[Vertex]: + def get_successors(self, vertex: "Vertex") -> List["Vertex"]: """Returns the successors of a vertex.""" return [self.get_vertex(target_id) for target_id in self.successor_map.get(vertex.id, [])] - def get_vertex_neighbors(self, vertex: Vertex) -> Dict[Vertex, int]: + def get_vertex_neighbors(self, vertex: "Vertex") -> Dict["Vertex", int]: """Returns the neighbors of a vertex.""" - neighbors: Dict[Vertex, int] = {} + neighbors: Dict["Vertex", int] = {} for edge in self.edges: if edge.source_id == vertex.id: neighbor = self.get_vertex(edge.target_id) @@ -1387,7 +1392,8 @@ def _build_edges(self) -> List[ContractEdge]: for edge in self._edges: new_edge = self.build_edge(edge) edges.add(new_edge) - + if self.vertices and not edges: + raise ValueError("Graph has vertices but no edges") return list(edges) def build_edge(self, edge: EdgeData) -> ContractEdge: @@ -1401,7 +1407,7 @@ def build_edge(self, edge: EdgeData) -> ContractEdge: new_edge = ContractEdge(source, target, edge) return new_edge - def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) -> Type[Vertex]: + def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) -> Type["Vertex"]: """Returns the node class based on the node type.""" # First we check for the node_base_type node_name = node_id.split("-")[0] @@ -1416,15 +1422,11 @@ def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) - if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type] - return ( - lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_base_type] - if node_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP - else Vertex - ) + return Vertex - def _build_vertices(self) -> List[Vertex]: + def _build_vertices(self) -> List["Vertex"]: """Builds the vertices of the graph.""" - vertices: List[Vertex] = [] + vertices: List["Vertex"] = [] for frontend_data in self._vertices: try: vertex_instance = self.get_vertex(frontend_data["id"]) @@ -1452,6 +1454,7 @@ def prepare(self, stop_component_id: Optional[str] = None, start_component_id: O raise ValueError("You can only provide one of stop_component_id or start_component_id") self.validate_stream() self.edges = self._build_edges() + if stop_component_id or start_component_id: try: first_layer = self.sort_vertices(stop_component_id, start_component_id) @@ -1485,7 +1488,7 @@ def __repr__(self): def layered_topological_sort( self, - vertices: List[Vertex], + vertices: List["Vertex"], filter_graphs: bool = False, ) -> List[List[str]]: """Performs a layered topological sort of the vertices in the graph.""" diff --git a/src/backend/base/langflow/graph/graph/constants.py b/src/backend/base/langflow/graph/graph/constants.py index 740a7e443218..51da1182a0ce 100644 --- a/src/backend/base/langflow/graph/graph/constants.py +++ b/src/backend/base/langflow/graph/graph/constants.py @@ -19,7 +19,7 @@ def _import_vertex_types(): class VertexTypesDict(LazyLoadDictBase): def __init__(self): self._all_types_dict = None - self._types = _import_vertex_types() + self._types = _import_vertex_types @property def VERTEX_TYPE_MAP(self): @@ -33,14 +33,15 @@ def _build_dict(self): } def get_type_dict(self): + types = self._types() return { - **{t: self._types.CustomComponentVertex for t in ["CustomComponent"]}, - **{t: self._types.ComponentVertex for t in ["Component"]}, - **{t: self._types.InterfaceVertex for t in CHAT_COMPONENTS}, + **{t: types.CustomComponentVertex for t in ["CustomComponent"]}, + **{t: types.ComponentVertex for t in ["Component"]}, + **{t: types.InterfaceVertex for t in CHAT_COMPONENTS}, } def get_custom_component_vertex_type(self): - return self._types.CustomComponentVertex + return self._types().CustomComponentVertex lazy_load_vertex_dict = VertexTypesDict() diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py index b47dcf86e7c2..d7ffc875dfc4 100644 --- a/src/backend/base/langflow/graph/vertex/base.py +++ b/src/backend/base/langflow/graph/vertex/base.py @@ -14,7 +14,7 @@ from langflow.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData from langflow.graph.utils import UnbuiltObject, UnbuiltResult, log_transaction from langflow.graph.vertex.schema import NodeData -from langflow.interface.initialize import loading +from langflow.interface import initialize from langflow.interface.listing import lazy_load_dict from langflow.schema.artifact import ArtifactType from langflow.schema.data import Data @@ -451,10 +451,10 @@ async def _build( raise ValueError(f"Base type for vertex {self.display_name} not found") if not self._custom_component: - custom_component, custom_params = await loading.instantiate_class(user_id=user_id, vertex=self) + custom_component, custom_params = await initialize.loading.instantiate_class(user_id=user_id, vertex=self) else: custom_component = self._custom_component - custom_params = loading.get_params(self.params) + custom_params = initialize.loading.get_params(self.params) await self._build_results(custom_component, custom_params, fallback_to_env_vars) @@ -669,7 +669,7 @@ def _extend_params_list_with_result(self, key, result): async def _build_results(self, custom_component, custom_params, fallback_to_env_vars=False): try: - result = await loading.get_instance_results( + result = await initialize.loading.get_instance_results( custom_component=custom_component, custom_params=custom_params, vertex=self, diff --git a/src/backend/base/langflow/graph/vertex/types.py b/src/backend/base/langflow/graph/vertex/types.py index 4c033227a3ef..3ee3a06991cb 100644 --- a/src/backend/base/langflow/graph/vertex/types.py +++ b/src/backend/base/langflow/graph/vertex/types.py @@ -7,7 +7,7 @@ from loguru import logger from langflow.graph.schema import CHAT_COMPONENTS, RECORDS_COMPONENTS, InterfaceComponentTypes, ResultData -from langflow.graph.utils import UnbuiltObject, log_transaction, log_vertex_build, serialize_field +from langflow.graph.utils import UnbuiltObject, log_transaction, serialize_field from langflow.graph.vertex.base import Vertex from langflow.graph.vertex.schema import NodeData from langflow.inputs.inputs import InputTypes @@ -15,6 +15,7 @@ from langflow.schema.artifact import ArtifactType from langflow.schema.message import Message from langflow.schema.schema import INPUT_FIELD_NAME +from langflow.graph.utils import log_vertex_build from langflow.template.field.base import UNDEFINED, Output from langflow.utils.schemas import ChatOutputResponse, DataOutputResponse from langflow.utils.util import unescape_string diff --git a/src/backend/base/langflow/inputs/__init__.py b/src/backend/base/langflow/inputs/__init__.py index 00842931cfe6..a68597da9676 100644 --- a/src/backend/base/langflow/inputs/__init__.py +++ b/src/backend/base/langflow/inputs/__init__.py @@ -1,23 +1,24 @@ from .inputs import ( BoolInput, DataInput, + DefaultPromptField, DictInput, DropdownInput, - MultiselectInput, FileInput, FloatInput, HandleInput, + Input, IntInput, MessageInput, MessageTextInput, MultilineInput, MultilineSecretInput, + MultiselectInput, NestedDictInput, PromptInput, SecretStrInput, StrInput, TableInput, - DefaultPromptField, ) __all__ = [ @@ -39,5 +40,6 @@ "StrInput", "MessageTextInput", "TableInput", + "Input", "DefaultPromptField", ] diff --git a/src/backend/base/langflow/interface/initialize/__init__.py b/src/backend/base/langflow/interface/initialize/__init__.py index e69de29bb2d1..a37cc6bcb44b 100644 --- a/src/backend/base/langflow/interface/initialize/__init__.py +++ b/src/backend/base/langflow/interface/initialize/__init__.py @@ -0,0 +1,3 @@ +from . import loading + +__all__ = ["loading"] diff --git a/src/backend/base/langflow/template/template/base.py b/src/backend/base/langflow/template/template/base.py index d0372b0b6547..97142cadb28e 100644 --- a/src/backend/base/langflow/template/template/base.py +++ b/src/backend/base/langflow/template/template/base.py @@ -9,7 +9,7 @@ class Template(BaseModel): type_name: str = Field(serialization_alias="_type") - fields: list[Union[Input, InputTypes]] + fields: list[InputTypes] def process_fields( self, diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock index f8958eaa1d90..7e9e76bf5a34 100644 --- a/src/backend/base/poetry.lock +++ b/src/backend/base/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiofiles" @@ -860,6 +860,7 @@ wrapt = ">=1.10,<2" dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] +<<<<<<< Updated upstream name = "devtools" version = "0.12.2" description = "Python's missing debug print command, and more." @@ -876,6 +877,8 @@ executing = ">=1.1.1" pygments = ">=2.15.0" [[package]] +======= +>>>>>>> Stashed changes name = "dictdiffer" version = "0.9.0" description = "Dictdiffer is a library that helps you to diff and patch dictionaries." @@ -5256,4 +5259,8 @@ local = [] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" +<<<<<<< Updated upstream content-hash = "877648288f4f9d5d3304c36853d1b2c5f833d673665bf9b1c707c37bbf02e6ad" +======= +content-hash = "bbbc234350488c8293c4b74695db9757be15cabbf6df99ce24d569aecdcd503b" +>>>>>>> Stashed changes diff --git a/src/backend/tests/unit/graph/graph/test_base.py b/src/backend/tests/unit/graph/graph/test_base.py index 68ecdff7a676..45be6c609ac7 100644 --- a/src/backend/tests/unit/graph/graph/test_base.py +++ b/src/backend/tests/unit/graph/graph/test_base.py @@ -21,13 +21,23 @@ async def test_graph_not_prepared(): graph = Graph() graph.add_component("chat_input", chat_input) graph.add_component("chat_output", chat_output) - graph.add_component_edge("chat_input", (chat_input.outputs[0].name, chat_input.inputs[0].name), "chat_output") with pytest.raises(ValueError): await graph.astep() @pytest.mark.asyncio async def test_graph(): + chat_input = ChatInput() + chat_output = ChatOutput() + graph = Graph() + graph.add_component("chat_input", chat_input) + graph.add_component("chat_output", chat_output) + with pytest.raises(ValueError, match="Graph has vertices but no edges"): + graph.prepare() + + +@pytest.mark.asyncio +async def test_graph_with_edge(): chat_input = ChatInput() chat_output = ChatOutput() graph = Graph() diff --git a/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py b/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py index afe24441cef2..a5fe1b700314 100644 --- a/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py +++ b/src/backend/tests/unit/initial_setup/starter_projects/test_memory_chatbot.py @@ -94,16 +94,16 @@ def test_memory_chatbot_dump_components_and_edges(memory_chatbot_graph: Graph): assert nodes[0]["data"]["type"] == "ChatInput" assert nodes[0]["id"] == "chat_input" - assert nodes[1]["data"]["type"] == "MemoryComponent" + assert nodes[1]["data"]["type"] == "Memory" assert nodes[1]["id"] == "chat_memory" assert nodes[2]["data"]["type"] == "ChatOutput" assert nodes[2]["id"] == "chat_output" - assert nodes[3]["data"]["type"] == "OpenAIModelComponent" + assert nodes[3]["data"]["type"] == "OpenAIModel" assert nodes[3]["id"] == "openai" - assert nodes[4]["data"]["type"] == "PromptComponent" + assert nodes[4]["data"]["type"] == "Prompt" assert nodes[4]["id"] == "prompt" # Check edges diff --git a/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py b/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py index 95fc5a4e74d6..a005b97deeb7 100644 --- a/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py +++ b/src/backend/tests/unit/initial_setup/starter_projects/test_vector_store_rag.py @@ -134,16 +134,16 @@ def test_vector_store_rag_dump_components_and_edges(ingestion_graph, rag_graph): ingestion_nodes = sorted(ingestion_nodes, key=lambda x: x["id"]) # Check components in the ingestion graph - assert ingestion_nodes[0]["data"]["type"] == "FileComponent" + assert ingestion_nodes[0]["data"]["type"] == "File" assert ingestion_nodes[0]["id"] == "file-123" - assert ingestion_nodes[1]["data"]["type"] == "OpenAIEmbeddingsComponent" + assert ingestion_nodes[1]["data"]["type"] == "OpenAIEmbeddings" assert ingestion_nodes[1]["id"] == "openai-embeddings-123" - assert ingestion_nodes[2]["data"]["type"] == "SplitTextComponent" + assert ingestion_nodes[2]["data"]["type"] == "SplitText" assert ingestion_nodes[2]["id"] == "text-splitter-123" - assert ingestion_nodes[3]["data"]["type"] == "AstraVectorStoreComponent" + assert ingestion_nodes[3]["data"]["type"] == "AstraDB" assert ingestion_nodes[3]["id"] == "vector-store-123" # Check edges in the ingestion graph @@ -178,19 +178,19 @@ def test_vector_store_rag_dump_components_and_edges(ingestion_graph, rag_graph): assert rag_nodes[1]["data"]["type"] == "ChatOutput" assert rag_nodes[1]["id"] == "chatoutput-123" - assert rag_nodes[2]["data"]["type"] == "OpenAIModelComponent" + assert rag_nodes[2]["data"]["type"] == "OpenAIModel" assert rag_nodes[2]["id"] == "openai-123" - assert rag_nodes[3]["data"]["type"] == "OpenAIEmbeddingsComponent" + assert rag_nodes[3]["data"]["type"] == "OpenAIEmbeddings" assert rag_nodes[3]["id"] == "openai-embeddings-124" - assert rag_nodes[4]["data"]["type"] == "ParseDataComponent" + assert rag_nodes[4]["data"]["type"] == "ParseData" assert rag_nodes[4]["id"] == "parse-data-123" - assert rag_nodes[5]["data"]["type"] == "PromptComponent" + assert rag_nodes[5]["data"]["type"] == "Prompt" assert rag_nodes[5]["id"] == "prompt-123" - assert rag_nodes[6]["data"]["type"] == "AstraVectorStoreComponent" + assert rag_nodes[6]["data"]["type"] == "AstraDB" assert rag_nodes[6]["id"] == "rag-vector-store-123" # Check edges in the RAG graph diff --git a/src/backend/tests/unit/test_custom_component.py b/src/backend/tests/unit/test_custom_component.py index 5c1cf5369d4e..5d91cc9dc14b 100644 --- a/src/backend/tests/unit/test_custom_component.py +++ b/src/backend/tests/unit/test_custom_component.py @@ -9,7 +9,12 @@ from langflow.custom.code_parser.code_parser import CodeParser, CodeSyntaxError from langflow.custom.custom_component.base_component import BaseComponent, ComponentCodeNullError from langflow.custom.utils import build_custom_component_template -from langflow.services.database.models.flow import Flow, FlowCreate +from langflow.services.database.models.flow import FlowCreate + + +@pytest.fixture +def client(): + pass @pytest.fixture @@ -168,7 +173,7 @@ def test_code_parser_parse_classes(): """ Test the parse_classes method of the CodeParser class. """ - parser = CodeParser("class Test: pass") + parser = CodeParser("from langflow.custom import Component\n\nclass Test(Component): pass") tree = parser.get_tree() for node in ast.walk(tree): if isinstance(node, ast.ClassDef): @@ -177,6 +182,18 @@ def test_code_parser_parse_classes(): assert parser.data["classes"][0]["name"] == "Test" +def test_code_parser_parse_classes_raises(): + """ + Test the parse_classes method of the CodeParser class. + """ + parser = CodeParser("class Test: pass") + tree = parser.get_tree() + with pytest.raises(TypeError): + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef): + parser.parse_classes(node) + + def test_code_parser_parse_global_vars(): """ Test the parse_global_vars method of the CodeParser class. @@ -487,11 +504,6 @@ def test_list_flows_return_type(component): assert isinstance(flows, list) -def test_list_flows_flow_objects(component): - flows = component.list_flows() - assert all(isinstance(flow, Flow) for flow in flows) - - def test_build_config_return_type(component): config = component.build_config() assert isinstance(config, dict) diff --git a/src/backend/tests/unit/test_custom_component_with_client.py b/src/backend/tests/unit/test_custom_component_with_client.py new file mode 100644 index 000000000000..3be2ce657f20 --- /dev/null +++ b/src/backend/tests/unit/test_custom_component_with_client.py @@ -0,0 +1,23 @@ +import pytest + +from langflow.custom.custom_component.custom_component import CustomComponent +from langflow.services.database.models.flow import Flow + + +@pytest.fixture +def component(client, active_user): + return CustomComponent( + user_id=active_user.id, + field_config={ + "fields": { + "llm": {"type": "str"}, + "url": {"type": "str"}, + "year": {"type": "int"}, + } + }, + ) + + +def test_list_flows_flow_objects(component): + flows = component.list_flows() + assert all(isinstance(flow, Flow) for flow in flows) diff --git a/src/backend/tests/unit/test_database.py b/src/backend/tests/unit/test_database.py index b97a4fd712cc..aebdef327129 100644 --- a/src/backend/tests/unit/test_database.py +++ b/src/backend/tests/unit/test_database.py @@ -1,26 +1,26 @@ import json +from collections import namedtuple from uuid import UUID, uuid4 -from langflow.graph.utils import log_transaction, log_vertex_build import orjson import pytest from fastapi.testclient import TestClient from sqlmodel import Session from langflow.api.v1.schemas import FlowListCreate, ResultDataResponse -from langflow.initial_setup.setup import load_starter_projects, load_flows_from_directory +from langflow.graph.utils import log_transaction, log_vertex_build +from langflow.initial_setup.setup import load_flows_from_directory, load_starter_projects from langflow.services.database.models.base import orjson_dumps from langflow.services.database.models.flow import Flow, FlowCreate, FlowUpdate from langflow.services.database.models.transactions.crud import get_transactions_by_flow_id -from langflow.services.database.utils import session_getter, migrate_transactions_from_monitor_service_to_database +from langflow.services.database.utils import migrate_transactions_from_monitor_service_to_database, session_getter from langflow.services.deps import get_db_service, get_monitor_service, session_scope from langflow.services.monitor.schema import TransactionModel from langflow.services.monitor.utils import ( + add_row_to_table, drop_and_create_table_if_schema_mismatch, new_duckdb_locked_connection, - add_row_to_table, ) -from collections import namedtuple @pytest.fixture(scope="module") @@ -46,7 +46,7 @@ def test_create_flow(client: TestClient, json_flow: str, active_user, logged_in_ assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data # flow is optional so we can create a flow without a flow - flow = FlowCreate(name="Test Flow") + flow = FlowCreate(name=str(uuid4())) response = client.post("api/v1/flows/", json=flow.model_dump(exclude_unset=True), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json index 231dc6f2f87e..db0e15228cd9 100644 --- a/src/frontend/package-lock.json +++ b/src/frontend/package-lock.json @@ -45,6 +45,7 @@ "cmdk": "^1.0.0", "dompurify": "^3.1.5", "dotenv": "^16.4.5", + "elkjs": "^0.9.3", "emoji-regex": "^10.3.0", "esbuild": "^0.21.5", "file-saver": "^2.0.5", @@ -7424,6 +7425,11 @@ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.806.tgz", "integrity": "sha512-nkoEX2QIB8kwCOtvtgwhXWy2IHVcOLQZu9Qo36uaGB835mdX/h8uLRlosL6QIhLVUnAiicXRW00PwaPZC74Nrg==" }, + "node_modules/elkjs": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/elkjs/-/elkjs-0.9.3.tgz", + "integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==" + }, "node_modules/emoji-regex": { "version": "10.3.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.3.0.tgz", diff --git a/src/frontend/package.json b/src/frontend/package.json index baf6af68f21c..1b179f2b9fc4 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -40,6 +40,7 @@ "cmdk": "^1.0.0", "dompurify": "^3.1.5", "dotenv": "^16.4.5", + "elkjs": "^0.9.3", "emoji-regex": "^10.3.0", "esbuild": "^0.21.5", "file-saver": "^2.0.5", diff --git a/src/frontend/src/CustomNodes/utils/get-handle-id.tsx b/src/frontend/src/CustomNodes/utils/get-handle-id.tsx new file mode 100644 index 000000000000..934efe2f248c --- /dev/null +++ b/src/frontend/src/CustomNodes/utils/get-handle-id.tsx @@ -0,0 +1,30 @@ +import { sourceHandleType, targetHandleType } from "@/types/flow"; +import { scapedJSONStringfy } from "@/utils/reactflowUtils"; + +export function getRightHandleId({ + output_types, + id, + dataType, + name, +}: sourceHandleType): string { + return scapedJSONStringfy({ + dataType, + id, + output_types, + name, + }); +} + +export function getLeftHandleId({ + inputTypes, + type, + fieldName, + id, +}: targetHandleType): string { + return scapedJSONStringfy({ + inputTypes, + id, + type, + fieldName, + }); +} diff --git a/src/frontend/src/constants/constants.ts b/src/frontend/src/constants/constants.ts index 7b8663ed930c..d1d97bd39ca8 100644 --- a/src/frontend/src/constants/constants.ts +++ b/src/frontend/src/constants/constants.ts @@ -879,3 +879,6 @@ export const LANGFLOW_ACCESS_TOKEN_EXPIRE_SECONDS = 60 * 60 - 60 * 60 * 0.1; export const LANGFLOW_ACCESS_TOKEN_EXPIRE_SECONDS_ENV = Number(process.env.ACCESS_TOKEN_EXPIRE_SECONDS) - Number(process.env.ACCESS_TOKEN_EXPIRE_SECONDS) * 0.1; + +export const NODE_WIDTH = 384; +export const NODE_HEIGHT = NODE_WIDTH * 3; diff --git a/src/frontend/src/hooks/flows/use-add-flow.ts b/src/frontend/src/hooks/flows/use-add-flow.ts index d0aa221c4fd2..5d9587db3eef 100644 --- a/src/frontend/src/hooks/flows/use-add-flow.ts +++ b/src/frontend/src/hooks/flows/use-add-flow.ts @@ -34,7 +34,7 @@ const useAddFlow = () => { return new Promise(async (resolve, reject) => { const flow = cloneDeep(params?.flow) ?? undefined; let flowData = flow - ? processDataFromFlow(flow) + ? await processDataFromFlow(flow) : { nodes: [], edges: [], viewport: { zoom: 1, x: 0, y: 0 } }; flowData?.nodes.forEach((node) => { updateGroupRecursion( diff --git a/src/frontend/src/types/utils/reactflowUtils.ts b/src/frontend/src/types/utils/reactflowUtils.ts index fda6b75a83be..1eb2430a89e0 100644 --- a/src/frontend/src/types/utils/reactflowUtils.ts +++ b/src/frontend/src/types/utils/reactflowUtils.ts @@ -6,6 +6,10 @@ export type unselectAllNodesType = { data: Node[]; }; +export type addEscapedHandleIdsToEdgesType = { + edges: Edge[]; +}; + export type updateEdgesHandleIdsType = { nodes: NodeType[]; edges: Edge[]; diff --git a/src/frontend/src/utils/layoutUtils.ts b/src/frontend/src/utils/layoutUtils.ts new file mode 100644 index 000000000000..b998d98a93d6 --- /dev/null +++ b/src/frontend/src/utils/layoutUtils.ts @@ -0,0 +1,76 @@ +import { NODE_HEIGHT, NODE_WIDTH } from "@/constants/constants"; +import { NodeType } from "@/types/flow"; +import ELK, { ElkNode } from "elkjs/lib/elk.bundled.js"; +import { cloneDeep } from "lodash"; +import { Edge } from "reactflow"; + +const layoutOptions = { + "elk.algorithm": "layered", + "elk.direction": "RIGHT", + "elk.layered.spacing.edgeNodeBetweenLayers": "40", + "elk.spacing.nodeNode": "40", + "elk.layered.nodePlacement.strategy": "SIMPLE", +}; +const elk = new ELK(); + +// uses elkjs to give each node a layouted position +export const getLayoutedNodes = async (nodes: NodeType[], edges: Edge[]) => { + const graph = { + id: "root", + layoutOptions, + children: cloneDeep(nodes).map((n) => { + const targetPorts = edges + .filter((e) => e.source === n.id) + .map((e) => ({ + id: e.sourceHandle, + properties: { + side: "EAST", + }, + })); + + const sourcePorts = edges + .filter((e) => e.target === n.id) + .map((e) => ({ + id: e.targetHandle, + properties: { + side: "WEST", + }, + })); + return { + id: n.id, + width: NODE_WIDTH, + height: NODE_HEIGHT, + // ⚠️ we need to tell elk that the ports are fixed, in order to reduce edge crossings + properties: { + "org.eclipse.elk.portConstraints": "FIXED_ORDER", + }, + // we are also passing the id, so we can also handle edges without a sourceHandle or targetHandle option + ports: [{ id: n.id }, ...targetPorts, ...sourcePorts], + }; + }) as ElkNode[], + edges: edges.map((e) => ({ + id: e.id, + sources: [e.sourceHandle || e.source], + targets: [e.targetHandle || e.target], + })), + }; + + const layoutedGraph = await elk.layout(graph); + + const layoutedNodes = nodes.map((node) => { + const layoutedNode = layoutedGraph.children?.find( + (lgNode) => lgNode.id === node.id, + ); + + return { + ...node, + position: { + x: layoutedNode?.x ?? 0, + y: layoutedNode?.y ?? 0, + }, + type: "genericNode", + }; + }); + + return layoutedNodes; +}; diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts index 58541adb5d17..9eb93843ee3b 100644 --- a/src/frontend/src/utils/reactflowUtils.ts +++ b/src/frontend/src/utils/reactflowUtils.ts @@ -1,3 +1,7 @@ +import { + getLeftHandleId, + getRightHandleId, +} from "@/CustomNodes/utils/get-handle-id"; import { cloneDeep } from "lodash"; import { Connection, @@ -34,11 +38,13 @@ import { targetHandleType, } from "../types/flow"; import { + addEscapedHandleIdsToEdgesType, findLastNodeType, generateFlowType, unselectAllNodesType, updateEdgesHandleIdsType, } from "../types/utils/reactflowUtils"; +import { getLayoutedNodes } from "./layoutUtils"; import { createRandomKey, toTitleCase } from "./utils"; const uid = new ShortUniqueId(); @@ -274,7 +280,7 @@ export function updateTemplate( export const processFlows = (DbData: FlowType[], skipUpdate = true) => { let savedComponents: { [key: string]: APIClassType } = {}; - DbData.forEach((flow: FlowType) => { + DbData.forEach(async (flow: FlowType) => { try { if (!flow.data) { return; @@ -290,15 +296,24 @@ export const processFlows = (DbData: FlowType[], skipUpdate = true) => { ] = cloneDeep((flow.data.nodes[0].data as NodeDataType).node!); return; } - processDataFromFlow(flow, !skipUpdate); + await processDataFromFlow(flow, !skipUpdate).catch((e) => { + console.error(e); + }); } catch (e) { - console.log(e); + console.error(e); } }); return { data: savedComponents, flows: DbData }; }; -export const processDataFromFlow = (flow: FlowType, refreshIds = true) => { +const needsLayout = (nodes: NodeType[]) => { + return nodes.some((node) => !node.position); +}; + +export async function processDataFromFlow( + flow: FlowType, + refreshIds = true, +): Promise { let data = flow?.data ? flow.data : null; if (data) { processFlowEdges(flow); @@ -308,9 +323,14 @@ export const processDataFromFlow = (flow: FlowType, refreshIds = true) => { updateEdges(data.edges); // updateNodes(data.nodes, data.edges); if (refreshIds) updateIds(data); // Assuming updateIds is defined elsewhere + // add layout to nodes if not present + if (needsLayout(data.nodes)) { + const layoutedNodes = await getLayoutedNodes(data.nodes, data.edges); + data.nodes = layoutedNodes; + } } return data; -}; +} export function updateIds( { edges, nodes }: { edges: Edge[]; nodes: Node[] }, @@ -341,6 +361,7 @@ export function updateIds( concatedEdges.forEach((edge: Edge) => { edge.source = idsMap[edge.source]; edge.target = idsMap[edge.target]; + const sourceHandleObject: sourceHandleType = scapeJSONParse( edge.sourceHandle!, ); @@ -475,6 +496,26 @@ export function addVersionToDuplicates(flow: FlowType, flows: FlowType[]) { return newName; } +export function addEscapedHandleIdsToEdges({ + edges, +}: addEscapedHandleIdsToEdgesType): Edge[] { + let newEdges = cloneDeep(edges); + newEdges.forEach((edge) => { + let escapedSourceHandle = edge.sourceHandle; + let escapedTargetHandle = edge.targetHandle; + if (!escapedSourceHandle) { + let sourceHandle = edge.data?.sourceHandle; + escapedSourceHandle = getRightHandleId(sourceHandle); + edge.sourceHandle = escapedSourceHandle; + } + if (!escapedTargetHandle) { + let targetHandle = edge.data?.targetHandle; + escapedTargetHandle = getLeftHandleId(targetHandle); + edge.targetHandle = escapedTargetHandle; + } + }); + return newEdges; +} export function updateEdgesHandleIds({ edges, nodes, @@ -740,6 +781,13 @@ export function checkOldEdgesHandles(edges: Edge[]): boolean { ); } +export function checkEdgeWithoutEscapedHandleIds(edges: Edge[]): boolean { + return edges.some( + (edge) => + (!edge.sourceHandle || !edge.targetHandle) && edge.data?.sourceHandle, + ); +} + export function checkOldNodesOutput(nodes: NodeType[]): boolean { return nodes.some((node) => !node.data.node?.outputs); } @@ -1231,7 +1279,10 @@ export function updateEdgesIds( export function processFlowEdges(flow: FlowType) { if (!flow.data || !flow.data.edges) return; - if (checkOldEdgesHandles(flow.data.edges)) { + if (checkEdgeWithoutEscapedHandleIds(flow.data.edges)) { + const newEdges = addEscapedHandleIdsToEdges({ edges: flow.data.edges }); + flow.data.edges = newEdges; + } else if (checkOldEdgesHandles(flow.data.edges)) { const newEdges = updateEdgesHandleIds(flow.data); flow.data.edges = newEdges; }