diff --git a/src/backend/langflow/alembic/env.py b/src/backend/langflow/alembic/env.py index 479db05bb..bc470f05b 100644 --- a/src/backend/langflow/alembic/env.py +++ b/src/backend/langflow/alembic/env.py @@ -62,12 +62,26 @@ def run_migrations_online() -> None: and associate a connection with the context. """ - from langflow.services.deps import get_db_service try: + from langflow.services.database.factory import DatabaseServiceFactory + from langflow.services.deps import get_db_service + from langflow.services.manager import ( + initialize_settings_service, + service_manager, + ) + from langflow.services.schema import ServiceType + + initialize_settings_service() + service_manager.register_factory( + DatabaseServiceFactory(), [ServiceType.SETTINGS_SERVICE] + ) connectable = get_db_service().engine except Exception as e: logger.error(f"Error getting database engine: {e}") + url = os.getenv("LANGFLOW_DATABASE_URL") + url = url or config.get_main_option("sqlalchemy.url") + config.set_main_option("sqlalchemy.url", url) connectable = engine_from_config( config.get_section(config.config_ini_section, {}), prefix="sqlalchemy.", diff --git a/src/backend/langflow/alembic/script.py.mako b/src/backend/langflow/alembic/script.py.mako index 2fbdc930d..bc9bca83a 100644 --- a/src/backend/langflow/alembic/script.py.mako +++ b/src/backend/langflow/alembic/script.py.mako @@ -23,10 +23,12 @@ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} def upgrade() -> None: conn = op.get_bind() inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() ${upgrades if upgrades else "pass"} def downgrade() -> None: conn = op.get_bind() inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() ${downgrades if downgrades else "pass"} diff --git a/src/backend/langflow/alembic/versions/63b9c451fd30_add_icon_and_icon_bg_color_to_flow.py b/src/backend/langflow/alembic/versions/63b9c451fd30_add_icon_and_icon_bg_color_to_flow.py new file mode 100644 index 000000000..3deb66346 --- /dev/null +++ b/src/backend/langflow/alembic/versions/63b9c451fd30_add_icon_and_icon_bg_color_to_flow.py @@ -0,0 +1,56 @@ +"""Add icon and icon_bg_color to Flow + +Revision ID: 63b9c451fd30 +Revises: bc2f01c40e4a +Create Date: 2024-03-06 10:53:47.148658 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +import sqlmodel +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "63b9c451fd30" +down_revision: Union[str, None] = "bc2f01c40e4a" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() + column_names = [column["name"] for column in inspector.get_columns("flow")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("flow", schema=None) as batch_op: + if "icon" not in column_names: + batch_op.add_column( + sa.Column("icon", sqlmodel.sql.sqltypes.AutoString(), nullable=True) + ) + if "icon_bg_color" not in column_names: + batch_op.add_column( + sa.Column( + "icon_bg_color", sqlmodel.sql.sqltypes.AutoString(), nullable=True + ) + ) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() + column_names = [column["name"] for column in inspector.get_columns("flow")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("flow", schema=None) as batch_op: + if "icon" in column_names: + batch_op.drop_column("icon") + if "icon_bg_color" in column_names: + batch_op.drop_column("icon_bg_color") + + # ### end Alembic commands ### diff --git a/src/backend/langflow/api/v1/base.py b/src/backend/langflow/api/v1/base.py index 2380b019e..cc16c6d1b 100644 --- a/src/backend/langflow/api/v1/base.py +++ b/src/backend/langflow/api/v1/base.py @@ -86,9 +86,7 @@ def validate_prompt(template: str): # Check if there are invalid characters in the input_variables input_variables = check_input_variables(input_variables) if any(var in INVALID_NAMES for var in input_variables): - raise ValueError( - f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. " - ) + raise ValueError(f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. ") try: PromptTemplate(template=template, input_variables=input_variables) @@ -123,9 +121,7 @@ def fix_variable(var, invalid_chars, wrong_variables): # Handle variables starting with a number if var[0].isdigit(): invalid_chars.append(var[0]) - new_var, invalid_chars, wrong_variables = fix_variable( - var[1:], invalid_chars, wrong_variables - ) + new_var, invalid_chars, wrong_variables = fix_variable(var[1:], invalid_chars, wrong_variables) # Temporarily replace {{ and }} to avoid treating them as invalid new_var = new_var.replace("{{", "ᴛᴇᴍᴘᴏᴘᴇɴ").replace("}}", "ᴛᴇᴍᴘᴄʟᴏsᴇ") @@ -152,9 +148,7 @@ def check_variable(var, invalid_chars, wrong_variables, empty_variables): return wrong_variables, empty_variables -def check_for_errors( - input_variables, fixed_variables, wrong_variables, empty_variables -): +def check_for_errors(input_variables, fixed_variables, wrong_variables, empty_variables): if any(var for var in input_variables if var not in fixed_variables): error_message = ( f"Error: Input variables contain invalid characters or formats. \n" @@ -179,17 +173,11 @@ def check_input_variables(input_variables): if is_json_like(var): continue - new_var, wrong_variables, empty_variables = fix_variable( - var, invalid_chars, wrong_variables - ) - wrong_variables, empty_variables = check_variable( - var, INVALID_CHARACTERS, wrong_variables, empty_variables - ) + new_var, wrong_variables, empty_variables = fix_variable(var, invalid_chars, wrong_variables) + wrong_variables, empty_variables = check_variable(var, INVALID_CHARACTERS, wrong_variables, empty_variables) fixed_variables.append(new_var) variables_to_check.append(var) - check_for_errors( - variables_to_check, fixed_variables, wrong_variables, empty_variables - ) + check_for_errors(variables_to_check, fixed_variables, wrong_variables, empty_variables) return fixed_variables diff --git a/src/backend/langflow/api/v1/callback.py b/src/backend/langflow/api/v1/callback.py index 38737623c..b326311ac 100644 --- a/src/backend/langflow/api/v1/callback.py +++ b/src/backend/langflow/api/v1/callback.py @@ -33,9 +33,7 @@ class AsyncStreamingLLMCallbackHandleSIO(AsyncCallbackHandler): resp = ChatResponse(message=token, type="stream", intermediate_steps="") await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) - async def on_tool_start( - self, serialized: Dict[str, Any], input_str: str, **kwargs: Any - ) -> Any: + async def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Any) -> Any: """Run when tool starts running.""" resp = ChatResponse( message="", @@ -73,9 +71,7 @@ class AsyncStreamingLLMCallbackHandleSIO(AsyncCallbackHandler): try: # This is to emulate the stream of tokens for resp in resps: - await self.socketio_service.emit_token( - to=self.sid, data=resp.model_dump() - ) + await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) except Exception as exc: logger.error(f"Error sending response: {exc}") @@ -101,9 +97,7 @@ class AsyncStreamingLLMCallbackHandleSIO(AsyncCallbackHandler): resp = PromptResponse( prompt=text, ) - await self.socketio_service.emit_message( - to=self.sid, data=resp.model_dump() - ) + await self.socketio_service.emit_message(to=self.sid, data=resp.model_dump()) async def on_agent_action(self, action: AgentAction, **kwargs: Any): log = f"Thought: {action.log}" @@ -113,9 +107,7 @@ class AsyncStreamingLLMCallbackHandleSIO(AsyncCallbackHandler): logs = log.split("\n") for log in logs: resp = ChatResponse(message="", type="stream", intermediate_steps=log) - await self.socketio_service.emit_token( - to=self.sid, data=resp.model_dump() - ) + await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) else: resp = ChatResponse(message="", type="stream", intermediate_steps=log) await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index b72504da6..ecec8f200 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -101,12 +101,8 @@ async def build_vertex( cache = chat_service.get_cache(flow_id) if not cache: # If there's no cache - logger.warning( - f"No cache found for {flow_id}. Building graph starting at {vertex_id}" - ) - graph = build_and_cache_graph( - flow_id=flow_id, session=next(get_session()), chat_service=chat_service - ) + logger.warning(f"No cache found for {flow_id}. Building graph starting at {vertex_id}") + graph = build_and_cache_graph(flow_id=flow_id, session=next(get_session()), chat_service=chat_service) else: graph = cache.get("result") result_data_response = ResultDataResponse(results={}) @@ -126,9 +122,7 @@ async def build_vertex( else: raise ValueError(f"No result found for vertex {vertex_id}") next_vertices_ids = vertex.successors_ids - next_vertices_ids = [ - v for v in next_vertices_ids if graph.should_run_vertex(v) - ] + next_vertices_ids = [v for v in next_vertices_ids if graph.should_run_vertex(v)] result_data_response = ResultDataResponse(**result_dict.model_dump()) @@ -211,9 +205,7 @@ async def build_vertex_stream( else: graph = cache.get("result") else: - session_data = await session_service.load_session( - session_id, flow_id=flow_id - ) + session_data = await session_service.load_session(session_id, flow_id=flow_id) graph, artifacts = session_data if session_data else (None, None) if not graph: raise ValueError(f"No graph found for {flow_id}.") diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index b77685d65..127a51627 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -48,12 +48,11 @@ def get_all( all_types_dict = get_all_types_dict(settings_service) return all_types_dict except Exception as exc: + logger.exception(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc -@router.post( - "/run/{flow_id}", response_model=RunResponse, response_model_exclude_none=True -) +@router.post("/run/{flow_id}", response_model=RunResponse, response_model_exclude_none=True) async def run_flow_with_caching( session: Annotated[Session, Depends(get_session)], flow_id: str, @@ -112,9 +111,7 @@ async def run_flow_with_caching( outputs = [] if session_id: - session_data = await session_service.load_session( - session_id, flow_id=flow_id - ) + session_data = await session_service.load_session(session_id, flow_id=flow_id) graph, artifacts = session_data if session_data else (None, None) task_result: Any = None if not graph: @@ -133,11 +130,7 @@ async def run_flow_with_caching( else: # Get the flow that matches the flow_id and belongs to the user # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() - flow = session.exec( - select(Flow) - .where(Flow.id == flow_id) - .where(Flow.user_id == api_key_user.id) - ).first() + flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first() if flow is None: raise ValueError(f"Flow {flow_id} not found") @@ -161,18 +154,12 @@ async def run_flow_with_caching( # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') if "badly formed hexadecimal UUID string" in str(exc): # This means the Flow ID is not a valid UUID which means it can't find the flow - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail=str(exc) - ) from exc + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc except ValueError as exc: if f"Flow {flow_id} not found" in str(exc): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail=str(exc) - ) from exc + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc else: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc) - ) from exc + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc @router.post( @@ -201,8 +188,7 @@ async def process( """ # Raise a depreciation warning logger.warning( - "The /process endpoint is deprecated and will be removed in a future version. " - "Please use /run instead." + "The /process endpoint is deprecated and will be removed in a future version. " "Please use /run instead." ) raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, @@ -274,16 +260,12 @@ async def custom_component( built_frontend_node = build_custom_component_template(component, user_id=user.id) - built_frontend_node = update_frontend_node_with_template_values( - built_frontend_node, raw_code.frontend_node - ) + built_frontend_node = update_frontend_node_with_template_values(built_frontend_node, raw_code.frontend_node) return built_frontend_node @router.post("/custom_component/reload", status_code=HTTPStatus.OK) -async def reload_custom_component( - path: str, user: User = Depends(get_current_active_user) -): +async def reload_custom_component(path: str, user: User = Depends(get_current_active_user)): from langflow.interface.custom.utils import build_custom_component_template try: diff --git a/src/backend/langflow/api/v1/flows.py b/src/backend/langflow/api/v1/flows.py index 371e320f4..dd60d5fed 100644 --- a/src/backend/langflow/api/v1/flows.py +++ b/src/backend/langflow/api/v1/flows.py @@ -10,6 +10,7 @@ from sqlmodel import Session, select from langflow.api.utils import remove_api_keys, validate_is_component from langflow.api.v1.schemas import FlowListCreate, FlowListRead +from langflow.initial_setup.setup import STARTER_FOLDER_NAME from langflow.services.auth.utils import get_current_active_user from langflow.services.database.models.flow import ( Flow, @@ -19,6 +20,7 @@ from langflow.services.database.models.flow import ( ) from langflow.services.database.models.user.model import User from langflow.services.deps import get_session, get_settings_service +from langflow.services.settings.service import SettingsService # build router router = APIRouter(prefix="/flows", tags=["Flows"]) @@ -49,15 +51,33 @@ def read_flows( *, current_user: User = Depends(get_current_active_user), session: Session = Depends(get_session), + settings_service: "SettingsService" = Depends(get_settings_service), ): """Read all flows.""" try: - flows = current_user.flows + auth_settings = settings_service.auth_settings + if auth_settings.AUTO_LOGIN: + flows = session.exec( + select(Flow).where( + (Flow.user_id == None) | (Flow.user_id == current_user.id) # noqa + ) + ).all() + else: + flows = current_user.flows + flows = validate_is_component(flows) + flow_ids = [flow.id for flow in flows] # with the session get the flows that DO NOT have a user_id try: - example_flows = session.exec(select(Flow).where(Flow.user_id == None)).all() - flows.extend(example_flows) + example_flows = session.exec( + select(Flow).where( + Flow.user_id == None, # noqa + Flow.folder == STARTER_FOLDER_NAME, + ) + ).all() + for example_flow in example_flows: + if example_flow.id not in flow_ids: + flows.append(example_flow) except Exception as e: logger.error(e) except Exception as e: @@ -71,13 +91,18 @@ def read_flow( session: Session = Depends(get_session), flow_id: UUID, current_user: User = Depends(get_current_active_user), + settings_service: "SettingsService" = Depends(get_settings_service), ): """Read a flow.""" - if user_flow := ( - session.exec( - select(Flow).where(Flow.id == flow_id, Flow.user_id == current_user.id) - ).first() - ): + auth_settings = settings_service.auth_settings + stmt = select(Flow).where(Flow.id == flow_id) + if auth_settings.AUTO_LOGIN: + # If auto login is enable user_id can be current_user.id or None + # so write an OR + stmt = stmt.where( + (Flow.user_id == current_user.id) | (Flow.user_id == None) # noqa + ) # noqa + if user_flow := session.exec(stmt).first(): return user_flow else: raise HTTPException(status_code=404, detail="Flow not found") @@ -94,7 +119,12 @@ def update_flow( ): """Update a flow.""" - db_flow = read_flow(session=session, flow_id=flow_id, current_user=current_user) + db_flow = read_flow( + session=session, + flow_id=flow_id, + current_user=current_user, + settings_service=settings_service, + ) if not db_flow: raise HTTPException(status_code=404, detail="Flow not found") flow_data = flow.model_dump(exclude_unset=True) @@ -116,9 +146,15 @@ def delete_flow( session: Session = Depends(get_session), flow_id: UUID, current_user: User = Depends(get_current_active_user), + settings_service=Depends(get_settings_service), ): """Delete a flow.""" - flow = read_flow(session=session, flow_id=flow_id, current_user=current_user) + flow = read_flow( + session=session, + flow_id=flow_id, + current_user=current_user, + settings_service=settings_service, + ) if not flow: raise HTTPException(status_code=404, detail="Flow not found") session.delete(flow) diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index 17274bcfc..70a60de5b 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -158,15 +158,13 @@ class StreamData(BaseModel): data: dict def __str__(self) -> str: - return ( - f"event: {self.event}\ndata: {orjson_dumps(self.data, indent_2=False)}\n\n" - ) + return f"event: {self.event}\ndata: {orjson_dumps(self.data, indent_2=False)}\n\n" class CustomComponentCode(BaseModel): code: str field: Optional[str] = None - field_value: Optional[str] = None + field_value: Optional[Any] = None frontend_node: Optional[dict] = None diff --git a/src/backend/langflow/api/v1/validate.py b/src/backend/langflow/api/v1/validate.py index 6b64faf20..02c17686b 100644 --- a/src/backend/langflow/api/v1/validate.py +++ b/src/backend/langflow/api/v1/validate.py @@ -41,9 +41,7 @@ def post_validate_prompt(prompt_request: ValidatePromptRequest): add_new_variables_to_template(input_variables, prompt_request) - remove_old_variables_from_template( - old_custom_fields, input_variables, prompt_request - ) + remove_old_variables_from_template(old_custom_fields, input_variables, prompt_request) update_input_variables_field(input_variables, prompt_request) @@ -58,19 +56,12 @@ def post_validate_prompt(prompt_request: ValidatePromptRequest): def get_old_custom_fields(prompt_request): try: - if ( - len(prompt_request.frontend_node.custom_fields) == 1 - and prompt_request.name == "" - ): + if len(prompt_request.frontend_node.custom_fields) == 1 and prompt_request.name == "": # If there is only one custom field and the name is empty string # then we are dealing with the first prompt request after the node was created - prompt_request.name = list( - prompt_request.frontend_node.custom_fields.keys() - )[0] + prompt_request.name = list(prompt_request.frontend_node.custom_fields.keys())[0] - old_custom_fields = prompt_request.frontend_node.custom_fields[ - prompt_request.name - ] + old_custom_fields = prompt_request.frontend_node.custom_fields[prompt_request.name] if old_custom_fields is None: old_custom_fields = [] @@ -87,40 +78,26 @@ def add_new_variables_to_template(input_variables, prompt_request): template_field = DefaultPromptField(name=variable, display_name=variable) if variable in prompt_request.frontend_node.template: # Set the new field with the old value - template_field.value = prompt_request.frontend_node.template[variable][ - "value" - ] + template_field.value = prompt_request.frontend_node.template[variable]["value"] prompt_request.frontend_node.template[variable] = template_field.to_dict() # Check if variable is not already in the list before appending - if ( - variable - not in prompt_request.frontend_node.custom_fields[prompt_request.name] - ): - prompt_request.frontend_node.custom_fields[prompt_request.name].append( - variable - ) + if variable not in prompt_request.frontend_node.custom_fields[prompt_request.name]: + prompt_request.frontend_node.custom_fields[prompt_request.name].append(variable) except Exception as exc: logger.exception(exc) raise HTTPException(status_code=500, detail=str(exc)) from exc -def remove_old_variables_from_template( - old_custom_fields, input_variables, prompt_request -): +def remove_old_variables_from_template(old_custom_fields, input_variables, prompt_request): for variable in old_custom_fields: if variable not in input_variables: try: # Remove the variable from custom_fields associated with the given name - if ( - variable - in prompt_request.frontend_node.custom_fields[prompt_request.name] - ): - prompt_request.frontend_node.custom_fields[ - prompt_request.name - ].remove(variable) + if variable in prompt_request.frontend_node.custom_fields[prompt_request.name]: + prompt_request.frontend_node.custom_fields[prompt_request.name].remove(variable) # Remove the variable from the template prompt_request.frontend_node.template.pop(variable, None) @@ -132,6 +109,4 @@ def remove_old_variables_from_template( def update_input_variables_field(input_variables, prompt_request): if "input_variables" in prompt_request.frontend_node.template: - prompt_request.frontend_node.template["input_variables"][ - "value" - ] = input_variables + prompt_request.frontend_node.template["input_variables"]["value"] = input_variables diff --git a/src/backend/langflow/base/data/utils.py b/src/backend/langflow/base/data/utils.py index 2219310ef..03f6de046 100644 --- a/src/backend/langflow/base/data/utils.py +++ b/src/backend/langflow/base/data/utils.py @@ -35,9 +35,7 @@ def retrieve_file_paths( glob = "**/*" if recursive else "*" paths = walk_level(path_obj, depth) if depth else path_obj.glob(glob) - file_paths = [ - Text(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p) - ] + file_paths = [Text(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p)] return file_paths @@ -70,16 +68,12 @@ def get_elements( if use_multithreading: records = parallel_load_records(file_paths, silent_errors, max_concurrency) else: - records = [ - parse_file_to_record(file_path, silent_errors) for file_path in file_paths - ] + records = [parse_file_to_record(file_path, silent_errors) for file_path in file_paths] records = list(filter(None, records)) return records -def parallel_load_records( - file_paths: List[str], silent_errors: bool, max_concurrency: int -) -> List[Optional[Record]]: +def parallel_load_records(file_paths: List[str], silent_errors: bool, max_concurrency: int) -> List[Optional[Record]]: with futures.ThreadPoolExecutor(max_workers=max_concurrency) as executor: loaded_files = executor.map( lambda file_path: parse_file_to_record(file_path, silent_errors), diff --git a/src/backend/langflow/base/io/chat.py b/src/backend/langflow/base/io/chat.py index 8695dbcbc..b9c721190 100644 --- a/src/backend/langflow/base/io/chat.py +++ b/src/backend/langflow/base/io/chat.py @@ -59,8 +59,8 @@ class ChatComponent(CustomComponent): ) else: record = Record( - text=message, data={ + "text": message, "session_id": session_id, "sender": sender, "sender_name": sender_name, diff --git a/src/backend/langflow/components/chains/ConversationChain.py b/src/backend/langflow/components/chains/ConversationChain.py index 6e1e319d6..774632412 100644 --- a/src/backend/langflow/components/chains/ConversationChain.py +++ b/src/backend/langflow/components/chains/ConversationChain.py @@ -31,7 +31,7 @@ class ConversationChainComponent(CustomComponent): chain = ConversationChain(llm=llm) else: chain = ConversationChain(llm=llm, memory=memory) - result = chain.invoke(inputs) + result = chain.invoke({"input": input_value}) if hasattr(result, "content") and isinstance(result.content, str): result = result.content elif isinstance(result, str): diff --git a/src/backend/langflow/components/utilities/APIRequest.py b/src/backend/langflow/components/data/APIRequest.py similarity index 60% rename from src/backend/langflow/components/utilities/APIRequest.py rename to src/backend/langflow/components/data/APIRequest.py index 2e73979ff..abdede217 100644 --- a/src/backend/langflow/components/utilities/APIRequest.py +++ b/src/backend/langflow/components/data/APIRequest.py @@ -1,8 +1,8 @@ import asyncio -from typing import List, Optional, Union -import httpx +from typing import List, Optional -import requests +import httpx +import json from langflow import CustomComponent from langflow.schema import Record @@ -27,10 +27,12 @@ class APIRequest(CustomComponent): "headers": { "display_name": "Headers", "info": "The headers to send with the request.", + "input_types": ["dict"], }, - "record": { - "display_name": "Record", - "info": "The record to send with the request (for POST, PATCH, PUT).", + "body": { + "display_name": "Body", + "info": "The body to send with the request (for POST, PATCH, PUT).", + "input_types": ["dict"], }, "timeout": { "display_name": "Timeout", @@ -42,36 +44,34 @@ class APIRequest(CustomComponent): async def make_request( self, - session: requests.Session, + client: httpx.AsyncClient, method: str, url: str, headers: Optional[dict] = None, - record: Optional[Record] = None, + body: Optional[dict] = None, timeout: int = 5, ) -> Record: method = method.upper() if method not in ["GET", "POST", "PATCH", "PUT"]: raise ValueError(f"Unsupported method: {method}") - data = record.text if record else None + data = body if body else None + data = json.dumps(data) try: - async with httpx.AsyncClient() as client: - response = await client.request( - method, url, headers=headers, content=data, timeout=timeout - ) - try: - response_json = response.json() - result = orjson_dumps(response_json, indent_2=False) - except Exception: - result = response.text - return Record( - text=result, - data={ - "source": url, - "headers": headers, - "status_code": response.status_code, - }, - ) + response = await client.request(method, url, headers=headers, content=data, timeout=timeout) + try: + response_json = response.json() + result = orjson_dumps(response_json, indent_2=False) + except Exception: + result = response.text + return Record( + text=result, + data={ + "source": url, + "headers": headers, + "status_code": response.status_code, + }, + ) except httpx.TimeoutException: return Record( text="Request Timed Out", @@ -88,22 +88,15 @@ class APIRequest(CustomComponent): method: str, url: List[str], headers: Optional[dict] = None, - record: Optional[Union[Record, List[Record]]] = None, + body: Optional[dict] = None, timeout: int = 5, ) -> List[Record]: if headers is None: headers = {} urls = url if isinstance(url, list) else [url] - records = ( - record - if isinstance(record, list) - else [record] if record else [None] * len(urls) - ) - - results = await asyncio.gather( - *[ - self.make_request(method, u, headers, doc, timeout) - for u, doc in zip(urls, records) - ] - ) + bodies = body if isinstance(body, list) else [body] if body else [None] * len(urls) + async with httpx.AsyncClient() as client: + results = await asyncio.gather( + *[self.make_request(client, method, u, headers, rec, timeout) for u, rec in zip(urls, bodies)] + ) return results diff --git a/src/backend/langflow/components/data/Directory.py b/src/backend/langflow/components/data/Directory.py index 327f270cc..f05b11e2c 100644 --- a/src/backend/langflow/components/data/Directory.py +++ b/src/backend/langflow/components/data/Directory.py @@ -57,20 +57,13 @@ class DirectoryComponent(CustomComponent): if types is None: types = [] resolved_path = self.resolve_path(path) - file_paths = retrieve_file_paths( - resolved_path, types, load_hidden, recursive, depth - ) + file_paths = retrieve_file_paths(resolved_path, types, load_hidden, recursive, depth) loaded_records = [] if use_multithreading: - loaded_records = parallel_load_records( - file_paths, silent_errors, max_concurrency - ) + loaded_records = parallel_load_records(file_paths, silent_errors, max_concurrency) else: - loaded_records = [ - parse_file_to_record(file_path, silent_errors) - for file_path in file_paths - ] + loaded_records = [parse_file_to_record(file_path, silent_errors) for file_path in file_paths] loaded_records = list(filter(None, loaded_records)) self.status = loaded_records return loaded_records diff --git a/src/backend/langflow/components/data/FileLoader.py b/src/backend/langflow/components/data/FileLoader.py index 2f74d9d04..d513298c6 100644 --- a/src/backend/langflow/components/data/FileLoader.py +++ b/src/backend/langflow/components/data/FileLoader.py @@ -11,9 +11,7 @@ class FileLoaderComponent(CustomComponent): beta = True def build_config(self): - loader_options = ["Automatic"] + [ - loader_info["name"] for loader_info in LOADERS_INFO - ] + loader_options = ["Automatic"] + [loader_info["name"] for loader_info in LOADERS_INFO] file_types = [] suffixes = [] @@ -105,9 +103,7 @@ class FileLoaderComponent(CustomComponent): if isinstance(selected_loader_info, dict): loader_import: str = selected_loader_info["import"] else: - raise ValueError( - f"Loader info for {loader} is not a dict\nLoader info:\n{selected_loader_info}" - ) + raise ValueError(f"Loader info for {loader} is not a dict\nLoader info:\n{selected_loader_info}") module_name, class_name = loader_import.rsplit(".", 1) try: @@ -115,9 +111,7 @@ class FileLoaderComponent(CustomComponent): loader_module = __import__(module_name, fromlist=[class_name]) loader_instance = getattr(loader_module, class_name) except ImportError as e: - raise ValueError( - f"Loader {loader} could not be imported\nLoader info:\n{selected_loader_info}" - ) from e + raise ValueError(f"Loader {loader} could not be imported\nLoader info:\n{selected_loader_info}") from e result = loader_instance(file_path=file_path) docs = result.load() diff --git a/src/backend/langflow/components/data/URL.py b/src/backend/langflow/components/data/URL.py index 08eafeaa3..8368e72be 100644 --- a/src/backend/langflow/components/data/URL.py +++ b/src/backend/langflow/components/data/URL.py @@ -1,6 +1,6 @@ -from typing import Any, Dict, Optional +from typing import Any, Dict -from langchain_community.document_loaders.url import UnstructuredURLLoader +from langchain_community.document_loaders.web_base import WebBaseLoader from langflow import CustomComponent from langflow.schema import Record @@ -8,7 +8,7 @@ from langflow.schema import Record class URLComponent(CustomComponent): display_name = "URL" - description = "Load a URL." + description = "Load URLs and convert them to records." def build_config(self) -> Dict[str, Any]: return { @@ -18,9 +18,8 @@ class URLComponent(CustomComponent): async def build( self, urls: list[str], - ) -> Optional[Record]: - - loader = UnstructuredURLLoader(urls=urls) + ) -> Record: + loader = WebBaseLoader(web_paths=urls) docs = loader.load() records = self.to_records(docs) return records diff --git a/src/backend/langflow/components/utilities/GetNotified.py b/src/backend/langflow/components/experimental/GetNotified.py similarity index 100% rename from src/backend/langflow/components/utilities/GetNotified.py rename to src/backend/langflow/components/experimental/GetNotified.py diff --git a/src/backend/langflow/components/utilities/ListFlows.py b/src/backend/langflow/components/experimental/ListFlows.py similarity index 100% rename from src/backend/langflow/components/utilities/ListFlows.py rename to src/backend/langflow/components/experimental/ListFlows.py diff --git a/src/backend/langflow/components/utilities/Notify.py b/src/backend/langflow/components/experimental/Notify.py similarity index 91% rename from src/backend/langflow/components/utilities/Notify.py rename to src/backend/langflow/components/experimental/Notify.py index 2155b079e..3b5662355 100644 --- a/src/backend/langflow/components/utilities/Notify.py +++ b/src/backend/langflow/components/experimental/Notify.py @@ -18,9 +18,7 @@ class NotifyComponent(CustomComponent): }, } - def build( - self, name: str, record: Optional[Record] = None, append: bool = False - ) -> Record: + def build(self, name: str, record: Optional[Record] = None, append: bool = False) -> Record: if record and not isinstance(record, Record): if isinstance(record, str): record = Record(text=record) diff --git a/src/backend/langflow/components/utilities/RunFlow.py b/src/backend/langflow/components/experimental/RunFlow.py similarity index 94% rename from src/backend/langflow/components/utilities/RunFlow.py rename to src/backend/langflow/components/experimental/RunFlow.py index d0e49ac90..94ba88044 100644 --- a/src/backend/langflow/components/utilities/RunFlow.py +++ b/src/backend/langflow/components/experimental/RunFlow.py @@ -39,10 +39,7 @@ class RunFlowComponent(CustomComponent): records.append(record) return records - async def build( - self, input_value: Text, flow_name: str, tweaks: NestedDict - ) -> Record: - + async def build(self, input_value: Text, flow_name: str, tweaks: NestedDict) -> Record: results: List[Optional[ResultData]] = await self.run_flow( input_value=input_value, flow_name=flow_name, tweaks=tweaks ) diff --git a/src/backend/langflow/components/utilities/RunnableExecutor.py b/src/backend/langflow/components/experimental/RunnableExecutor.py similarity index 100% rename from src/backend/langflow/components/utilities/RunnableExecutor.py rename to src/backend/langflow/components/experimental/RunnableExecutor.py diff --git a/src/backend/langflow/components/utilities/SQLExecutor.py b/src/backend/langflow/components/experimental/SQLExecutor.py similarity index 76% rename from src/backend/langflow/components/utilities/SQLExecutor.py rename to src/backend/langflow/components/experimental/SQLExecutor.py index 530391c31..e1b4e699f 100644 --- a/src/backend/langflow/components/utilities/SQLExecutor.py +++ b/src/backend/langflow/components/experimental/SQLExecutor.py @@ -11,7 +11,10 @@ class SQLExecutorComponent(CustomComponent): def build_config(self): return { - "database": {"display_name": "Database"}, + "database_url": { + "display_name": "Database URL", + "info": "The URL of the database.", + }, "include_columns": { "display_name": "Include Columns", "info": "Include columns in the result.", @@ -26,15 +29,24 @@ class SQLExecutorComponent(CustomComponent): }, } + def clean_up_uri(self, uri: str) -> str: + if uri.startswith("postgresql://"): + uri = uri.replace("postgresql://", "postgres://") + return uri.strip() + def build( self, query: str, - database: SQLDatabase, + database_url: str, include_columns: bool = False, passthrough: bool = False, add_error: bool = False, ) -> Text: error = None + try: + database = SQLDatabase.from_uri(database_url) + except Exception as e: + raise ValueError(f"An error occurred while connecting to the database: {e}") try: tool = QuerySQLDataBaseTool(db=database) result = tool.run(query, include_columns=include_columns) diff --git a/src/backend/langflow/components/io/__init__.py b/src/backend/langflow/components/experimental/__init__.py similarity index 100% rename from src/backend/langflow/components/io/__init__.py rename to src/backend/langflow/components/experimental/__init__.py diff --git a/src/backend/langflow/components/helpers/ClearMessageHistory.py b/src/backend/langflow/components/helpers/ClearMessageHistory.py new file mode 100644 index 000000000..6d264422f --- /dev/null +++ b/src/backend/langflow/components/helpers/ClearMessageHistory.py @@ -0,0 +1,24 @@ +from langflow import CustomComponent +from langflow.memory import delete_messages, get_messages + + +class ClearMessageHistoryComponent(CustomComponent): + display_name = "Clear Message History" + description = "A component to clear the message history." + + def build_config(self): + return { + "session_id": { + "display_name": "Session ID", + "info": "The session ID to clear the message history.", + } + } + + def build( + self, + session_id: str, + ) -> None: + delete_messages(session_id=session_id) + records = get_messages(session_id=session_id) + self.records = records + return records diff --git a/src/backend/langflow/components/utilities/CustomComponent.py b/src/backend/langflow/components/helpers/CustomComponent.py similarity index 100% rename from src/backend/langflow/components/utilities/CustomComponent.py rename to src/backend/langflow/components/helpers/CustomComponent.py diff --git a/src/backend/langflow/components/utilities/DocumentToRecord.py b/src/backend/langflow/components/helpers/DocumentToRecord.py similarity index 100% rename from src/backend/langflow/components/utilities/DocumentToRecord.py rename to src/backend/langflow/components/helpers/DocumentToRecord.py diff --git a/src/backend/langflow/components/helpers/ExtractDataFromRecord.py b/src/backend/langflow/components/helpers/ExtractDataFromRecord.py new file mode 100644 index 000000000..2b28545b5 --- /dev/null +++ b/src/backend/langflow/components/helpers/ExtractDataFromRecord.py @@ -0,0 +1,16 @@ +from langflow import CustomComponent +from langflow.schema import Record + + +class ExtractKeyFromRecordComponent(CustomComponent): + display_name = "Extract Key From Record" + description = "Extracts a key from a record." + + field_config = { + "record": {"display_name": "Record"}, + } + + def build(self, record: Record, key: str, silent_error: bool = True) -> dict: + data = getattr(record, key) + self.status = data + return data diff --git a/src/backend/langflow/components/helpers/IDGenerator.py b/src/backend/langflow/components/helpers/IDGenerator.py new file mode 100644 index 000000000..35b9a3d42 --- /dev/null +++ b/src/backend/langflow/components/helpers/IDGenerator.py @@ -0,0 +1,26 @@ +import uuid +from typing import Any, Text + +from langflow import CustomComponent + + +class UUIDGeneratorComponent(CustomComponent): + documentation: str = "http://docs.langflow.org/components/custom" + display_name = "Unique ID Generator" + description = "Generates a unique ID." + + def update_build_config(self, build_config: dict, field_name: Text, field_value: Any): + if field_name == "unique_id": + build_config[field_name]["value"] = str(uuid.uuid4()) + return build_config + + def build_config(self): + return { + "unique_id": { + "display_name": "Value", + "refresh": True, + } + } + + def build(self, unique_id: str) -> str: + return unique_id diff --git a/src/backend/langflow/components/helpers/MergeRecords.py b/src/backend/langflow/components/helpers/MergeRecords.py new file mode 100644 index 000000000..9c280d12a --- /dev/null +++ b/src/backend/langflow/components/helpers/MergeRecords.py @@ -0,0 +1,25 @@ +from langflow import CustomComponent +from langflow.schema import Record + + +class MergeRecordsComponent(CustomComponent): + display_name = "Merge Records" + description = "Merges records." + + field_config = { + "records": {"display_name": "Records"}, + } + + def build(self, records: list[Record]) -> Record: + if not records: + return records + if len(records) == 1: + return records[0] + merged_record = None + for record in records: + if merged_record is None: + merged_record = record + else: + merged_record += record + self.status = merged_record + return merged_record diff --git a/src/backend/langflow/components/memories/MessageHistory.py b/src/backend/langflow/components/helpers/MessageHistory.py similarity index 100% rename from src/backend/langflow/components/memories/MessageHistory.py rename to src/backend/langflow/components/helpers/MessageHistory.py diff --git a/src/backend/langflow/components/utilities/PythonFunction.py b/src/backend/langflow/components/helpers/PythonFunction.py similarity index 100% rename from src/backend/langflow/components/utilities/PythonFunction.py rename to src/backend/langflow/components/helpers/PythonFunction.py diff --git a/src/backend/langflow/components/helpers/RecordComponent1.py b/src/backend/langflow/components/helpers/RecordComponent1.py new file mode 100644 index 000000000..dce557826 --- /dev/null +++ b/src/backend/langflow/components/helpers/RecordComponent1.py @@ -0,0 +1,47 @@ +from typing import Any + +from langflow import CustomComponent +from langflow.schema import Record +from langflow.template.field.base import TemplateField + + +class RecordComponent(CustomComponent): + display_name = "Record Numbers" + description = "A component to create a record from key-value pairs." + field_order = ["n_keys"] + + def update_build_config(self, build_config: dict, field_name: str, field_value: Any): + if field_value is None: + return + elif int(field_value) == 0: + keep = ["n_keys", "code"] + for key in build_config.copy(): + if key in keep: + continue + del build_config[key] + build_config[field_name]["value"] = int(field_value) + + # Add new fields depending on the field value + for i in range(int(field_value)): + field = TemplateField( + name=f"Key and Value {i}", + field_type="dict", + display_name="", + info="The key for the record.", + input_types=["Text"], + ) + build_config[field.name] = field.to_dict() + + def build_config(self): + return { + "n_keys": { + "display_name": "Number of Fields", + "refresh": True, + "info": "The number of keys to create in the record.", + }, + } + + def build(self, n_keys: int, **kwargs) -> Record: + data = {k: v for d in kwargs.values() for k, v in d.items()} + record = Record(data=data) + return record diff --git a/src/backend/langflow/components/helpers/RecordComponent2.py b/src/backend/langflow/components/helpers/RecordComponent2.py new file mode 100644 index 000000000..b476efb4d --- /dev/null +++ b/src/backend/langflow/components/helpers/RecordComponent2.py @@ -0,0 +1,51 @@ +from typing import Any, List + +from langflow import CustomComponent +from langflow.schema import Record +from langflow.template.field.base import TemplateField + + +class RecordComponent2(CustomComponent): + display_name = "Record Text" + description = "A component to create a record from key-value pairs." + field_order = ["keys"] + + def update_build_config(self, build_config: dict, field_name: str, field_value: Any): + if field_value is None: + field_value = [] + if field_name is None: + return build_config + elif len(field_value) == 0: + keep = ["keys", "code"] + for key in build_config.copy(): + if key in keep: + continue + del build_config[key] + build_config[field_name]["value"] = field_value + + # Add new fields depending on the field value + for val in field_value: + if not isinstance(val, str) or val == "": + continue + field = TemplateField( + name=val, + field_type="str", + display_name="", + info="The key for the record.", + ) + build_config[field.name] = field.to_dict() + + def build_config(self): + return { + "keys": { + "display_name": "Keys", + "refresh": True, + "info": "The number of keys to create in the record.", + "input_types": [], + }, + } + + def build(self, keys: List[str], **kwargs) -> Record: + record = Record(data=kwargs) + self.status = record + return record diff --git a/src/backend/langflow/components/utilities/RecordsAsText.py b/src/backend/langflow/components/helpers/RecordsAsText.py similarity index 87% rename from src/backend/langflow/components/utilities/RecordsAsText.py rename to src/backend/langflow/components/helpers/RecordsAsText.py index 18bf8be8c..f7750bdba 100644 --- a/src/backend/langflow/components/utilities/RecordsAsText.py +++ b/src/backend/langflow/components/helpers/RecordsAsText.py @@ -6,7 +6,7 @@ from langflow.schema import Record class RecordsAsTextComponent(CustomComponent): display_name = "Records to Text" - description = "Converts Records a list of Records to text using a template." + description = "Converts Records into single piece of text using a template." def build_config(self): return { @@ -16,7 +16,7 @@ class RecordsAsTextComponent(CustomComponent): }, "template": { "display_name": "Template", - "info": "The template to use for formatting the records. It must contain the keys {text} and {data}.", + "info": "The template to use for formatting the records. It can contain the keys {text}, {data} or any other key in the Record.", }, } diff --git a/src/backend/langflow/components/utilities/TextToRecord.py b/src/backend/langflow/components/helpers/TextToRecord.py similarity index 100% rename from src/backend/langflow/components/utilities/TextToRecord.py rename to src/backend/langflow/components/helpers/TextToRecord.py diff --git a/src/backend/langflow/components/utilities/__init__.py b/src/backend/langflow/components/helpers/__init__.py similarity index 100% rename from src/backend/langflow/components/utilities/__init__.py rename to src/backend/langflow/components/helpers/__init__.py diff --git a/src/backend/langflow/components/prompts/Prompt.py b/src/backend/langflow/components/prompts/Prompt.py index 975998919..7a6da307d 100644 --- a/src/backend/langflow/components/prompts/Prompt.py +++ b/src/backend/langflow/components/prompts/Prompt.py @@ -1,6 +1,7 @@ from langchain_core.prompts import PromptTemplate from langflow import CustomComponent +from langflow.components.prompts.base.utils import dict_values_to_string from langflow.field_typing import Prompt, TemplateField, Text @@ -21,16 +22,13 @@ class PromptComponent(CustomComponent): **kwargs, ) -> Text: prompt_template = PromptTemplate.from_template(Text(template)) - - attributes_to_check = ["text", "page_content"] - for key, value in kwargs.copy().items(): - for attribute in attributes_to_check: - if hasattr(value, attribute): - kwargs[key] = getattr(value, attribute) - + kwargs = dict_values_to_string(kwargs) + kwargs = { + k: "\n".join(v) if isinstance(v, list) else v for k, v in kwargs.items() + } try: formated_prompt = prompt_template.format(**kwargs) except Exception as exc: raise ValueError(f"Error formatting prompt: {exc}") from exc - self.status = f'Prompt: "{formated_prompt}"' + self.status = f'Prompt:\n"{formated_prompt}"' return formated_prompt diff --git a/src/backend/langflow/components/prompts/base/__init__.py b/src/backend/langflow/components/prompts/base/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/components/prompts/base/utils.py b/src/backend/langflow/components/prompts/base/utils.py new file mode 100644 index 000000000..1f41ebda1 --- /dev/null +++ b/src/backend/langflow/components/prompts/base/utils.py @@ -0,0 +1,55 @@ +from langchain_core.documents import Document + +from langflow.schema import Record + + +def dict_values_to_string(d: dict) -> dict: + """ + Converts the values of a dictionary to strings. + + Args: + d (dict): The dictionary whose values need to be converted. + + Returns: + dict: The dictionary with values converted to strings. + """ + # Do something similar to the above + for key, value in d.items(): + # it could be a list of records or documents or strings + if isinstance(value, list): + for i, item in enumerate(value): + if isinstance(item, Record): + d[key][i] = record_to_string(item) + elif isinstance(item, Document): + d[key][i] = document_to_string(item) + elif isinstance(value, Record): + d[key] = record_to_string(value) + elif isinstance(value, Document): + d[key] = document_to_string(value) + return d + + +def record_to_string(record: Record) -> str: + """ + Convert a record to a string. + + Args: + record (Record): The record to convert. + + Returns: + str: The record as a string. + """ + return record.text + + +def document_to_string(document: Document) -> str: + """ + Convert a document to a string. + + Args: + document (Document): The document to convert. + + Returns: + str: The document as a string. + """ + return document.page_content diff --git a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py index d165f47fd..96576a4a3 100644 --- a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py +++ b/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py @@ -1,8 +1,9 @@ from typing import List from langchain.text_splitter import CharacterTextSplitter -from langchain_core.documents.base import Document + from langflow import CustomComponent +from langflow.schema.schema import Record class CharacterTextSplitterComponent(CustomComponent): @@ -11,7 +12,7 @@ class CharacterTextSplitterComponent(CustomComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "chunk_overlap": {"display_name": "Chunk Overlap", "default": 200}, "chunk_size": {"display_name": "Chunk Size", "default": 1000}, "separator": {"display_name": "Separator", "default": "\n"}, @@ -19,17 +20,24 @@ class CharacterTextSplitterComponent(CustomComponent): def build( self, - documents: List[Document], + inputs: List[Record], chunk_overlap: int = 200, chunk_size: int = 1000, separator: str = "\n", - ) -> List[Document]: + ) -> List[Record]: # separator may come escaped from the frontend separator = separator.encode().decode("unicode_escape") + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) docs = CharacterTextSplitter( chunk_overlap=chunk_overlap, chunk_size=chunk_size, separator=separator, ).split_documents(documents) - self.status = docs - return docs + records = self.to_records(docs) + self.status = records + return records diff --git a/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py b/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py index d1494f4d0..3f521e0ba 100644 --- a/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py +++ b/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py @@ -1,9 +1,9 @@ -from typing import Optional +from typing import List, Optional from langchain.text_splitter import Language -from langchain_core.documents import Document from langflow import CustomComponent +from langflow.schema.schema import Record class LanguageRecursiveTextSplitterComponent(CustomComponent): @@ -14,10 +14,7 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent): def build_config(self): options = [x.value for x in Language] return { - "documents": { - "display_name": "Documents", - "info": "The documents to split.", - }, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "separator_type": { "display_name": "Separator Type", "info": "The type of separator to use.", @@ -47,11 +44,11 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent): def build( self, - documents: list[Document], + inputs: List[Record], chunk_size: Optional[int] = 1000, chunk_overlap: Optional[int] = 200, separator_type: str = "Python", - ) -> list[Document]: + ) -> list[Record]: """ Split text into chunks of a specified length. @@ -77,6 +74,12 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent): chunk_size=chunk_size, chunk_overlap=chunk_overlap, ) - + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) docs = splitter.split_documents(documents) - return docs + records = self.to_records(docs) + return records diff --git a/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py b/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py index d07ae3ebe..6b9cb865b 100644 --- a/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py +++ b/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py @@ -1,10 +1,11 @@ from typing import Optional +from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain_core.documents import Document from langflow import CustomComponent -from langflow.utils.util import build_loader_repr_from_documents -from langchain.text_splitter import RecursiveCharacterTextSplitter +from langflow.schema import Record +from langflow.utils.util import build_loader_repr_from_records class RecursiveCharacterTextSplitterComponent(CustomComponent): @@ -14,9 +15,10 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent): def build_config(self): return { - "documents": { - "display_name": "Documents", - "info": "The documents to split.", + "inputs": { + "display_name": "Input", + "info": "The texts to split.", + "input_types": ["Document", "Record"], }, "separators": { "display_name": "Separators", @@ -40,11 +42,11 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent): def build( self, - documents: list[Document], + inputs: list[Document], separators: Optional[list[str]] = None, chunk_size: Optional[int] = 1000, chunk_overlap: Optional[int] = 200, - ) -> list[Document]: + ) -> list[Record]: """ Split text into chunks of a specified length. @@ -75,7 +77,13 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent): chunk_size=chunk_size, chunk_overlap=chunk_overlap, ) - + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) docs = splitter.split_documents(documents) - self.repr_value = build_loader_repr_from_documents(docs) - return docs + records = self.to_records(docs) + self.repr_value = build_loader_repr_from_records(records) + return records diff --git a/src/backend/langflow/components/utilities/IDGenerator.py b/src/backend/langflow/components/utilities/IDGenerator.py deleted file mode 100644 index ceb937a6c..000000000 --- a/src/backend/langflow/components/utilities/IDGenerator.py +++ /dev/null @@ -1,19 +0,0 @@ -import uuid -from typing import Text - -from langflow import CustomComponent - - -class UUIDGeneratorComponent(CustomComponent): - documentation: str = "http://docs.langflow.org/components/custom" - display_name = "Unique ID Generator" - description = "Generates a unique ID." - - def generate(self, *args, **kwargs): - return Text(uuid.uuid4().hex) - - def build_config(self): - return {"unique_id": {"display_name": "Value", "value": self.generate}} - - def build(self, unique_id: str) -> str: - return unique_id diff --git a/src/backend/langflow/components/utilities/SharedState.py b/src/backend/langflow/components/utilities/SharedState.py deleted file mode 100644 index 95de17774..000000000 --- a/src/backend/langflow/components/utilities/SharedState.py +++ /dev/null @@ -1,41 +0,0 @@ -from typing import Optional - -from langflow import CustomComponent -from langflow.schema import Record - - -class SharedState(CustomComponent): - display_name = "Shared State" - description = "A component to share state between components." - - def build_config(self): - return { - "name": {"display_name": "Name", "info": "The name of the state."}, - "record": {"display_name": "Record", "info": "The record to store."}, - "append": { - "display_name": "Append", - "info": "If True, the record will be appended to the state.", - }, - } - - def build( - self, name: str, record: Optional[Record] = None, append: bool = False - ) -> Record: - if record: - if append: - self.append_state(name, record) - else: - self.update_state(name, record) - - state = self.get_state(name) - if state and not isinstance(state, Record): - if isinstance(state, str): - state = Record(text=state) - elif isinstance(state, dict): - state = Record(data=state) - else: - state = Record(text=str(state)) - elif not state: - state = Record(text="") - self.status = state - return state diff --git a/src/backend/langflow/components/utilities/ShouldRunNext.py b/src/backend/langflow/components/utilities/ShouldRunNext.py deleted file mode 100644 index b9ae3b048..000000000 --- a/src/backend/langflow/components/utilities/ShouldRunNext.py +++ /dev/null @@ -1,49 +0,0 @@ -# Implement ShouldRunNext component -from typing import Text -from langchain_core.prompts import PromptTemplate - -from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, Prompt - - -class ShouldRunNext(CustomComponent): - display_name = "Should Run Next" - description = "Decides whether to run the next component." - - def build_config(self): - return { - "prompt": { - "display_name": "Prompt", - "info": "The prompt to use for the decision. It should generate a boolean response (True or False).", - }, - "llm": { - "display_name": "LLM", - "info": "The language model to use for the decision.", - }, - } - - def build(self, template: Prompt, llm: BaseLanguageModel, **kwargs) -> dict: - # This is a simple component that always returns True - prompt_template = PromptTemplate.from_template(Text(template)) - - attributes_to_check = ["text", "page_content"] - for key, value in kwargs.items(): - for attribute in attributes_to_check: - if hasattr(value, attribute): - kwargs[key] = getattr(value, attribute) - - chain = prompt_template | llm - result = chain.invoke(kwargs) - if hasattr(result, "content") and isinstance(result.content, str): - result = result.content - elif isinstance(result, str): - result = result - else: - result = result.get("response") - - if result.lower() not in ["true", "false"]: - raise ValueError("The prompt should generate a boolean response (True or False).") - # The string should be the words true or false - # if not raise an error - bool_result = result.lower() == "true" - return {"condition": bool_result, "result": kwargs} diff --git a/src/backend/langflow/components/vectorstores/Chroma.py b/src/backend/langflow/components/vectorstores/Chroma.py index b1756e777..063ff7cf3 100644 --- a/src/backend/langflow/components/vectorstores/Chroma.py +++ b/src/backend/langflow/components/vectorstores/Chroma.py @@ -2,11 +2,12 @@ from typing import List, Optional, Union import chromadb # type: ignore from langchain.embeddings.base import Embeddings -from langchain.schema import BaseRetriever, Document +from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.chroma import Chroma from langflow import CustomComponent +from langflow.schema.schema import Record class ChromaComponent(CustomComponent): @@ -31,7 +32,7 @@ class ChromaComponent(CustomComponent): "collection_name": {"display_name": "Collection Name", "value": "langflow"}, "index_directory": {"display_name": "Persist Directory"}, "code": {"advanced": True, "display_name": "Code"}, - "documents": {"display_name": "Documents", "is_list": True}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "chroma_server_cors_allow_origins": { "display_name": "Server CORS Allow Origins", @@ -55,7 +56,7 @@ class ChromaComponent(CustomComponent): embedding: Embeddings, chroma_server_ssl_enabled: bool, index_directory: Optional[str] = None, - documents: Optional[List[Document]] = None, + inputs: Optional[List[Record]] = None, chroma_server_cors_allow_origins: Optional[str] = None, chroma_server_host: Optional[str] = None, chroma_server_port: Optional[int] = None, @@ -97,6 +98,12 @@ class ChromaComponent(CustomComponent): if index_directory is not None: index_directory = self.resolve_path(index_directory) + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) if documents is not None and embedding is not None: if len(documents) == 0: raise ValueError("If documents are provided, there must be at least one document.") diff --git a/src/backend/langflow/components/vectorstores/ChromaSearch.py b/src/backend/langflow/components/vectorstores/ChromaSearch.py index 3a6d283b3..baa550472 100644 --- a/src/backend/langflow/components/vectorstores/ChromaSearch.py +++ b/src/backend/langflow/components/vectorstores/ChromaSearch.py @@ -35,7 +35,6 @@ class ChromaSearchComponent(LCVectorStoreComponent): # "persist": {"display_name": "Persist"}, "index_directory": {"display_name": "Index Directory"}, "code": {"show": False, "display_name": "Code"}, - "documents": {"display_name": "Documents", "is_list": True}, "embedding": { "display_name": "Embedding", "info": "Embedding model to vectorize inputs (make sure to use same as index)", @@ -93,8 +92,7 @@ class ChromaSearchComponent(LCVectorStoreComponent): if chroma_server_host is not None: chroma_settings = chromadb.config.Settings( - chroma_server_cors_allow_origins=chroma_server_cors_allow_origins - or None, + chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None, chroma_server_host=chroma_server_host, chroma_server_port=chroma_server_port or None, chroma_server_grpc_port=chroma_server_grpc_port or None, diff --git a/src/backend/langflow/components/vectorstores/FAISS.py b/src/backend/langflow/components/vectorstores/FAISS.py index a0324456e..7cdadccdb 100644 --- a/src/backend/langflow/components/vectorstores/FAISS.py +++ b/src/backend/langflow/components/vectorstores/FAISS.py @@ -5,7 +5,8 @@ from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.faiss import FAISS from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings +from langflow.field_typing import Embeddings +from langflow.schema.schema import Record class FAISSComponent(CustomComponent): @@ -15,7 +16,7 @@ class FAISSComponent(CustomComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "folder_path": { "display_name": "Folder Path", @@ -27,10 +28,16 @@ class FAISSComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: List[Document], + inputs: List[Record], folder_path: str, index_name: str = "langflow_index", ) -> Union[VectorStore, FAISS, BaseRetriever]: + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) vector_store = FAISS.from_documents(documents=documents, embedding=embedding) if not folder_path: raise ValueError("Folder path is required to save the FAISS index.") diff --git a/src/backend/langflow/components/vectorstores/FAISSSearch.py b/src/backend/langflow/components/vectorstores/FAISSSearch.py index f6ddf4f7a..27cdc606c 100644 --- a/src/backend/langflow/components/vectorstores/FAISSSearch.py +++ b/src/backend/langflow/components/vectorstores/FAISSSearch.py @@ -14,7 +14,6 @@ class FAISSSearchComponent(LCVectorStoreComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, "embedding": {"display_name": "Embedding"}, "folder_path": { "display_name": "Folder Path", @@ -34,9 +33,7 @@ class FAISSSearchComponent(LCVectorStoreComponent): if not folder_path: raise ValueError("Folder path is required to save the FAISS index.") path = self.resolve_path(folder_path) - vector_store = FAISS.load_local( - folder_path=Text(path), embeddings=embedding, index_name=index_name - ) + vector_store = FAISS.load_local(folder_path=Text(path), embeddings=embedding, index_name=index_name) if not vector_store: raise ValueError("Failed to load the FAISS index.") diff --git a/src/backend/langflow/components/vectorstores/MongoDBAtlasVector.py b/src/backend/langflow/components/vectorstores/MongoDBAtlasVector.py index e15368f7d..f45d55584 100644 --- a/src/backend/langflow/components/vectorstores/MongoDBAtlasVector.py +++ b/src/backend/langflow/components/vectorstores/MongoDBAtlasVector.py @@ -3,7 +3,8 @@ from typing import List, Optional from langchain_community.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings, NestedDict +from langflow.field_typing import Embeddings, NestedDict +from langflow.schema.schema import Record class MongoDBAtlasComponent(CustomComponent): @@ -13,7 +14,7 @@ class MongoDBAtlasComponent(CustomComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "collection_name": {"display_name": "Collection Name"}, "db_name": {"display_name": "Database Name"}, @@ -25,7 +26,7 @@ class MongoDBAtlasComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: List[Document], + inputs: List[Record], collection_name: str = "", db_name: str = "", index_name: str = "", @@ -42,6 +43,12 @@ class MongoDBAtlasComponent(CustomComponent): collection = mongo_client[db_name][collection_name] except Exception as e: raise ValueError(f"Failed to connect to MongoDB Atlas: {e}") + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) if documents: vector_store = MongoDBAtlasVectorSearch.from_documents( documents=documents, diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/langflow/components/vectorstores/Pinecone.py index 54222b133..c71048266 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/langflow/components/vectorstores/Pinecone.py @@ -7,7 +7,8 @@ from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.pinecone import Pinecone from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings +from langflow.field_typing import Embeddings +from langflow.schema.schema import Record class PineconeComponent(CustomComponent): @@ -17,7 +18,7 @@ class PineconeComponent(CustomComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "index_name": {"display_name": "Index Name"}, "namespace": {"display_name": "Namespace"}, @@ -44,7 +45,7 @@ class PineconeComponent(CustomComponent): self, embedding: Embeddings, pinecone_env: str, - documents: List[Document], + inputs: List[Record], text_key: str = "text", pool_threads: int = 4, index_name: Optional[str] = None, @@ -59,6 +60,12 @@ class PineconeComponent(CustomComponent): pinecone.init(api_key=pinecone_api_key, environment=pinecone_env) # type: ignore if not index_name: raise ValueError("Index Name is required.") + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) if documents: return Pinecone.from_documents( documents=documents, diff --git a/src/backend/langflow/components/vectorstores/Qdrant.py b/src/backend/langflow/components/vectorstores/Qdrant.py index 23ee70b11..e1773268b 100644 --- a/src/backend/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/langflow/components/vectorstores/Qdrant.py @@ -3,8 +3,10 @@ from typing import Optional, Union from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.qdrant import Qdrant + from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings, NestedDict +from langflow.field_typing import Embeddings, NestedDict +from langflow.schema.schema import Record class QdrantComponent(CustomComponent): @@ -14,17 +16,23 @@ class QdrantComponent(CustomComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "api_key": {"display_name": "API Key", "password": True, "advanced": True}, "collection_name": {"display_name": "Collection Name"}, - "content_payload_key": {"display_name": "Content Payload Key", "advanced": True}, + "content_payload_key": { + "display_name": "Content Payload Key", + "advanced": True, + }, "distance_func": {"display_name": "Distance Function", "advanced": True}, "grpc_port": {"display_name": "gRPC Port", "advanced": True}, "host": {"display_name": "Host", "advanced": True}, "https": {"display_name": "HTTPS", "advanced": True}, "location": {"display_name": "Location", "advanced": True}, - "metadata_payload_key": {"display_name": "Metadata Payload Key", "advanced": True}, + "metadata_payload_key": { + "display_name": "Metadata Payload Key", + "advanced": True, + }, "path": {"display_name": "Path", "advanced": True}, "port": {"display_name": "Port", "advanced": True}, "prefer_grpc": {"display_name": "Prefer gRPC", "advanced": True}, @@ -38,7 +46,7 @@ class QdrantComponent(CustomComponent): self, embedding: Embeddings, collection_name: str, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, api_key: Optional[str] = None, content_payload_key: str = "page_content", distance_func: str = "Cosine", @@ -55,6 +63,12 @@ class QdrantComponent(CustomComponent): timeout: Optional[int] = None, url: Optional[str] = None, ) -> Union[VectorStore, Qdrant, BaseRetriever]: + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) if documents is None: from qdrant_client import QdrantClient diff --git a/src/backend/langflow/components/vectorstores/Redis.py b/src/backend/langflow/components/vectorstores/Redis.py index b2d7e4542..599a697a0 100644 --- a/src/backend/langflow/components/vectorstores/Redis.py +++ b/src/backend/langflow/components/vectorstores/Redis.py @@ -3,9 +3,10 @@ from typing import Optional, Union from langchain.embeddings.base import Embeddings from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.redis import Redis -from langchain_core.documents import Document from langchain_core.retrievers import BaseRetriever + from langflow import CustomComponent +from langflow.schema.schema import Record class RedisComponent(CustomComponent): @@ -28,7 +29,7 @@ class RedisComponent(CustomComponent): return { "index_name": {"display_name": "Index Name", "value": "your_index"}, "code": {"show": False, "display_name": "Code"}, - "documents": {"display_name": "Documents", "is_list": True}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "schema": {"display_name": "Schema", "file_types": [".yaml"]}, "redis_server_url": { @@ -44,7 +45,7 @@ class RedisComponent(CustomComponent): redis_server_url: str, redis_index_name: str, schema: Optional[str] = None, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, ) -> Union[VectorStore, BaseRetriever]: """ Builds the Vector Store or BaseRetriever object. @@ -58,7 +59,13 @@ class RedisComponent(CustomComponent): Returns: - VectorStore: The Vector Store object. """ - if documents is None: + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + if not documents: if schema is None: raise ValueError("If no documents are provided, a schema must be provided.") redis_vs = Redis.from_existing_index( diff --git a/src/backend/langflow/components/vectorstores/RedisSearch.py b/src/backend/langflow/components/vectorstores/RedisSearch.py index 4089d4f47..b2b420d3e 100644 --- a/src/backend/langflow/components/vectorstores/RedisSearch.py +++ b/src/backend/langflow/components/vectorstores/RedisSearch.py @@ -33,7 +33,6 @@ class RedisSearchComponent(RedisComponent, LCVectorStoreComponent): "input_value": {"display_name": "Input"}, "index_name": {"display_name": "Index Name", "value": "your_index"}, "code": {"show": False, "display_name": "Code"}, - "documents": {"display_name": "Documents", "is_list": True}, "embedding": {"display_name": "Embedding"}, "schema": {"display_name": "Schema", "file_types": [".yaml"]}, "redis_server_url": { diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py index 2ec6dfabc..5d32388d9 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py @@ -3,10 +3,12 @@ from typing import List, Union from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.supabase import SupabaseVectorStore -from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings, NestedDict from supabase.client import Client, create_client +from langflow import CustomComponent +from langflow.field_typing import Embeddings, NestedDict +from langflow.schema.schema import Record + class SupabaseComponent(CustomComponent): display_name = "Supabase" @@ -14,7 +16,7 @@ class SupabaseComponent(CustomComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "query_name": {"display_name": "Query Name"}, "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, @@ -26,7 +28,7 @@ class SupabaseComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: List[Document], + inputs: List[Record], query_name: str = "", search_kwargs: NestedDict = {}, supabase_service_key: str = "", @@ -34,6 +36,12 @@ class SupabaseComponent(CustomComponent): table_name: str = "", ) -> Union[VectorStore, SupabaseVectorStore, BaseRetriever]: supabase: Client = create_client(supabase_url, supabase_key=supabase_service_key) + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) return SupabaseVectorStore.from_documents( documents=documents, embedding=embedding, diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStoreSearch.py b/src/backend/langflow/components/vectorstores/SupabaseVectorStoreSearch.py index 5fd4dbd18..ca8113c56 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStoreSearch.py +++ b/src/backend/langflow/components/vectorstores/SupabaseVectorStoreSearch.py @@ -38,9 +38,7 @@ class SupabaseSearchComponent(LCVectorStoreComponent): supabase_url: str = "", table_name: str = "", ) -> List[Record]: - supabase: Client = create_client( - supabase_url, supabase_key=supabase_service_key - ) + supabase: Client = create_client(supabase_url, supabase_key=supabase_service_key) vector_store = SupabaseVectorStore( client=supabase, embedding=embedding, diff --git a/src/backend/langflow/components/vectorstores/Vectara.py b/src/backend/langflow/components/vectorstores/Vectara.py index 0a396918c..cd25b2dd9 100644 --- a/src/backend/langflow/components/vectorstores/Vectara.py +++ b/src/backend/langflow/components/vectorstores/Vectara.py @@ -8,7 +8,8 @@ from langchain_community.vectorstores.vectara import Vectara from langchain_core.vectorstores import VectorStore from langflow import CustomComponent -from langflow.field_typing import BaseRetriever, Document +from langflow.field_typing import BaseRetriever +from langflow.schema.schema import Record class VectaraComponent(CustomComponent): @@ -28,8 +29,9 @@ class VectaraComponent(CustomComponent): "display_name": "Vectara API Key", "password": True, }, - "documents": { - "display_name": "Documents", + "inputs": { + "display_name": "Input", + "input_types": ["Document", "Record"], "info": "If provided, will be upserted to corpus (optional)", }, "files_url": { @@ -44,11 +46,18 @@ class VectaraComponent(CustomComponent): vectara_corpus_id: str, vectara_api_key: str, files_url: Optional[List[str]] = None, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, ) -> Union[VectorStore, BaseRetriever]: source = "Langflow" - if documents is not None: + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents: return Vectara.from_documents( documents=documents, # type: ignore embedding=FakeEmbeddings(size=768), diff --git a/src/backend/langflow/components/vectorstores/VectaraSearch.py b/src/backend/langflow/components/vectorstores/VectaraSearch.py index ae2d442be..3220d1561 100644 --- a/src/backend/langflow/components/vectorstores/VectaraSearch.py +++ b/src/backend/langflow/components/vectorstores/VectaraSearch.py @@ -11,9 +11,7 @@ from langflow.schema import Record class VectaraSearchComponent(VectaraComponent, LCVectorStoreComponent): display_name: str = "Vectara Search" description: str = "Search a Vectara Vector Store for similar documents." - documentation = ( - "https://python.langchain.com/docs/integrations/vectorstores/vectara" - ) + documentation = "https://python.langchain.com/docs/integrations/vectorstores/vectara" beta = True icon = "Vectara" @@ -33,10 +31,6 @@ class VectaraSearchComponent(VectaraComponent, LCVectorStoreComponent): "display_name": "Vectara API Key", "password": True, }, - "documents": { - "display_name": "Documents", - "info": "If provided, will be upserted to corpus (optional)", - }, "files_url": { "display_name": "Files Url", "info": "Make vectara object using url of files (optional)", diff --git a/src/backend/langflow/components/vectorstores/Weaviate.py b/src/backend/langflow/components/vectorstores/Weaviate.py index 3d804255a..8bc46d17b 100644 --- a/src/backend/langflow/components/vectorstores/Weaviate.py +++ b/src/backend/langflow/components/vectorstores/Weaviate.py @@ -2,10 +2,11 @@ from typing import Optional, Union import weaviate # type: ignore from langchain.embeddings.base import Embeddings -from langchain.schema import BaseRetriever, Document +from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore, Weaviate from langflow import CustomComponent +from langflow.schema.schema import Record class WeaviateVectorStoreComponent(CustomComponent): @@ -30,7 +31,7 @@ class WeaviateVectorStoreComponent(CustomComponent): "advanced": True, "value": "text", }, - "documents": {"display_name": "Documents", "is_list": True}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "attributes": { "display_name": "Attributes", @@ -55,7 +56,7 @@ class WeaviateVectorStoreComponent(CustomComponent): index_name: Optional[str] = None, text_key: str = "text", embedding: Optional[Embeddings] = None, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, attributes: Optional[list] = None, ) -> Union[VectorStore, BaseRetriever]: if api_key: @@ -78,8 +79,14 @@ class WeaviateVectorStoreComponent(CustomComponent): return pascal_case_word index_name = _to_pascal_case(index_name) if index_name else None + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) - if documents is not None and embedding is not None: + if documents and embedding is not None: return Weaviate.from_documents( client=client, index_name=index_name, diff --git a/src/backend/langflow/components/vectorstores/WeaviateSearch.py b/src/backend/langflow/components/vectorstores/WeaviateSearch.py index 6eee202c9..5713ca26f 100644 --- a/src/backend/langflow/components/vectorstores/WeaviateSearch.py +++ b/src/backend/langflow/components/vectorstores/WeaviateSearch.py @@ -11,9 +11,7 @@ from langflow.schema import Record class WeaviateSearchVectorStore(WeaviateVectorStoreComponent, LCVectorStoreComponent): display_name: str = "Weaviate Search" description: str = "Search a Weaviate Vector Store for similar documents." - documentation = ( - "https://python.langchain.com/docs/integrations/vectorstores/weaviate" - ) + documentation = "https://python.langchain.com/docs/integrations/vectorstores/weaviate" beta = True icon = "Weaviate" @@ -39,7 +37,6 @@ class WeaviateSearchVectorStore(WeaviateVectorStoreComponent, LCVectorStoreCompo "advanced": True, "value": "text", }, - "documents": {"display_name": "Documents", "is_list": True}, "embedding": {"display_name": "Embedding"}, "attributes": { "display_name": "Attributes", diff --git a/src/backend/langflow/components/vectorstores/base/model.py b/src/backend/langflow/components/vectorstores/base/model.py index 6c2c7d453..de5b5840f 100644 --- a/src/backend/langflow/components/vectorstores/base/model.py +++ b/src/backend/langflow/components/vectorstores/base/model.py @@ -37,14 +37,8 @@ class LCVectorStoreComponent(CustomComponent): """ docs: List[Document] = [] - if ( - input_value - and isinstance(input_value, str) - and hasattr(vector_store, "search") - ): - docs = vector_store.search( - query=input_value, search_type=search_type.lower() - ) + if input_value and isinstance(input_value, str) and hasattr(vector_store, "search"): + docs = vector_store.search(query=input_value, search_type=search_type.lower()) else: raise ValueError("Invalid inputs provided.") return docs_to_records(docs) diff --git a/src/backend/langflow/components/vectorstores/pgvector.py b/src/backend/langflow/components/vectorstores/pgvector.py index 2baf6dae6..7ab20b8df 100644 --- a/src/backend/langflow/components/vectorstores/pgvector.py +++ b/src/backend/langflow/components/vectorstores/pgvector.py @@ -3,9 +3,10 @@ from typing import Optional, Union from langchain.embeddings.base import Embeddings from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.pgvector import PGVector -from langchain_core.documents import Document from langchain_core.retrievers import BaseRetriever + from langflow import CustomComponent +from langflow.schema.schema import Record class PGVectorComponent(CustomComponent): @@ -26,7 +27,7 @@ class PGVectorComponent(CustomComponent): """ return { "code": {"show": False}, - "documents": {"display_name": "Documents", "is_list": True}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "pg_server_url": { "display_name": "PostgreSQL Server Connection String", @@ -40,7 +41,7 @@ class PGVectorComponent(CustomComponent): embedding: Embeddings, pg_server_url: str, collection_name: str, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, ) -> Union[VectorStore, BaseRetriever]: """ Builds the Vector Store or BaseRetriever object. @@ -55,6 +56,12 @@ class PGVectorComponent(CustomComponent): - VectorStore: The Vector Store object. """ + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) try: if documents is None: vector_store = PGVector.from_existing_index( diff --git a/src/backend/langflow/components/vectorstores/pgvectorSearch.py b/src/backend/langflow/components/vectorstores/pgvectorSearch.py index f40e5ed26..04666fe74 100644 --- a/src/backend/langflow/components/vectorstores/pgvectorSearch.py +++ b/src/backend/langflow/components/vectorstores/pgvectorSearch.py @@ -15,9 +15,7 @@ class PGVectorSearchComponent(PGVectorComponent, LCVectorStoreComponent): display_name: str = "PGVector Search" description: str = "Search a PGVector Store for similar documents." - documentation = ( - "https://python.langchain.com/docs/integrations/vectorstores/pgvector" - ) + documentation = "https://python.langchain.com/docs/integrations/vectorstores/pgvector" def build_config(self): """ diff --git a/src/backend/langflow/graph/edge/base.py b/src/backend/langflow/graph/edge/base.py index c49ec714c..53c4892f5 100644 --- a/src/backend/langflow/graph/edge/base.py +++ b/src/backend/langflow/graph/edge/base.py @@ -12,9 +12,7 @@ if TYPE_CHECKING: class SourceHandle(BaseModel): - baseClasses: List[str] = Field( - ..., description="List of base classes for the source handle." - ) + baseClasses: List[str] = Field(..., description="List of base classes for the source handle.") dataType: str = Field(..., description="Data type for the source handle.") id: str = Field(..., description="Unique identifier for the source handle.") @@ -22,9 +20,7 @@ class SourceHandle(BaseModel): class TargetHandle(BaseModel): fieldName: str = Field(..., description="Field name for the target handle.") id: str = Field(..., description="Unique identifier for the target handle.") - inputTypes: Optional[List[str]] = Field( - None, description="List of input types for the target handle." - ) + inputTypes: Optional[List[str]] = Field(None, description="List of input types for the target handle.") type: str = Field(..., description="Type of the target handle.") @@ -53,24 +49,16 @@ class Edge: def validate_handles(self, source, target) -> None: if self.target_handle.inputTypes is None: - self.valid_handles = ( - self.target_handle.type in self.source_handle.baseClasses - ) + self.valid_handles = self.target_handle.type in self.source_handle.baseClasses else: self.valid_handles = ( - any( - baseClass in self.target_handle.inputTypes - for baseClass in self.source_handle.baseClasses - ) + any(baseClass in self.target_handle.inputTypes for baseClass in self.source_handle.baseClasses) or self.target_handle.type in self.source_handle.baseClasses ) if not self.valid_handles: logger.debug(self.source_handle) logger.debug(self.target_handle) - raise ValueError( - f"Edge between {source.vertex_type} and {target.vertex_type} " - f"has invalid handles" - ) + raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has invalid handles") def __setstate__(self, state): self.source_id = state["source_id"] @@ -87,11 +75,7 @@ class Edge: # Both lists contain strings and sometimes a string contains the value we are # looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"] # so we need to check if any of the strings in source_types is in target_reqs - self.valid = any( - output in target_req - for output in self.source_types - for target_req in self.target_reqs - ) + self.valid = any(output in target_req for output in self.source_types for target_req in self.target_reqs) # Get what type of input the target node is expecting self.matched_type = next( @@ -102,10 +86,7 @@ class Edge: if no_matched_type: logger.debug(self.source_types) logger.debug(self.target_reqs) - raise ValueError( - f"Edge between {source.vertex_type} and {target.vertex_type} " - f"has no matched type" - ) + raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has no matched type") def __repr__(self) -> str: return ( @@ -118,10 +99,7 @@ class Edge: def __eq__(self, __o: object) -> bool: # Create a better way to compare edges - return ( - self._source_handle == __o._source_handle - and self._target_handle == __o._target_handle - ) + return self._source_handle == __o._source_handle and self._target_handle == __o._target_handle class ContractEdge(Edge): @@ -178,9 +156,7 @@ class ContractEdge(Edge): return f"{self.source_id} -[{self.target_param}]-> {self.target_id}" -def log_transaction( - edge: ContractEdge, source: "Vertex", target: "Vertex", status, error=None -): +def log_transaction(edge: ContractEdge, source: "Vertex", target: "Vertex", status, error=None): try: monitor_service = get_monitor_service() clean_params = build_clean_params(target) diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 32c1369d8..a7d5a1d42 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -76,9 +76,7 @@ class Graph: """Returns the state of the graph.""" return self.state_manager.get_state(name, run_id=self._run_id) - def update_state( - self, name: str, record: Union[str, Record], caller: Optional[str] = None - ) -> None: + def update_state(self, name: str, record: Union[str, Record], caller: Optional[str] = None) -> None: """Updates the state of the graph.""" if caller: # If there is a caller which is a vertex_id, I want to activate @@ -110,12 +108,9 @@ class Graph: def reset_activated_vertices(self): self.activated_vertices = [] - def append_state( - self, name: str, record: Union[str, Record], caller: Optional[str] = None - ) -> None: + def append_state(self, name: str, record: Union[str, Record], caller: Optional[str] = None) -> None: """Appends the state of the graph.""" if caller: - self.activate_state_vertices(name, caller) self.state_manager.append_state(name, record, run_id=self._run_id) @@ -161,10 +156,7 @@ class Graph: """Runs the graph with the given inputs.""" for vertex_id in self._is_input_vertices: vertex = self.get_vertex(vertex_id) - if input_components and ( - vertex_id not in input_components - or vertex.display_name not in input_components - ): + if input_components and (vertex_id not in input_components or vertex.display_name not in input_components): continue if vertex is None: raise ValueError(f"Vertex {vertex_id} not found") @@ -187,11 +179,7 @@ class Graph: if vertex is None: raise ValueError(f"Vertex {vertex_id} not found") - if ( - not vertex.result - and not stream - and hasattr(vertex, "consume_async_generator") - ): + if not vertex.result and not stream and hasattr(vertex, "consume_async_generator"): await vertex.consume_async_generator() if vertex.display_name in outputs or vertex.id in outputs: vertex_outputs.append(vertex.result) @@ -269,9 +257,7 @@ class Graph: def build_parent_child_map(self): parent_child_map = defaultdict(list) for vertex in self.vertices: - parent_child_map[vertex.id] = [ - child.id for child in self.get_successors(vertex) - ] + parent_child_map[vertex.id] = [child.id for child in self.get_successors(vertex)] return parent_child_map def increment_run_count(self): @@ -320,9 +306,12 @@ class Graph: return cls(vertices, edges, flow_id) except KeyError as exc: logger.exception(exc) - raise ValueError( - f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" - ) from exc + if "nodes" not in payload and "edges" not in payload: + logger.exception(exc) + raise ValueError( + f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" + ) from exc + raise ValueError(f"Error while creating graph from payload: {exc}") from exc def __eq__(self, other: object) -> bool: if not isinstance(other, Graph): @@ -453,11 +442,7 @@ class Graph: """Updates the edges of a vertex.""" # Vertex has edges, so we need to update the edges for edge in vertex.edges: - if ( - edge not in self.edges - and edge.source_id in self.vertex_map - and edge.target_id in self.vertex_map - ): + if edge not in self.edges and edge.source_id in self.vertex_map and edge.target_id in self.vertex_map: self.edges.append(edge) def _build_graph(self) -> None: @@ -482,11 +467,7 @@ class Graph: return self.vertices.remove(vertex) self.vertex_map.pop(vertex_id) - self.edges = [ - edge - for edge in self.edges - if edge.source_id != vertex_id and edge.target_id != vertex_id - ] + self.edges = [edge for edge in self.edges if edge.source_id != vertex_id and edge.target_id != vertex_id] def _build_vertex_params(self) -> None: """Identifies and handles the LLM vertex within the graph.""" @@ -507,9 +488,7 @@ class Graph: return for vertex in self.vertices: if not self._validate_vertex(vertex): - raise ValueError( - f"{vertex.display_name} is not connected to any other components" - ) + raise ValueError(f"{vertex.display_name} is not connected to any other components") def _validate_vertex(self, vertex: Vertex) -> bool: """Validates a vertex.""" @@ -571,9 +550,7 @@ class Graph: name=f"{vertex.display_name} Run {vertex_task_run_count.get(vertex_id, 0)}", ) tasks.append(task) - vertex_task_run_count[vertex_id] = ( - vertex_task_run_count.get(vertex_id, 0) + 1 - ) + vertex_task_run_count[vertex_id] = vertex_task_run_count.get(vertex_id, 0) + 1 logger.debug(f"Running layer {layer_index} with {len(tasks)} tasks") await self._execute_tasks(tasks) logger.debug("Graph processing complete") @@ -615,9 +592,7 @@ class Graph: def dfs(vertex): if state[vertex] == 1: # We have a cycle - raise ValueError( - "Graph contains a cycle, cannot perform topological sort" - ) + raise ValueError("Graph contains a cycle, cannot perform topological sort") if state[vertex] == 0: state[vertex] = 1 for edge in vertex.edges: @@ -641,10 +616,7 @@ class Graph: def get_predecessors(self, vertex): """Returns the predecessors of a vertex.""" - return [ - self.get_vertex(source_id) - for source_id in self.predecessor_map.get(vertex.id, []) - ] + return [self.get_vertex(source_id) for source_id in self.predecessor_map.get(vertex.id, [])] def get_all_successors(self, vertex, recursive=True, flat=True): # Recursively get the successors of the current vertex @@ -685,10 +657,7 @@ class Graph: def get_successors(self, vertex): """Returns the successors of a vertex.""" - return [ - self.get_vertex(target_id) - for target_id in self.successor_map.get(vertex.id, []) - ] + return [self.get_vertex(target_id) for target_id in self.successor_map.get(vertex.id, [])] def get_vertex_neighbors(self, vertex: Vertex) -> Dict[Vertex, int]: """Returns the neighbors of a vertex.""" @@ -734,9 +703,7 @@ class Graph: edges_added.add((source.id, target.id)) return edges - def _get_vertex_class( - self, node_type: str, node_base_type: str, node_id: str - ) -> Type[Vertex]: + def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) -> Type[Vertex]: """Returns the node class based on the node type.""" # First we check for the node_base_type node_name = node_id.split("-")[0] @@ -769,18 +736,14 @@ class Graph: vertex_type: str = vertex_data["type"] # type: ignore vertex_base_type: str = vertex_data["node"]["template"]["_type"] # type: ignore - VertexClass = self._get_vertex_class( - vertex_type, vertex_base_type, vertex_data["id"] - ) + VertexClass = self._get_vertex_class(vertex_type, vertex_base_type, vertex_data["id"]) vertex_instance = VertexClass(vertex, graph=self) vertex_instance.set_top_level(self.top_level_vertices) vertices.append(vertex_instance) return vertices - def get_children_by_vertex_type( - self, vertex: Vertex, vertex_type: str - ) -> List[Vertex]: + def get_children_by_vertex_type(self, vertex: Vertex, vertex_type: str) -> List[Vertex]: """Returns the children of a vertex based on the vertex type.""" children = [] vertex_types = [vertex.data["type"]] @@ -792,9 +755,7 @@ class Graph: def __repr__(self): vertex_ids = [vertex.id for vertex in self.vertices] - edges_repr = "\n".join( - [f"{edge.source_id} --> {edge.target_id}" for edge in self.edges] - ) + edges_repr = "\n".join([f"{edge.source_id} --> {edge.target_id}" for edge in self.edges]) return f"Graph:\nNodes: {vertex_ids}\nConnections:\n{edges_repr}" def sort_up_to_vertex(self, vertex_id: str, is_start: bool = False) -> List[Vertex]: @@ -862,8 +823,7 @@ class Graph: vertex.id for vertex in vertices # if filter_graphs then only vertex.is_input will be considered - if self.in_degree_map[vertex.id] == 0 - and (not filter_graphs or vertex.is_input) + if self.in_degree_map[vertex.id] == 0 and (not filter_graphs or vertex.is_input) ) layers: List[List[str]] = [] visited = set(queue) @@ -937,9 +897,7 @@ class Graph: return refined_layers - def sort_chat_inputs_first( - self, vertices_layers: List[List[str]] - ) -> List[List[str]]: + def sort_chat_inputs_first(self, vertices_layers: List[List[str]]) -> List[List[str]]: chat_inputs_first = [] for layer in vertices_layers: for vertex_id in layer: @@ -980,9 +938,7 @@ class Graph: first_layer = vertices_layers[0] # save the only the rest self.vertices_layers = vertices_layers[1:] - self.vertices_to_run = { - vertex_id for vertex_id in chain.from_iterable(vertices_layers) - } + self.vertices_to_run = {vertex_id for vertex_id in chain.from_iterable(vertices_layers)} # Return just the first layer return first_layer @@ -993,15 +949,11 @@ class Graph: self.vertices_to_run.remove(vertex_id) return should_run - def sort_interface_components_first( - self, vertices_layers: List[List[str]] - ) -> List[List[str]]: + def sort_interface_components_first(self, vertices_layers: List[List[str]]) -> List[List[str]]: """Sorts the vertices in the graph so that vertices containing ChatInput or ChatOutput come first.""" def contains_interface_component(vertex): - return any( - component.value in vertex for component in InterfaceComponentTypes - ) + return any(component.value in vertex for component in InterfaceComponentTypes) # Sort each inner list so that vertices containing ChatInput or ChatOutput come first sorted_vertices = [ @@ -1013,22 +965,16 @@ class Graph: ] return sorted_vertices - def sort_by_avg_build_time( - self, vertices_layers: List[List[str]] - ) -> List[List[str]]: + def sort_by_avg_build_time(self, vertices_layers: List[List[str]]) -> List[List[str]]: """Sorts the vertices in the graph so that vertices with the lowest average build time come first.""" def sort_layer_by_avg_build_time(vertices_ids: List[str]) -> List[str]: """Sorts the vertices in the graph so that vertices with the lowest average build time come first.""" if len(vertices_ids) == 1: return vertices_ids - vertices_ids.sort( - key=lambda vertex_id: self.get_vertex(vertex_id).avg_build_time - ) + vertices_ids.sort(key=lambda vertex_id: self.get_vertex(vertex_id).avg_build_time) return vertices_ids - sorted_vertices = [ - sort_layer_by_avg_build_time(layer) for layer in vertices_layers - ] + sorted_vertices = [sort_layer_by_avg_build_time(layer) for layer in vertices_layers] return sorted_vertices diff --git a/src/backend/langflow/graph/graph/constants.py b/src/backend/langflow/graph/graph/constants.py index 2badbf0eb..0d0e69c77 100644 --- a/src/backend/langflow/graph/graph/constants.py +++ b/src/backend/langflow/graph/graph/constants.py @@ -47,10 +47,7 @@ class VertexTypesDict(LazyLoadDictBase): **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()}, **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()}, **{t: types.OutputParserVertex for t in output_parser_creator.to_list()}, - **{ - t: types.CustomComponentVertex - for t in custom_component_creator.to_list() - }, + **{t: types.CustomComponentVertex for t in custom_component_creator.to_list()}, **{t: types.RetrieverVertex for t in retriever_creator.to_list()}, **{t: types.ChatVertex for t in CHAT_COMPONENTS}, **{t: types.RoutingVertex for t in ROUTING_COMPONENTS}, diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index 66fdd44c6..711b01bad 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -313,7 +313,16 @@ class Vertex: params[param_key] = [] params[param_key].append(self.graph.get_vertex(edge.source_id)) elif edge.target_id == self.id: - params[param_key] = self.graph.get_vertex(edge.source_id) + if isinstance(template_dict[param_key].get("value"), dict): + # we don't know the key of the dict but we need to set the value + # to the vertex that is the source of the edge + param_dict = template_dict[param_key]["value"] + params[param_key] = { + key: self.graph.get_vertex(edge.source_id) + for key in param_dict.keys() + } + else: + params[param_key] = self.graph.get_vertex(edge.source_id) for key, value in template_dict.items(): if key in params: @@ -493,9 +502,24 @@ class Vertex: await self._build_node_and_update_params(key, value, user_id) elif isinstance(value, list) and self._is_list_of_nodes(value): await self._build_list_of_nodes_and_update_params(key, value, user_id) + elif isinstance(value, dict): + await self._build_dict_and_update_params(key, value, user_id) elif key not in self.params or self.updated_raw_params: self.params[key] = value + async def _build_dict_and_update_params( + self, key, nodes_dict: Dict[str, "Vertex"], user_id=None + ): + """ + Iterates over a dictionary of nodes, builds each and updates the params dictionary. + """ + for sub_key, value in nodes_dict.items(): + if not self._is_node(value): + self.params[key][sub_key] = value + else: + built = await value.get_result(requester=self, user_id=user_id) + self.params[key][sub_key] = built + def _is_node(self, value): """ Checks if the provided value is an instance of Vertex. diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index 1bfd7cacf..99791729f 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -1,7 +1,6 @@ import ast import json -from typing import (AsyncIterator, Callable, Dict, Iterator, List, Optional, - Union) +from typing import AsyncIterator, Callable, Dict, Iterator, List, Optional, Union import yaml from langchain_core.messages import AIMessage @@ -398,7 +397,7 @@ class ChatVertex(Vertex): self.will_stream = stream_url is not None if artifacts: - self.artifacts = artifacts.model_dump() + self.artifacts = artifacts.model_dump(exclude_none=True) if isinstance(self._built_object, (AsyncIterator, Iterator)): if self.params["return_record"]: self._built_object = Record(text=message, data=self.artifacts) diff --git a/src/backend/langflow/helpers/record.py b/src/backend/langflow/helpers/record.py index 9e1f2eb34..dd480e4e7 100644 --- a/src/backend/langflow/helpers/record.py +++ b/src/backend/langflow/helpers/record.py @@ -30,8 +30,5 @@ def records_to_text(template: str, records: list[Record]) -> list[str]: records = [records] # Check if there are any format strings in the template - formated_records = [ - template.format(text=record.text, data=record.data, **record.data) - for record in records - ] + formated_records = [template.format(**record.data) for record in records] return "\n".join(formated_records) diff --git a/src/backend/langflow/initial_setup/__init__.py b/src/backend/langflow/initial_setup/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/initial_setup/setup.py b/src/backend/langflow/initial_setup/setup.py new file mode 100644 index 000000000..f1e3fd9ce --- /dev/null +++ b/src/backend/langflow/initial_setup/setup.py @@ -0,0 +1,146 @@ +from datetime import datetime +from pathlib import Path + +import orjson +from emoji import demojize, purely_emoji +from loguru import logger +from sqlmodel import select + +from langflow.services.database.models.flow.model import Flow, FlowCreate +from langflow.services.deps import session_scope + +STARTER_FOLDER_NAME = "Starter Projects" + + +# In the folder ./starter_projects we have a few JSON files that represent +# starter projects. We want to load these into the database so that users +# can use them as a starting point for their own projects. + + +def load_starter_projects(): + starter_projects = [] + folder = Path(__file__).parent / "starter_projects" + for file in folder.glob("*.json"): + project = orjson.loads(file.read_text()) + starter_projects.append(project) + logger.info(f"Loaded starter project {file}") + return starter_projects + + +def get_project_data(project): + project_name = project.get("name") + project_description = project.get("description") + project_is_component = project.get("is_component") + project_updated_at = project.get("updated_at") + if not project_updated_at: + project_updated_at = datetime.utcnow().isoformat() + updated_at_datetime = datetime.strptime(project_updated_at, "%Y-%m-%dT%H:%M:%S.%f") + project_data = project.get("data") + project_icon = project.get("icon") + project_icon_bg_color = project.get("icon_bg_color") + return ( + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, + ) + + +def update_existing_project( + existing_project, + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, +): + logger.info(f"Updating starter project {project_name}") + existing_project.data = project_data + existing_project.folder = STARTER_FOLDER_NAME + existing_project.description = project_description + existing_project.is_component = project_is_component + existing_project.updated_at = updated_at_datetime + existing_project.icon = project_icon + existing_project.icon_bg_color = project_icon_bg_color + + +def create_new_project( + session, + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, +): + logger.info(f"Creating starter project {project_name}") + new_project = FlowCreate( + name=project_name, + description=project_description, + icon=project_icon if not purely_emoji(project_icon) else demojize(project_icon), + icon_bg_color=project_icon_bg_color, + data=project_data, + is_component=project_is_component, + updated_at=updated_at_datetime, + folder=STARTER_FOLDER_NAME, + ) + db_flow = Flow.model_validate(new_project, from_attributes=True) + session.add(db_flow) + + +def get_all_flows_similar_to_project(session, project_name): + flows = session.exec( + select(Flow).where( + Flow.name == project_name, + Flow.folder == STARTER_FOLDER_NAME, + ) + ).all() + return flows + + +def delete_start_projects(session): + flows = session.exec( + select(Flow).where( + Flow.folder == STARTER_FOLDER_NAME, + ) + ).all() + for flow in flows: + session.delete(flow) + + +def create_or_update_starter_projects(): + with session_scope() as session: + starter_projects = load_starter_projects() + delete_start_projects(session) + for project in starter_projects: + ( + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, + ) = get_project_data(project) + if project_name and project_data: + for existing_project in get_all_flows_similar_to_project( + session, project_name + ): + session.delete(existing_project) + + create_new_project( + session, + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, + ) diff --git a/src/backend/langflow/initial_setup/starter_projects/Langflow Basic Prompting.json b/src/backend/langflow/initial_setup/starter_projects/Langflow Basic Prompting.json new file mode 100644 index 000000000..879cb1468 --- /dev/null +++ b/src/backend/langflow/initial_setup/starter_projects/Langflow Basic Prompting.json @@ -0,0 +1,1199 @@ +{ + "id": "4ac1ae80-b818-4fdf-b72c-f22dace784a5", + "icon": "📝", + "icon_bg_color": "#FFD700", + "data": { + "nodes": [ + { + "id": "ChatInput-WcFzs", + "type": "genericNode", + "position": { + "x": 86.66131544226482, + "y": 69.51987428063671 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Used to get user input from the chat.\"\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "title_case": false, + "value": "Write a press release " + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": false, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": false, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Used to get user input from the chat.", + "base_classes": [ + "object", + "Text", + "Record", + "str" + ], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": true + }, + "id": "ChatInput-WcFzs" + }, + "selected": false, + "width": 384, + "height": 667, + "positionAbsolute": { + "x": 86.66131544226482, + "y": 69.51987428063671 + }, + "dragging": false + }, + { + "id": "Prompt-QtWOn", + "type": "genericNode", + "position": { + "x": 731.5380376186406, + "y": 273.5294585628963 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow import CustomComponent\nfrom langflow.field_typing import Prompt, TemplateField, Text\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"A component for creating prompts using templates\"\n beta = True\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n prompt_template = PromptTemplate.from_template(Text(template))\n\n attributes_to_check = [\"text\", \"page_content\"]\n for key, value in kwargs.copy().items():\n for attribute in attributes_to_check:\n if hasattr(value, attribute):\n kwargs[key] = getattr(value, attribute)\n\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt: \"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "{request}\n\n- {topic_1}\n- {topic_2}\n\n\nAnswer:\n\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "title_case": false + }, + "_type": "CustomComponent", + "request": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request", + "display_name": "request", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Text", + "Record" + ], + "dynamic": false, + "info": "", + "title_case": false, + "type": "str" + }, + "topic_1": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "topic_1", + "display_name": "topic_1", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Text", + "Record" + ], + "dynamic": false, + "info": "", + "title_case": false, + "type": "str" + }, + "topic_2": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "topic_2", + "display_name": "topic_2", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Text", + "Record" + ], + "dynamic": false, + "info": "", + "title_case": false, + "type": "str" + } + }, + "description": "A component for creating prompts using templates", + "icon": null, + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "object", + "Text", + "str" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "request", + "topic_1", + "topic_2" + ] + }, + "output_types": [ + "Text" + ], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": true, + "error": null + }, + "id": "Prompt-QtWOn", + "description": "A component for creating prompts using templates", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 571, + "dragging": false + }, + { + "id": "TextInput-xUQ9w", + "type": "genericNode", + "position": { + "x": 91.73477837172948, + "y": 787.6263883143245 + }, + "data": { + "type": "TextInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextInput(TextComponent):\n display_name = \"Text Input\"\n description = \"Used to pass text input to the next component.\"\n\n def build(self, input_value: Optional[str] = \"\") -> Text:\n return super().build(input_value=input_value)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "Cars", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Used to pass text input to the next component.", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "Topic 1", + "documentation": "", + "custom_fields": { + "input_value": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "input_value" + ], + "beta": true + }, + "id": "TextInput-xUQ9w" + }, + "selected": false, + "width": 384, + "height": 289, + "positionAbsolute": { + "x": 91.73477837172948, + "y": 787.6263883143245 + }, + "dragging": false + }, + { + "id": "TextInput-l4zQt", + "type": "genericNode", + "position": { + "x": 93.56470545178581, + "y": 1125.2986229040628 + }, + "data": { + "type": "TextInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextInput(TextComponent):\n display_name = \"Text Input\"\n description = \"Used to pass text input to the next component.\"\n\n def build(self, input_value: Optional[str] = \"\") -> Text:\n return super().build(input_value=input_value)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "Bottle", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Used to pass text input to the next component.", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "Topic 2", + "documentation": "", + "custom_fields": { + "input_value": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "input_value" + ], + "beta": true + }, + "id": "TextInput-l4zQt" + }, + "selected": false, + "width": 384, + "height": 289, + "positionAbsolute": { + "x": 93.56470545178581, + "y": 1125.2986229040628 + }, + "dragging": false + }, + { + "id": "TextOutput-fTp5e", + "type": "genericNode", + "position": { + "x": 1242.6494961686594, + "y": 100.3023112016921 + }, + "data": { + "type": "TextOutput", + "node": { + "template": { + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Used to pass text output to the next component.\"\n\n field_config = {\n \"input_value\": {\"display_name\": \"Value\"},\n }\n\n def build(self, input_value: Optional[Text] = \"\") -> Text:\n return super().build(input_value=input_value)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Used to pass text output to the next component.", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "Prompt Output", + "documentation": "", + "custom_fields": { + "input_value": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "input_value" + ], + "beta": true + }, + "id": "TextOutput-fTp5e" + }, + "selected": false, + "width": 384, + "height": 297, + "positionAbsolute": { + "x": 1242.6494961686594, + "y": 100.3023112016921 + }, + "dragging": false + }, + { + "id": "ChatOutput-AVN8s", + "type": "genericNode", + "position": { + "x": 2299.2806014585203, + "y": 449.2461295937437 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Used to send a message to the chat.\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "title_case": false + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": false, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": false, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Used to send a message to the chat.", + "base_classes": [ + "object", + "Text", + "Record", + "str" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": true + }, + "id": "ChatOutput-AVN8s" + }, + "selected": false, + "width": 384, + "height": 667, + "positionAbsolute": { + "x": 2299.2806014585203, + "y": 449.2461295937437 + }, + "dragging": false + }, + { + "id": "OpenAIModel-IRzsd", + "type": "genericNode", + "position": { + "x": 1735.1051821296949, + "y": 246.4955882724468 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.components.models.base.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI Model\"\n description = \"Generates text using OpenAI's models.\"\n icon = \"OpenAI\"\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": False,\n \"required\": False,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n \"required\": False,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"required\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": False,\n \"required\": False,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"advanced\": False,\n \"required\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"required\": False,\n \"value\": 0.7,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": \"Stream the response from the model.\",\n },\n }\n\n def build(\n self,\n input_value: Text,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n model_name: str = \"gpt-4-1106-preview\",\n openai_api_base: Optional[str] = None,\n openai_api_key: Optional[str] = None,\n temperature: float = 0.7,\n stream: bool = False,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n secret_key = SecretStr(openai_api_key)\n else:\n secret_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=secret_key,\n temperature=temperature,\n )\n\n return self.get_result(output=output, stream=stream, input_value=input_value)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-4-1106-preview", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": false, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "stream": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": false, + "dynamic": false, + "info": "Stream the response from the model.", + "title_case": false + }, + "temperature": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "0.2", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "min": -1, + "max": 1, + "step": 0.1 + }, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI's models.", + "icon": "OpenAI", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "OpenAI Model", + "documentation": "", + "custom_fields": { + "input_value": null, + "max_tokens": null, + "model_kwargs": null, + "model_name": null, + "openai_api_base": null, + "openai_api_key": null, + "temperature": null, + "stream": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": true + }, + "id": "OpenAIModel-IRzsd" + }, + "selected": false, + "width": 384, + "height": 847, + "positionAbsolute": { + "x": 1735.1051821296949, + "y": 246.4955882724468 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "ChatInput-WcFzs", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œRecordœ,œstrœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-WcFzsœ}", + "target": "Prompt-QtWOn", + "targetHandle": "{œfieldNameœ:œrequestœ,œidœ:œPrompt-QtWOnœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "request", + "id": "Prompt-QtWOn", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Text", + "Record" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "Record", + "str" + ], + "dataType": "ChatInput", + "id": "ChatInput-WcFzs" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-ChatInput-WcFzs{œbaseClassesœ:[œobjectœ,œTextœ,œRecordœ,œstrœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-WcFzsœ}-Prompt-QtWOn{œfieldNameœ:œrequestœ,œidœ:œPrompt-QtWOnœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-QtWOn", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-QtWOnœ}", + "target": "TextOutput-fTp5e", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-fTp5eœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "TextOutput-fTp5e", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "Prompt", + "id": "Prompt-QtWOn" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-Prompt-QtWOn{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-QtWOnœ}-TextOutput-fTp5e{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-fTp5eœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "TextOutput-fTp5e", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-fTp5eœ}", + "target": "OpenAIModel-IRzsd", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-IRzsdœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-IRzsd", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "TextOutput", + "id": "TextOutput-fTp5e" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-TextOutput-fTp5e{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-fTp5eœ}-OpenAIModel-IRzsd{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-IRzsdœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "OpenAIModel-IRzsd", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-IRzsdœ}", + "target": "ChatOutput-AVN8s", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-AVN8sœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-AVN8s", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-IRzsd" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-OpenAIModel-IRzsd{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-IRzsdœ}-ChatOutput-AVN8s{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-AVN8sœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "TextInput-l4zQt", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-l4zQtœ}", + "target": "Prompt-QtWOn", + "targetHandle": "{œfieldNameœ:œtopic_2œ,œidœ:œPrompt-QtWOnœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "topic_2", + "id": "Prompt-QtWOn", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Text", + "Record" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "TextInput", + "id": "TextInput-l4zQt" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-TextInput-l4zQt{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-l4zQtœ}-Prompt-QtWOn{œfieldNameœ:œtopic_2œ,œidœ:œPrompt-QtWOnœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}" + }, + { + "source": "TextInput-xUQ9w", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-xUQ9wœ}", + "target": "Prompt-QtWOn", + "targetHandle": "{œfieldNameœ:œtopic_1œ,œidœ:œPrompt-QtWOnœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "topic_1", + "id": "Prompt-QtWOn", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Text", + "Record" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "TextInput", + "id": "TextInput-xUQ9w" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-TextInput-xUQ9w{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-xUQ9wœ}-Prompt-QtWOn{œfieldNameœ:œtopic_1œ,œidœ:œPrompt-QtWOnœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œTextœ,œRecordœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": 81.87154098468557, + "y": 266.8627952720353, + "zoom": 0.315125847895746 + } + }, + "description": "Use a language model to generate text based on a prompt. \n\nIn this project, you'll be able to generate text based on a request and some topics.\n\nThe Topic 1 and Topic 2 components are actually Text Input, while the Prompt Output component is a Text Output. Changing the name of the component makes them easier to identify when interacting with them.", + "name": "Basic Prompting", + "last_tested_version": "0.6.8", + "is_component": false +} \ No newline at end of file diff --git a/src/backend/langflow/initial_setup/starter_projects/Langflow Data Ingestion.json b/src/backend/langflow/initial_setup/starter_projects/Langflow Data Ingestion.json new file mode 100644 index 000000000..e40ffda57 --- /dev/null +++ b/src/backend/langflow/initial_setup/starter_projects/Langflow Data Ingestion.json @@ -0,0 +1,1087 @@ +{ + "name": "Data Ingestion", + "icon": ":inbox_tray:", + "icon_bg_color": "#FFD700", + "description": "This project is the starting point to insert data into a Vector Store. \n\nWe use the Vector Store Chroma but you can replace it with any other Vector Store. \n\nYou start by deciding what type of data you want to load, then you pick a place where you want to store the vectors and run it.\n\nThis will create a vector store in your local environment which you can query using the Chroma Search component.", + "data": { + "nodes": [ + { + "id": "RecursiveCharacterTextSplitter-jwfyG", + "type": "genericNode", + "position": { + "x": 1042.4388767006992, + "y": 633.2204634490822 + }, + "data": { + "type": "RecursiveCharacterTextSplitter", + "node": { + "template": { + "inputs": { + "type": "Document", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Input", + "advanced": false, + "input_types": [ + "Document", + "Record" + ], + "dynamic": false, + "info": "The texts to split.", + "title_case": false + }, + "chunk_overlap": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 200, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_overlap", + "display_name": "Chunk Overlap", + "advanced": false, + "dynamic": false, + "info": "The amount of overlap between chunks.", + "title_case": false + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": false, + "dynamic": false, + "info": "The maximum length of each chunk.", + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_documents\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = (\n \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n )\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [x.encode().decode(\"unicode-escape\") for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n self.repr_value = build_loader_repr_from_documents(docs)\n return self.to_records(docs)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "separators": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "separators", + "display_name": "Separators", + "advanced": false, + "dynamic": false, + "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", + "title_case": false, + "input_types": [ + "Text" + ], + "value": [ + "\\n" + ] + }, + "_type": "CustomComponent" + }, + "description": "Split text into chunks of a specified length.", + "base_classes": [ + "Record" + ], + "display_name": "Recursive Character Text Splitter", + "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter", + "custom_fields": { + "inputs": null, + "separators": null, + "chunk_size": null, + "chunk_overlap": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "beta": true + }, + "id": "RecursiveCharacterTextSplitter-jwfyG" + }, + "selected": false, + "width": 384, + "height": 509, + "positionAbsolute": { + "x": 1042.4388767006992, + "y": 633.2204634490822 + }, + "dragging": false + }, + { + "id": "Chroma-aFGHF", + "type": "genericNode", + "position": { + "x": 1641.280676720732, + "y": 356.94961598422196 + }, + "data": { + "type": "Chroma", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "inputs": { + "type": "Record", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Input", + "advanced": false, + "input_types": [ + "Document", + "Record" + ], + "dynamic": false, + "info": "", + "title_case": false + }, + "chroma_server_cors_allow_origins": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_cors_allow_origins", + "display_name": "Server CORS Allow Origins", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "chroma_server_grpc_port": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_grpc_port", + "display_name": "Server gRPC Port", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "chroma_server_host": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_host", + "display_name": "Server Host", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "chroma_server_port": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_port", + "display_name": "Server Port", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "chroma_server_ssl_enabled": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_ssl_enabled", + "display_name": "Server SSL Enabled", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional, Union\n\nimport chromadb # type: ignore\nfrom langchain.embeddings.base import Embeddings\nfrom langchain.schema import BaseRetriever\nfrom langchain_community.vectorstores import VectorStore\nfrom langchain_community.vectorstores.chroma import Chroma\n\nfrom langflow import CustomComponent\nfrom langflow.schema.schema import Record\n\n\nclass ChromaComponent(CustomComponent):\n \"\"\"\n A custom component for implementing a Vector Store using Chroma.\n \"\"\"\n\n display_name: str = \"Chroma\"\n description: str = \"Implementation of Vector Store using Chroma\"\n documentation = \"https://python.langchain.com/docs/integrations/vectorstores/chroma\"\n beta: bool = True\n icon = \"Chroma\"\n\n def build_config(self):\n \"\"\"\n Builds the configuration for the component.\n\n Returns:\n - dict: A dictionary containing the configuration options for the component.\n \"\"\"\n return {\n \"collection_name\": {\"display_name\": \"Collection Name\", \"value\": \"langflow\"},\n \"index_directory\": {\"display_name\": \"Persist Directory\"},\n \"code\": {\"advanced\": True, \"display_name\": \"Code\"},\n \"inputs\": {\"display_name\": \"Input\", \"input_types\": [\"Document\", \"Record\"]},\n \"embedding\": {\"display_name\": \"Embedding\"},\n \"chroma_server_cors_allow_origins\": {\n \"display_name\": \"Server CORS Allow Origins\",\n \"advanced\": True,\n },\n \"chroma_server_host\": {\"display_name\": \"Server Host\", \"advanced\": True},\n \"chroma_server_port\": {\"display_name\": \"Server Port\", \"advanced\": True},\n \"chroma_server_grpc_port\": {\n \"display_name\": \"Server gRPC Port\",\n \"advanced\": True,\n },\n \"chroma_server_ssl_enabled\": {\n \"display_name\": \"Server SSL Enabled\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n collection_name: str,\n embedding: Embeddings,\n chroma_server_ssl_enabled: bool,\n index_directory: Optional[str] = None,\n inputs: Optional[List[Record]] = None,\n chroma_server_cors_allow_origins: Optional[str] = None,\n chroma_server_host: Optional[str] = None,\n chroma_server_port: Optional[int] = None,\n chroma_server_grpc_port: Optional[int] = None,\n ) -> Union[VectorStore, BaseRetriever]:\n \"\"\"\n Builds the Vector Store or BaseRetriever object.\n\n Args:\n - collection_name (str): The name of the collection.\n - index_directory (Optional[str]): The directory to persist the Vector Store to.\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\n - documents (Optional[Document]): The documents to use for the Vector Store.\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\n - chroma_server_host (Optional[str]): The host for the Chroma server.\n - chroma_server_port (Optional[int]): The port for the Chroma server.\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\n\n Returns:\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\n \"\"\"\n\n # Chroma settings\n chroma_settings = None\n\n if chroma_server_host is not None:\n chroma_settings = chromadb.config.Settings(\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins\n or None,\n chroma_server_host=chroma_server_host,\n chroma_server_port=chroma_server_port or None,\n chroma_server_grpc_port=chroma_server_grpc_port or None,\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\n )\n\n # If documents, then we need to create a Chroma instance using .from_documents\n\n # Check index_directory and expand it if it is a relative path\n if index_directory is not None:\n index_directory = self.resolve_path(index_directory)\n\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n if documents is not None and embedding is not None:\n if len(documents) == 0:\n raise ValueError(\n \"If documents are provided, there must be at least one document.\"\n )\n chroma = Chroma.from_documents(\n documents=documents, # type: ignore\n persist_directory=index_directory,\n collection_name=collection_name,\n embedding=embedding,\n client_settings=chroma_settings,\n )\n else:\n chroma = Chroma(\n persist_directory=index_directory,\n client_settings=chroma_settings,\n embedding_function=embedding,\n )\n return chroma\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "langflow_contrib", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "index_directory": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "index_directory", + "display_name": "Persist Directory", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ], + "value": "./chroma_langflow" + }, + "_type": "CustomComponent" + }, + "description": "Implementation of Vector Store using Chroma", + "icon": "Chroma", + "base_classes": [ + "Serializable", + "VectorStore", + "object", + "Runnable", + "BaseRetriever", + "RunnableSerializable", + "Generic" + ], + "display_name": "Chroma", + "documentation": "https://python.langchain.com/docs/integrations/vectorstores/chroma", + "custom_fields": { + "collection_name": null, + "embedding": null, + "chroma_server_ssl_enabled": null, + "index_directory": null, + "inputs": null, + "chroma_server_cors_allow_origins": null, + "chroma_server_host": null, + "chroma_server_port": null, + "chroma_server_grpc_port": null + }, + "output_types": [ + "VectorStore", + "BaseRetriever" + ], + "field_formatters": {}, + "frozen": false, + "beta": true + }, + "id": "Chroma-aFGHF" + }, + "selected": true, + "width": 384, + "height": 495, + "positionAbsolute": { + "x": 1641.280676720732, + "y": 356.94961598422196 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-rbMk3", + "type": "genericNode", + "position": { + "x": 1053.9472627140208, + "y": -2.5921878249999963 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "chunk_size": { + "type": "int", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Callable, Dict, List, Optional, Union\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\nfrom langflow import CustomComponent\nfrom langflow.field_typing import NestedDict\nfrom pydantic.v1.types import SecretStr\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAIEmbeddings\"\n description = \"OpenAI embedding models\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\"text-embedding-3-small\", \"text-embedding-3-large\", \"text-embedding-ada-002\"],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\"display_name\": \"OpenAI API Base\", \"password\": True, \"advanced\": True},\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\"display_name\": \"OpenAI API Type\", \"advanced\": True, \"password\": True},\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\"display_name\": \"TikToken Model Name\"},\n \"tikToken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-3-small\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-3-small\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_key: Optional[str] = \"\",\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Union[OpenAIEmbeddings, Callable]:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n api_key = SecretStr(openai_api_key) if openai_api_key else None\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "deployment": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-3-small", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "disallowed_special": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": [ + "all" + ], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "embedding_ctx_length": { + "type": "int", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "max_retries": { + "type": "int", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "model": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-3-small", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "model_kwargs": { + "type": "NestedDict", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "openai_api_key": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "min": -1, + "max": 1, + "step": 0.1 + }, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "OpenAI embedding models", + "base_classes": [ + "Embeddings", + "OpenAIEmbeddings", + "Callable" + ], + "display_name": "OpenAIEmbeddings", + "documentation": "", + "custom_fields": { + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_key": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null + }, + "output_types": [ + "OpenAIEmbeddings", + "Callable" + ], + "field_formatters": {}, + "frozen": false, + "beta": true + }, + "id": "OpenAIEmbeddings-rbMk3" + }, + "selected": false, + "width": 384, + "height": 573, + "positionAbsolute": { + "x": 1053.9472627140208, + "y": -2.5921878249999963 + }, + "dragging": false + }, + { + "id": "URL-5zjQH", + "type": "genericNode", + "position": { + "x": 567.0838444398559, + "y": 596.6568151511171 + }, + "data": { + "type": "URL", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Load URLs and convert them to records.\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n async def build(\n self,\n urls: list[str],\n ) -> Record:\n\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n return records\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "urls": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "urls", + "display_name": "URL", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": [ + "Text" + ], + "value": [ + "https://raw.githubusercontent.com/logspace-ai/langflow/dev/CONTRIBUTING.md" + ] + }, + "_type": "CustomComponent" + }, + "description": "Load URLs and convert them to records.", + "base_classes": [ + "Record" + ], + "display_name": "URL", + "documentation": "", + "custom_fields": { + "urls": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "beta": true + }, + "id": "URL-5zjQH" + }, + "selected": false, + "width": 384, + "height": 289, + "dragging": false, + "positionAbsolute": { + "x": 567.0838444398559, + "y": 596.6568151511171 + } + } + ], + "edges": [ + { + "source": "RecursiveCharacterTextSplitter-jwfyG", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-jwfyGœ}", + "target": "Chroma-aFGHF", + "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œChroma-aFGHFœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œRecordœ}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "Chroma-aFGHF", + "inputTypes": [ + "Document", + "Record" + ], + "type": "Record" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "RecursiveCharacterTextSplitter", + "id": "RecursiveCharacterTextSplitter-jwfyG" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-RecursiveCharacterTextSplitter-jwfyG{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-jwfyGœ}-Chroma-aFGHF{œfieldNameœ:œinputsœ,œidœ:œChroma-aFGHFœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œRecordœ}" + }, + { + "source": "OpenAIEmbeddings-rbMk3", + "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ,œCallableœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-rbMk3œ}", + "target": "Chroma-aFGHF", + "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œChroma-aFGHFœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "Chroma-aFGHF", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": [ + "Embeddings", + "OpenAIEmbeddings", + "Callable" + ], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-rbMk3" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-rbMk3{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ,œCallableœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-rbMk3œ}-Chroma-aFGHF{œfieldNameœ:œembeddingœ,œidœ:œChroma-aFGHFœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}" + }, + { + "source": "URL-5zjQH", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-5zjQHœ}", + "target": "RecursiveCharacterTextSplitter-jwfyG", + "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-jwfyGœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "RecursiveCharacterTextSplitter-jwfyG", + "inputTypes": [ + "Document", + "Record" + ], + "type": "Document" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "URL", + "id": "URL-5zjQH" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-URL-5zjQH{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-5zjQHœ}-RecursiveCharacterTextSplitter-jwfyG{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-jwfyGœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}" + } + ], + "viewport": { + "x": -160.3219973143573, + "y": 117.63775645863632, + "zoom": 0.48903173672366845 + } + }, + "is_component": false, + "updated_at": "2024-03-05T21:59:59.738081", + "folder": null, + "id": "7f90dc54-717d-49fe-a43f-c4dc055daa4e", + "user_id": "9365dbda-e8cf-4e95-8c84-49f8b6edb44f" +} \ No newline at end of file diff --git a/src/backend/langflow/interface/custom/code_parser/code_parser.py b/src/backend/langflow/interface/custom/code_parser/code_parser.py index 846edc716..eaff42fab 100644 --- a/src/backend/langflow/interface/custom/code_parser/code_parser.py +++ b/src/backend/langflow/interface/custom/code_parser/code_parser.py @@ -95,9 +95,7 @@ class CodeParser: elif isinstance(node, ast.ImportFrom): for alias in node.names: if alias.asname: - self.data["imports"].append( - (node.module, f"{alias.name} as {alias.asname}") - ) + self.data["imports"].append((node.module, f"{alias.name} as {alias.asname}")) else: self.data["imports"].append((node.module, alias.name)) @@ -146,9 +144,7 @@ class CodeParser: return_type = None if node.returns: return_type_str = ast.unparse(node.returns) - eval_env = self.construct_eval_env( - return_type_str, tuple(self.data["imports"]) - ) + eval_env = self.construct_eval_env(return_type_str, tuple(self.data["imports"])) try: return_type = eval(return_type_str, eval_env) @@ -190,22 +186,14 @@ class CodeParser: num_defaults = len(node.args.defaults) num_missing_defaults = num_args - num_defaults missing_defaults = [None] * num_missing_defaults - default_values = [ - ast.unparse(default).strip("'") if default else None - for default in node.args.defaults - ] + default_values = [ast.unparse(default).strip("'") if default else None for default in node.args.defaults] # Now check all default values to see if there # are any "None" values in the middle - default_values = [ - None if value == "None" else value for value in default_values - ] + default_values = [None if value == "None" else value for value in default_values] defaults = missing_defaults + default_values - args = [ - self.parse_arg(arg, default) - for arg, default in zip(node.args.args, defaults) - ] + args = [self.parse_arg(arg, default) for arg, default in zip(node.args.args, defaults)] return args def parse_varargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: @@ -223,17 +211,11 @@ class CodeParser: """ Parses the keyword-only arguments of a function or method node. """ - kw_defaults = [None] * ( - len(node.args.kwonlyargs) - len(node.args.kw_defaults) - ) + [ - ast.unparse(default) if default else None - for default in node.args.kw_defaults + kw_defaults = [None] * (len(node.args.kwonlyargs) - len(node.args.kw_defaults)) + [ + ast.unparse(default) if default else None for default in node.args.kw_defaults ] - args = [ - self.parse_arg(arg, default) - for arg, default in zip(node.args.kwonlyargs, kw_defaults) - ] + args = [self.parse_arg(arg, default) for arg, default in zip(node.args.kwonlyargs, kw_defaults)] return args def parse_kwargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: @@ -337,9 +319,7 @@ class CodeParser: Extracts global variables from the code. """ global_var = { - "targets": [ - t.id if hasattr(t, "id") else ast.dump(t) for t in node.targets - ], + "targets": [t.id if hasattr(t, "id") else ast.dump(t) for t in node.targets], "value": ast.unparse(node.value), } self.data["global_vars"].append(global_var) diff --git a/src/backend/langflow/interface/custom/custom_component/component.py b/src/backend/langflow/interface/custom/custom_component/component.py index a889fa7b9..ce40b0f74 100644 --- a/src/backend/langflow/interface/custom/custom_component/component.py +++ b/src/backend/langflow/interface/custom/custom_component/component.py @@ -21,9 +21,7 @@ class ComponentFunctionEntrypointNameNullError(HTTPException): class Component: ERROR_CODE_NULL: ClassVar[str] = "Python code must be provided." - ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[str] = ( - "The name of the entrypoint function must be provided." - ) + ERROR_FUNCTION_ENTRYPOINT_NAME_NULL: ClassVar[str] = "The name of the entrypoint function must be provided." code: Optional[str] = None _function_entrypoint_name: str = "build" diff --git a/src/backend/langflow/interface/custom/custom_component/custom_component.py b/src/backend/langflow/interface/custom/custom_component/custom_component.py index dde147d77..f0c3bfa80 100644 --- a/src/backend/langflow/interface/custom/custom_component/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component/custom_component.py @@ -1,7 +1,15 @@ import operator from pathlib import Path -from typing import (TYPE_CHECKING, Any, Callable, ClassVar, List, Optional, - Sequence, Union) +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + List, + Optional, + Sequence, + Union, +) from uuid import UUID import yaml @@ -12,13 +20,17 @@ from sqlmodel import select from langflow.interface.custom.code_parser.utils import ( extract_inner_type_from_generic_alias, - extract_union_types_from_generic_alias) + extract_union_types_from_generic_alias, +) from langflow.interface.custom.custom_component.component import Component from langflow.schema import Record from langflow.services.database.models.flow import Flow from langflow.services.database.utils import session_getter -from langflow.services.deps import (get_credential_service, get_db_service, - get_storage_service) +from langflow.services.deps import ( + get_credential_service, + get_db_service, + get_storage_service, +) from langflow.services.storage.service import StorageService from langflow.utils import validate @@ -65,17 +77,13 @@ class CustomComponent(Component): def update_state(self, name: str, value: Any): try: - self.vertex.graph.update_state( - name=name, record=value, caller=self.vertex.id - ) + self.vertex.graph.update_state(name=name, record=value, caller=self.vertex.id) except Exception as e: raise ValueError(f"Error updating state: {e}") def append_state(self, name: str, value: Any): try: - self.vertex.graph.append_state( - name=name, record=value, caller=self.vertex.id - ) + self.vertex.graph.append_state(name=name, record=value, caller=self.vertex.id) except Exception as e: raise ValueError(f"Error appending state: {e}") @@ -134,9 +142,7 @@ class CustomComponent(Component): def tree(self): return self.get_code_tree(self.code or "") - def to_records( - self, data: Any, text_key: str = "text", data_key: str = "data" - ) -> List[Record]: + def to_records(self, data: Any, keys: Optional[List[str]] = None, silent_errors: bool = False) -> List[Record]: """ Converts input data into a list of Record objects. @@ -144,8 +150,9 @@ class CustomComponent(Component): data (Any): The input data to be converted. It can be a single item or a sequence of items. If the input data is a Langchain Document, text_key and data_key are ignored. - text_key (str, optional): The key to access the text value in each item. Defaults to "text". - data_key (str, optional): The key to access the data value in each item. Defaults to "data". + keys (List[str], optional): The keys to access the text and data values in each item. + It should be a list of strings where the first element is the text key and the second element is the data key. + Defaults to None, in which case the default keys "text" and "data" are used. Returns: List[Record]: A list of Record objects. @@ -158,33 +165,33 @@ class CustomComponent(Component): if not isinstance(data, Sequence): data = [data] for item in data: + data_dict = {} if isinstance(item, Document): - item = {"text": item.page_content, "data": item.metadata} + data_dict = item.metadata + data_dict["text"] = item.page_content elif isinstance(item, BaseModel): model_dump = item.model_dump() - if text_key not in model_dump: - raise ValueError(f"Key '{text_key}' not found in BaseModel item.") - if data_key not in model_dump: - raise ValueError(f"Key '{data_key}' not found in BaseModel item.") - item = {"text": model_dump[text_key], "data": model_dump[data_key]} + for key in keys: + if silent_errors: + data_dict[key] = model_dump.get(key, "") + else: + try: + data_dict[key] = model_dump[key] + except KeyError: + raise ValueError(f"Key {key} not found in {item}") + elif isinstance(item, str): - item = {"text": item, "data": {}} + data_dict = {"text": item} elif isinstance(item, dict): - if text_key not in item: - raise ValueError(f"Key '{text_key}' not found in dictionary item.") - if data_key not in item: - raise ValueError(f"Key '{data_key}' not found in dictionary item.") - item = {"text": item[text_key], "data": item[data_key]} + data_dict = item.copy() else: raise ValueError(f"Invalid data type: {type(item)}") - records.append(Record(**item)) + records.append(Record(data=data_dict)) return records - def create_references_from_records( - self, records: List[Record], include_data: bool = False - ) -> str: + def create_references_from_records(self, records: List[Record], include_data: bool = False) -> str: """ Create references from a list of records. @@ -223,20 +230,14 @@ class CustomComponent(Component): if not self.code: return {} - component_classes = [ - cls - for cls in self.tree["classes"] - if self.code_class_base_inheritance in cls["bases"] - ] + component_classes = [cls for cls in self.tree["classes"] if self.code_class_base_inheritance in cls["bases"]] if not component_classes: return {} # Assume the first Component class is the one we're interested in component_class = component_classes[0] build_methods = [ - method - for method in component_class["methods"] - if method["name"] == self.function_entrypoint_name + method for method in component_class["methods"] if method["name"] == self.function_entrypoint_name ] return build_methods[0] if build_methods else {} @@ -293,9 +294,7 @@ class CustomComponent(Component): # Retrieve and decrypt the credential by name for the current user db_service = get_db_service() with session_getter(db_service) as session: - return credential_service.get_credential( - user_id=self._user_id or "", name=name, session=session - ) + return credential_service.get_credential(user_id=self._user_id or "", name=name, session=session) return get_credential @@ -305,9 +304,7 @@ class CustomComponent(Component): credential_service = get_credential_service() db_service = get_db_service() with session_getter(db_service) as session: - return credential_service.list_credentials( - user_id=self._user_id, session=session - ) + return credential_service.list_credentials(user_id=self._user_id, session=session) def index(self, value: int = 0): """Returns a function that returns the value at the given index in the iterable.""" @@ -346,11 +343,7 @@ class CustomComponent(Component): if not self._flows_records: self.list_flows() if not flow_id and self._flows_records: - flow_ids = [ - flow.data["id"] - for flow in self._flows_records - if flow.data["name"] == flow_name - ] + flow_ids = [flow.data["id"] for flow in self._flows_records if flow.data["name"] == flow_name] if not flow_ids: raise ValueError(f"Flow {flow_name} not found") elif len(flow_ids) > 1: @@ -372,9 +365,7 @@ class CustomComponent(Component): db_service = get_db_service() with get_session(db_service) as session: flows = session.exec( - select(Flow) - .where(Flow.user_id == self._user_id) - .where(Flow.is_component == False) + select(Flow).where(Flow.user_id == self._user_id).where(Flow.is_component == False) # noqa ).all() flows_records = [flow.to_record() for flow in flows] diff --git a/src/backend/langflow/interface/custom/directory_reader/directory_reader.py b/src/backend/langflow/interface/custom/directory_reader/directory_reader.py index 448e3c485..5acc15131 100644 --- a/src/backend/langflow/interface/custom/directory_reader/directory_reader.py +++ b/src/backend/langflow/interface/custom/directory_reader/directory_reader.py @@ -80,13 +80,9 @@ class DirectoryReader: except Exception as e: logger.error(f"Error while loading component: {e}") continue - items.append( - {"name": menu["name"], "path": menu["path"], "components": components} - ) + items.append({"name": menu["name"], "path": menu["path"], "components": components}) filtered = [menu for menu in items if menu["components"]] - logger.debug( - f'Filtered components {"with errors" if with_errors else ""}: {len(filtered)}' - ) + logger.debug(f'Filtered components {"with errors" if with_errors else ""}: {len(filtered)}') return {"menu": filtered} def validate_code(self, file_content): @@ -119,9 +115,7 @@ class DirectoryReader: Walk through the directory path and return a list of all .py files. """ if not (safe_path := self.get_safe_path()): - raise CustomComponentPathValueError( - f"The path needs to start with '{self.base_path}'." - ) + raise CustomComponentPathValueError(f"The path needs to start with '{self.base_path}'.") file_list = [] safe_path_obj = Path(safe_path) @@ -131,11 +125,7 @@ class DirectoryReader: # any folders below [folder] will be ignored # basically the parent folder of the file should be a # folder in the safe_path - if ( - file_path.is_file() - and file_path.parent.parent == safe_path_obj - and not file_path.name.startswith("__") - ): + if file_path.is_file() and file_path.parent.parent == safe_path_obj and not file_path.name.startswith("__"): file_list.append(str(file_path)) return file_list @@ -173,9 +163,7 @@ class DirectoryReader: for node in ast.walk(module): if isinstance(node, ast.FunctionDef): for arg in node.args.args: - if self._is_type_hint_in_arg_annotation( - arg.annotation, type_hint_name - ): + if self._is_type_hint_in_arg_annotation(arg.annotation, type_hint_name): return True except SyntaxError: # Returns False if the code is not valid Python @@ -193,16 +181,14 @@ class DirectoryReader: and annotation.value.id == type_hint_name ) - def is_type_hint_used_but_not_imported( - self, type_hint_name: str, code: str - ) -> bool: + def is_type_hint_used_but_not_imported(self, type_hint_name: str, code: str) -> bool: """ Check if a type hint is used but not imported in the given code. """ try: - return self._is_type_hint_used_in_args( + return self._is_type_hint_used_in_args(type_hint_name, code) and not self._is_type_hint_imported( type_hint_name, code - ) and not self._is_type_hint_imported(type_hint_name, code) + ) except SyntaxError: # Returns True if there's something wrong with the code # TODO : Find a better way to handle this @@ -223,9 +209,9 @@ class DirectoryReader: return False, "Syntax error" elif not self.validate_build(file_content): return False, "Missing build function" - elif self._is_type_hint_used_in_args( + elif self._is_type_hint_used_in_args("Optional", file_content) and not self._is_type_hint_imported( "Optional", file_content - ) and not self._is_type_hint_imported("Optional", file_content): + ): return ( False, "Type hint 'Optional' is used but not imported in the code.", @@ -241,18 +227,14 @@ class DirectoryReader: from the .py files in the directory. """ response = {"menu": []} - logger.debug( - "-------------------- Building component menu list --------------------" - ) + logger.debug("-------------------- Building component menu list --------------------") for file_path in file_paths: menu_name = os.path.basename(os.path.dirname(file_path)) filename = os.path.basename(file_path) validation_result, result_content = self.process_file(file_path) if not validation_result: - logger.error( - f"Error while processing file {file_path}: {result_content}" - ) + logger.error(f"Error while processing file {file_path}: {result_content}") menu_result = self.find_menu(response, menu_name) or { "name": menu_name, @@ -265,9 +247,7 @@ class DirectoryReader: # first check if it's already CamelCase if "_" in component_name: - component_name_camelcase = " ".join( - word.title() for word in component_name.split("_") - ) + component_name_camelcase = " ".join(word.title() for word in component_name.split("_")) else: component_name_camelcase = component_name @@ -275,9 +255,7 @@ class DirectoryReader: try: output_types = self.get_output_types_from_code(result_content) except Exception as exc: - logger.exception( - f"Error while getting output types from code: {str(exc)}" - ) + logger.exception(f"Error while getting output types from code: {str(exc)}") output_types = [component_name_camelcase] else: output_types = [component_name_camelcase] @@ -293,9 +271,7 @@ class DirectoryReader: if menu_result not in response["menu"]: response["menu"].append(menu_result) - logger.debug( - "-------------------- Component menu list built --------------------" - ) + logger.debug("-------------------- Component menu list built --------------------") return response @staticmethod diff --git a/src/backend/langflow/interface/custom/utils.py b/src/backend/langflow/interface/custom/utils.py index 513669f75..389f24ffd 100644 --- a/src/backend/langflow/interface/custom/utils.py +++ b/src/backend/langflow/interface/custom/utils.py @@ -32,18 +32,14 @@ class UpdateBuildConfigError(Exception): pass -def add_output_types( - frontend_node: CustomComponentFrontendNode, return_types: List[str] -): +def add_output_types(frontend_node: CustomComponentFrontendNode, return_types: List[str]): """Add output types to the frontend node""" for return_type in return_types: if return_type is None: raise HTTPException( status_code=400, detail={ - "error": ( - "Invalid return type. Please check your code and try again." - ), + "error": ("Invalid return type. Please check your code and try again."), "traceback": traceback.format_exc(), }, ) @@ -72,20 +68,17 @@ def reorder_fields(frontend_node: CustomComponentFrontendNode, field_order: List if field.name not in field_order: reordered_fields.append(field) frontend_node.template.fields = reordered_fields + frontend_node.field_order = field_order -def add_base_classes( - frontend_node: CustomComponentFrontendNode, return_types: List[str] -): +def add_base_classes(frontend_node: CustomComponentFrontendNode, return_types: List[str]): """Add base classes to the frontend node""" for return_type_instance in return_types: if return_type_instance is None: raise HTTPException( status_code=400, detail={ - "error": ( - "Invalid return type. Please check your code and try again." - ), + "error": ("Invalid return type. Please check your code and try again."), "traceback": traceback.format_exc(), }, ) @@ -162,14 +155,10 @@ def add_new_custom_field( # If options is a list, then it's a dropdown # If options is None, then it's a list of strings is_list = isinstance(field_config.get("options"), list) - field_config["is_list"] = ( - is_list or field_config.get("is_list", False) or field_contains_list - ) + field_config["is_list"] = is_list or field_config.get("is_list", False) or field_contains_list if "name" in field_config: - warnings.warn( - "The 'name' key in field_config is used to build the object and can't be changed." - ) + warnings.warn("The 'name' key in field_config is used to build the object and can't be changed.") required = field_config.pop("required", field_required) placeholder = field_config.pop("placeholder", "") @@ -208,9 +197,7 @@ def add_extra_fields(frontend_node, field_config, function_args): ]: continue - field_name, field_type, field_value, field_required = get_field_properties( - extra_field - ) + field_name, field_type, field_value, field_required = get_field_properties(extra_field) config = _field_config.pop(field_name, {}) frontend_node = add_new_custom_field( frontend_node, @@ -220,15 +207,13 @@ def add_extra_fields(frontend_node, field_config, function_args): field_required, config, ) - if "kwargs" in function_args_names and not all( - key in function_args_names for key in field_config.keys() - ): + if "kwargs" in function_args_names and not all(key in function_args_names for key in field_config.keys()): for field_name, field_config in _field_config.copy().items(): + if "name" not in field_config or field_name == "code": + continue config = _field_config.get(field_name, {}) config = config.model_dump() if isinstance(config, BaseModel) else config - field_name, field_type, field_value, field_required = get_field_properties( - extra_field=config - ) + field_name, field_type, field_value, field_required = get_field_properties(extra_field=config) frontend_node = add_new_custom_field( frontend_node, field_name, @@ -266,9 +251,7 @@ def run_build_config( raise HTTPException( status_code=400, detail={ - "error": ( - "Invalid type convertion. Please check your code and try again." - ), + "error": ("Invalid type convertion. Please check your code and try again."), "traceback": traceback.format_exc(), }, ) from exc @@ -380,16 +363,10 @@ def build_custom_component_template( add_extra_fields(frontend_node, field_config, entrypoint_args) - frontend_node = add_code_field( - frontend_node, custom_component.code, field_config.get("code", {}) - ) + frontend_node = add_code_field(frontend_node, custom_component.code, field_config.get("code", {})) - add_base_classes( - frontend_node, custom_component.get_function_entrypoint_return_type - ) - add_output_types( - frontend_node, custom_component.get_function_entrypoint_return_type - ) + add_base_classes(frontend_node, custom_component.get_function_entrypoint_return_type) + add_output_types(frontend_node, custom_component.get_function_entrypoint_return_type) reorder_fields(frontend_node, custom_instance._get_field_order()) @@ -400,9 +377,7 @@ def build_custom_component_template( raise HTTPException( status_code=400, detail={ - "error": ( - "Invalid type convertion. Please check your code and try again." - ), + "error": ("Invalid type convertion. Please check your code and try again."), "traceback": traceback.format_exc(), }, ) from exc @@ -428,9 +403,7 @@ def build_custom_components(settings_service): if not settings_service.settings.COMPONENTS_PATH: return {} - logger.info( - f"Building custom components from {settings_service.settings.COMPONENTS_PATH}" - ) + logger.info(f"Building custom components from {settings_service.settings.COMPONENTS_PATH}") custom_components_from_file = {} processed_paths = set() for path in settings_service.settings.COMPONENTS_PATH: @@ -441,9 +414,7 @@ def build_custom_components(settings_service): custom_component_dict = build_custom_component_list_from_path(path_str) if custom_component_dict: category = next(iter(custom_component_dict)) - logger.info( - f"Loading {len(custom_component_dict[category])} component(s) from category {category}" - ) + logger.info(f"Loading {len(custom_component_dict[category])} component(s) from category {category}") custom_components_from_file = merge_nested_dicts_with_renaming( custom_components_from_file, custom_component_dict ) @@ -464,14 +435,10 @@ def update_field_dict( if "refresh" in field_dict: if call: try: - custom_component_instance.update_build_config( - build_config, update_field, update_field_value - ) + custom_component_instance.update_build_config(build_config, update_field, update_field_value) except Exception as exc: logger.error(f"Error while running update_build_config: {str(exc)}") - raise UpdateBuildConfigError( - f"Error while running update_build_config: {str(exc)}" - ) from exc + raise UpdateBuildConfigError(f"Error while running update_build_config: {str(exc)}") from exc field_dict["refresh"] = True # Let's check if "range_spec" is a RangeSpec object diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index 7bd8e34f4..fd6f27db6 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -144,13 +144,9 @@ async def instantiate_based_on_type( return class_object(**params) -async def instantiate_custom_component( - node_type, class_object, params, user_id, vertex -): +async def instantiate_custom_component(node_type, class_object, params, user_id, vertex): params_copy = params.copy() - class_object: Type["CustomComponent"] = eval_custom_component_code( - params_copy.pop("code") - ) + class_object: Type["CustomComponent"] = eval_custom_component_code(params_copy.pop("code")) custom_component: "CustomComponent" = class_object( user_id=user_id, parameters=params_copy, @@ -226,9 +222,7 @@ def instantiate_memory(node_type, class_object, params): # I want to catch a specific attribute error that happens # when the object does not have a cursor attribute except Exception as exc: - if "object has no attribute 'cursor'" in str( - exc - ) or 'object has no field "conn"' in str(exc): + if "object has no attribute 'cursor'" in str(exc) or 'object has no field "conn"' in str(exc): raise AttributeError( ( "Failed to build connection to database." @@ -271,9 +265,7 @@ def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params: if class_method := getattr(class_object, method, None): agent = class_method(**params) tools = params.get("tools", []) - return AgentExecutor.from_agent_and_tools( - agent=agent, tools=tools, handle_parsing_errors=True - ) + return AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, handle_parsing_errors=True) return load_agent_executor(class_object, params) @@ -329,11 +321,7 @@ def instantiate_embedding(node_type, class_object, params: Dict): try: return class_object(**params) except ValidationError: - params = { - key: value - for key, value in params.items() - if key in class_object.model_fields - } + params = {key: value for key, value in params.items() if key in class_object.model_fields} return class_object(**params) @@ -345,9 +333,7 @@ def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict): if "texts" in params: params["documents"] = params.pop("texts") if "documents" in params: - params["documents"] = [ - doc for doc in params["documents"] if isinstance(doc, Document) - ] + params["documents"] = [doc for doc in params["documents"] if isinstance(doc, Document)] if initializer := vecstore_initializer.get(class_object.__name__): vecstore = initializer(class_object, params) else: @@ -362,9 +348,7 @@ def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict): return vecstore -def instantiate_documentloader( - node_type: str, class_object: Type[BaseLoader], params: Dict -): +def instantiate_documentloader(node_type: str, class_object: Type[BaseLoader], params: Dict): if "file_filter" in params: # file_filter will be a string but we need a function # that will be used to filter the files using file_filter @@ -373,17 +357,13 @@ def instantiate_documentloader( # in x and if it is, we will return True file_filter = params.pop("file_filter") extensions = file_filter.split(",") - params["file_filter"] = lambda x: any( - extension.strip() in x for extension in extensions - ) + params["file_filter"] = lambda x: any(extension.strip() in x for extension in extensions) metadata = params.pop("metadata", None) if metadata and isinstance(metadata, str): try: metadata = orjson.loads(metadata) except json.JSONDecodeError as exc: - raise ValueError( - "The metadata you provided is not a valid JSON string." - ) from exc + raise ValueError("The metadata you provided is not a valid JSON string.") from exc if node_type == "WebBaseLoader": if web_path := params.pop("web_path", None): @@ -416,16 +396,12 @@ def instantiate_textsplitter( "Try changing the chunk_size of the Text Splitter." ) from exc - if ( - "separator_type" in params and params["separator_type"] == "Text" - ) or "separator_type" not in params: + if ("separator_type" in params and params["separator_type"] == "Text") or "separator_type" not in params: params.pop("separator_type", None) # separators might come in as an escaped string like \\n # so we need to convert it to a string if "separators" in params: - params["separators"] = ( - params["separators"].encode().decode("unicode-escape") - ) + params["separators"] = params["separators"].encode().decode("unicode-escape") text_splitter = class_object(**params) else: from langchain.text_splitter import Language @@ -452,8 +428,7 @@ def replace_zero_shot_prompt_with_prompt_template(nodes): tools = [ tool for tool in nodes - if tool["type"] != "chatOutputNode" - and "Tool" in tool["data"]["node"]["base_classes"] + if tool["type"] != "chatOutputNode" and "Tool" in tool["data"]["node"]["base_classes"] ] node["data"] = build_prompt_template(prompt=node["data"], tools=tools) break @@ -467,9 +442,7 @@ def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs) # agent has hidden args for memory. might need to be support # memory = params["memory"] # if allowed_tools is not a list or set, make it a list - if not isinstance(allowed_tools, (list, set)) and isinstance( - allowed_tools, BaseTool - ): + if not isinstance(allowed_tools, (list, set)) and isinstance(allowed_tools, BaseTool): allowed_tools = [allowed_tools] tool_names = [tool.name for tool in allowed_tools] # Agent class requires an output_parser but Agent classes @@ -497,10 +470,7 @@ def build_prompt_template(prompt, tools): format_instructions = prompt["node"]["template"]["format_instructions"]["value"] tool_strings = "\n".join( - [ - f"{tool['data']['node']['name']}: {tool['data']['node']['description']}" - for tool in tools - ] + [f"{tool['data']['node']['name']}: {tool['data']['node']['description']}" for tool in tools] ) tool_names = ", ".join([tool["data"]["node"]["name"] for tool in tools]) format_instructions = format_instructions.format(tool_names=tool_names) diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 9a1375869..e0e7c6c59 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -66,6 +66,4 @@ def get_all_types_dict(settings_service): """Get all types dictionary combining native and custom components.""" native_components = build_langchain_types_dict() custom_components_from_file = build_custom_components(settings_service) - return merge_nested_dicts_with_renaming( - native_components, custom_components_from_file - ) + return merge_nested_dicts_with_renaming(native_components, custom_components_from_file) diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py index 30c55f1ef..8e7f476f5 100644 --- a/src/backend/langflow/interface/utils.py +++ b/src/backend/langflow/interface/utils.py @@ -43,9 +43,7 @@ def try_setting_streaming_options(langchain_object): llm = None if hasattr(langchain_object, "llm"): llm = langchain_object.llm - elif hasattr(langchain_object, "llm_chain") and hasattr( - langchain_object.llm_chain, "llm" - ): + elif hasattr(langchain_object, "llm_chain") and hasattr(langchain_object.llm_chain, "llm"): llm = langchain_object.llm_chain.llm if isinstance(llm, BaseLanguageModel): @@ -71,9 +69,7 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]: # Extract the variable name from either the single or double brace match if match.group(1): # Match found in double braces - variable_name = ( - "{{" + match.group(1) + "}}" - ) # Re-add single braces for JSON strings + variable_name = "{{" + match.group(1) + "}}" # Re-add single braces for JSON strings else: # Match found in single braces variable_name = match.group(2) if variable_name is not None: @@ -109,9 +105,7 @@ def set_langchain_cache(settings): if cache_type := os.getenv("LANGFLOW_LANGCHAIN_CACHE"): try: - cache_class = import_class( - f"langchain.cache.{cache_type or settings.LANGCHAIN_CACHE}" - ) + cache_class = import_class(f"langchain.cache.{cache_type or settings.LANGCHAIN_CACHE}") logger.debug(f"Setting up LLM caching with {cache_class.__name__}") set_llm_cache(cache_class()) diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 4b4c19a25..76724521f 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -8,7 +8,9 @@ from fastapi import FastAPI, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles + from langflow.api import router +from langflow.initial_setup.setup import create_or_update_starter_projects from langflow.interface.utils import setup_llm_caching from langflow.services.plugins.langfuse_plugin import LangfuseInstance from langflow.services.utils import initialize_services, teardown_services @@ -21,6 +23,7 @@ def get_lifespan(fix_migration=False, socketio_server=None): initialize_services(fix_migration=fix_migration, socketio_server=socketio_server) setup_llm_caching() LangfuseInstance.update() + create_or_update_starter_projects() yield teardown_services() @@ -114,6 +117,7 @@ def setup_app(static_files_dir: Optional[Path] = None, backend_only: bool = Fals if __name__ == "__main__": import uvicorn + from langflow.__main__ import get_number_of_workers configure() diff --git a/src/backend/langflow/memory.py b/src/backend/langflow/memory.py index c8f73f25e..b0ec04f1c 100644 --- a/src/backend/langflow/memory.py +++ b/src/backend/langflow/memory.py @@ -40,8 +40,8 @@ def get_messages( for row in messages_df.itertuples(): record = Record( - text=row.message, data={ + "text": row.message, "sender": row.sender, "sender_name": row.sender_name, "session_id": row.session_id, @@ -81,3 +81,14 @@ def add_messages(records: Union[list[Record], Record]): except Exception as e: logger.exception(e) raise e + + +def delete_messages(session_id: str): + """ + Delete messages from the monitor service based on the provided session ID. + + Args: + session_id (str): The session ID associated with the messages to delete. + """ + monitor_service = get_monitor_service() + monitor_service.delete_messages(session_id) diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py index 8ef4ae50f..f034656be 100644 --- a/src/backend/langflow/processing/process.py +++ b/src/backend/langflow/processing/process.py @@ -126,9 +126,7 @@ async def process_runnable(runnable: Runnable, inputs: Union[dict, List[dict]]): elif isinstance(inputs, dict) and hasattr(runnable, "ainvoke"): result = await runnable.ainvoke(inputs) else: - raise ValueError( - f"Runnable {runnable} does not support inputs of type {type(inputs)}" - ) + raise ValueError(f"Runnable {runnable} does not support inputs of type {type(inputs)}") # Check if the result is a list of AIMessages if isinstance(result, list) and all(isinstance(r, AIMessage) for r in result): result = [r.content for r in result] @@ -137,9 +135,7 @@ async def process_runnable(runnable: Runnable, inputs: Union[dict, List[dict]]): return result -async def process_inputs_dict( - built_object: Union[Chain, VectorStore, Runnable], inputs: dict -): +async def process_inputs_dict(built_object: Union[Chain, VectorStore, Runnable], inputs: dict): if isinstance(built_object, Chain): if inputs is None: raise ValueError("Inputs must be provided for a Chain") @@ -174,9 +170,7 @@ async def process_inputs_list(built_object: Runnable, inputs: List[dict]): return await process_runnable(built_object, inputs) -async def generate_result( - built_object: Union[Chain, VectorStore, Runnable], inputs: Union[dict, List[dict]] -): +async def generate_result(built_object: Union[Chain, VectorStore, Runnable], inputs: Union[dict, List[dict]]): if isinstance(inputs, dict): result = await process_inputs_dict(built_object, inputs) elif isinstance(inputs, List) and isinstance(built_object, Runnable): @@ -215,9 +209,7 @@ async def run_graph( else: graph_data = graph._graph_data if not session_id and session_service is not None: - session_id = session_service.generate_key( - session_id=flow_id, data_graph=graph_data - ) + session_id = session_service.generate_key(session_id=flow_id, data_graph=graph_data) if inputs is None: inputs = {} @@ -232,18 +224,14 @@ async def run_graph( return outputs, session_id -def validate_input( - graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]] -) -> List[Dict[str, Any]]: +def validate_input(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]) -> List[Dict[str, Any]]: if not isinstance(graph_data, dict) or not isinstance(tweaks, dict): raise ValueError("graph_data and tweaks should be dictionaries") nodes = graph_data.get("data", {}).get("nodes") or graph_data.get("nodes") if not isinstance(nodes, list): - raise ValueError( - "graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key" - ) + raise ValueError("graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key") return nodes @@ -252,9 +240,7 @@ def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None: template_data = node.get("data", {}).get("node", {}).get("template") if not isinstance(template_data, dict): - logger.warning( - f"Template data for node {node.get('id')} should be a dictionary" - ) + logger.warning(f"Template data for node {node.get('id')} should be a dictionary") return for tweak_name, tweak_value in node_tweaks.items(): @@ -269,9 +255,7 @@ def apply_tweaks_on_vertex(vertex: Vertex, node_tweaks: Dict[str, Any]) -> None: vertex.params[tweak_name] = tweak_value -def process_tweaks( - graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]] -) -> Dict[str, Any]: +def process_tweaks(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]) -> Dict[str, Any]: """ This function is used to tweak the graph data using the node id and the tweaks dict. @@ -307,8 +291,6 @@ def process_tweaks_on_graph(graph: Graph, tweaks: Dict[str, Dict[str, Any]]): if node_tweaks := tweaks.get(node_id): apply_tweaks_on_vertex(vertex, node_tweaks) else: - logger.warning( - "Each node should be a Vertex with an 'id' attribute of type str" - ) + logger.warning("Each node should be a Vertex with an 'id' attribute of type str") return graph diff --git a/src/backend/langflow/schema/schema.py b/src/backend/langflow/schema/schema.py index 639c9da96..1b3e6b10e 100644 --- a/src/backend/langflow/schema/schema.py +++ b/src/backend/langflow/schema/schema.py @@ -1,6 +1,6 @@ -from typing import Any, Optional +import copy -from langchain_core.documents import Document +from langchain_core.documents import Document # Assumed import from pydantic import BaseModel @@ -9,12 +9,11 @@ class Record(BaseModel): Represents a record with text and optional data. Attributes: - text (str): The text of the record. data (dict, optional): Additional data associated with the record. """ - text: Optional[str] = "" data: dict = {} + _default_value: str = "" @classmethod def from_document(cls, document: Document) -> "Record": @@ -27,7 +26,22 @@ class Record(BaseModel): Returns: Record: The converted Record. """ - return cls(text=document.page_content, data=document.metadata) + data = document.metadata + data["text"] = document.page_content + return cls(data=data) + + def __add__(self, other: "Record") -> "Record": + """ + Concatenates the text of two records and combines their data. + + Args: + other (Record): The other record to concatenate with. + + Returns: + Record: The concatenated record. + """ + combined_data = {**self.data, **other.data} + return Record(data=combined_data) def to_lc_document(self) -> Document: """ @@ -38,20 +52,63 @@ class Record(BaseModel): """ return Document(page_content=self.text, metadata=self.data) - def __call__(self, *args: Any, **kwds: Any) -> Any: + def __getattr__(self, key): """ - Returns the text of the record. + Allows attribute-like access to the data dictionary. + """ + try: + if key == "data" or key.startswith("_"): + return super().__getattr__(key) - Returns: - Any: The text of the record. + return self.data.get(key, self._default_value) + except KeyError: + # Fallback to default behavior to raise AttributeError for undefined attributes + raise AttributeError( + f"'{type(self).__name__}' object has no attribute '{key}'" + ) + + def __setattr__(self, key, value): """ - return self.text + Allows attribute-like setting of values in the data dictionary, + while still allowing direct assignment to class attributes. + """ + if key == "data" or key.startswith("_"): + super().__setattr__(key, value) + else: + self.data[key] = value + + def __delattr__(self, key): + """ + Allows attribute-like deletion from the data dictionary. + """ + if key == "data" or key.startswith("_"): + super().__delattr__(key) + else: + del self.data[key] + + def __deepcopy__(self, memo): + """ + Custom deepcopy implementation to handle copying of the Record object. + """ + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + setattr(result, k, copy.deepcopy(v, memo)) + return result def __str__(self) -> str: """ - Returns the text of the record. - - Returns: - str: The text and data of the record. + Returns a string representation of the Record, including text and data. """ - return self.model_dump_json(indent=2) + # Assuming a method to dump model data as JSON string exists. + # If it doesn't, you might need to implement it or use json.dumps() directly. + # build the string considering all keys in the data dictionary + prefix = "Record(" + suffix = ")" + text = ", ".join([f"{k}={v}" for k, v in self.data.items()]) + return prefix + text + suffix + + # check which attributes the Record has by checking the keys in the data dictionary + def __dir__(self): + return super().__dir__() + list(self.data.keys()) diff --git a/src/backend/langflow/services/chat/utils.py b/src/backend/langflow/services/chat/utils.py index f0e584f4c..0562b9df2 100644 --- a/src/backend/langflow/services/chat/utils.py +++ b/src/backend/langflow/services/chat/utils.py @@ -22,9 +22,7 @@ async def process_graph( if build_result is None: # Raise user facing error - raise ValueError( - "There was an error loading the langchain_object. Please, check all the nodes and try again." - ) + raise ValueError("There was an error loading the langchain_object. Please, check all the nodes and try again.") # Generate result and thought try: @@ -50,7 +48,5 @@ async def process_graph( raise e -async def run_build_result( - build_result: Any, chat_inputs: ChatMessage, client_id: str, session_id: str -): +async def run_build_result(build_result: Any, chat_inputs: ChatMessage, client_id: str, session_id: str): return build_result(inputs=chat_inputs.message) diff --git a/src/backend/langflow/services/database/models/flow/model.py b/src/backend/langflow/services/database/models/flow/model.py index c7ee08141..b91cc409d 100644 --- a/src/backend/langflow/services/database/models/flow/model.py +++ b/src/backend/langflow/services/database/models/flow/model.py @@ -4,9 +4,11 @@ from datetime import datetime from typing import TYPE_CHECKING, Dict, Optional from uuid import UUID, uuid4 +from emoji import purely_emoji from pydantic import field_serializer, field_validator from sqlmodel import JSON, Column, Field, Relationship, SQLModel +from langflow.interface.custom.attributes import validate_icon from langflow.schema.schema import Record if TYPE_CHECKING: @@ -16,13 +18,49 @@ if TYPE_CHECKING: class FlowBase(SQLModel): name: str = Field(index=True) description: Optional[str] = Field(index=True, nullable=True, default=None) + icon: Optional[str] = Field(default=None, nullable=True) + icon_bg_color: Optional[str] = Field(default=None, nullable=True) data: Optional[Dict] = Field(default=None, nullable=True) is_component: Optional[bool] = Field(default=False, nullable=True) - updated_at: Optional[datetime] = Field( - default_factory=datetime.utcnow, nullable=True - ) + updated_at: Optional[datetime] = Field(default_factory=datetime.utcnow, nullable=True) folder: Optional[str] = Field(default=None, nullable=True) + @field_validator("icon_bg_color") + def validate_icon_bg_color(cls, v): + if v is not None and not isinstance(v, str): + raise ValueError("Icon background color must be a string") + # validate that is is a hex color + if v and not v.startswith("#"): + raise ValueError("Icon background color must start with #") + + # validate that it is a valid hex color + if v and len(v) != 7: + raise ValueError("Icon background color must be 7 characters long") + return v + + @field_validator("icon") + def validate_icon_atr(cls, v): + # const emojiRegex = /\p{Emoji}/u; + # const isEmoji = emojiRegex.test(data?.node?.icon!); + # emoji pattern in Python + if v is None: + return v + + emoji = validate_icon(v) + + if purely_emoji(emoji): + # this is indeed an emoji + return emoji + # otherwise it should be a valid lucide icon + if v is not None and not isinstance(v, str): + raise ValueError("Icon must be a string") + # is should be lowercase and contain only letters and hyphens + if v and not v.islower(): + raise ValueError("Icon must be lowercase") + if v and not v.replace("-", "").isalpha(): + raise ValueError("Icon must contain only letters and hyphens") + return v + @field_validator("data") def validate_json(v): if not v: @@ -58,7 +96,7 @@ class FlowBase(SQLModel): class Flow(FlowBase, table=True): id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) - user_id: UUID = Field(index=True, foreign_key="user.id", nullable=True) + user_id: Optional[UUID] = Field(index=True, foreign_key="user.id", nullable=True) user: "User" = Relationship(back_populates="flows") def to_record(self): diff --git a/src/backend/langflow/services/database/service.py b/src/backend/langflow/services/database/service.py index 3cecdd340..ebaccf600 100644 --- a/src/backend/langflow/services/database/service.py +++ b/src/backend/langflow/services/database/service.py @@ -36,10 +36,7 @@ class DatabaseService(Service): def _create_engine(self) -> "Engine": """Create the engine for the database.""" settings_service = get_settings_service() - if ( - settings_service.settings.DATABASE_URL - and settings_service.settings.DATABASE_URL.startswith("sqlite") - ): + if settings_service.settings.DATABASE_URL and settings_service.settings.DATABASE_URL.startswith("sqlite"): connect_args = {"check_same_thread": False} else: connect_args = {} @@ -51,9 +48,7 @@ class DatabaseService(Service): def __exit__(self, exc_type, exc_value, traceback): if exc_type is not None: # If an exception has been raised - logger.error( - f"Session rollback because of exception: {exc_type.__name__} {exc_value}" - ) + logger.error(f"Session rollback because of exception: {exc_type.__name__} {exc_value}") self._session.rollback() else: self._session.commit() @@ -70,9 +65,7 @@ class DatabaseService(Service): settings_service = get_settings_service() if settings_service.auth_settings.AUTO_LOGIN: with Session(self.engine) as session: - flows = session.exec( - select(models.Flow).where(models.Flow.user_id is None) - ).all() + flows = session.exec(select(models.Flow).where(models.Flow.user_id is None)).all() if flows: logger.debug("Migrating flows to default superuser") username = settings_service.auth_settings.SUPERUSER @@ -102,9 +95,7 @@ class DatabaseService(Service): expected_columns = list(model.model_fields.keys()) try: - available_columns = [ - col["name"] for col in inspector.get_columns(table) - ] + available_columns = [col["name"] for col in inspector.get_columns(table)] except sa.exc.NoSuchTableError: logger.error(f"Missing table: {table}") return False @@ -161,20 +152,18 @@ class DatabaseService(Service): try: command.check(alembic_cfg) except Exception as exc: - if isinstance( - exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected) - ): + if isinstance(exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)): command.upgrade(alembic_cfg, "head") time.sleep(3) try: command.check(alembic_cfg) - except util.exc.AutogenerateDiffsDetected as e: + except util.exc.AutogenerateDiffsDetected as exc: logger.error(f"AutogenerateDiffsDetected: {exc}") if not fix: raise RuntimeError( "Something went wrong running migrations. Please, run `langflow migration --fix`" - ) from e + ) from exc if fix: self.try_downgrade_upgrade_until_success(alembic_cfg) @@ -199,10 +188,7 @@ class DatabaseService(Service): # We will check that all models are in the database # and that the database is up to date with all columns sql_models = [models.Flow, models.User, models.ApiKey] - return [ - TableResults(sql_model.__tablename__, self.check_table(sql_model)) - for sql_model in sql_models - ] + return [TableResults(sql_model.__tablename__, self.check_table(sql_model)) for sql_model in sql_models] def check_table(self, model): results = [] @@ -211,9 +197,7 @@ class DatabaseService(Service): expected_columns = list(model.__fields__.keys()) available_columns = [] try: - available_columns = [ - col["name"] for col in inspector.get_columns(table_name) - ] + available_columns = [col["name"] for col in inspector.get_columns(table_name)] results.append(Result(name=table_name, type="table", success=True)) except sa.exc.NoSuchTableError: logger.error(f"Missing table: {table_name}") @@ -244,9 +228,7 @@ class DatabaseService(Service): try: table.create(self.engine, checkfirst=True) except OperationalError as oe: - logger.warning( - f"Table {table} already exists, skipping. Exception: {oe}" - ) + logger.warning(f"Table {table} already exists, skipping. Exception: {oe}") except Exception as exc: logger.error(f"Error creating table {table}: {exc}") raise RuntimeError(f"Error creating table {table}") from exc @@ -258,9 +240,7 @@ class DatabaseService(Service): if table not in table_names: logger.error("Something went wrong creating the database and tables.") logger.error("Please check your database settings.") - raise RuntimeError( - "Something went wrong creating the database and tables." - ) + raise RuntimeError("Something went wrong creating the database and tables.") logger.debug("Database and tables created successfully") diff --git a/src/backend/langflow/services/deps.py b/src/backend/langflow/services/deps.py index 19f3dcbf0..7d2338b04 100644 --- a/src/backend/langflow/services/deps.py +++ b/src/backend/langflow/services/deps.py @@ -1,3 +1,4 @@ +from contextlib import contextmanager from typing import TYPE_CHECKING, Generator from langflow.services import ServiceType, service_manager @@ -54,6 +55,19 @@ def get_session() -> Generator["Session", None, None]: yield from db_service.get_session() +@contextmanager +def session_scope(): + session = next(get_session()) + try: + yield session + session.commit() + except: + session.rollback() + raise + finally: + session.close() + + def get_cache_service() -> "BaseCacheService": return service_manager.get(ServiceType.CACHE_SERVICE) # type: ignore diff --git a/src/backend/langflow/services/monitor/schema.py b/src/backend/langflow/services/monitor/schema.py index 2c1e34cd5..dc3e2454c 100644 --- a/src/backend/langflow/services/monitor/schema.py +++ b/src/backend/langflow/services/monitor/schema.py @@ -60,11 +60,11 @@ class MessageModel(BaseModel): "The record does not have the required fields 'sender' and 'sender_name' in the data." ) return cls( - sender=record.data["sender"], - sender_name=record.data["sender_name"], + sender=record.sender, + sender_name=record.sender_name, message=record.text, - session_id=record.data.get("session_id", ""), - artifacts=record.data.get("artifacts", {}), + session_id=record.session_id, + artifacts=record.artifacts or {}, ) diff --git a/src/backend/langflow/services/monitor/service.py b/src/backend/langflow/services/monitor/service.py index 6702a89eb..fdae04a4e 100644 --- a/src/backend/langflow/services/monitor/service.py +++ b/src/backend/langflow/services/monitor/service.py @@ -44,7 +44,9 @@ class MonitorService(Service): def ensure_tables_exist(self): for table_name, model in self.table_map.items(): - drop_and_create_table_if_schema_mismatch(str(self.db_path), table_name, model) + drop_and_create_table_if_schema_mismatch( + str(self.db_path), table_name, model + ) def add_row( self, @@ -105,6 +107,12 @@ class MonitorService(Service): with duckdb.connect(str(self.db_path)) as conn: conn.execute(query) + def delete_messages(self, session_id: str): + query = f"DELETE FROM messages WHERE session_id = '{session_id}'" + + with duckdb.connect(str(self.db_path)) as conn: + conn.execute(query) + def add_message(self, message: MessageModel): self.add_row("messages", message) diff --git a/src/backend/langflow/services/settings/base.py b/src/backend/langflow/services/settings/base.py index 3a1ec956e..f2e78b8cb 100644 --- a/src/backend/langflow/services/settings/base.py +++ b/src/backend/langflow/services/settings/base.py @@ -58,12 +58,10 @@ class Settings(BaseSettings): STORE: Optional[bool] = True STORE_URL: Optional[str] = "https://api.langflow.store" - DOWNLOAD_WEBHOOK_URL: Optional[str] = ( - "https://api.langflow.store/flows/trigger/ec611a61-8460-4438-b187-a4f65e5559d4" - ) - LIKE_WEBHOOK_URL: Optional[str] = ( - "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da" - ) + DOWNLOAD_WEBHOOK_URL: Optional[ + str + ] = "https://api.langflow.store/flows/trigger/ec611a61-8460-4438-b187-a4f65e5559d4" + LIKE_WEBHOOK_URL: Optional[str] = "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da" STORAGE_TYPE: str = "local" @@ -95,9 +93,7 @@ class Settings(BaseSettings): @validator("DATABASE_URL", pre=True) def set_database_url(cls, value, values): if not value: - logger.debug( - "No database_url provided, trying LANGFLOW_DATABASE_URL env variable" - ) + logger.debug("No database_url provided, trying LANGFLOW_DATABASE_URL env variable") if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): value = langflow_database_url logger.debug("Using LANGFLOW_DATABASE_URL env variable.") @@ -107,9 +103,7 @@ class Settings(BaseSettings): # so we need to migrate to the new format # if there is a database in that location if not values["CONFIG_DIR"]: - raise ValueError( - "CONFIG_DIR not set, please set it or provide a DATABASE_URL" - ) + raise ValueError("CONFIG_DIR not set, please set it or provide a DATABASE_URL") new_path = f"{values['CONFIG_DIR']}/langflow.db" if Path("./langflow.db").exists(): @@ -133,22 +127,15 @@ class Settings(BaseSettings): if os.getenv("LANGFLOW_COMPONENTS_PATH"): logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH") - if ( - Path(langflow_component_path).exists() - and langflow_component_path not in value - ): + if Path(langflow_component_path).exists() and langflow_component_path not in value: if isinstance(langflow_component_path, list): for path in langflow_component_path: if path not in value: value.append(path) - logger.debug( - f"Extending {langflow_component_path} to components_path" - ) + logger.debug(f"Extending {langflow_component_path} to components_path") elif langflow_component_path not in value: value.append(langflow_component_path) - logger.debug( - f"Appending {langflow_component_path} to components_path" - ) + logger.debug(f"Appending {langflow_component_path} to components_path") if not value: value = [BASE_COMPONENTS_PATH] @@ -160,9 +147,7 @@ class Settings(BaseSettings): logger.debug(f"Components path: {value}") return value - model_config = SettingsConfigDict( - validate_assignment=True, extra="ignore", env_prefix="LANGFLOW_" - ) + model_config = SettingsConfigDict(validate_assignment=True, extra="ignore", env_prefix="LANGFLOW_") # @model_validator() # @classmethod diff --git a/src/backend/langflow/services/socket/utils.py b/src/backend/langflow/services/socket/utils.py index a45b85cd6..c1f012e18 100644 --- a/src/backend/langflow/services/socket/utils.py +++ b/src/backend/langflow/services/socket/utils.py @@ -96,9 +96,7 @@ async def build_vertex( ) # Emit the vertex build response - response = VertexBuildResponse( - valid=valid, params=params, id=vertex.id, data=result_dict - ) + response = VertexBuildResponse(valid=valid, params=params, id=vertex.id, data=result_dict) await sio.emit("vertex_build", data=response.model_dump(), to=sid) except Exception as exc: diff --git a/src/backend/langflow/services/task/service.py b/src/backend/langflow/services/task/service.py index e7f87d3f9..4d9a4412f 100644 --- a/src/backend/langflow/services/task/service.py +++ b/src/backend/langflow/services/task/service.py @@ -74,9 +74,7 @@ class TaskService(Service): result = await result return task.id, result - async def launch_task( - self, task_func: Callable[..., Any], *args: Any, **kwargs: Any - ) -> Any: + async def launch_task(self, task_func: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: logger.debug(f"Launching task {task_func} with args {args} and kwargs {kwargs}") logger.debug(f"Using backend {self.backend}") task = self.backend.launch_task(task_func, *args, **kwargs) diff --git a/src/backend/langflow/services/utils.py b/src/backend/langflow/services/utils.py index 34f1a042d..4c3533e97 100644 --- a/src/backend/langflow/services/utils.py +++ b/src/backend/langflow/services/utils.py @@ -92,16 +92,12 @@ def get_or_create_super_user(session: Session, username, password, is_default): ) return None else: - logger.debug( - "User with superuser credentials exists but is not a superuser." - ) + logger.debug("User with superuser credentials exists but is not a superuser.") return None if user: if verify_password(password, user.password): - raise ValueError( - "User with superuser credentials exists but is not a superuser." - ) + raise ValueError("User with superuser credentials exists but is not a superuser.") else: raise ValueError("Incorrect superuser credentials") @@ -130,21 +126,15 @@ def setup_superuser(settings_service, session: Session): username = settings_service.auth_settings.SUPERUSER password = settings_service.auth_settings.SUPERUSER_PASSWORD - is_default = (username == DEFAULT_SUPERUSER) and ( - password == DEFAULT_SUPERUSER_PASSWORD - ) + is_default = (username == DEFAULT_SUPERUSER) and (password == DEFAULT_SUPERUSER_PASSWORD) try: - user = get_or_create_super_user( - session=session, username=username, password=password, is_default=is_default - ) + user = get_or_create_super_user(session=session, username=username, password=password, is_default=is_default) if user is not None: logger.debug("Superuser created successfully.") except Exception as exc: logger.exception(exc) - raise RuntimeError( - "Could not create superuser. Please create a superuser manually." - ) from exc + raise RuntimeError("Could not create superuser. Please create a superuser manually.") from exc finally: settings_service.auth_settings.reset_credentials() @@ -158,9 +148,7 @@ def teardown_superuser(settings_service, session): if not settings_service.auth_settings.AUTO_LOGIN: try: - logger.debug( - "AUTO_LOGIN is set to False. Removing default superuser if exists." - ) + logger.debug("AUTO_LOGIN is set to False. Removing default superuser if exists.") username = DEFAULT_SUPERUSER from langflow.services.database.models.user.model import User @@ -210,9 +198,7 @@ def initialize_session_service(): initialize_settings_service() - service_manager.register_factory( - cache_factory.CacheServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE] - ) + service_manager.register_factory(cache_factory.CacheServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE]) service_manager.register_factory( session_service_factory.SessionServiceFactory(), @@ -229,9 +215,7 @@ def initialize_services(fix_migration: bool = False, socketio_server=None): service_manager.register_factory(factory, dependencies=dependencies) except Exception as exc: logger.exception(exc) - raise RuntimeError( - "Could not initialize services. Please check your settings." - ) from exc + raise RuntimeError("Could not initialize services. Please check your settings.") from exc # Test cache connection service_manager.get(ServiceType.CACHE_SERVICE) @@ -241,9 +225,7 @@ def initialize_services(fix_migration: bool = False, socketio_server=None): except Exception as exc: logger.error(exc) raise exc - setup_superuser( - service_manager.get(ServiceType.SETTINGS_SERVICE), next(get_session()) - ) + setup_superuser(service_manager.get(ServiceType.SETTINGS_SERVICE), next(get_session())) try: get_db_service().migrate_flows_if_auto_login() except Exception as exc: diff --git a/src/backend/langflow/template/field/base.py b/src/backend/langflow/template/field/base.py index 455d5779b..bf6d461f7 100644 --- a/src/backend/langflow/template/field/base.py +++ b/src/backend/langflow/template/field/base.py @@ -84,10 +84,10 @@ class TemplateField(BaseModel): if self.field_type in ["str", "Text"]: if "input_types" not in result: result["input_types"] = ["Text"] - else: - result["input_types"].append("Text") if self.field_type == "Text": result["type"] = "str" + else: + result["type"] = self.field_type return result @field_serializer("file_path") diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py index bcbbb36c1..7bd68ddf9 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/langflow/template/frontend_node/base.py @@ -74,6 +74,9 @@ class FrontendNode(BaseModel): frozen: bool = False """Whether the frontend node is frozen.""" + field_order: list[str] = [] + """Order of the fields in the frontend node.""" + beta: bool = False error: Optional[str] = None @@ -171,9 +174,7 @@ class FrontendNode(BaseModel): return _type @staticmethod - def handle_special_field( - field, key: str, _type: str, SPECIAL_FIELD_HANDLERS - ) -> str: + def handle_special_field(field, key: str, _type: str, SPECIAL_FIELD_HANDLERS) -> str: """Handles special field by using the respective handler if present.""" handler = SPECIAL_FIELD_HANDLERS.get(key) return handler(field) if handler else _type @@ -184,11 +185,7 @@ class FrontendNode(BaseModel): if "dict" in _type.lower() and field.name == "dict_": field.field_type = "file" field.file_types = [".json", ".yaml", ".yml"] - elif ( - _type.startswith("Dict") - or _type.startswith("Mapping") - or _type.startswith("dict") - ): + elif _type.startswith("Dict") or _type.startswith("Mapping") or _type.startswith("dict"): field.field_type = "dict" return _type @@ -199,9 +196,7 @@ class FrontendNode(BaseModel): field.value = value["default"] @staticmethod - def handle_specific_field_values( - field: TemplateField, key: str, name: Optional[str] = None - ) -> None: + def handle_specific_field_values(field: TemplateField, key: str, name: Optional[str] = None) -> None: """Handles specific field values for certain fields.""" if key == "headers": field.value = """{"Authorization": "Bearer "}""" @@ -209,9 +204,7 @@ class FrontendNode(BaseModel): FrontendNode._handle_api_key_specific_field_values(field, key, name) @staticmethod - def _handle_model_specific_field_values( - field: TemplateField, key: str, name: Optional[str] = None - ) -> None: + def _handle_model_specific_field_values(field: TemplateField, key: str, name: Optional[str] = None) -> None: """Handles specific field values related to models.""" model_dict = { "OpenAI": constants.OPENAI_MODELS, @@ -224,9 +217,7 @@ class FrontendNode(BaseModel): field.is_list = True @staticmethod - def _handle_api_key_specific_field_values( - field: TemplateField, key: str, name: Optional[str] = None - ) -> None: + def _handle_api_key_specific_field_values(field: TemplateField, key: str, name: Optional[str] = None) -> None: """Handles specific field values related to API keys.""" if "api_key" in key and "OpenAI" in str(name): field.display_name = "OpenAI API Key" @@ -266,10 +257,7 @@ class FrontendNode(BaseModel): @staticmethod def should_be_password(key: str, show: bool) -> bool: """Determines whether the field should be a password field.""" - return ( - any(text in key.lower() for text in {"password", "token", "api", "key"}) - and show - ) + return any(text in key.lower() for text in {"password", "token", "api", "key"}) and show @staticmethod def should_be_multiline(key: str) -> bool: diff --git a/src/backend/langflow/template/frontend_node/memories.py b/src/backend/langflow/template/frontend_node/memories.py index f1c326810..93ea561dd 100644 --- a/src/backend/langflow/template/frontend_node/memories.py +++ b/src/backend/langflow/template/frontend_node/memories.py @@ -80,9 +80,7 @@ class MemoryFrontendNode(FrontendNode): field.show = True field.advanced = False field.value = "" - field.info = ( - INPUT_KEY_INFO if field.name == "input_key" else OUTPUT_KEY_INFO - ) + field.info = INPUT_KEY_INFO if field.name == "input_key" else OUTPUT_KEY_INFO if field.name == "memory_key": field.value = "chat_history" diff --git a/src/backend/langflow/template/template/base.py b/src/backend/langflow/template/template/base.py index 9bc375b0f..d7632e239 100644 --- a/src/backend/langflow/template/template/base.py +++ b/src/backend/langflow/template/template/base.py @@ -1,14 +1,14 @@ from typing import Callable, Union +from pydantic import BaseModel, model_serializer + from langflow.template.field.base import TemplateField from langflow.utils.constants import DIRECT_TYPES -from pydantic import BaseModel, model_serializer class Template(BaseModel): type_name: str fields: list[TemplateField] - field_order: list[str] = [] def process_fields( self, @@ -30,7 +30,6 @@ class Template(BaseModel): for field in self.fields: result[field.name] = field.model_dump(by_alias=True, exclude_none=True) result["_type"] = result.pop("type_name") - result.pop("field_order", None) return result # For backwards compatibility diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py index 7e1206222..ad6660cc5 100644 --- a/src/backend/langflow/utils/util.py +++ b/src/backend/langflow/utils/util.py @@ -5,8 +5,8 @@ from functools import wraps from typing import Any, Dict, List, Optional, Union from docstring_parser import parse -from langchain_core.documents import Document +from langflow.schema.schema import Record from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS from langflow.utils import constants @@ -15,8 +15,12 @@ def remove_ansi_escape_codes(text): return re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", text) -def build_template_from_function(name: str, type_to_loader_dict: Dict, add_function: bool = False): - classes = [item.__annotations__["return"].__name__ for item in type_to_loader_dict.values()] +def build_template_from_function( + name: str, type_to_loader_dict: Dict, add_function: bool = False +): + classes = [ + item.__annotations__["return"].__name__ for item in type_to_loader_dict.values() + ] # Raise error if name is not in chains if name not in classes: @@ -37,8 +41,10 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict, add_funct for name_, value_ in value.__repr_args__(): if name_ == "default_factory": try: - variables[class_field_items]["default"] = get_default_factory( - module=_class.__base__.__module__, function=value_ + variables[class_field_items]["default"] = ( + get_default_factory( + module=_class.__base__.__module__, function=value_ + ) ) except Exception: variables[class_field_items]["default"] = None @@ -46,7 +52,9 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict, add_funct variables[class_field_items][name_] = value_ variables[class_field_items]["placeholder"] = ( - docs.params[class_field_items] if class_field_items in docs.params else "" + docs.params[class_field_items] + if class_field_items in docs.params + else "" ) # Adding function to base classes to allow # the output to be a function @@ -61,7 +69,9 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict, add_funct } -def build_template_from_class(name: str, type_to_cls_dict: Dict, add_function: bool = False): +def build_template_from_class( + name: str, type_to_cls_dict: Dict, add_function: bool = False +): classes = [item.__name__ for item in type_to_cls_dict.values()] # Raise error if name is not in chains @@ -85,8 +95,11 @@ def build_template_from_class(name: str, type_to_cls_dict: Dict, add_function: b for name_, value_ in value.__repr_args__(): if name_ == "default_factory": try: - variables[class_field_items]["default"] = get_default_factory( - module=_class.__base__.__module__, function=value_ + variables[class_field_items]["default"] = ( + get_default_factory( + module=_class.__base__.__module__, + function=value_, + ) ) except Exception: variables[class_field_items]["default"] = None @@ -94,7 +107,9 @@ def build_template_from_class(name: str, type_to_cls_dict: Dict, add_function: b variables[class_field_items][name_] = value_ variables[class_field_items]["placeholder"] = ( - docs.params[class_field_items] if class_field_items in docs.params else "" + docs.params[class_field_items] + if class_field_items in docs.params + else "" ) base_classes = get_base_classes(_class) # Adding function to base classes to allow @@ -126,7 +141,9 @@ def build_template_from_method( # Check if the method exists in this class if not hasattr(_class, method_name): - raise ValueError(f"Method {method_name} not found in class {class_name}") + raise ValueError( + f"Method {method_name} not found in class {class_name}" + ) # Get the method method = getattr(_class, method_name) @@ -145,8 +162,14 @@ def build_template_from_method( "_type": _type, **{ name: { - "default": param.default if param.default != param.empty else None, - "type": param.annotation if param.annotation != param.empty else None, + "default": ( + param.default if param.default != param.empty else None + ), + "type": ( + param.annotation + if param.annotation != param.empty + else None + ), "required": param.default == param.empty, } for name, param in params.items() @@ -233,7 +256,9 @@ def sync_to_async(func): return async_wrapper -def format_dict(dictionary: Dict[str, Any], class_name: Optional[str] = None) -> Dict[str, Any]: +def format_dict( + dictionary: Dict[str, Any], class_name: Optional[str] = None +) -> Dict[str, Any]: """ Formats a dictionary by removing certain keys and modifying the values of other keys. @@ -243,7 +268,7 @@ def format_dict(dictionary: Dict[str, Any], class_name: Optional[str] = None) -> """ for key, value in dictionary.items(): - if key == "_type": + if key in ["_type"]: continue _type: Union[str, type] = get_type(value) @@ -319,7 +344,9 @@ def check_list_type(_type: str, value: Dict[str, Any]) -> str: The modified type string. """ if any(list_type in _type for list_type in ["List", "Sequence", "Set"]): - _type = _type.replace("List[", "").replace("Sequence[", "").replace("Set[", "")[:-1] + _type = ( + _type.replace("List[", "").replace("Sequence[", "").replace("Set[", "")[:-1] + ) value["list"] = True else: value["list"] = False @@ -422,7 +449,9 @@ def set_headers_value(value: Dict[str, Any]) -> None: value["value"] = """{"Authorization": "Bearer "}""" -def add_options_to_field(value: Dict[str, Any], class_name: Optional[str], key: str) -> None: +def add_options_to_field( + value: Dict[str, Any], class_name: Optional[str], key: str +) -> None: """ Adds options to the field based on the class name and key. """ @@ -439,10 +468,20 @@ def add_options_to_field(value: Dict[str, Any], class_name: Optional[str], key: value["value"] = options_map[class_name][0] -def build_loader_repr_from_documents(documents: List[Document]) -> str: - if documents: - avg_length = sum(len(doc.page_content) for doc in documents) / len(documents) - return f"""{len(documents)} documents - \nAvg. Document Length (characters): {int(avg_length)} - Documents: {documents[:3]}...""" - return "0 documents" +def build_loader_repr_from_records(records: List[Record]) -> str: + """ + Builds a string representation of the loader based on the given records. + + Args: + records (List[Record]): A list of records. + + Returns: + str: A string representation of the loader. + + """ + if records: + avg_length = sum(len(doc.text) for doc in records) / len(records) + return f"""{len(records)} records + \nAvg. Record Length (characters): {int(avg_length)} + Records: {records[:3]}...""" + return "0 records" diff --git a/src/backend/langflow/utils/validate.py b/src/backend/langflow/utils/validate.py index 9611d3a56..e7bd4ae05 100644 --- a/src/backend/langflow/utils/validate.py +++ b/src/backend/langflow/utils/validate.py @@ -43,9 +43,7 @@ def validate_code(code): # Evaluate the function definition for node in tree.body: if isinstance(node, ast.FunctionDef): - code_obj = compile( - ast.Module(body=[node], type_ignores=[]), "", "exec" - ) + code_obj = compile(ast.Module(body=[node], type_ignores=[]), "", "exec") try: exec(code_obj) except Exception as e: @@ -89,23 +87,15 @@ def execute_function(code, function_name, *args, **kwargs): exec_globals, locals(), ) - exec_globals[alias.asname or alias.name] = importlib.import_module( - alias.name - ) + exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) except ModuleNotFoundError as e: - raise ModuleNotFoundError( - f"Module {alias.name} not found. Please install it and try again." - ) from e + raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e function_code = next( - node - for node in module.body - if isinstance(node, ast.FunctionDef) and node.name == function_name + node for node in module.body if isinstance(node, ast.FunctionDef) and node.name == function_name ) function_code.parent = None - code_obj = compile( - ast.Module(body=[function_code], type_ignores=[]), "", "exec" - ) + code_obj = compile(ast.Module(body=[function_code], type_ignores=[]), "", "exec") try: exec(code_obj, exec_globals, locals()) except Exception as exc: @@ -132,23 +122,15 @@ def create_function(code, function_name): if isinstance(node, ast.Import): for alias in node.names: try: - exec_globals[alias.asname or alias.name] = importlib.import_module( - alias.name - ) + exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) except ModuleNotFoundError as e: - raise ModuleNotFoundError( - f"Module {alias.name} not found. Please install it and try again." - ) from e + raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e function_code = next( - node - for node in module.body - if isinstance(node, ast.FunctionDef) and node.name == function_name + node for node in module.body if isinstance(node, ast.FunctionDef) and node.name == function_name ) function_code.parent = None - code_obj = compile( - ast.Module(body=[function_code], type_ignores=[]), "", "exec" - ) + code_obj = compile(ast.Module(body=[function_code], type_ignores=[]), "", "exec") with contextlib.suppress(Exception): exec(code_obj, exec_globals, locals()) exec_globals[function_name] = locals()[function_name] @@ -210,13 +192,9 @@ def prepare_global_scope(code, module): if isinstance(node, ast.Import): for alias in node.names: try: - exec_globals[alias.asname or alias.name] = importlib.import_module( - alias.name - ) + exec_globals[alias.asname or alias.name] = importlib.import_module(alias.name) except ModuleNotFoundError as e: - raise ModuleNotFoundError( - f"Module {alias.name} not found. Please install it and try again." - ) from e + raise ModuleNotFoundError(f"Module {alias.name} not found. Please install it and try again.") from e elif isinstance(node, ast.ImportFrom) and node.module is not None: try: imported_module = importlib.import_module(node.module) @@ -237,11 +215,7 @@ def extract_class_code(module, class_name): :param class_name: Name of the class to extract :return: AST node of the specified class """ - class_code = next( - node - for node in module.body - if isinstance(node, ast.ClassDef) and node.name == class_name - ) + class_code = next(node for node in module.body if isinstance(node, ast.ClassDef) and node.name == class_name) class_code.parent = None return class_code @@ -254,9 +228,7 @@ def compile_class_code(class_code): :param class_code: AST node of the class :return: Compiled code object of the class """ - code_obj = compile( - ast.Module(body=[class_code], type_ignores=[]), "", "exec" - ) + code_obj = compile(ast.Module(body=[class_code], type_ignores=[]), "", "exec") return code_obj @@ -300,9 +272,7 @@ def get_default_imports(code_string): langflow_imports = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys()) necessary_imports = find_names_in_code(code_string, langflow_imports) langflow_module = importlib.import_module("langflow.field_typing") - default_imports.update( - {name: getattr(langflow_module, name) for name in necessary_imports} - ) + default_imports.update({name: getattr(langflow_module, name) for name in necessary_imports}) return default_imports diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 7a6c306f1..075ded804 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -23,14 +23,16 @@ import { OUTPUT_HANDLER_HOVER, TOOLTIP_EMPTY, } from "../../../../constants/constants"; -import { postCustomComponentUpdate } from "../../../../controllers/API"; import useAlertStore from "../../../../stores/alertStore"; import useFlowStore from "../../../../stores/flowStore"; import useFlowsManagerStore from "../../../../stores/flowsManagerStore"; import { useTypesStore } from "../../../../stores/typesStore"; -import { APIClassType } from "../../../../types/api"; +import { APIClassType, ResponseErrorTypeAPI } from "../../../../types/api"; import { ParameterComponentType } from "../../../../types/components"; -import { NodeDataType } from "../../../../types/flow"; +import { + handleUpdateValues, + throttledHandleUpdateValues, +} from "../../../../utils/parameterUtils"; import { convertObjToArray, convertValuesToNumbers, @@ -86,81 +88,65 @@ export default function ParameterComponent({ const takeSnapshot = useFlowsManagerStore((state) => state.takeSnapshot); - const handleUpdateValues = async ( - name: string, - data: NodeDataType, - delayAnimation: boolean = true - ) => { - setIsLoading(true); - const code = data.node?.template["code"]?.value; - if (!code) { - console.error("Code not found in the template"); - return; - } - - await postCustomComponentUpdate( - code, - name, - data.node?.template[name]?.value - ) - .then((res) => { - if (res.status === 200 && data.node?.template) { - setNode(data.id, (oldNode) => { - let newNode = cloneDeep(oldNode); - - newNode.data = { - ...newNode.data, - }; - - newNode.data.node.template = res.data.template; - - return newNode; - }); - } - }) - .catch((error) => { - console.error("Error occurred while updating the node:", error); - setErrorData({ - title: "Error while updating the Component", - list: [error.response.data.detail.error ?? "Unknown error"], - }); - }); - - renderTooltips(); - if (delayAnimation) { - try { - // Wait for at least 500 milliseconds - await new Promise((resolve) => setTimeout(resolve, 500)); - // Continue with the request - // If the request takes longer than 500 milliseconds, it will not wait an additional 500 milliseconds - } catch (error) { - console.error("Error occurred while waiting for refresh:", error); - } finally { - setIsLoading(false); - } - } else setIsLoading(false); - }; - useEffect(() => { - function fetchData() { + async function fetchData() { if ( data.node?.template[name]?.refresh && - Object.keys(data.node?.template[name]?.options ?? {}).length === 0 + // options can be undefined but not an empty array + (data.node?.template[name]?.options?.length ?? 0) === 0 ) { - handleUpdateValues(name, data, false); + setIsLoading(true); + try { + let newTemplate = await handleUpdateValues(name, data); + if (newTemplate) { + setNode(data.id, (oldNode) => { + let newNode = cloneDeep(oldNode); + newNode.data = { + ...newNode.data, + }; + newNode.data.node.template = newTemplate; + return newNode; + }); + } + } catch (error) { + let responseError = error as ResponseErrorTypeAPI; + setErrorData({ + title: "Error while updating the Component", + list: [responseError.response.data.detail.error ?? "Unknown error"], + }); + } + setIsLoading(false); + renderTooltips(); } } fetchData(); }, []); - const handleOnNewValue = ( + const handleOnNewValue = async ( newValue: string | string[] | boolean | Object[] - ): void => { + ): Promise => { if (data.node!.template[name].value !== newValue) { takeSnapshot(); } + const shouldUpdate = + data.node?.template[name].refresh && + data.node!.template[name].value !== newValue; data.node!.template[name].value = newValue; // necessary to enable ctrl+z inside the input - + let newTemplate; + if (shouldUpdate) { + setIsLoading(true); + try { + newTemplate = await throttledHandleUpdateValues(name, data); + } catch (error) { + let responseError = error as ResponseErrorTypeAPI; + setErrorData({ + title: "Error while updating the Component", + list: [responseError.response.data.detail.error ?? "Unknown error"], + }); + } + setIsLoading(false); + // this de + } setNode(data.id, (oldNode) => { let newNode = cloneDeep(oldNode); @@ -168,7 +154,9 @@ export default function ParameterComponent({ ...newNode.data, }; - newNode.data.node.template[name].value = newValue; + if (data.node?.template[name].refresh && newTemplate) { + newNode.data.node.template = newTemplate; + } else newNode.data.node.template[name].value = newValue; return newNode; }); @@ -300,11 +288,14 @@ export default function ParameterComponent({ refHtml.current = {TOOLTIP_EMPTY}; } } + // If optionalHandle is an empty list, then it is not an optional handle + if (optionalHandle && optionalHandle.length === 0) { + optionalHandle = null; + } useEffect(() => { renderTooltips(); }, [tooltipTitle, flow]); - return !showNode ? ( left && LANGFLOW_SUPPORTED_TYPES.has(type ?? "") && !optionalHandle ? ( <> @@ -449,7 +440,14 @@ export default function ParameterComponent({ !data.node?.template[name].options ? (
{data.node?.template[name].list ? ( -
+
- {data.node?.template[name].refresh && ( + {/* {data.node?.template[name].refresh && (
- )} + )} */}
) : data.node?.template[name].multiline ? ( ) : (
-
+
{ const valueToNumbers = convertValuesToNumbers(newValue); setErrorDuplicateKey(hasDuplicateKeys(valueToNumbers)); - handleOnNewValue(valueToNumbers); + // if data.node?.template[name].list is true, then the value is an array of objects + // else we need to get the first object of the array + + if (data.node?.template[name].list) { + handleOnNewValue(valueToNumbers); + } else handleOnNewValue(valueToNumbers[0]); }} + isList={data.node?.template[name].list ?? false} />
) : ( diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index a78362f16..4b0b752e1 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -12,10 +12,10 @@ import { RUN_TIMESTAMP_PREFIX, STATUS_BUILD, STATUS_BUILDING, - priorityFields, } from "../../constants/constants"; import { BuildStatus } from "../../constants/enums"; import NodeToolbarComponent from "../../pages/FlowPage/components/nodeToolbarComponent"; +import useAlertStore from "../../stores/alertStore"; import { useDarkStore } from "../../stores/darkStore"; import useFlowStore from "../../stores/flowStore"; import useFlowsManagerStore from "../../stores/flowsManagerStore"; @@ -24,7 +24,7 @@ import { validationStatusType } from "../../types/components"; import { NodeDataType } from "../../types/flow"; import { handleKeyDown, scapedJSONStringfy } from "../../utils/reactflowUtils"; import { nodeColors, nodeIconsLucide } from "../../utils/styleUtils"; -import { classNames, cn, getFieldTitle } from "../../utils/utils"; +import { classNames, cn, getFieldTitle, sortFields } from "../../utils/utils"; import ParameterComponent from "./components/parameterComponent"; export default function GenericNode({ @@ -43,6 +43,7 @@ export default function GenericNode({ const flowPool = useFlowStore((state) => state.flowPool); const buildFlow = useFlowStore((state) => state.buildFlow); const setNode = useFlowStore((state) => state.setNode); + const setErrorData = useAlertStore((state) => state.setErrorData); const name = nodeIconsLucide[data.type] ? data.type : types[data.type]; const [inputName, setInputName] = useState(false); const [nodeName, setNodeName] = useState(data.node!.display_name); @@ -64,6 +65,18 @@ export default function GenericNode({ const takeSnapshot = useFlowsManagerStore((state) => state.takeSnapshot); + if (!data.node!.template) { + setErrorData({ + title: `Error in component ${data.node!.display_name}`, + list: [ + `The component ${data.node!.display_name} has no template.`, + `Please contact the developer of the component to fix this issue.`, + ], + }); + takeSnapshot(); + deleteNode(data.id); + } + function countHandles(): void { let count = Object.keys(data.node!.template) .filter((templateField) => templateField.charAt(0) !== "_") @@ -157,7 +170,7 @@ export default function GenericNode({ const iconElement = data?.node?.icon; const iconColor = nodeColors[types[data.type]]; const iconName = - iconElement || (data.node?.flow ? "group_components" : name); + iconElement || (data.node?.flow ? "group_components" : name); const iconClassName = `generic-node-icon ${ !showNode ? " absolute inset-x-6 h-12 w-12 " : "" }`; @@ -636,15 +649,7 @@ export default function GenericNode({ <> {Object.keys(data.node!.template) .filter((templateField) => templateField.charAt(0) !== "_") - .sort((a, b) => { - if (priorityFields.has(a.toLowerCase())) { - return -1; - } else if (priorityFields.has(b.toLowerCase())) { - return 1; - } else { - return a.localeCompare(b); - } - }) + .sort((a, b) => sortFields(a, b, data.node?.field_order ?? [])) .map((templateField: string, idx) => (
{data.node!.template[templateField].show && diff --git a/src/frontend/src/assets/undraw_transfer_files_re_a2a9.svg b/src/frontend/src/assets/undraw_transfer_files_re_a2a9.svg new file mode 100644 index 000000000..c5930b9ea --- /dev/null +++ b/src/frontend/src/assets/undraw_transfer_files_re_a2a9.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/components/AccordionComponent/index.tsx b/src/frontend/src/components/AccordionComponent/index.tsx index 9a3c4a212..3d9aac314 100644 --- a/src/frontend/src/components/AccordionComponent/index.tsx +++ b/src/frontend/src/components/AccordionComponent/index.tsx @@ -50,9 +50,7 @@ export default function AccordionComponent({ {trigger} -
- {children} -
+
{children}
diff --git a/src/frontend/src/components/CrashErrorComponent/index.tsx b/src/frontend/src/components/CrashErrorComponent/index.tsx index 0cf376195..57dc83d83 100644 --- a/src/frontend/src/components/CrashErrorComponent/index.tsx +++ b/src/frontend/src/components/CrashErrorComponent/index.tsx @@ -26,7 +26,7 @@ export default function CrashErrorComponent({

- Please report errors with detailed tracebacks on the{" "} + Please report errors with detailed tracebacks on the{" "} {" "} page.

- Thank you! + Thank you!

diff --git a/src/frontend/src/components/NewFLowCard2/index.tsx b/src/frontend/src/components/NewFLowCard2/index.tsx new file mode 100644 index 000000000..096cd2314 --- /dev/null +++ b/src/frontend/src/components/NewFLowCard2/index.tsx @@ -0,0 +1,26 @@ +import { Card, CardContent } from "../ui/card"; +import useFlowsManagerStore from "../../stores/flowsManagerStore"; +import { useNavigate } from "react-router-dom"; +import IconComponent from "../genericIconComponent"; +import { cn } from "../../utils/utils"; + +export default function NewFlowCardComponent() { + const addFlow = useFlowsManagerStore((state) => state.addFlow); + const navigate = useNavigate(); + return ( + { + addFlow(true).then((id) => { + navigate("/flow/" + id); + }); + }} className="pt-4 w-80 h-72 cursor-pointer"> + + + +
Create from scratch
+
+
+ ) +} \ No newline at end of file diff --git a/src/frontend/src/components/NewFlowCardComponent/index.tsx b/src/frontend/src/components/NewFlowCardComponent/index.tsx new file mode 100644 index 000000000..1fe5b4b46 --- /dev/null +++ b/src/frontend/src/components/NewFlowCardComponent/index.tsx @@ -0,0 +1,33 @@ +import { useNavigate } from "react-router-dom"; +import useFlowsManagerStore from "../../stores/flowsManagerStore"; +import { cn } from "../../utils/utils"; +import IconComponent from "../genericIconComponent"; +import { Card, CardContent } from "../ui/card"; + +export default function NewFlowCardComponent({}: {}) { + const addFlow = useFlowsManagerStore((state) => state.addFlow); + const navigate = useNavigate(); + + return ( + + + + + + ); +} diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index 8ddb09552..a83a36fc2 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -126,7 +126,7 @@ export default function CodeTabsComponent({ {tab.code} diff --git a/src/frontend/src/components/exampleComponent/index.tsx b/src/frontend/src/components/exampleComponent/index.tsx new file mode 100644 index 000000000..347797c47 --- /dev/null +++ b/src/frontend/src/components/exampleComponent/index.tsx @@ -0,0 +1,100 @@ +import { useNavigate } from "react-router-dom"; +import useFlowsManagerStore from "../../stores/flowsManagerStore"; +import { FlowType } from "../../types/flow"; +import { updateIds } from "../../utils/reactflowUtils"; +import { cn } from "../../utils/utils"; +import ShadTooltip from "../ShadTooltipComponent"; +import IconComponent from "../genericIconComponent"; +import { Button } from "../ui/button"; +import { + Card, + CardDescription, + CardFooter, + CardHeader, + CardTitle, +} from "../ui/card"; + +export default function CollectionCardComponent({ + flow, +}: { + flow: FlowType; + authorized?: boolean; +}) { + const addFlow = useFlowsManagerStore((state) => state.addFlow); + const navigate = useNavigate(); + const emojiRegex = /\p{Emoji}/u; + const isEmoji = (str: string) => emojiRegex.test(str); + + return ( + +
+ +
+ + {flow.icon && isEmoji(flow.icon) && ( +
+
{flow.icon}
+
+ )} + {(!flow.icon || !isEmoji(flow.icon)) && ( +
+ +
+ )} + +
{flow.name}
+
+
+
+ + +
{flow.description}
+
+
+
+
+ + +
+
+ +
+
+
+
+ ); +} diff --git a/src/frontend/src/components/keypairListComponent/index.tsx b/src/frontend/src/components/keypairListComponent/index.tsx index 034628643..9f452c988 100644 --- a/src/frontend/src/components/keypairListComponent/index.tsx +++ b/src/frontend/src/components/keypairListComponent/index.tsx @@ -12,10 +12,11 @@ export default function KeypairListComponent({ disabled, editNode = false, duplicateKey, + isList = true, }: KeyPairListComponentType): JSX.Element { useEffect(() => { if (disabled && value.length > 0 && value[0] !== "") { - onChange([""]); + onChange([{ "": "" }]); } }, [disabled]); @@ -79,6 +80,7 @@ export default function KeypairListComponent({ : "keypair" + (index + 100).toString() } type="text" + disabled={disabled} value={obj[key]} className={editNode ? "input-edit-node" : ""} placeholder="Type a value..." @@ -87,7 +89,7 @@ export default function KeypairListComponent({ } /> - {index === ref.current.length - 1 ? ( + {isList && index === ref.current.length - 1 ? ( - ) : ( + ) : isList ? ( + ) : ( + "" )}
); diff --git a/src/frontend/src/components/newChatView/chatMessage/index.tsx b/src/frontend/src/components/newChatView/chatMessage/index.tsx index 075d5e418..760a9d140 100644 --- a/src/frontend/src/components/newChatView/chatMessage/index.tsx +++ b/src/frontend/src/components/newChatView/chatMessage/index.tsx @@ -134,7 +134,7 @@ export default function ChatMessage({ )}
{!chat.isSend ? ( -
+
{hidden && chat.thought && chat.thought !== "" && (
)} {chat.thought && chat.thought !== "" && !hidden &&

} -
-
-
+
+
+
{useMemo( () => chatMessage === "" && lockChat ? ( @@ -169,7 +169,7 @@ export default function ChatMessage({ +//@ts-ignore +import { ReactComponent as TransferFiles } from "../../assets/undraw_transfer_files_re_a2a9.svg" +import useFlowsManagerStore from "../../stores/flowsManagerStore"; +import { FlowType } from "../../types/flow" +import { updateIds } from "../../utils/reactflowUtils"; +import ShadTooltip from "../ShadTooltipComponent" +import { Card, CardContent, CardDescription, CardFooter, CardTitle } from "../ui/card" + +export default function UndrawCardComponent({ + flow +}: { flow: FlowType }) { + const addFlow = useFlowsManagerStore((state) => state.addFlow); + const navigate = useNavigate(); + return ( + { + updateIds(flow.data!); + addFlow(true, flow).then((id) => { + navigate("/flow/" + id); + }); + }} className="pt-4 w-80 h-72 cursor-pointer"> + +
+
+
+ + {flow.name} + +
{flow.description}
+
+
+
+ ) +} \ No newline at end of file diff --git a/src/frontend/src/constants/alerts_constants.tsx b/src/frontend/src/constants/alerts_constants.tsx index 205933b4b..0710e3d00 100644 --- a/src/frontend/src/constants/alerts_constants.tsx +++ b/src/frontend/src/constants/alerts_constants.tsx @@ -58,4 +58,3 @@ export const FLOW_BUILD_SUCCESS_ALERT = `Flow built successfully`; export const SAVE_SUCCESS_ALERT = "Changes saved successfully!"; // Generic Node - diff --git a/src/frontend/src/constants/constants.ts b/src/frontend/src/constants/constants.ts index 3f4584102..9e40d56ca 100644 --- a/src/frontend/src/constants/constants.ts +++ b/src/frontend/src/constants/constants.ts @@ -727,3 +727,15 @@ export const STATUS_BUILD = "Build to validate status."; export const STATUS_BUILDING = "Building..."; export const SAVED_HOVER = "Last saved at "; export const RUN_TIMESTAMP_PREFIX = "Last Run: "; +export const STARTER_FOLDER_NAME = "Starter Projects"; +export const PRIORITY_SIDEBAR_ORDER = [ + "saved_components", + "inputs", + "outputs", + "prompts", + "data", + "prompt", + "models", + "helpers", + "experimental", +]; diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index 6ee4141a0..a8ee9c6e7 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -370,7 +370,7 @@ export async function postCustomComponent( export async function postCustomComponentUpdate( code: string, field: string, - field_value: string + field_value: any ): Promise> { return await api.post(`${BASE_URL_API}custom_component/update`, { code, diff --git a/src/frontend/src/modals/ApiModal/index.tsx b/src/frontend/src/modals/ApiModal/index.tsx index 81d6defee..9c451beeb 100644 --- a/src/frontend/src/modals/ApiModal/index.tsx +++ b/src/frontend/src/modals/ApiModal/index.tsx @@ -98,6 +98,9 @@ const ApiModal = forwardRef( let arrNodesWithValues: string[] = []; flow["data"]!["nodes"].forEach((node) => { + if (!node["data"]["node"]["template"]) { + return; + } Object.keys(node["data"]["node"]["template"]) .filter( (templateField) => diff --git a/src/frontend/src/modals/DeleteConfirmationModal/index.tsx b/src/frontend/src/modals/DeleteConfirmationModal/index.tsx index a1f757985..b2087f40d 100644 --- a/src/frontend/src/modals/DeleteConfirmationModal/index.tsx +++ b/src/frontend/src/modals/DeleteConfirmationModal/index.tsx @@ -35,8 +35,7 @@ export default function DeleteConfirmationModal({ - Confirm deletion of {description ?? "component"}? -

+ Confirm deletion of {description ?? "component"}?

Note: This action is irreversible.
diff --git a/src/frontend/src/modals/EditNodeModal/index.tsx b/src/frontend/src/modals/EditNodeModal/index.tsx index e789e1ab8..619a6f7e2 100644 --- a/src/frontend/src/modals/EditNodeModal/index.tsx +++ b/src/frontend/src/modals/EditNodeModal/index.tsx @@ -165,8 +165,19 @@ const EditNodeModal = forwardRef( ) ) ?? false; return ( -
- Attention: API keys in specified fields are automatically removed upon sharing. + Attention: API keys in specified fields are automatically + removed upon sharing. diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils.tsx index f8b5a0af9..ee0509cd0 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils.tsx @@ -1,16 +1,10 @@ +import { PRIORITY_SIDEBAR_ORDER } from "../../../../constants/constants"; + export function sortKeys(a: string, b: string) { // Define the order of specific keys - const order = [ - "saved_components", - "inputs", - "outputs", - "prompts", - "data", - "models", - "utilities", - ]; - const indexA = order.indexOf(a.toLowerCase()); - const indexB = order.indexOf(b.toLowerCase()); + + const indexA = PRIORITY_SIDEBAR_ORDER.indexOf(a.toLowerCase()); + const indexB = PRIORITY_SIDEBAR_ORDER.indexOf(b.toLowerCase()); // Check if both keys are in the predefined order if (indexA !== -1 && indexB !== -1) { diff --git a/src/frontend/src/pages/MainPage/components/components/index.tsx b/src/frontend/src/pages/MainPage/components/components/index.tsx index 86ee65d0c..65189fa8a 100644 --- a/src/frontend/src/pages/MainPage/components/components/index.tsx +++ b/src/frontend/src/pages/MainPage/components/components/index.tsx @@ -24,6 +24,7 @@ export default function ComponentsComponent({ const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow); const removeFlow = useFlowsManagerStore((state) => state.removeFlow); const isLoading = useFlowsManagerStore((state) => state.isLoading); + const setExamples = useFlowsManagerStore((state) => state.setExamples); const flows = useFlowsManagerStore((state) => state.flows); const setSuccessData = useAlertStore((state) => state.setSuccessData); const setErrorData = useAlertStore((state) => state.setErrorData); @@ -35,7 +36,7 @@ export default function ComponentsComponent({ useEffect(() => { if (isLoading) return; - const all = flows + let all = flows .filter((f) => (f.is_component ?? false) === is_component) .sort((a, b) => { if (a?.updated_at && b?.updated_at) { diff --git a/src/frontend/src/pages/MainPage/index.tsx b/src/frontend/src/pages/MainPage/index.tsx index cc4115e7e..e2d30fe4f 100644 --- a/src/frontend/src/pages/MainPage/index.tsx +++ b/src/frontend/src/pages/MainPage/index.tsx @@ -1,7 +1,9 @@ import { Group, ToyBrick } from "lucide-react"; -import { useEffect } from "react"; +import { useEffect, useState } from "react"; import { Outlet, useLocation, useNavigate } from "react-router-dom"; import DropdownButton from "../../components/DropdownButtonComponent"; +import NewFlowCardComponent from "../../components/NewFLowCard2";; +import ExampleCardComponent from "../../components/exampleComponent"; import IconComponent from "../../components/genericIconComponent"; import PageLayout from "../../components/pageLayout"; import SidebarNav from "../../components/sidebarComponent"; @@ -11,9 +13,11 @@ import { MY_COLLECTION_DESC, USER_PROJECTS_HEADER, } from "../../constants/constants"; +import BaseModal from "../../modals/baseModal"; import useAlertStore from "../../stores/alertStore"; import useFlowsManagerStore from "../../stores/flowsManagerStore"; import { downloadFlows } from "../../utils/reactflowUtils"; +import UndrawCardComponent from "../../components/undrawCards"; export default function HomePage(): JSX.Element { const addFlow = useFlowsManagerStore((state) => state.addFlow); const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow); @@ -25,6 +29,8 @@ export default function HomePage(): JSX.Element { const setErrorData = useAlertStore((state) => state.setErrorData); const location = useLocation(); const pathname = location.pathname; + const [openModal, setOpenModal] = useState(false); + const examples = useFlowsManagerStore((state) => state.examples); const is_component = pathname === "/components"; const dropdownOptions = [ { @@ -98,11 +104,7 @@ export default function HomePage(): JSX.Element { { - addFlow(true).then((id) => { - navigate("/flow/" + id); - }); - }} + onFirstBtnClick={() => setOpenModal(true)} options={dropdownOptions} />
@@ -116,6 +118,28 @@ export default function HomePage(): JSX.Element {
+ + + + Get Started + + {/* + +
+ {examples.map((example, idx) => { + return ; + })} + +
+
+
); } diff --git a/src/frontend/src/stores/flowStore.ts b/src/frontend/src/stores/flowStore.ts index cbf183289..16f6b75f4 100644 --- a/src/frontend/src/stores/flowStore.ts +++ b/src/frontend/src/stores/flowStore.ts @@ -514,7 +514,6 @@ const useFlowStore = create((set, get) => ({ get().setIsBuilding(false); }, onBuildStart: (idList) => { - console.log("onBuildStart", idList); useFlowStore.getState().updateBuildStatus(idList, BuildStatus.BUILDING); }, validateNodes: validateSubgraph, @@ -536,7 +535,6 @@ const useFlowStore = create((set, get) => ({ runId: string; } | null ) => { - console.log("updateVerticesBuild", vertices); set({ verticesBuild: vertices }); }, verticesBuild: null, @@ -563,7 +561,6 @@ const useFlowStore = create((set, get) => ({ }); }, updateBuildStatus: (nodeIdList: string[], status: BuildStatus) => { - console.log("updateBuildStatus", nodeIdList, status); const newFlowBuildStatus = { ...get().flowBuildStatus }; nodeIdList.forEach((id) => { newFlowBuildStatus[id] = { @@ -573,7 +570,6 @@ const useFlowStore = create((set, get) => ({ const timestamp_string = new Date(Date.now()).toLocaleString(); newFlowBuildStatus[id].timestamp = timestamp_string; } - console.log("updateBuildStatus", newFlowBuildStatus); }); set({ flowBuildStatus: newFlowBuildStatus }); }, diff --git a/src/frontend/src/stores/flowsManagerStore.ts b/src/frontend/src/stores/flowsManagerStore.ts index fac806c5f..2c9fca428 100644 --- a/src/frontend/src/stores/flowsManagerStore.ts +++ b/src/frontend/src/stores/flowsManagerStore.ts @@ -2,6 +2,7 @@ import { AxiosError } from "axios"; import { cloneDeep } from "lodash"; import { Edge, Node, Viewport, XYPosition } from "reactflow"; import { create } from "zustand"; +import { STARTER_FOLDER_NAME } from "../constants/constants"; import { deleteFlowFromDatabase, readFlowsFromDatabase, @@ -37,6 +38,10 @@ const past = {}; const future = {}; const useFlowsManagerStore = create((set, get) => ({ + examples: [], + setExamples: (examples: FlowType[]) => { + set({ examples }); + }, currentFlowId: "", setCurrentFlowId: (currentFlowId: string) => { set((state) => ({ @@ -62,7 +67,16 @@ const useFlowsManagerStore = create((set, get) => ({ .then((dbData) => { if (dbData) { const { data, flows } = processFlows(dbData, false); - get().setFlows(flows); + get().setExamples( + flows.filter( + (f) => f.folder === STARTER_FOLDER_NAME && !f.user_id + ) + ); + get().setFlows( + flows.filter( + (f) => !(f.folder === STARTER_FOLDER_NAME && !f.user_id) + ) + ); useTypesStore.setState((state) => ({ data: { ...state.data, ["saved_components"]: data }, })); diff --git a/src/frontend/src/types/api/index.ts b/src/frontend/src/types/api/index.ts index bcbb31730..fc8346a5b 100644 --- a/src/frontend/src/types/api/index.ts +++ b/src/frontend/src/types/api/index.ts @@ -29,6 +29,7 @@ export type APIClassType = { official?: boolean; frozen?: boolean; flow?: FlowType; + field_order?: string[]; [key: string]: | Array | string @@ -158,3 +159,17 @@ export type VertexDataTypeAPI = { timedelta?: number; duration?: string; }; + +export type CodeErrorDataTypeAPI = { + error: string | undefined; + traceback: string | undefined; +}; + +// the error above is inside this error.response.data.detail.error +// which comes from a request to the API +// to type the error we need to know the structure of the object + +// error that has a response, that has a data, that has a detail, that has an error +export type ResponseErrorTypeAPI = { + response: { data: { detail: CodeErrorDataTypeAPI } }; +}; diff --git a/src/frontend/src/types/components/index.ts b/src/frontend/src/types/components/index.ts index aef92b1a4..14fb959ff 100644 --- a/src/frontend/src/types/components/index.ts +++ b/src/frontend/src/types/components/index.ts @@ -71,6 +71,7 @@ export type KeyPairListComponentType = { editNode?: boolean; duplicateKey?: boolean; editNodeModal?: boolean; + isList?: boolean; }; export type DictComponentType = { diff --git a/src/frontend/src/types/flow/index.ts b/src/frontend/src/types/flow/index.ts index 967d4e424..8b259186a 100644 --- a/src/frontend/src/types/flow/index.ts +++ b/src/frontend/src/types/flow/index.ts @@ -13,6 +13,10 @@ export type FlowType = { updated_at?: string; date_created?: string; parent?: string; + folder?: string; + user_id?: string; + icon?: string; + icon_bg_color?: string; }; export type NodeType = { diff --git a/src/frontend/src/types/zustand/flowsManager/index.ts b/src/frontend/src/types/zustand/flowsManager/index.ts index 471d4cf17..87fbf9a22 100644 --- a/src/frontend/src/types/zustand/flowsManager/index.ts +++ b/src/frontend/src/types/zustand/flowsManager/index.ts @@ -44,6 +44,8 @@ export type FlowsManagerStoreType = { undo: () => void; redo: () => void; takeSnapshot: () => void; + examples: Array; + setExamples: (examples: FlowType[]) => void; }; export type UseUndoRedoOptions = { diff --git a/src/frontend/src/utils/parameterUtils.ts b/src/frontend/src/utils/parameterUtils.ts new file mode 100644 index 000000000..9635ce96d --- /dev/null +++ b/src/frontend/src/utils/parameterUtils.ts @@ -0,0 +1,35 @@ +import { throttle } from "lodash"; +import { postCustomComponentUpdate } from "../controllers/API"; +import { ResponseErrorTypeAPI } from "../types/api"; +import { NodeDataType } from "../types/flow"; + +export const handleUpdateValues = async (name: string, data: NodeDataType) => { + const code = data.node?.template["code"]?.value; + if (!code) { + console.error("Code not found in the template"); + return; + } + try { + let newTemplate = await postCustomComponentUpdate( + code, + name, + data.node?.template[name]?.value + ) + .then((res) => { + console.log("res", res); + if (res.status === 200 && data.node?.template) { + return res.data.template; + } + }) + .catch((error) => { + throw error; + }); + return newTemplate; + } catch (error) { + console.error("Error occurred while updating the node:", error); + let errorType = error as ResponseErrorTypeAPI; + throw errorType; + } +}; + +export const throttledHandleUpdateValues = throttle(handleUpdateValues, 10); diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts index 340d30ffd..7ebcda3c2 100644 --- a/src/frontend/src/utils/styleUtils.ts +++ b/src/frontend/src/utils/styleUtils.ts @@ -42,15 +42,14 @@ import { EyeOff, File, FileDown, + SquarePen, FileSearch, FileSearch2, FileSliders, FileText, FileType2, FileUp, - Fingerprint, FlaskConical, - FolderOpen, FolderPlus, FormInput, Forward, @@ -67,7 +66,6 @@ import { Key, Laptop2, Layers, - Lightbulb, Link, Loader2, Lock, @@ -91,11 +89,12 @@ import { Pin, Play, Plus, + PlusCircle, + PlusSquare, PocketKnife, Redo, RefreshCcw, Repeat, - Rocket, Save, SaveAll, Scissors, @@ -127,10 +126,8 @@ import { UserPlus2, Users2, Variable, - Wand, Wand2, Workflow, - Wrench, X, XCircle, Zap, @@ -235,7 +232,8 @@ export const nodeColors: { [char: string]: string } = { textsplitters: "#B47CB5", toolkits: "#DB2C2C", wrappers: "#E6277A", - utilities: "#31A3CC", + helpers: "#31A3CC", + experimental: "#E6277A", langchain_utilities: "#31A3CC", output_parsers: "#E6A627", str: "#31a3cc", @@ -266,7 +264,8 @@ export const nodeNames: { [char: string]: string } = { wrappers: "Wrappers", textsplitters: "Text Splitters", retrievers: "Retrievers", - utilities: "Helpers", + helpers: "Helpers", + experimental: "Experimental", langchain_utilities: "Utilities", output_parsers: "Output Parsers", custom_components: "Custom", @@ -358,7 +357,8 @@ export const nodeIconsLucide: iconsType = { toolkits: Package2, textsplitters: Scissors, wrappers: Gift, - utilities: Wand2, + helpers: Wand2, + experimental: FlaskConical, langchain_utilities: PocketKnife, WolframAlphaAPIWrapper: SvgWolfram, output_parsers: Compass, @@ -376,6 +376,7 @@ export const nodeIconsLucide: iconsType = { XCircle, Info, CheckCircle2, + SquarePen, Zap, MessagesSquare, ExternalLink, @@ -400,6 +401,8 @@ export const nodeIconsLucide: iconsType = { Circle, CircleDot, Clipboard, + PlusCircle, + PlusSquare, Code2, Variable, Snowflake, diff --git a/src/frontend/src/utils/utils.ts b/src/frontend/src/utils/utils.ts index 22fe93185..9651169eb 100644 --- a/src/frontend/src/utils/utils.ts +++ b/src/frontend/src/utils/utils.ts @@ -1,5 +1,6 @@ import clsx, { ClassValue } from "clsx"; import { twMerge } from "tailwind-merge"; +import { priorityFields } from "../constants/constants"; import { ADJECTIVES, DESCRIPTIONS, NOUNS } from "../flow_constants"; import { APIDataType, @@ -143,7 +144,7 @@ export function groupByFamily( // se existir o flow for (const node of flow) { // para cada node do flow - if (node!.data!.node!.flow) break; // não faz nada se o node for um group + if (node!.data!.node!.flow || !node!.data!.node!.template) break; // não faz nada se o node for um group const nodeData = node.data; const foundNode = checkedNodes.get(nodeData.type); // verifica se o tipo do node já foi checado @@ -642,3 +643,42 @@ export function getFieldTitle( ? template[templateField].display_name! : template[templateField].name ?? templateField; } + +export function sortFields(a, b, fieldOrder) { + // Early return for empty fields + if (!a && !b) return 0; + if (!a) return 1; + if (!b) return -1; + + // Normalize the case to ensure case-insensitive comparison + const normalizedFieldA = a.toLowerCase(); + const normalizedFieldB = b.toLowerCase(); + + const aIsPriority = priorityFields.has(normalizedFieldA); + const bIsPriority = priorityFields.has(normalizedFieldB); + + // Sort by priority + if (aIsPriority && !bIsPriority) return -1; + if (!aIsPriority && bIsPriority) return 1; + + // Check if either field is in the fieldOrder array + const indexOfA = fieldOrder.indexOf(normalizedFieldA); + const indexOfB = fieldOrder.indexOf(normalizedFieldB); + + // If both fields are in fieldOrder, sort by their order in the array + if (indexOfA !== -1 && indexOfB !== -1) { + return indexOfA - indexOfB; + } + + // If only one of the fields is in fieldOrder, that field comes first + if (indexOfA !== -1) { + return -1; + } + if (indexOfB !== -1) { + return 1; + } + + // Default case for fields not in priorityFields and not found in fieldOrder + // You might want to sort them alphabetically or in another specific manner + return a.localeCompare(b); +} diff --git a/src/frontend/tests/end-to-end/assets/flow_group_test.json b/src/frontend/tests/end-to-end/assets/flow_group_test.json index d8d9a2a87..9df38cfb3 100644 --- a/src/frontend/tests/end-to-end/assets/flow_group_test.json +++ b/src/frontend/tests/end-to-end/assets/flow_group_test.json @@ -1 +1,509 @@ -{"id":"8404c1fc-1bce-43b4-a8bc-3febea587fc8","data":{"nodes":[{"id":"PythonFunctionTool-RfJui","type":"genericNode","position":{"x":117.54690105175428,"y":-84.2465475108354},"data":{"type":"PythonFunctionTool","node":{"template":{"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"\ndef python_function(text: str) -> str:\n \"\"\"This is a default python function that returns the input text\"\"\"\n return text\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":false,"dynamic":false,"info":"","title_case":false},"description":{"type":"str","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"Returns the Text you send. This is a testing tool.","fileTypes":[],"file_path":"","password":false,"name":"description","advanced":false,"dynamic":false,"info":"","title_case":false,"input_types":["Text"]},"name":{"type":"str","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":"PythonFunction","fileTypes":[],"file_path":"","password":false,"name":"name","advanced":false,"dynamic":false,"info":"","title_case":false,"input_types":["Text"]},"return_direct":{"type":"bool","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":false,"fileTypes":[],"file_path":"","password":false,"name":"return_direct","advanced":false,"dynamic":false,"info":"","title_case":false},"_type":"PythonFunctionTool"},"description":"Python function to be executed.","base_classes":["BaseTool","Tool"],"display_name":"PythonFunctionTool","documentation":"","custom_fields":{},"output_types":[],"field_formatters":{},"pinned":false,"beta":false},"id":"PythonFunctionTool-RfJui"},"selected":true,"width":384,"height":466,"positionAbsolute":{"x":117.54690105175428,"y":-84.2465475108354},"dragging":false},{"id":"AgentInitializer-tPdJw","type":"genericNode","position":{"x":677.68677055088,"y":127.19859565276168},"data":{"type":"AgentInitializer","node":{"template":{"llm":{"type":"BaseLanguageModel","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"llm","display_name":"Language Model","advanced":false,"dynamic":false,"info":"","title_case":false},"memory":{"type":"BaseChatMemory","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"memory","display_name":"Memory","advanced":false,"dynamic":false,"info":"","title_case":false},"tools":{"type":"Tool","required":true,"placeholder":"","list":true,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"tools","display_name":"Tools","advanced":false,"dynamic":false,"info":"","title_case":false},"agent":{"type":"str","required":true,"placeholder":"","list":true,"show":true,"multiline":false,"value":"zero-shot-react-description","fileTypes":[],"file_path":"","password":false,"options":["zero-shot-react-description","react-docstore","self-ask-with-search","conversational-react-description","chat-zero-shot-react-description","chat-conversational-react-description","structured-chat-zero-shot-react-description","openai-functions","openai-multi-functions","JsonAgent","CSVAgent","VectorStoreAgent","VectorStoreRouterAgent","SQLAgent"],"name":"agent","display_name":"Agent Type","advanced":false,"dynamic":false,"info":"","title_case":false,"input_types":["Text"]},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from typing import Callable, List, Optional, Union\n\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\nfrom langflow import CustomComponent\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\n\n\nclass AgentInitializerComponent(CustomComponent):\n display_name: str = \"Agent Initializer\"\n description: str = \"Initialize a Langchain Agent.\"\n documentation: str = \"https://python.langchain.com/docs/modules/agents/agent_types/\"\n\n def build_config(self):\n agents = list(types.AGENT_TO_CLASS.keys())\n # field_type and required are optional\n return {\n \"agent\": {\"options\": agents, \"value\": agents[0], \"display_name\": \"Agent Type\"},\n \"max_iterations\": {\"display_name\": \"Max Iterations\", \"value\": 10},\n \"memory\": {\"display_name\": \"Memory\"},\n \"tools\": {\"display_name\": \"Tools\"},\n \"llm\": {\"display_name\": \"Language Model\"},\n \"code\": {\"advanced\": True},\n }\n\n def build(\n self,\n agent: str,\n llm: BaseLanguageModel,\n tools: List[Tool],\n max_iterations: int,\n memory: Optional[BaseChatMemory] = None,\n ) -> Union[AgentExecutor, Callable]:\n agent = AgentType(agent)\n if memory:\n return initialize_agent(\n tools=tools,\n llm=llm,\n agent=agent,\n memory=memory,\n return_intermediate_steps=True,\n handle_parsing_errors=True,\n max_iterations=max_iterations,\n )\n return initialize_agent(\n tools=tools,\n llm=llm,\n agent=agent,\n return_intermediate_steps=True,\n handle_parsing_errors=True,\n max_iterations=max_iterations,\n )\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","title_case":false},"max_iterations":{"type":"int","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":10,"fileTypes":[],"file_path":"","password":false,"name":"max_iterations","display_name":"Max Iterations","advanced":false,"dynamic":false,"info":"","title_case":false},"_type":"CustomComponent"},"description":"Initialize a Langchain Agent.","base_classes":["Runnable","Chain","Serializable","object","AgentExecutor","Generic","RunnableSerializable","Callable"],"display_name":"Agent Initializer","documentation":"https://python.langchain.com/docs/modules/agents/agent_types/","custom_fields":{"agent":null,"llm":null,"tools":null,"max_iterations":null,"memory":null},"output_types":["AgentExecutor","Callable"],"field_formatters":{},"pinned":false,"beta":true},"id":"AgentInitializer-tPdJw"},"selected":false,"width":384,"height":522},{"id":"ChatOpenAISpecs-stxRM","type":"genericNode","position":{"x":18.226716205350385,"y":432.6122491402193},"data":{"type":"ChatOpenAISpecs","node":{"template":{"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from typing import Optional, Union\n\nfrom langchain.llms import BaseLLM\nfrom langchain_community.chat_models.openai import ChatOpenAI\nfrom langflow import CustomComponent\nfrom langflow.field_typing import BaseLanguageModel, NestedDict\n\n\nclass ChatOpenAIComponent(CustomComponent):\n display_name = \"ChatOpenAI\"\n description = \"`OpenAI` Chat large language models API.\"\n icon = \"OpenAI\"\n\n def build_config(self):\n return {\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": False,\n \"required\": False,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n \"required\": False,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"required\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": False,\n \"required\": False,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"advanced\": False,\n \"required\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"required\": False,\n \"value\": 0.7,\n },\n }\n\n def build(\n self,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n model_name: str = \"gpt-4-1106-preview\",\n openai_api_base: Optional[str] = None,\n openai_api_key: Optional[str] = None,\n temperature: float = 0.7,\n ) -> Union[BaseLanguageModel, BaseLLM]:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n return ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":false,"dynamic":true,"info":"","title_case":false},"max_tokens":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":256,"fileTypes":[],"file_path":"","password":false,"name":"max_tokens","display_name":"Max Tokens","advanced":false,"dynamic":false,"info":"","title_case":false},"model_kwargs":{"type":"NestedDict","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":{},"fileTypes":[],"file_path":"","password":false,"name":"model_kwargs","display_name":"Model Kwargs","advanced":true,"dynamic":false,"info":"","title_case":false},"model_name":{"type":"str","required":false,"placeholder":"","list":true,"show":true,"multiline":false,"value":"gpt-4-1106-preview","fileTypes":[],"file_path":"","password":false,"options":["gpt-4-turbo-preview","gpt-4-0125-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-3.5-turbo-0125","gpt-3.5-turbo-1106"],"name":"model_name","display_name":"Model Name","advanced":false,"dynamic":false,"info":"","title_case":false,"input_types":["Text"]},"openai_api_base":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"openai_api_base","display_name":"OpenAI API Base","advanced":false,"dynamic":false,"info":"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.","title_case":false,"input_types":["Text"]},"openai_api_key":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":true,"name":"openai_api_key","display_name":"OpenAI API Key","advanced":false,"dynamic":false,"info":"","title_case":false,"input_types":["Text"]},"temperature":{"type":"float","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":0.7,"fileTypes":[],"file_path":"","password":false,"name":"temperature","display_name":"Temperature","advanced":false,"dynamic":false,"info":"","rangeSpec":{"min":-1,"max":1,"step":0.1},"title_case":false},"_type":"CustomComponent"},"description":"`OpenAI` Chat large language models API.","icon":"OpenAI","base_classes":["Runnable","BaseLLM","Serializable","BaseLanguageModel","object","Generic","RunnableSerializable"],"display_name":"ChatOpenAI","documentation":"","custom_fields":{"max_tokens":null,"model_kwargs":null,"model_name":null,"openai_api_base":null,"openai_api_key":null,"temperature":null},"output_types":["BaseLanguageModel","BaseLLM"],"field_formatters":{},"pinned":false,"beta":true},"id":"ChatOpenAISpecs-stxRM"},"selected":false,"width":384,"height":666,"positionAbsolute":{"x":18.226716205350385,"y":432.6122491402193},"dragging":false}],"edges":[{"source":"ChatOpenAISpecs-stxRM","sourceHandle":"{œbaseClassesœ:[œRunnableœ,œBaseLLMœ,œSerializableœ,œBaseLanguageModelœ,œobjectœ,œGenericœ,œRunnableSerializableœ],œdataTypeœ:œChatOpenAISpecsœ,œidœ:œChatOpenAISpecs-stxRMœ}","target":"AgentInitializer-tPdJw","targetHandle":"{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}","data":{"targetHandle":{"fieldName":"llm","id":"AgentInitializer-tPdJw","inputTypes":null,"type":"BaseLanguageModel"},"sourceHandle":{"baseClasses":["Runnable","BaseLLM","Serializable","BaseLanguageModel","object","Generic","RunnableSerializable"],"dataType":"ChatOpenAISpecs","id":"ChatOpenAISpecs-stxRM"}},"style":{"stroke":"#555"},"className":"stroke-foreground stroke-connection","id":"reactflow__edge-ChatOpenAISpecs-stxRM{œbaseClassesœ:[œRunnableœ,œBaseLLMœ,œSerializableœ,œBaseLanguageModelœ,œobjectœ,œGenericœ,œRunnableSerializableœ],œdataTypeœ:œChatOpenAISpecsœ,œidœ:œChatOpenAISpecs-stxRMœ}-AgentInitializer-tPdJw{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}"},{"source":"PythonFunctionTool-RfJui","sourceHandle":"{œbaseClassesœ:[œBaseToolœ,œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-RfJuiœ}","target":"AgentInitializer-tPdJw","targetHandle":"{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œToolœ}","data":{"targetHandle":{"fieldName":"tools","id":"AgentInitializer-tPdJw","inputTypes":null,"type":"Tool"},"sourceHandle":{"baseClasses":["BaseTool","Tool"],"dataType":"PythonFunctionTool","id":"PythonFunctionTool-RfJui"}},"style":{"stroke":"#555"},"className":"stroke-foreground stroke-connection","id":"reactflow__edge-PythonFunctionTool-RfJui{œbaseClassesœ:[œBaseToolœ,œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-RfJuiœ}-AgentInitializer-tPdJw{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œToolœ}"}],"viewport":{"x":37.63043052737157,"y":71.47518177614131,"zoom":0.5140569133280332}},"description":"Uncover Business Opportunities with NLP.","name":"Untitled document (20)","last_tested_version":"0.7.0a0","is_component":false} \ No newline at end of file +{ + "id": "8404c1fc-1bce-43b4-a8bc-3febea587fc8", + "data": { + "nodes": [ + { + "id": "PythonFunctionTool-RfJui", + "type": "genericNode", + "position": { "x": 117.54690105175428, "y": -84.2465475108354 }, + "data": { + "type": "PythonFunctionTool", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "\ndef python_function(text: str) -> str:\n \"\"\"This is a default python function that returns the input text\"\"\"\n return text\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "description": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "Returns the Text you send. This is a testing tool.", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "description", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "PythonFunction", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "return_direct": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_direct", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "_type": "PythonFunctionTool" + }, + "description": "Python function to be executed.", + "base_classes": ["BaseTool", "Tool"], + "display_name": "PythonFunctionTool", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "field_formatters": {}, + "pinned": false, + "beta": false + }, + "id": "PythonFunctionTool-RfJui" + }, + "selected": true, + "width": 384, + "height": 466, + "positionAbsolute": { "x": 117.54690105175428, "y": -84.2465475108354 }, + "dragging": false + }, + { + "id": "AgentInitializer-tPdJw", + "type": "genericNode", + "position": { "x": 677.68677055088, "y": 127.19859565276168 }, + "data": { + "type": "AgentInitializer", + "node": { + "template": { + "llm": { + "type": "BaseLanguageModel", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "llm", + "display_name": "Language Model", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "memory": { + "type": "BaseChatMemory", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "memory", + "display_name": "Memory", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "tools": { + "type": "Tool", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tools", + "display_name": "Tools", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "agent": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "zero-shot-react-description", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "zero-shot-react-description", + "react-docstore", + "self-ask-with-search", + "conversational-react-description", + "chat-zero-shot-react-description", + "chat-conversational-react-description", + "structured-chat-zero-shot-react-description", + "openai-functions", + "openai-multi-functions", + "JsonAgent", + "CSVAgent", + "VectorStoreAgent", + "VectorStoreRouterAgent", + "SQLAgent" + ], + "name": "agent", + "display_name": "Agent Type", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Callable, List, Optional, Union\n\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\nfrom langflow import CustomComponent\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\n\n\nclass AgentInitializerComponent(CustomComponent):\n display_name: str = \"Agent Initializer\"\n description: str = \"Initialize a Langchain Agent.\"\n documentation: str = \"https://python.langchain.com/docs/modules/agents/agent_types/\"\n\n def build_config(self):\n agents = list(types.AGENT_TO_CLASS.keys())\n # field_type and required are optional\n return {\n \"agent\": {\"options\": agents, \"value\": agents[0], \"display_name\": \"Agent Type\"},\n \"max_iterations\": {\"display_name\": \"Max Iterations\", \"value\": 10},\n \"memory\": {\"display_name\": \"Memory\"},\n \"tools\": {\"display_name\": \"Tools\"},\n \"llm\": {\"display_name\": \"Language Model\"},\n \"code\": {\"advanced\": True},\n }\n\n def build(\n self,\n agent: str,\n llm: BaseLanguageModel,\n tools: List[Tool],\n max_iterations: int,\n memory: Optional[BaseChatMemory] = None,\n ) -> Union[AgentExecutor, Callable]:\n agent = AgentType(agent)\n if memory:\n return initialize_agent(\n tools=tools,\n llm=llm,\n agent=agent,\n memory=memory,\n return_intermediate_steps=True,\n handle_parsing_errors=True,\n max_iterations=max_iterations,\n )\n return initialize_agent(\n tools=tools,\n llm=llm,\n agent=agent,\n return_intermediate_steps=True,\n handle_parsing_errors=True,\n max_iterations=max_iterations,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "title_case": false + }, + "max_iterations": { + "type": "int", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 10, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_iterations", + "display_name": "Max Iterations", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Initialize a Langchain Agent.", + "base_classes": [ + "Runnable", + "Chain", + "Serializable", + "object", + "AgentExecutor", + "Generic", + "RunnableSerializable", + "Callable" + ], + "display_name": "Agent Initializer", + "documentation": "https://python.langchain.com/docs/modules/agents/agent_types/", + "custom_fields": { + "agent": null, + "llm": null, + "tools": null, + "max_iterations": null, + "memory": null + }, + "output_types": ["AgentExecutor", "Callable"], + "field_formatters": {}, + "pinned": false, + "beta": true + }, + "id": "AgentInitializer-tPdJw" + }, + "selected": false, + "width": 384, + "height": 522 + }, + { + "id": "ChatOpenAISpecs-stxRM", + "type": "genericNode", + "position": { "x": 18.226716205350385, "y": 432.6122491402193 }, + "data": { + "type": "ChatOpenAISpecs", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langchain.llms import BaseLLM\nfrom langchain_community.chat_models.openai import ChatOpenAI\nfrom langflow import CustomComponent\nfrom langflow.field_typing import BaseLanguageModel, NestedDict\n\n\nclass ChatOpenAIComponent(CustomComponent):\n display_name = \"ChatOpenAI\"\n description = \"`OpenAI` Chat large language models API.\"\n icon = \"OpenAI\"\n\n def build_config(self):\n return {\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": False,\n \"required\": False,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n \"required\": False,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"required\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": False,\n \"required\": False,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"advanced\": False,\n \"required\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"required\": False,\n \"value\": 0.7,\n },\n }\n\n def build(\n self,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n model_name: str = \"gpt-4-1106-preview\",\n openai_api_base: Optional[str] = None,\n openai_api_key: Optional[str] = None,\n temperature: float = 0.7,\n ) -> Union[BaseLanguageModel, BaseLLM]:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n return ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-4-1106-preview", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": false, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "temperature": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.7, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { "min": -1, "max": 1, "step": 0.1 }, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "`OpenAI` Chat large language models API.", + "icon": "OpenAI", + "base_classes": [ + "Runnable", + "BaseLLM", + "Serializable", + "BaseLanguageModel", + "object", + "Generic", + "RunnableSerializable" + ], + "display_name": "ChatOpenAI", + "documentation": "", + "custom_fields": { + "max_tokens": null, + "model_kwargs": null, + "model_name": null, + "openai_api_base": null, + "openai_api_key": null, + "temperature": null + }, + "output_types": ["BaseLanguageModel", "BaseLLM"], + "field_formatters": {}, + "pinned": false, + "beta": true + }, + "id": "ChatOpenAISpecs-stxRM" + }, + "selected": false, + "width": 384, + "height": 666, + "positionAbsolute": { "x": 18.226716205350385, "y": 432.6122491402193 }, + "dragging": false + } + ], + "edges": [ + { + "source": "ChatOpenAISpecs-stxRM", + "sourceHandle": "{œbaseClassesœ:[œRunnableœ,œBaseLLMœ,œSerializableœ,œBaseLanguageModelœ,œobjectœ,œGenericœ,œRunnableSerializableœ],œdataTypeœ:œChatOpenAISpecsœ,œidœ:œChatOpenAISpecs-stxRMœ}", + "target": "AgentInitializer-tPdJw", + "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", + "data": { + "targetHandle": { + "fieldName": "llm", + "id": "AgentInitializer-tPdJw", + "inputTypes": null, + "type": "BaseLanguageModel" + }, + "sourceHandle": { + "baseClasses": [ + "Runnable", + "BaseLLM", + "Serializable", + "BaseLanguageModel", + "object", + "Generic", + "RunnableSerializable" + ], + "dataType": "ChatOpenAISpecs", + "id": "ChatOpenAISpecs-stxRM" + } + }, + "style": { "stroke": "#555" }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-ChatOpenAISpecs-stxRM{œbaseClassesœ:[œRunnableœ,œBaseLLMœ,œSerializableœ,œBaseLanguageModelœ,œobjectœ,œGenericœ,œRunnableSerializableœ],œdataTypeœ:œChatOpenAISpecsœ,œidœ:œChatOpenAISpecs-stxRMœ}-AgentInitializer-tPdJw{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}" + }, + { + "source": "PythonFunctionTool-RfJui", + "sourceHandle": "{œbaseClassesœ:[œBaseToolœ,œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-RfJuiœ}", + "target": "AgentInitializer-tPdJw", + "targetHandle": "{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œToolœ}", + "data": { + "targetHandle": { + "fieldName": "tools", + "id": "AgentInitializer-tPdJw", + "inputTypes": null, + "type": "Tool" + }, + "sourceHandle": { + "baseClasses": ["BaseTool", "Tool"], + "dataType": "PythonFunctionTool", + "id": "PythonFunctionTool-RfJui" + } + }, + "style": { "stroke": "#555" }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-PythonFunctionTool-RfJui{œbaseClassesœ:[œBaseToolœ,œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-RfJuiœ}-AgentInitializer-tPdJw{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œToolœ}" + } + ], + "viewport": { + "x": 37.63043052737157, + "y": 71.47518177614131, + "zoom": 0.5140569133280332 + } + }, + "description": "Uncover Business Opportunities with NLP.", + "name": "Untitled document (20)", + "last_tested_version": "0.7.0a0", + "is_component": false +} diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py index e133844d2..6a2f9cff4 100644 --- a/tests/test_endpoints.py +++ b/tests/test_endpoints.py @@ -29,10 +29,7 @@ def poll_task_status(client, headers, href, max_attempts=20, sleep_time=1): href, headers=headers, ) - if ( - task_status_response.status_code == 200 - and task_status_response.json()["status"] == "SUCCESS" - ): + if task_status_response.status_code == 200 and task_status_response.json()["status"] == "SUCCESS": return task_status_response.json() time.sleep(sleep_time) return None # Return None if task did not complete in time @@ -126,11 +123,7 @@ def created_api_key(active_user): ) db_manager = get_db_service() with session_getter(db_manager) as session: - if ( - existing_api_key := session.query(ApiKey) - .filter(ApiKey.api_key == api_key.api_key) - .first() - ): + if existing_api_key := session.query(ApiKey).filter(ApiKey.api_key == api_key.api_key).first(): return existing_api_key session.add(api_key) session.commit() @@ -296,11 +289,7 @@ def test_get_all(client: TestClient, logged_in_headers): dir_reader = DirectoryReader(settings.COMPONENTS_PATH[0]) files = dir_reader.get_files() # json_response is a dict of dicts - all_names = [ - component_name - for _, components in response.json().items() - for component_name in components - ] + all_names = [component_name for _, components in response.json().items() for component_name in components] json_response = response.json() # We need to test the custom nodes assert len(all_names) > len(files) @@ -425,19 +414,13 @@ def test_various_prompts(client, prompt, expected_input_variables): def test_get_vertices_flow_not_found(client, logged_in_headers): - response = client.get( - "/api/v1/build/nonexistent_id/vertices", headers=logged_in_headers - ) - assert ( - response.status_code == 500 - ) # Or whatever status code you've set for invalid ID + response = client.get("/api/v1/build/nonexistent_id/vertices", headers=logged_in_headers) + assert response.status_code == 500 # Or whatever status code you've set for invalid ID def test_get_vertices(client, added_flow_with_prompt_and_history, logged_in_headers): flow_id = added_flow_with_prompt_and_history["id"] - response = client.get( - f"/api/v1/build/{flow_id}/vertices", headers=logged_in_headers - ) + response = client.get(f"/api/v1/build/{flow_id}/vertices", headers=logged_in_headers) assert response.status_code == 200 assert "ids" in response.json() # The response should contain the list in this order @@ -453,19 +436,13 @@ def test_get_vertices(client, added_flow_with_prompt_and_history, logged_in_head def test_build_vertex_invalid_flow_id(client, logged_in_headers): - response = client.post( - "/api/v1/build/nonexistent_id/vertices/vertex_id", headers=logged_in_headers - ) + response = client.post("/api/v1/build/nonexistent_id/vertices/vertex_id", headers=logged_in_headers) assert response.status_code == 500 -def test_build_vertex_invalid_vertex_id( - client, added_flow_with_prompt_and_history, logged_in_headers -): +def test_build_vertex_invalid_vertex_id(client, added_flow_with_prompt_and_history, logged_in_headers): flow_id = added_flow_with_prompt_and_history["id"] - response = client.post( - f"/api/v1/build/{flow_id}/vertices/invalid_vertex_id", headers=logged_in_headers - ) + response = client.post(f"/api/v1/build/{flow_id}/vertices/invalid_vertex_id", headers=logged_in_headers) assert response.status_code == 500