Update import statements for langchain_community packages

This commit is contained in:
Gabriel Luiz Freitas Almeida 2024-01-30 14:46:28 -03:00
commit 43e3baaa84
26 changed files with 94 additions and 96 deletions

View file

@ -3,7 +3,7 @@ from typing import List, Optional
from langchain.agents.agent import AgentExecutor
from langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import _get_default_system_message
from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from langchain.memory.token_buffer import ConversationTokenBufferMemory
from langchain.prompts import SystemMessagePromptTemplate
from langchain.prompts.chat import MessagesPlaceholder

View file

@ -1,6 +1,6 @@
from typing import Optional
from langchain.chat_models.anthropic import ChatAnthropic
from langchain_community.chat_models.anthropic import ChatAnthropic
from langchain.llms.base import BaseLanguageModel
from pydantic.v1 import SecretStr

View file

@ -1,7 +1,7 @@
from typing import Optional
from langflow import CustomComponent
from langchain.llms.base import BaseLanguageModel
from langchain.chat_models.azure_openai import AzureChatOpenAI
from langchain_community.chat_models.azure_openai import AzureChatOpenAI
class AzureChatOpenAIComponent(CustomComponent):

View file

@ -1,6 +1,6 @@
from typing import Optional
from langchain.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
from langchain.llms.base import BaseLLM
from pydantic.v1 import SecretStr

View file

@ -1,7 +1,9 @@
from langflow import CustomComponent
from langchain.vectorstores import VectorStore
from typing import Union, Callable
from typing import Callable, Union
from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo
from langchain_community.vectorstores import VectorStore
from langflow import CustomComponent
class VectorStoreInfoComponent(CustomComponent):

View file

@ -3,9 +3,8 @@ from typing import List, Optional, Union
import chromadb # type: ignore
from langchain.embeddings.base import Embeddings
from langchain.schema import BaseRetriever, Document
from langchain.vectorstores.chroma import Chroma
from langchain.vectorstores.base import VectorStore
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.chroma import Chroma
from langflow import CustomComponent

View file

@ -1,12 +1,10 @@
from langflow import CustomComponent
from langchain_community.vectorstores.faiss import FAISS
from typing import List, Union
from langchain.schema import BaseRetriever
from langchain.vectorstores.base import VectorStore
from langflow.field_typing import (
Document,
Embeddings,
)
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.faiss import FAISS
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings
class FAISSComponent(CustomComponent):

View file

@ -1,6 +1,8 @@
from typing import List, Optional
from langchain_community.vectorstores import MongoDBAtlasVectorSearch
from langflow import CustomComponent
from langchain.vectorstores import MongoDBAtlasVectorSearch
from typing import Optional, List
from langflow.field_typing import (
Document,
Embeddings,

View file

@ -3,7 +3,7 @@ from typing import List, Optional, Union
import pinecone # type: ignore
from langchain.schema import BaseRetriever
from langchain.vectorstores.base import VectorStore
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.pinecone import Pinecone
from langflow import CustomComponent

View file

@ -1,9 +1,10 @@
from langflow import CustomComponent
from langchain_community.vectorstores.qdrant import Qdrant
from typing import Optional, List, Union
from langflow.field_typing import Document, Embeddings, NestedDict
from typing import List, Optional, Union
from langchain.schema import BaseRetriever
from langchain.vectorstores.base import VectorStore
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.qdrant import Qdrant
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings, NestedDict
class QdrantComponent(CustomComponent):

View file

@ -1,10 +1,10 @@
from typing import Optional
from langflow import CustomComponent
from langchain.vectorstores.redis import Redis
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain.embeddings.base import Embeddings
from langchain.schema import Document
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.redis import Redis
from langflow import CustomComponent
class RedisComponent(CustomComponent):

View file

@ -1,13 +1,10 @@
from langflow import CustomComponent
from typing import List, Union
from langchain_community.vectorstores.supabase import SupabaseVectorStore
from langflow.field_typing import (
Document,
Embeddings,
NestedDict,
)
from langchain.schema import BaseRetriever
from langchain.vectorstores.base import VectorStore
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.supabase import SupabaseVectorStore
from langflow import CustomComponent
from langflow.field_typing import Document, Embeddings, NestedDict
from supabase.client import Client, create_client

View file

@ -1,14 +1,13 @@
from typing import Optional, Union, List
from langflow import CustomComponent
import tempfile
import urllib.request
import urllib
import urllib.request
from typing import List, Optional, Union
from langchain.vectorstores import Vectara
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain.schema import BaseRetriever
from langchain.embeddings import FakeEmbeddings
from langchain.schema import BaseRetriever, Document
from langchain_community.vectorstores import Vectara, VectorStore
from langflow import CustomComponent
class VectaraComponent(CustomComponent):

View file

@ -3,8 +3,7 @@ from typing import Optional, Union
import weaviate # type: ignore
from langchain.embeddings.base import Embeddings
from langchain.schema import BaseRetriever, Document
from langchain.vectorstores import Weaviate
from langchain.vectorstores.base import VectorStore
from langchain_community.vectorstores import VectorStore, Weaviate
from langflow import CustomComponent

View file

@ -1,10 +1,10 @@
from typing import Optional, List
from langflow import CustomComponent
from typing import List, Optional
from langchain.vectorstores.pgvector import PGVector
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain.embeddings.base import Embeddings
from langchain.schema import Document
from langchain_community.vectorstores import VectorStore
from langchain_community.vectorstores.pgvector import PGVector
from langflow import CustomComponent
class PostgresqlVectorComponent(CustomComponent):

View file

@ -12,7 +12,7 @@ from langchain.schema.language_model import BaseLanguageModel
from langchain.schema.memory import BaseMemory
from langchain.text_splitter import TextSplitter
from langchain.tools import Tool
from langchain.vectorstores.base import VectorStore
from langchain_community.vectorstores import VectorStore
# Type alias for more complex dicts
NestedDict = Dict[str, Union[str, Dict]]

View file

@ -2,14 +2,10 @@ from typing import Any, Optional
from langchain.agents import AgentExecutor, ZeroShotAgent
from langchain.agents.agent_toolkits import (
SQLDatabaseToolkit,
VectorStoreInfo,
VectorStoreRouterToolkit,
VectorStoreToolkit,
)
from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
from langchain.agents.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX
from langchain.agents.agent_toolkits.vectorstore.prompt import PREFIX as VECTORSTORE_PREFIX
from langchain.agents.agent_toolkits.vectorstore.prompt import ROUTER_PREFIX as VECTORSTORE_ROUTER_PREFIX
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
@ -17,9 +13,14 @@ from langchain.base_language import BaseLanguageModel
from langchain.chains.llm import LLMChain
from langchain.sql_database import SQLDatabase
from langchain.tools.sql_database.prompt import QUERY_CHECKER
from langchain_community.agent_toolkits import SQLDatabaseToolkit
from langchain_community.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
from langchain_community.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX
from langchain_experimental.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX
from langchain_experimental.agents.agent_toolkits.pandas.prompt import SUFFIX_WITH_DF as PANDAS_SUFFIX
from langchain_experimental.tools.python.tool import PythonAstREPLTool
from langflow.interface.base import CustomAgentExecutor

View file

@ -1,9 +1,10 @@
import inspect
from typing import Any
from langchain import document_loaders, embeddings, llms, memory, requests, text_splitter
from langchain.agents import agent_toolkits
from langchain.chat_models import AzureChatOpenAI, ChatAnthropic, ChatOpenAI, ChatVertexAI
from langchain import llms, memory, requests, text_splitter
from langchain_community.chat_models import AzureChatOpenAI, ChatAnthropic, ChatOpenAI, ChatVertexAI
from langchain_community import agent_toolkits, document_loaders, embeddings
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.chains.custom import CUSTOM_CHAINS
from langflow.interface.importing.utils import import_class
@ -24,14 +25,14 @@ llm_type_to_cls_dict["vertexai-chat"] = ChatVertexAI # type: ignore
# Toolkits
toolkit_type_to_loader_dict: dict[str, Any] = {
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
toolkit_name: import_class(f"langchain_community.agent_toolkits.{toolkit_name}")
# if toolkit_name is lower case it is a loader
for toolkit_name in agent_toolkits.__all__
if toolkit_name.islower()
}
toolkit_type_to_cls_dict: dict[str, Any] = {
toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
toolkit_name: import_class(f"langchain_community.agent_toolkits.{toolkit_name}")
# if toolkit_name is not lower case it is a class
for toolkit_name in agent_toolkits.__all__
if not toolkit_name.islower()
@ -47,13 +48,14 @@ wrapper_type_to_cls_dict: dict[str, Any] = {wrapper.__name__: wrapper for wrappe
# Embeddings
embedding_type_to_cls_dict: dict[str, Any] = {
embedding_name: import_class(f"langchain.embeddings.{embedding_name}") for embedding_name in embeddings.__all__
embedding_name: import_class(f"langchain_community.embeddings.{embedding_name}")
for embedding_name in embeddings.__all__
}
# Document Loaders
documentloaders_type_to_cls_dict: dict[str, Any] = {
documentloader_name: import_class(f"langchain.document_loaders.{documentloader_name}")
documentloader_name: import_class(f"langchain_community.document_loaders.{documentloader_name}")
for documentloader_name in document_loaders.__all__
}

View file

@ -6,9 +6,9 @@ from typing import Any, Type
from langchain.agents import Agent
from langchain.base_language import BaseLanguageModel
from langchain.chains.base import Chain
from langchain.chat_models.base import BaseChatModel
from langchain.prompts import PromptTemplate
from langchain.tools import BaseTool
from langchain_core.language_models.chat_models import BaseChatModel
from langflow.interface.custom.custom_component import CustomComponent
from langflow.interface.wrappers.base import wrapper_creator
from langflow.utils import validate
@ -71,7 +71,7 @@ def import_output_parser(output_parser: str) -> Any:
def import_chat_llm(llm: str) -> BaseChatModel:
"""Import chat llm from llm name"""
return import_class(f"langchain.chat_models.{llm}")
return import_class(f"langchain_community.chat_models.{llm}")
def import_retriever(retriever: str) -> Any:
@ -148,17 +148,17 @@ def import_chain(chain: str) -> Type[Chain]:
def import_embedding(embedding: str) -> Any:
"""Import embedding from embedding name"""
return import_class(f"langchain.embeddings.{embedding}")
return import_class(f"langchain_community.embeddings.{embedding}")
def import_vectorstore(vectorstore: str) -> Any:
"""Import vectorstore from vectorstore name"""
return import_class(f"langchain.vectorstores.{vectorstore}")
return import_class(f"langchain_community.vectorstores.{vectorstore}")
def import_documentloader(documentloader: str) -> Any:
"""Import documentloader from documentloader name"""
return import_class(f"langchain.document_loaders.{documentloader}")
return import_class(f"langchain_community.document_loaders.{documentloader}")
def import_textsplitter(textsplitter: str) -> Any:
@ -169,8 +169,8 @@ def import_textsplitter(textsplitter: str) -> Any:
def import_utility(utility: str) -> Any:
"""Import utility from utility name"""
if utility == "SQLDatabase":
return import_class(f"langchain.sql_database.{utility}")
return import_class(f"langchain.utilities.{utility}")
return import_class(f"langchain_community.sql_database.{utility}")
return import_class(f"langchain_community.utilities.{utility}")
def get_function(code):

View file

@ -10,7 +10,7 @@ from langchain.agents.tools import BaseTool
from langchain.chains.base import Chain
from langchain.document_loaders.base import BaseLoader
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain_community.vectorstores import VectorStore
from loguru import logger
from pydantic import ValidationError

View file

@ -1,18 +1,18 @@
from typing import Any, Callable, Dict, Type
from langchain.vectorstores import (
Pinecone,
ElasticsearchStore,
Qdrant,
Chroma,
FAISS,
Weaviate,
SupabaseVectorStore,
MongoDBAtlasVectorSearch,
)
from langchain.schema import Document
import os
from typing import Any, Callable, Dict, Type
import orjson
from langchain.schema import Document
from langchain_community.vectorstores import (
FAISS,
Chroma,
ElasticsearchStore,
MongoDBAtlasVectorSearch,
Pinecone,
Qdrant,
SupabaseVectorStore,
Weaviate,
)
def docs_in_params(params: dict) -> bool:
@ -27,8 +27,8 @@ def initialize_mongodb(class_object: Type[MongoDBAtlasVectorSearch], params: dic
MONGODB_ATLAS_CLUSTER_URI = params.pop("mongodb_atlas_cluster_uri")
if not MONGODB_ATLAS_CLUSTER_URI:
raise ValueError("Mongodb atlas cluster uri must be provided in the params")
from pymongo import MongoClient
import certifi
from pymongo import MongoClient
client: MongoClient = MongoClient(MONGODB_ATLAS_CLUSTER_URI, tlsCAFile=certifi.where())
db_name = params.pop("db_name", None)

View file

@ -1,14 +1,13 @@
from typing import Any, ClassVar, Dict, List, Optional, Type
from langchain import retrievers
from loguru import logger
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.services.deps import get_settings_service
from langflow.template.frontend_node.retrievers import RetrieverFrontendNode
from loguru import logger
from langflow.utils.util import build_template_from_method, build_template_from_class
from langflow.utils.util import build_template_from_class, build_template_from_method
class RetrieverCreator(LangChainTypeCreator):
@ -27,7 +26,7 @@ class RetrieverCreator(LangChainTypeCreator):
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict: dict[str, Any] = {
retriever_name: import_class(f"langchain.retrievers.{retriever_name}")
retriever_name: import_class(f"langchain_community.retrievers.{retriever_name}")
for retriever_name in retrievers.__all__
}
return self.type_dict

View file

@ -1,6 +1,6 @@
from typing import Dict, List, Optional, Type
from langchain import utilities
from langchain_community import utilities
from loguru import logger
from langflow.custom.customs import get_custom_nodes
@ -30,7 +30,7 @@ class UtilityCreator(LangChainTypeCreator):
self.type_dict = {}
for utility_name in utilities.__all__:
try:
imported = import_class(f"langchain.utilities.{utility_name}")
imported = import_class(f"langchain_community.utilities.{utility_name}")
self.type_dict[utility_name] = imported
except Exception:
pass

View file

@ -1,13 +1,12 @@
from typing import Any, Dict, List, Optional, Type
from langchain import vectorstores
from loguru import logger
from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class
from langflow.services.deps import get_settings_service
from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode
from loguru import logger
from langflow.utils.util import build_template_from_method
@ -22,7 +21,7 @@ class VectorstoreCreator(LangChainTypeCreator):
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict: dict[str, Any] = {
vectorstore_name: import_class(f"langchain.vectorstores.{vectorstore_name}")
vectorstore_name: import_class(f"langchain_community.vectorstores.{vectorstore_name}")
for vectorstore_name in vectorstores.__all__
}
return self.type_dict

View file

@ -1,9 +1,9 @@
from typing import ClassVar, Dict, List, Optional
from langchain.utilities import requests, sql_database
from langchain_community.utilities import requests, sql_database
from loguru import logger
from langflow.interface.base import LangChainTypeCreator
from loguru import logger
from langflow.utils.util import build_template_from_class, build_template_from_method

View file

@ -4,7 +4,7 @@ from typing import Any, Coroutine, Dict, List, Optional, Tuple, Union
from langchain.agents import AgentExecutor
from langchain.chains.base import Chain
from langchain.schema import AgentAction, Document
from langchain.vectorstores.base import VectorStore
from langchain_community.vectorstores import VectorStore
from langchain_core.messages import AIMessage
from langchain_core.runnables.base import Runnable
from langflow.graph.graph.base import Graph