refactor(langflow): reorder imports in multiple files

This commit reorders imports in multiple files to follow PEP8 guidelines
and improve code readability. No functional changes were made.
This commit is contained in:
Gabriel Almeida 2023-05-02 19:49:32 -03:00 committed by Gabriel Luiz Freitas Almeida
commit 9d3098f3e2
15 changed files with 28 additions and 20 deletions

View file

@ -1,4 +1,4 @@
from langflow.interface.loading import load_flow_from_json
from langflow.cache import cache_manager
from langflow.interface.loading import load_flow_from_json
__all__ = ["load_flow_from_json", "cache_manager"]

View file

@ -1,4 +1,5 @@
from typing import Any
from langchain.callbacks.base import AsyncCallbackHandler
from langflow.api.schemas import ChatResponse

View file

@ -1,9 +1,12 @@
import asyncio
from typing import Dict, List
from collections import defaultdict
from fastapi import WebSocket
import json
from collections import defaultdict
from typing import Dict, List
from fastapi import WebSocket
from langflow.api.schemas import ChatMessage, ChatResponse, FileResponse
from langflow.cache import cache_manager
from langflow.cache.manager import Subject
from langflow.interface.run import (
get_result_and_steps,
@ -11,7 +14,6 @@ from langflow.interface.run import (
)
from langflow.interface.utils import pil_to_base64, try_setting_streaming_options
from langflow.utils.logger import logger
from langflow.cache import cache_manager
class ChatHistory(Subject):

View file

@ -1,4 +1,5 @@
from typing import Any, Union
from pydantic import BaseModel, validator

View file

@ -2,13 +2,13 @@ import base64
import contextlib
import functools
import hashlib
import json
import os
import tempfile
from collections import OrderedDict
from pathlib import Path
from typing import Any, Dict
import dill # type: ignore
CACHE: Dict[str, Any] = {}

View file

@ -1,7 +1,8 @@
from contextlib import contextmanager
from typing import Any, Awaitable, Callable, List, Optional
from PIL import Image
import pandas as pd
from PIL import Image
class Subject:

View file

@ -26,9 +26,9 @@ from langchain.agents.agent_toolkits.vectorstore.prompt import (
)
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS as SQL_FORMAT_INSTRUCTIONS
from langchain.base_language import BaseLanguageModel
from langchain.llms.base import BaseLLM
from langchain.memory.chat_memory import BaseChatMemory
from langchain.base_language import BaseLanguageModel
from langchain.sql_database import SQLDatabase
from langchain.tools.python.tool import PythonAstREPLTool
from langchain.tools.sql_database.prompt import QUERY_CHECKER

View file

@ -17,6 +17,7 @@ from langchain.callbacks.base import BaseCallbackManager
from langchain.chains.loading import load_chain_from_config
from langchain.llms.base import BaseLLM
from langchain.llms.loading import load_llm_from_config
from pydantic import ValidationError
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.importing.utils import import_by_type
@ -25,7 +26,6 @@ from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.types import get_type_list
from langflow.interface.utils import load_file_into_dict
from langflow.utils import util, validate
from pydantic import ValidationError
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:

View file

@ -1,6 +1,7 @@
import contextlib
import io
from typing import Any, Dict
from chromadb.errors import NotEnoughElementsException # type: ignore
from langflow.cache.base import compute_dict_hash, load_cache, memoize_dict

View file

@ -1,14 +1,15 @@
import base64
from io import BytesIO
import json
import os
from PIL.Image import Image
from io import BytesIO
import yaml
from langchain.callbacks.manager import AsyncCallbackManager
from langchain.chat_models import AzureChatOpenAI, ChatOpenAI
from langchain.llms import AzureOpenAI, OpenAI
from langflow.api.callback import StreamingLLMCallbackHandler
from PIL.Image import Image
import yaml
from langflow.api.callback import StreamingLLMCallbackHandler
def load_file_into_dict(file_path: str) -> dict:

View file

@ -1,9 +1,9 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langflow.api.chat import router as chat_router
from langflow.api.endpoints import router as endpoints_router
from langflow.api.validate import router as validate_router
from langflow.api.chat import router as chat_router
def create_app():

View file

@ -1,7 +1,7 @@
from functools import wraps
import importlib
import inspect
import re
from functools import wraps
from typing import Dict, Optional
from docstring_parser import parse # type: ignore

View file

@ -1,11 +1,10 @@
import json
from pathlib import Path
from typing import AsyncGenerator
from httpx import AsyncClient
import pytest
from fastapi.testclient import TestClient
from httpx import AsyncClient
def pytest_configure():

View file

@ -1,8 +1,9 @@
import pytest
from PIL import Image
import pandas as pd
from io import StringIO
import pandas as pd
import pytest
from langflow.cache.manager import CacheManager
from PIL import Image
@pytest.fixture

View file

@ -1,5 +1,6 @@
import json
from unittest.mock import patch
from fastapi.testclient import TestClient