Small bugfixes and improvements (#514)
# Pull Request Description ## Summary This Pull Request includes several code improvements, refactoring, feature additions, and documentation updates mainly focused on enhancing the functionality and maintainability of the codebase. ## Details ### Refactoring and Improvements: - **Commit (5ea20aa) by @ogabrielluiz**: Unnecessary indentation in the `getPythonCode` function within `constants.tsx` has been removed to improve code readability. - **Commit (6886828) by @ogabrielluiz**: - Changed the `load_flow_from_json` function signature in `process.py` to accept either a JSON file path or a JSON object. - Updated the import statement for `Chain` in `base.py` to be more explicit by importing it from `langchain.chains.base` instead of `langflow.graph.vertex.types`. - Removed a print statement from the `process_tweaks` function in `process.py`. - Added an optional `tweaks` parameter to the `load_flow_from_json` function. - Changed the return type of the `build` method in the `Graph` class from `List[Vertex]` to `Chain`. - **Commit (83c28dc) by @ogabrielluiz**: The `process_tweaks` function in `process.py` has been refactored to improve readability and maintainability. An `apply_tweaks` function was added to apply tweaks to the node, and a `validate_input` function was added for input validation. The function now raises a `ValueError` if the input is not in the expected format. - **Commit (11185af) by @ogabrielluiz**: Refactored the `fix_memory_inputs` function in `process.py` to improve readability by reducing nesting. The function now checks if the `langchain_object` has a `memory` attribute and if it is not `None` before proceeding. The try-except block was also refactored to reduce nesting. - **Commit (f180fa6) by @ogabrielluiz**: The input parameter in `process.py` has been changed to accept a `Path` object in addition to a string or dictionary, improving the flexibility of the function and allowing for easier file handling. ### Features: - **Commit (8a5525f) by @ogabrielluiz**: The `getPythonCode` function in `constants.tsx` now accepts a `tweaks` parameter which is used to customize the behavior of the flow. A `buildTweaks` function generates the `tweaks` object which is passed to the `load_flow_from_json` function. - **Commit (c5d6f48) by @ogabrielluiz**: Added a `build_and_install` target to the `Makefile` to build the package and install it without running it. This is useful for building and installing on a remote server. Additionally, fixed the path to the `langflow.main` module in the backend target of the `Makefile`. ### Documentation: - **Commit (76a1265) by @ogabrielluiz**: Updated the Langflow API usage example in `README.md` to reflect the new API endpoint URL. Added a new optional `tweaks` parameter to the `run_flow` function for flow customization and updated the example code accordingly. ### Other: - **Commit (0dcd93e) by @lucaseduoli**: Added Discord and Twitter icons, and GitHub stars. - **Commit (0dcd93e) by @lucaseduoli**: Fixed the Twitter link.
This commit is contained in:
commit
fa2d2fbc97
10 changed files with 209 additions and 97 deletions
7
Makefile
7
Makefile
|
|
@ -44,13 +44,18 @@ install_backend:
|
|||
|
||||
backend:
|
||||
make install_backend
|
||||
poetry run uvicorn langflow.main:app --port 7860 --reload --log-level debug
|
||||
poetry run uvicorn src.backend.langflow.main:app --port 7860 --reload --log-level debug
|
||||
|
||||
build_and_run:
|
||||
echo 'Removing dist folder'
|
||||
rm -rf dist
|
||||
make build && poetry run pip install dist/*.tar.gz && poetry run langflow
|
||||
|
||||
build_and_install:
|
||||
echo 'Removing dist folder'
|
||||
rm -rf dist
|
||||
make build && poetry run pip install dist/*.tar.gz
|
||||
|
||||
build_frontend:
|
||||
cd src/frontend && CI='' npm run build
|
||||
cp -r src/frontend/build src/backend/langflow/frontend
|
||||
|
|
|
|||
44
README.md
44
README.md
|
|
@ -50,11 +50,11 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
|
|||
|
||||
Langflow integrates with langchain-serve to provide a one-command deployment to Jina AI Cloud.
|
||||
|
||||
Start by installing `langchain-serve` with
|
||||
Start by installing `langchain-serve` with
|
||||
|
||||
```bash
|
||||
pip install -U langchain-serve
|
||||
```
|
||||
```
|
||||
|
||||
Then, run:
|
||||
|
||||
|
|
@ -109,24 +109,38 @@ You can use Langflow directly on your browser, or use the API endpoints on Jina
|
|||
<summary>Show API usage (with python)</summary>
|
||||
|
||||
```python
|
||||
import json
|
||||
import requests
|
||||
import requests
|
||||
|
||||
FLOW_PATH = "Time_traveller.json"
|
||||
BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict"
|
||||
FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468"
|
||||
# You can tweak the flow by adding a tweaks dictionary
|
||||
# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}}
|
||||
TWEAKS = {
|
||||
"ChatOpenAI-g4jEr": {},
|
||||
"ConversationChain-UidfJ": {}
|
||||
}
|
||||
|
||||
# HOST = 'http://localhost:7860'
|
||||
HOST = 'https://langflow-f1ed20e309.wolf.jina.ai'
|
||||
API_URL = f'{HOST}/predict'
|
||||
def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict:
|
||||
"""
|
||||
Run a flow with a given message and optional tweaks.
|
||||
|
||||
def predict(message):
|
||||
with open(FLOW_PATH, "r") as f:
|
||||
json_data = json.load(f)
|
||||
payload = {'exported_flow': json_data, 'message': message}
|
||||
response = requests.post(API_URL, json=payload)
|
||||
return response.json()
|
||||
:param message: The message to send to the flow
|
||||
:param flow_id: The ID of the flow to run
|
||||
:param tweaks: Optional tweaks to customize the flow
|
||||
:return: The JSON response from the flow
|
||||
"""
|
||||
api_url = f"{BASE_API_URL}/{flow_id}"
|
||||
|
||||
payload = {"message": message}
|
||||
|
||||
predict('Take me to 1920s Bangalore')
|
||||
if tweaks:
|
||||
payload["tweaks"] = tweaks
|
||||
|
||||
response = requests.post(api_url, json=payload)
|
||||
return response.json()
|
||||
|
||||
# Setup any tweaks you want to apply to the flow
|
||||
print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
|
||||
```
|
||||
|
||||
```json
|
||||
|
|
|
|||
14
poetry.lock
generated
14
poetry.lock
generated
|
|
@ -3812,14 +3812,14 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-
|
|||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.1.0"
|
||||
version = "1.0.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pluggy-1.1.0-py3-none-any.whl", hash = "sha256:d81d19a3a88d82ed06998353ce5d5c02587ef07ee2d808ae63904ab0ccef0087"},
|
||||
{file = "pluggy-1.1.0.tar.gz", hash = "sha256:c500b592c5512df35622e4faf2135aa0b7e989c7d31344194b4afb9d5e47b1bf"},
|
||||
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
|
||||
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -5036,14 +5036,14 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "67.8.0"
|
||||
version = "68.0.0"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"},
|
||||
{file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"},
|
||||
{file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"},
|
||||
{file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow"
|
||||
version = "0.1.3"
|
||||
version = "0.1.4"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Logspace <contact@logspace.ai>"]
|
||||
maintainers = [
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from langflow.graph.vertex.types import (
|
|||
from langflow.interface.tools.constants import FILE_TOOLS
|
||||
from langflow.utils import payload
|
||||
from langflow.utils.logger import logger
|
||||
from langchain.chains.base import Chain
|
||||
|
||||
|
||||
class Graph:
|
||||
|
|
@ -99,7 +100,7 @@ class Graph:
|
|||
]
|
||||
return connected_nodes
|
||||
|
||||
def build(self) -> List[Vertex]:
|
||||
def build(self) -> Chain:
|
||||
"""Builds the graph."""
|
||||
# Get root node
|
||||
root_node = payload.get_root_node(self)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import contextlib
|
||||
import io
|
||||
from pathlib import Path
|
||||
from langchain.schema import AgentAction
|
||||
import json
|
||||
from langflow.interface.run import (
|
||||
|
|
@ -10,8 +11,7 @@ from langflow.interface.run import (
|
|||
from langflow.utils.logger import logger
|
||||
from langflow.graph import Graph
|
||||
|
||||
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
|
||||
|
||||
def fix_memory_inputs(langchain_object):
|
||||
|
|
@ -20,22 +20,23 @@ def fix_memory_inputs(langchain_object):
|
|||
object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the
|
||||
get_memory_key function and updates the memory keys using the update_memory_keys function.
|
||||
"""
|
||||
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
|
||||
try:
|
||||
if langchain_object.memory.memory_key in langchain_object.input_variables:
|
||||
return
|
||||
except AttributeError:
|
||||
input_variables = (
|
||||
langchain_object.prompt.input_variables
|
||||
if hasattr(langchain_object, "prompt")
|
||||
else langchain_object.input_keys
|
||||
)
|
||||
if langchain_object.memory.memory_key in input_variables:
|
||||
return
|
||||
if not hasattr(langchain_object, "memory") or langchain_object.memory is None:
|
||||
return
|
||||
try:
|
||||
if langchain_object.memory.memory_key in langchain_object.input_variables:
|
||||
return
|
||||
except AttributeError:
|
||||
input_variables = (
|
||||
langchain_object.prompt.input_variables
|
||||
if hasattr(langchain_object, "prompt")
|
||||
else langchain_object.input_keys
|
||||
)
|
||||
if langchain_object.memory.memory_key in input_variables:
|
||||
return
|
||||
|
||||
possible_new_mem_key = get_memory_key(langchain_object)
|
||||
if possible_new_mem_key is not None:
|
||||
update_memory_keys(langchain_object, possible_new_mem_key)
|
||||
possible_new_mem_key = get_memory_key(langchain_object)
|
||||
if possible_new_mem_key is not None:
|
||||
update_memory_keys(langchain_object, possible_new_mem_key)
|
||||
|
||||
|
||||
def format_actions(actions: List[Tuple[AgentAction, str]]) -> str:
|
||||
|
|
@ -131,57 +132,108 @@ def process_graph_cached(data_graph: Dict[str, Any], message: str):
|
|||
return {"result": str(result), "thought": thought.strip()}
|
||||
|
||||
|
||||
def load_flow_from_json(path: str, build=True):
|
||||
"""Load flow from json file"""
|
||||
# This is done to avoid circular imports
|
||||
def load_flow_from_json(
|
||||
input: Union[Path, str, dict], tweaks: Optional[dict] = None, build=True
|
||||
):
|
||||
"""
|
||||
Load flow from a JSON file or a JSON object.
|
||||
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
flow_graph = json.load(f)
|
||||
data_graph = flow_graph["data"]
|
||||
nodes = data_graph["nodes"]
|
||||
# Substitute ZeroShotPrompt with PromptTemplate
|
||||
# nodes = replace_zero_shot_prompt_with_prompt_template(nodes)
|
||||
# Add input variables
|
||||
# nodes = payload.extract_input_variables(nodes)
|
||||
:param input: JSON file path or JSON object
|
||||
:param tweaks: Optional tweaks to be processed
|
||||
:param build: If True, build the graph, otherwise return the graph object
|
||||
:return: Langchain object or Graph object depending on the build parameter
|
||||
"""
|
||||
# If input is a file path, load JSON from the file
|
||||
if isinstance(input, (str, Path)):
|
||||
with open(input, "r", encoding="utf-8") as f:
|
||||
flow_graph = json.load(f)
|
||||
# If input is a dictionary, assume it's a JSON object
|
||||
elif isinstance(input, dict):
|
||||
flow_graph = input
|
||||
else:
|
||||
raise TypeError(
|
||||
"Input must be either a file path (str) or a JSON object (dict)"
|
||||
)
|
||||
|
||||
# Nodes, edges and root node
|
||||
edges = data_graph["edges"]
|
||||
graph_data = flow_graph["data"]
|
||||
if tweaks is not None:
|
||||
graph_data = process_tweaks(graph_data, tweaks)
|
||||
nodes = graph_data["nodes"]
|
||||
edges = graph_data["edges"]
|
||||
graph = Graph(nodes, edges)
|
||||
|
||||
if build:
|
||||
langchain_object = graph.build()
|
||||
|
||||
if hasattr(langchain_object, "verbose"):
|
||||
langchain_object.verbose = True
|
||||
|
||||
if hasattr(langchain_object, "return_intermediate_steps"):
|
||||
# https://github.com/hwchase17/langchain/issues/2068
|
||||
# Deactivating until we have a frontend solution
|
||||
# to display intermediate steps
|
||||
langchain_object.return_intermediate_steps = False
|
||||
|
||||
fix_memory_inputs(langchain_object)
|
||||
return langchain_object
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def process_tweaks(graph_data: Dict, tweaks: Dict):
|
||||
"""This function is used to tweak the graph data using the node id and the tweaks dict"""
|
||||
# the tweaks dict is a dict of dicts
|
||||
# the key is the node id and the value is a dict of the tweaks
|
||||
# the dict of tweaks contains the name of a certain parameter and the value to be tweaked
|
||||
def validate_input(
|
||||
graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
if not isinstance(graph_data, dict) or not isinstance(tweaks, dict):
|
||||
raise ValueError("graph_data and tweaks should be dictionaries")
|
||||
|
||||
nodes = graph_data.get("data", {}).get("nodes") or graph_data.get("nodes")
|
||||
|
||||
if not isinstance(nodes, list):
|
||||
raise ValueError(
|
||||
"graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key"
|
||||
)
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None:
|
||||
template_data = node.get("data", {}).get("node", {}).get("template")
|
||||
|
||||
if not isinstance(template_data, dict):
|
||||
logger.warning(
|
||||
f"Template data for node {node.get('id')} should be a dictionary"
|
||||
)
|
||||
return
|
||||
|
||||
for tweak_name, tweak_value in node_tweaks.items():
|
||||
if tweak_name and tweak_value and tweak_name in template_data:
|
||||
template_data[tweak_name]["value"] = tweak_value
|
||||
|
||||
|
||||
def process_tweaks(
|
||||
graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
This function is used to tweak the graph data using the node id and the tweaks dict.
|
||||
|
||||
:param graph_data: The dictionary containing the graph data. It must contain a 'data' key with
|
||||
'nodes' as its child or directly contain 'nodes' key. Each node should have an 'id' and 'data'.
|
||||
:param tweaks: A dictionary where the key is the node id and the value is a dictionary of the tweaks.
|
||||
The inner dictionary contains the name of a certain parameter as the key and the value to be tweaked.
|
||||
|
||||
:return: The modified graph_data dictionary.
|
||||
|
||||
:raises ValueError: If the input is not in the expected format.
|
||||
"""
|
||||
nodes = validate_input(graph_data, tweaks)
|
||||
|
||||
# We need to process the graph data to add the tweaks
|
||||
if "data" not in graph_data and "nodes" in graph_data:
|
||||
nodes = graph_data["nodes"]
|
||||
else:
|
||||
nodes = graph_data["data"]["nodes"]
|
||||
for node in nodes:
|
||||
node_id = node["id"]
|
||||
if node_id in tweaks:
|
||||
node_tweaks = tweaks[node_id]
|
||||
template_data = node["data"]["node"]["template"]
|
||||
for tweak_name, tweake_value in node_tweaks.items():
|
||||
if tweak_name in template_data:
|
||||
template_data[tweak_name]["value"] = tweake_value
|
||||
print(
|
||||
f"Something changed in node {node_id} with tweak {tweak_name} and value {tweake_value}"
|
||||
)
|
||||
if isinstance(node, dict) and isinstance(node.get("id"), str):
|
||||
node_id = node["id"]
|
||||
if node_tweaks := tweaks.get(node_id):
|
||||
apply_tweaks(node, node_tweaks)
|
||||
else:
|
||||
logger.warning(
|
||||
"Each node should be a dictionary with an 'id' key of type str"
|
||||
)
|
||||
|
||||
return graph_data
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { BellIcon, Home, Users2 } from "lucide-react";
|
||||
import { useContext } from "react";
|
||||
import { FaGithub } from "react-icons/fa";
|
||||
import { useContext, useEffect, useState } from "react";
|
||||
import { FaDiscord, FaGithub, FaTwitter } from "react-icons/fa";
|
||||
import { Button } from "../ui/button";
|
||||
import { TabsContext } from "../../contexts/tabsContext";
|
||||
import AlertDropdown from "../../alerts/alertDropDown";
|
||||
|
|
@ -11,6 +11,7 @@ import { typesContext } from "../../contexts/typesContext";
|
|||
import MenuBar from "./components/menuBar";
|
||||
import { Link, useLocation, useParams } from "react-router-dom";
|
||||
import { USER_PROJECTS_HEADER } from "../../constants";
|
||||
import { getRepoStars } from "../../controllers/API";
|
||||
|
||||
export default function Header() {
|
||||
const { flows, addFlow, tabId } = useContext(TabsContext);
|
||||
|
|
@ -22,6 +23,16 @@ export default function Header() {
|
|||
const { notificationCenter, setNotificationCenter, setErrorData } =
|
||||
useContext(alertContext);
|
||||
const location = useLocation();
|
||||
|
||||
const [stars, setStars] = useState(null);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchStars() {
|
||||
const starsCount = await getRepoStars("logspace-ai", "langflow");
|
||||
setStars(starsCount);
|
||||
}
|
||||
fetchStars();
|
||||
}, []);
|
||||
return (
|
||||
<div className="w-full h-12 flex justify-between items-center border-b bg-muted">
|
||||
<div className="flex gap-2 justify-start items-center w-96">
|
||||
|
|
@ -57,22 +68,35 @@ export default function Header() {
|
|||
</Link>
|
||||
</div>
|
||||
<div className="flex justify-end px-2 w-96">
|
||||
<div className="ml-auto mr-2 flex gap-5">
|
||||
<Button
|
||||
asChild
|
||||
variant="outline"
|
||||
className="text-gray-600 dark:text-gray-300 "
|
||||
>
|
||||
<div className="ml-auto mr-2 flex gap-5 items-center">
|
||||
<a
|
||||
href="https://github.com/logspace-ai/langflow"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="flex"
|
||||
className="inline-flex items-center justify-center text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:opacity-50 disabled:pointer-events-none ring-offset-background text-gray-600 dark:text-gray-300 border border-input hover:bg-accent hover:text-accent-foreground h-9 px-3 pr-0 rounded-md"
|
||||
>
|
||||
<FaGithub className="h-5 w-5 mr-2" />
|
||||
Join The Community
|
||||
Star
|
||||
<div className="ml-2 flex text-sm bg-background rounded-md rounded-l-none border px-2 h-9 -mr-px items-center justify-center">
|
||||
{stars}
|
||||
</div>
|
||||
</a>
|
||||
<a
|
||||
href="https://twitter.com/logspace_ai"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="text-muted-foreground"
|
||||
>
|
||||
<FaTwitter className="h-5 w-5" />
|
||||
</a>
|
||||
<a
|
||||
href="https://discord.gg/EqksyE2EX9"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="text-muted-foreground"
|
||||
>
|
||||
<FaDiscord className="h-5 w-5" />
|
||||
</a>
|
||||
</Button>
|
||||
{/* <button
|
||||
className="text-gray-600 hover:text-gray-500 dark:text-gray-300 dark:hover:text-gray-200"
|
||||
onClick={() => {
|
||||
|
|
@ -110,12 +134,6 @@ export default function Header() {
|
|||
)}
|
||||
<BellIcon className="h-5 w-5" aria-hidden="true" />
|
||||
</button>
|
||||
{/* <button>
|
||||
<img
|
||||
src="https://github.com/shadcn.png"
|
||||
className="rounded-full w-8"
|
||||
/>
|
||||
</button> */}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -121,11 +121,12 @@ export const getCurlCode = (flow: FlowType): string => {
|
|||
*/
|
||||
export const getPythonCode = (flow: FlowType): string => {
|
||||
const flowName = flow.name;
|
||||
const tweaks = buildTweaks(flow);
|
||||
return `from langflow import load_flow_from_json
|
||||
|
||||
flow = load_flow_from_json("${flowName}.json")
|
||||
# Now you can use it like any chain
|
||||
flow("Hey, have you heard of LangFlow?")`;
|
||||
TWEAKS = ${JSON.stringify(tweaks, null, 2)}
|
||||
flow = load_flow_from_json("${flowName}.json", tweaks=TWEAKS)
|
||||
# Now you can use it like any chain
|
||||
flow("Hey, have you heard of LangFlow?")`;
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -18,6 +18,18 @@ export async function getAll(): Promise<AxiosResponse<APIObjectType>> {
|
|||
return await axios.get(`/api/v1/all`);
|
||||
}
|
||||
|
||||
const GITHUB_API_URL = "https://api.github.com";
|
||||
|
||||
export async function getRepoStars(owner, repo) {
|
||||
try {
|
||||
const response = await axios.get(`${GITHUB_API_URL}/repos/${owner}/${repo}`);
|
||||
return response.data.stargazers_count;
|
||||
} catch (error) {
|
||||
console.error("Error fetching repository data:", error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends data to the API for prediction.
|
||||
*
|
||||
|
|
|
|||
|
|
@ -14,6 +14,15 @@ def test_load_flow_from_json():
|
|||
assert isinstance(loaded, Chain)
|
||||
|
||||
|
||||
def test_load_flow_from_json_with_tweaks():
|
||||
"""Test loading a flow from a json file and applying tweaks"""
|
||||
tweaks = {"dndnode_82": {"model_name": "test model"}}
|
||||
loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH, tweaks=tweaks)
|
||||
assert loaded is not None
|
||||
assert isinstance(loaded, Chain)
|
||||
assert loaded.llm.model_name == "test model"
|
||||
|
||||
|
||||
def test_get_root_node():
|
||||
with open(pytest.BASIC_EXAMPLE_PATH, "r") as f:
|
||||
flow_graph = json.load(f)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue