feat: add llm and chains

This commit is contained in:
Ibis Prevedello 2023-02-22 16:26:39 -03:00
commit 4620a29024
3 changed files with 21 additions and 7 deletions

View file

@ -1,5 +1,5 @@
#! /bin/bash
poetry remove langchain
docker build -t ibiscp/expert:v0.0.12 . && docker push ibiscp/expert:v0.0.12
docker build -t ibiscp/expert:v0.0.13 . && docker push ibiscp/expert:v0.0.13
poetry add --editable ../langchain

View file

@ -11,8 +11,8 @@ openai = "^0.26.5"
fastapi = "^0.91.0"
uvicorn = "^0.20.0"
beautifulsoup4 = "^4.11.2"
langchain = {path = "../langchain", develop = true}
google-search-results = "^2.4.1"
langchain = {path = "../langchain", develop = true}
[tool.poetry.group.dev.dependencies]

View file

@ -3,6 +3,9 @@ import signature
import list_endpoints
import payload
from langchain.agents.loading import load_agent_executor_from_config
from langchain.chains.loading import load_chain_from_config
from langchain.llms.loading import load_llm_from_config
from langchain.prompts.loading import load_prompt_from_config
from typing import Any
@ -81,12 +84,23 @@ def get_load(data: dict[str, Any]):
if extracted_json["_type"] in type_list["agents"]:
loaded = load_agent_executor_from_config(extracted_json)
return loaded.run(message)
return {"result": loaded.run(message)}
elif extracted_json["_type"] in type_list["chains"]:
loaded = load_chain_from_config(extracted_json)
elif extracted_json["_type"] in type_list["prompts"]:
loaded = load_prompt_from_config(extracted_json)
print(loaded.format(product=''))
return extracted_json
return {"result": loaded.run(message)}
elif extracted_json["_type"] in type_list["llms"]:
loaded = load_llm_from_config(extracted_json)
return {"result": loaded(message)}
else:
return {"result": "Error: Type should be either agent, chain or llm"}
# elif extracted_json["_type"] in type_list["prompts"]:
# loaded = load_prompt_from_config(extracted_json)
# print(loaded.format(product=''))
# return {'result': loaded.format(product=message)}
# if type in a["prompts"]: