From ffde79fd5fef99e07bc258f5f126f78f0ff716da Mon Sep 17 00:00:00 2001
From: Mendon Kissling <59585235+mendonk@users.noreply.github.com>
Date: Fri, 26 Apr 2024 10:05:30 -0400
Subject: [PATCH] qa-and-memory-content
---
docs/docs/guides/document-qa.mdx | 82 ++++++++++++++++++++++++
docs/docs/guides/memory-chatbot.mdx | 99 +++++++++++++++++++++++++++++
2 files changed, 181 insertions(+)
diff --git a/docs/docs/guides/document-qa.mdx b/docs/docs/guides/document-qa.mdx
index e69de29bb..105753b99 100644
--- a/docs/docs/guides/document-qa.mdx
+++ b/docs/docs/guides/document-qa.mdx
@@ -0,0 +1,82 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+import Admonition from "@theme/Admonition";
+
+# Document QA
+
+Build a question-and-answer chatbot with a document loaded from local memory.
+
+## Prerequisites
+
+1. Install Langflow.
+```bash
+python -m pip install langflow --pre
+```
+
+2. Start a local Langflow instance with the Langflow CLI:
+```bash
+langflow run
+```
+Or start Langflow with Python:
+```bash
+python -m langflow run
+```
+
+Result:
+```
+│ Welcome to ⛓ Langflow │
+│ │
+│ Access http://127.0.0.1:7860 │
+│ Collaborate, and contribute at our GitHub Repo 🚀 │
+```
+
+Alternatively, go to [HuggingFace Spaces](https://docs.langflow.org/getting-started/hugging-face-spaces) or [Lightning.ai Studio](https://lightning.ai/ogabrielluiz-8j6t8/studios/langflow) for a pre-built Langflow test environment.
+
+3. Create an [OpenAI API key](https://platform.openai.com).
+
+## Create the Document QA project
+
+1. From the Langflow dashboard, click **New Project**.
+2. Select **Document QA**.
+3. The **Document QA** flow is created.
+
+
+
+This flow creates a basic chatbot with the **Chat Input**, **Prompt**, **OpenAI**, and **Chat Output** components.
+This chatbot is augmented with the **Files** component, which loads a file from your local machine into the **Prompt** component as `{Document}`.
+The **Prompt** component is instructed to answer questions based on the contents of `{Document}`.
+Including a file with the prompt gives the **OpenAI** component context it may not otherwise have access to.
+
+4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
+ 1. In the **Variable Name** field, enter `openai_api_key`.
+ 2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
+ 3. Click **Save Variable**.
+
+5. To select a document to load, in the **Files** component, click within the **Path** field.
+ 1. Select a local file, and then click **Open**.
+ 2. The file name appears in the field.
+
+ The file must be of an extension type listed [here](https://github.com/langflow-ai/langflow/blob/dev/src/backend/base/langflow/base/data/utils.py#L13).
+
+
+## Run the Document QA flow
+
+1. Click the **Run** button.
+The **Interaction Panel** opens, where you can converse with your bot.
+2. Type a message and press Enter.
+For this example, we loaded an error log `.txt` file and asked, "What went wrong?"
+The bot responded:
+```
+The issue occurred during the execution of migrations in the application. Specifically, an error was raised by the Alembic library, indicating that new upgrade operations were detected that had not been accounted for in the existing migration scripts. The operation in question involved modifying the nullable property of a column (apikey, created_at) in the database, with details about the existing type (DATETIME()), existing server default, and other properties.
+```
+
+This result indicates that the bot received the loaded document and understood the context surrounding the vague question. It also correctly identified the issue in the error log, and followed up with appropriate troubleshooting suggestions. Nice!
diff --git a/docs/docs/guides/memory-chatbot.mdx b/docs/docs/guides/memory-chatbot.mdx
index e69de29bb..227b530ea 100644
--- a/docs/docs/guides/memory-chatbot.mdx
+++ b/docs/docs/guides/memory-chatbot.mdx
@@ -0,0 +1,99 @@
+import ThemedImage from "@theme/ThemedImage";
+import useBaseUrl from "@docusaurus/useBaseUrl";
+import ZoomableImage from "/src/theme/ZoomableImage.js";
+import ReactPlayer from "react-player";
+
+# Memory chatbot
+
+This flow extends the [basic prompting flow](./basic-prompting.mdx) to include chat memory for unique SessionIDs.
+
+## Prerequisites
+
+1. Install Langflow.
+```bash
+python -m pip install langflow --pre
+```
+
+2. Start a local Langflow instance with the Langflow CLI:
+```bash
+langflow run
+```
+Or start Langflow with Python:
+```bash
+python -m langflow run
+```
+
+Result:
+```
+│ Welcome to ⛓ Langflow │
+│ │
+│ Access http://127.0.0.1:7860 │
+│ Collaborate, and contribute at our GitHub Repo 🚀 │
+```
+
+Alternatively, go to [HuggingFace Spaces](https://docs.langflow.org/getting-started/hugging-face-spaces) or [Lightning.ai Studio](https://lightning.ai/ogabrielluiz-8j6t8/studios/langflow) for a pre-built Langflow test environment.
+
+3. Create an [OpenAI API key](https://platform.openai.com).
+
+## Create the memory chatbot project
+
+1. From the Langflow dashboard, click **New Project**.
+2. Select **Memory Chatbot**.
+3. The **Memory Chatbot** flow is created.
+
+
+
+This flow creates a basic chatbot with the **Chat Input**, **Prompt**, and **OpenAI** components.
+This chatbot is augmented with the **Chat Memory** component, which stores messages submitted via **Chat Input** and prepends them to subsequent prompts to OpenAI via `{context}`.
+The **Chat History** component gives the **OpenAI** component a memory of previous questions.
+
+4. To create an environment variable for the **OpenAI** component, in the **OpenAI API Key** field, click the **Globe** button, and then click **Add New Variable**.
+ 1. In the **Variable Name** field, enter `openai_api_key`.
+ 2. In the **Value** field, paste your OpenAI API Key (`sk-...`).
+ 3. Click **Save Variable**.
+
+## Run the memory chatbot flow
+
+1. Click the **Run** button.
+The **Interaction Panel** opens, where you can converse with your bot.
+2. Type a message and press Enter.
+The bot will respond according to the template in the **Prompt** component.
+3. Type more questions. In the **Outputs** log, your queries are logged in order. Up to 5 queries are stored by default. Try asking `What is the first subject I asked you about?` to see where the LLM's memory disappears.
+
+## Modify the Session ID field to have multiple conversations
+
+`SessionID` is a unique identifier in Langchain for a conversation session between a chatbot and a client.
+A `SessionID` is created when a conversation is initiated, and then associated with all subsequent messages during that session.
+
+In the **Memory Chatbot** flow you created, the **Chat Memory** component references past interactions with **Chat Input** by **Session ID**.
+You can demonstrate this by modifying the **Session ID** value to switch between conversation histories.
+
+1. In the **Session ID** field of the **Chat Memory** and **Chat Input** components, change the **Session ID** value from `MySessionID` to `AnotherSessionID`.
+2. Click the **Run** button to run your flow.
+In the **Interaction Panel**, you will have a new conversation. (You may need to clear the cache with the **Eraser** button).
+3. Type a few questions to your bot.
+4. In the **Session ID** field of the **Chat Memory** and **Chat Input** components, change the **Session ID** value back to `MySessionID`.
+5. Run your flow.
+The **Outputs** log of the **Interaction Panel** displays the history from your initial chat with `MySessionID`.
+
+## Store Session ID as a Langflow variable
+
+To store **Session ID** as a Langflow variable, in the **Session ID** field, click the **Globe** button, and then click **Add New Variable**.
+
+1. In the **Variable Name** field, enter a name like `customer_chat_emea`.
+2. In the **Value** field, enter a value like `1B5EBD79-6E9C-4533-B2C8-7E4FF29E983B`.
+3. Click **Save Variable**.
+4. Apply this variable to **Chat Input**.
+