From 08b12b3e2212878a0c9016705f90b34c5f4913e3 Mon Sep 17 00:00:00 2001
From: Eugene Yurtsev
Date: Wed, 20 Dec 2023 16:24:27 -0500
Subject: [PATCH] Update README.md with chat widget information (#340)
Add information about chat widgets
---
README.md | 64 +++++++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 64 insertions(+)
diff --git a/README.md b/README.md
index 8b12fa12..558f9ed7 100644
--- a/README.md
+++ b/README.md
@@ -420,6 +420,19 @@ type Widget = {
};
```
+### Available Widgets
+
+There are only two widgets that the user can specify manually right now:
+
+1. File Upload Widget
+2. Chat History Widget
+
+See below more information about these widgets.
+
+All other widgets on the playground UI are created and managed automatically by the UI
+based on the config schema of the Runnable. When you create Configurable Runnables,
+the playground should create appropriate widgets for you to control the behavior.
+
#### File Upload Widget
Allows creation of a file upload input in the UI playground for files
@@ -454,6 +467,57 @@ Example widget:
+### Chat Widget
+
+Look at [widget example](https://github.com/langchain-ai/langserve/tree/main/examples/widgets/server.py).
+
+To define a chat widget, make sure that you pass "type": "chat".
+
+* "input" is JSONPath to the field in the *Request* that has the new input message.
+* "output" is JSONPath to the field in the *Response* that has new output message(s).
+* Don't specify these fields if the entire input or output should be used as they are (e.g., if the output is a list of chat messages.)
+
+Here's a snippet:
+
+```python
+
+class ChatHistory(CustomUserType):
+ chat_history: List[Tuple[str, str]] = Field(
+ ...,
+ examples=[[("human input", "ai response")]],
+ extra={"widget": {"type": "chat", "input": "question", "output": "answer"}},
+ )
+ question: str
+
+
+def _format_to_messages(input: ChatHistory) -> List[BaseMessage]:
+ """Format the input to a list of messages."""
+ history = input.chat_history
+ user_input = input.question
+
+ messages = []
+
+ for human, ai in history:
+ messages.append(HumanMessage(content=human))
+ messages.append(AIMessage(content=ai))
+ messages.append(HumanMessage(content=user_input))
+ return messages
+
+
+model = ChatOpenAI()
+chat_model = RunnableParallel({"answer": (RunnableLambda(_format_to_messages) | model)})
+add_routes(
+ app,
+ chat_model.with_types(input_type=ChatHistory),
+ config_keys=["configurable"],
+ path="/chat",
+)
+```
+
+Example widget:
+
+
+
### Enabling / Disabling Endpoints (LangServe >=0.0.33)