Skip to content

Commit

Permalink
Add support for older chat input schemas to chat playground (#526)
Browse files Browse the repository at this point in the history
  • Loading branch information
jacoblee93 authored Mar 12, 2024
1 parent 4bf7b0f commit 1428ac0
Show file tree
Hide file tree
Showing 10 changed files with 178 additions and 57 deletions.
29 changes: 29 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,32 @@ To run linting for this project:
```sh
make lint
```

## Frontend Playground Development

Here are a few tips to keep in mind when developing the LangServe playgrounds:

### Setup

Switch directories to `langserve/playground` or `langserve/chat_playground`, then run `yarn` to install required
dependencies. `yarn dev` will start the playground at `http://localhost:5173/____LANGSERVE_BASE_URL/` in dev mode.

You can run one of the chains in the `examples/` repo using `poetry run python path/to/file.py`.

### Setting CORS

You may need to add the following to an example route when developing the playground in dev mode to handle CORS:

```python
from fastapi.middleware.cors import CORSMiddleware

# Set all CORS enabled origins
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
expose_headers=["*"],
)
```
10 changes: 8 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -334,8 +334,14 @@ runnable and share a link with the configuration:

LangServe also supports a chat-focused playground that opt into and use under `/my_runnable/playground/`.
Unlike the general playground, only certain types of runnables are supported - the runnable's input schema must
be a `dict` with a single key, and that key's value must be a list of chat messages. The runnable
can return either an `AIMessage` or a string.
be a `dict` with either:

- a single key, and that key's value must be a list of chat messages.
- two keys, one whose value is a list of messages, and the other representing the most recent message.

We recommend you use the first format.

The runnable must also return either an `AIMessage` or a string.

To enable it, you must set `playground_type="chat",` when adding your route. Here's an example:

Expand Down
56 changes: 56 additions & 0 deletions examples/chat_playground/legacy_input/server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
#!/usr/bin/env python
"""Example of a simple chatbot that just passes current conversation
state back and forth between server and client.
"""
from typing import List, Union

from fastapi import FastAPI
from langchain.chat_models import ChatAnthropic
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder

from langserve import add_routes
from langserve.pydantic_v1 import BaseModel, Field

app = FastAPI(
title="LangChain Server",
version="1.0",
description="Spin up a simple api server using Langchain's Runnable interfaces",
)


# Declare a chain
prompt = ChatPromptTemplate.from_messages(
[
("system", "You are a helpful, professional assistant named Cob."),
MessagesPlaceholder(variable_name="messages"),
("human", "{input}"),
]
)

chain = prompt | ChatAnthropic(model="claude-2")


class InputChat(BaseModel):
"""Input for the chat endpoint."""

messages: List[Union[HumanMessage, AIMessage, SystemMessage]] = Field(
...,
description="The chat messages representing the current conversation.",
)

input: str


add_routes(
app,
chain.with_types(input_type=InputChat),
enable_feedback_endpoint=True,
enable_public_trace_link_endpoint=True,
playground_type="chat",
)

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="localhost", port=8000)
12 changes: 0 additions & 12 deletions examples/chat_playground/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from typing import List, Union

from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langchain.chat_models import ChatAnthropic
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
Expand All @@ -20,17 +19,6 @@
)


# Set all CORS enabled origins
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
expose_headers=["*"],
)


# Declare a chain
prompt = ChatPromptTemplate.from_messages(
[
Expand Down
1 change: 1 addition & 0 deletions langserve/chat_playground/dist/assets/index-434ff580.css

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion langserve/chat_playground/dist/assets/index-b47ed17e.css

This file was deleted.

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions langserve/chat_playground/dist/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
<link rel="icon" href="/____LANGSERVE_BASE_URL/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Chat Playground</title>
<script type="module" crossorigin src="/____LANGSERVE_BASE_URL/assets/index-c5f8c3dc.js"></script>
<link rel="stylesheet" href="/____LANGSERVE_BASE_URL/assets/index-b47ed17e.css">
<script type="module" crossorigin src="/____LANGSERVE_BASE_URL/assets/index-d9089d96.js"></script>
<link rel="stylesheet" href="/____LANGSERVE_BASE_URL/assets/index-434ff580.css">
</head>
<body>
<div id="root"></div>
Expand Down
47 changes: 35 additions & 12 deletions langserve/chat_playground/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,31 +14,54 @@ export function App() {
const outputDataSchema = outputSchema?.data?.schema;
const isLoading = inputProps === undefined || outputDataSchema === undefined;
const inputKeys = Object.keys(inputProps ?? {});
const isSupported = isLoading || (
const inputSchemaSupported = (
inputKeys.length === 1 &&
inputProps[inputKeys[0]].type === "array" &&
(
outputDataSchema.anyOf?.find((option) => option.properties?.type?.enum?.includes("ai")) ||
outputDataSchema.type === "string"
inputProps?.[inputKeys[0]].type === "array"
) || (
inputKeys.length === 2 && (
(
inputProps?.[inputKeys[0]].type === "array" ||
inputProps?.[inputKeys[1]].type === "string"
) || (
inputProps?.[inputKeys[0]].type === "string" ||
inputProps?.[inputKeys[1]].type === "array"
)
)
);
const outputSchemaSupported = (
outputDataSchema?.anyOf?.find((option) => option.properties?.type?.enum?.includes("ai")) ||
outputDataSchema?.type === "string"
);
const isSupported = isLoading || (inputSchemaSupported && outputSchemaSupported);
return (
<div className="flex items-center flex-col text-ls-black bg-background">
<AppCallbackContext.Provider value={context}>
{isSupported
? <ChatWindow
startStream={startStream}
stopStream={stopStream}
inputKey={inputKeys[0]}
messagesInputKey={inputProps?.[inputKeys[0]].type === "array" ? inputKeys[0] : inputKeys[1]}
inputKey={inputProps?.[inputKeys[0]].type === "string" ? inputKeys[0] : inputKeys[1]}
></ChatWindow>
: <div className="h-[100vh] w-[100vw] flex justify-center items-center text-xl">
<span className="text-center">
The chat playground is only supported for chains that take a single array of messages as input
<br/>
and return either an AIMessage or a string.
: <div className="h-[100vh] w-[100vw] flex justify-center items-center text-xl p-16">
<span>
The chat playground is only supported for chains that take one of the following as input:
<ul className="mt-8 list-disc ml-6">
<li>
a dict with a single key containing a list of messages
</li>
<li>
a dict with two keys: one a string input, one an list of messages
</li>
</ul>
<br />
and which return either an <code>AIMessage</code> or a string.
<br />
<br />
You can test this chain in the default LangServe playground instead.
<br />
<br />
You can test this chain in the default LangServe playground instead. Please set <code>playground_type="default"</code>.
To use the default playground, set <code>playground_type="default"</code> when adding the route in your backend.
</span>
</div>}
</AppCallbackContext.Provider>
Expand Down
27 changes: 23 additions & 4 deletions langserve/chat_playground/src/components/ChatWindow.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,10 @@ export function isAIMessage(x: unknown): x is AIMessage {
export function ChatWindow(props: {
startStream: (input: unknown, config: unknown) => Promise<void>;
stopStream: (() => void) | undefined;
inputKey: string;
messagesInputKey: string;
inputKey?: string;
}) {
const { startStream, inputKey } = props;
const { startStream, messagesInputKey, inputKey } = props;

const [currentInputValue, setCurrentInputValue] = useState("");
const [isLoading, setIsLoading] = useState(false);
Expand All @@ -58,7 +59,18 @@ export function ChatWindow(props: {
setMessages(newMessages);
setCurrentInputValue("");
// TODO: Add config schema support
startStream({ [inputKey]: newMessages }, {});
if (inputKey === undefined) {
startStream({ [messagesInputKey]: newMessages }, {});
} else {
console.log({
[messagesInputKey]: newMessages.slice(0, -1),
[inputKey]: newMessages[newMessages.length - 1].content
})
startStream({
[messagesInputKey]: newMessages.slice(0, -1),
[inputKey]: newMessages[newMessages.length - 1].content
}, {});
}
};

const regenerateMessages = () => {
Expand All @@ -67,7 +79,14 @@ export function ChatWindow(props: {
}
setIsLoading(true);
// TODO: Add config schema support
startStream({ [inputKey]: messages }, {});
if (inputKey === undefined) {
startStream({ [messagesInputKey]: messages }, {});
} else {
startStream({
[messagesInputKey]: messages.slice(0, -1),
[inputKey]: messages[messages.length - 1]
}, {});
}
};

useStreamCallback("onStart", () => {
Expand Down

0 comments on commit 1428ac0

Please sign in to comment.