Skip to content

Commit

Permalink
Chat Backend with session_id (#310)
Browse files Browse the repository at this point in the history
Example with of chat history persisted on backend.

This is using just session id.

Will add another example that mimicks auth for user identification once
new changes to RunnableWithMessageHistory becomes available.
  • Loading branch information
eyurtsev authored Dec 11, 2023
1 parent 5ec5821 commit 3c6cce8
Show file tree
Hide file tree
Showing 2 changed files with 382 additions and 0 deletions.
277 changes: 277 additions & 0 deletions examples/chat_with_persistence/client.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,277 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Chat History\n",
"\n",
"An example of a client interacting with a chatbot where message history is persisted on the backend."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"import uuid\n",
"from langserve import RemoteRunnable\n",
"\n",
"chat = RemoteRunnable(\"http://localhost:8000/\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Let's create a prompt composed of a system message and a human message."
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"session_id = str(uuid.uuid4())"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"AIMessage(content=\" Hello Eugene! My name is Claude. It's nice to meet another cat lover.\")"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chat.invoke({\"human_input\": \"my name is eugene. i like cats. what is your name?\"}, {'configurable': { 'session_id': session_id } })"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"AIMessage(content=' You told me your name is Eugene.')"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chat.invoke({\"human_input\": \"what was my name?\"}, {'configurable': { 'session_id': session_id } })"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"AIMessage(content=' You said you like cats.')"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chat.invoke({\"human_input\": \"What animal do i like?\"}, {'configurable': { 'session_id': session_id } })"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
" Sure\n",
",\n",
" I\n",
"'d\n",
" be\n",
" happy\n",
" to\n",
" count\n",
" to\n",
" 10\n",
":\n",
"\n",
"\n",
"1\n",
",\n",
" 2\n",
",\n",
" 3\n",
",\n",
" 4\n",
",\n",
" 5\n",
",\n",
" 6\n",
",\n",
" 7\n",
",\n",
" 8\n",
",\n",
" 9\n",
",\n",
" 10\n"
]
}
],
"source": [
"for chunk in chat.stream({'human_input': \"Can you count till 10?\"}, {'configurable': { 'session_id': session_id } }):\n",
" print()\n",
" print(chunk.content, end='', flush=True)"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[1;39m[\n",
" \u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"human\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"data\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"content\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"my name is eugene. i like cats. what is your name?\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"additional_kwargs\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{}\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"human\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"example\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;39mfalse\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m,\n",
" \u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"ai\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"data\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"content\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\" Hello Eugene! My name is Claude. It's nice to meet another cat lover.\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"additional_kwargs\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{}\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"ai\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"example\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;39mfalse\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m,\n",
" \u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"human\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"data\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"content\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"what was my name?\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"additional_kwargs\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{}\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"human\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"example\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;39mfalse\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m,\n",
" \u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"ai\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"data\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"content\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\" You told me your name is Eugene.\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"additional_kwargs\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{}\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"ai\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"example\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;39mfalse\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m,\n",
" \u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"human\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"data\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"content\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"What animal do i like?\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"additional_kwargs\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{}\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"human\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"example\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;39mfalse\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m,\n",
" \u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"ai\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"data\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"content\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\" You said you like cats.\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"additional_kwargs\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{}\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"ai\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"example\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;39mfalse\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m,\n",
" \u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"human\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"data\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"content\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"Can you count till 10?\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"additional_kwargs\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{}\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"human\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"example\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;39mfalse\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m,\n",
" \u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"AIMessageChunk\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"data\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{\n",
" \u001b[0m\u001b[34;1m\"content\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\" Sure, I'd be happy to count to 10:\\n\\n1, 2, 3, 4, 5, 6, 7, 8, 9, 10\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"additional_kwargs\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[1;39m{}\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"type\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;32m\"AIMessageChunk\"\u001b[0m\u001b[1;39m,\n",
" \u001b[0m\u001b[34;1m\"example\"\u001b[0m\u001b[1;39m: \u001b[0m\u001b[0;39mfalse\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
" \u001b[1;39m}\u001b[0m\u001b[1;39m\n",
"\u001b[1;39m]\u001b[0m\n"
]
}
],
"source": [
"!cat chat_histories/c7a327f3-5578-4fb7-a8f2-3082d7cb58cc.json | jq ."
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
105 changes: 105 additions & 0 deletions examples/chat_with_persistence/server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
#!/usr/bin/env python
"""Example of a chat server with persistence handled on the backend.
For simplicity, we're using file storage here -- to avoid the need to set up
a database. This is obviously not a good idea for a production environment,
but will help us to demonstrate the RunnableWithMessageHistory interface.
We'll use cookies to identify the user and/or session. This will help illustrate how to
fetch configuration from the request.
"""
import re
from pathlib import Path
from typing import Callable, Union

from fastapi import FastAPI, HTTPException
from langchain.chat_models import ChatAnthropic
from langchain.memory import FileChatMessageHistory
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables.history import RunnableWithMessageHistory
from typing_extensions import TypedDict

from langserve import add_routes


def _is_valid_identifier(value: str) -> bool:
"""Check if the session ID is in a valid format."""
# Use a regular expression to match the allowed characters
valid_characters = re.compile(r"^[a-zA-Z0-9-_]+$")
return bool(valid_characters.match(value))


def create_session_factory(
base_dir: Union[str, Path],
) -> Callable[[str], BaseChatMessageHistory]:
"""Create a session ID factory that creates session IDs from a base dir.
Args:
base_dir: Base directory to use for storing the chat histories.
Returns:
A session ID factory that creates session IDs from a base path.
"""
base_dir_ = Path(base_dir) if isinstance(base_dir, str) else base_dir
if not base_dir_.exists():
base_dir_.mkdir(parents=True)

def get_chat_history(session_id: str) -> FileChatMessageHistory:
"""Get a chat history from a session ID."""
if not _is_valid_identifier(session_id):
raise HTTPException(
status_code=400,
detail=f"Session ID `{session_id}` is not in a valid format. "
"Session ID must only contain alphanumeric characters, "
"hyphens, and underscores.",
)
file_path = base_dir_ / f"{session_id}.json"
return FileChatMessageHistory(str(file_path))

return get_chat_history


app = FastAPI(
title="LangChain Server",
version="1.0",
description="Spin up a simple api server using Langchain's Runnable interfaces",
)


# Declare a chain
prompt = ChatPromptTemplate.from_messages(
[
("system", "You're an assistant by the name of Bob."),
MessagesPlaceholder(variable_name="history"),
("human", "{human_input}"),
]
)

chain = prompt | ChatAnthropic(model="claude-2")


class InputChat(TypedDict):
"""Input for the chat endpoint."""

human_input: str
"""Human input"""


chain_with_history = RunnableWithMessageHistory(
chain,
create_session_factory("chat_histories"),
input_messages_key="human_input",
history_messages_key="history",
).with_types(input_type=InputChat)


add_routes(
app,
chain_with_history,
)

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="localhost", port=8000)

0 comments on commit 3c6cce8

Please sign in to comment.