Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Passthrough example #305

Merged
merged 4 commits into from
Dec 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
148 changes: 148 additions & 0 deletions examples/passthrough_dict/client.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Passthrough information\n",
"\n",
"An example that shows how to pass through additional info with the request, and get it back with the response."
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from langchain.prompts.chat import ChatPromptTemplate"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from langserve import RemoteRunnable\n",
"\n",
"chain = RemoteRunnable(\"http://localhost:8000/v1/\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Let's create a prompt composed of a system message and a human message."
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"{'output': AIMessage(content='`apple` translates to `mela` in Italian.'),\n",
" 'info': {'info': {'user_id': 42, 'user_info': {'address': 42}}}}"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chain.invoke({'thing': 'apple', 'language': 'italian', 'info': {\"user_id\": 42, \"user_info\": {\"address\": 42}}})"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'info': {'info': {'user_id': 42, 'user_info': {'address': 42}}}}\n",
"{'output': AIMessageChunk(content='')}\n",
"{'output': AIMessageChunk(content='m')}\n",
"{'output': AIMessageChunk(content='ela')}\n",
"{'output': AIMessageChunk(content='')}\n"
]
}
],
"source": [
"for chunk in chain.stream({'thing': 'apple', 'language': 'italian', 'info': {\"user_id\": 42, \"user_info\": {\"address\": 42}}}):\n",
" print(chunk)"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from langserve import RemoteRunnable\n",
"\n",
"chain = RemoteRunnable(\"http://localhost:8000/v2/\")"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"{'output': AIMessage(content='`apple` translates to `mela` in Italian.'),\n",
" 'info': {'user_id': 42, 'user_info': {'address': 42}}}"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chain.invoke({'thing': 'apple', 'language': 'italian', 'info': {\"user_id\": 42, \"user_info\": {\"address\": 42}}})"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
86 changes: 86 additions & 0 deletions examples/passthrough_dict/server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
#!/usr/bin/env python
"""Example LangChain server exposes multiple runnables (LLMs in this case)."""

from typing import Any, Callable, Dict, List, Optional, TypedDict

from fastapi import FastAPI
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.schema.runnable import RunnableMap, RunnablePassthrough

from langserve import add_routes

app = FastAPI(
title="LangChain Server",
version="1.0",
description="Spin up a simple api server using Langchain's Runnable interfaces",
)


def _create_projection(
*, include_keys: Optional[List] = None, exclude_keys: Optional[List[str]] = None
) -> Callable[[dict], dict]:
"""Create a projection function."""

def _project_dict(
d: dict,
) -> dict:
"""Project dictionary."""
keys = d.keys()
if include_keys is not None:
keys = set(keys) & set(include_keys)
if exclude_keys is not None:
keys = set(keys) - set(exclude_keys)
return {k: d[k] for k in keys}

return _project_dict


prompt = ChatPromptTemplate.from_messages(
[("human", "translate `{thing}` to {language}")]
)
model = ChatOpenAI()

underlying_chain = prompt | model

wrapped_chain = RunnableMap(
{
"output": _create_projection(exclude_keys=["info"]) | underlying_chain,
"info": _create_projection(include_keys=["info"]),
}
)


class Input(TypedDict):
thing: str
language: str
info: Dict[str, Any]


class Output(TypedDict):
output: underlying_chain.output_schema
info: Dict[str, Any]


add_routes(
app, wrapped_chain.with_types(input_type=Input, output_type=Output), path="/v1"
)


# Version 2
# Uses RunnablePassthrough.assign
wrapped_chain_2 = RunnablePassthrough.assign(output=underlying_chain) | {
"output": lambda x: x["output"],
"info": lambda x: x["info"],
}

add_routes(
app,
wrapped_chain_2.with_types(input_type=Input, output_type=Output),
path="/v2",
)

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="localhost", port=8000)