From 4a8ec7275475e23bb93c11141c0f0fb43dd32ad2 Mon Sep 17 00:00:00 2001 From: Eugene Yurtsev Date: Fri, 8 Dec 2023 16:03:31 -0500 Subject: [PATCH] Passthrough example (#305) Including an example to show how to do pass through of arbitrary inputs to the response --- examples/passthrough_dict/client.ipynb | 148 +++++++++++++++++++++++++ examples/passthrough_dict/server.py | 86 ++++++++++++++ 2 files changed, 234 insertions(+) create mode 100644 examples/passthrough_dict/client.ipynb create mode 100755 examples/passthrough_dict/server.py diff --git a/examples/passthrough_dict/client.ipynb b/examples/passthrough_dict/client.ipynb new file mode 100644 index 00000000..0c6a300e --- /dev/null +++ b/examples/passthrough_dict/client.ipynb @@ -0,0 +1,148 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Passthrough information\n", + "\n", + "An example that shows how to pass through additional info with the request, and get it back with the response." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "from langchain.prompts.chat import ChatPromptTemplate" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "from langserve import RemoteRunnable\n", + "\n", + "chain = RemoteRunnable(\"http://localhost:8000/v1/\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's create a prompt composed of a system message and a human message." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "tags": [] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{'output': AIMessage(content='`apple` translates to `mela` in Italian.'),\n", + " 'info': {'info': {'user_id': 42, 'user_info': {'address': 42}}}}" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.invoke({'thing': 'apple', 'language': 'italian', 'info': {\"user_id\": 42, \"user_info\": {\"address\": 42}}})" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'info': {'info': {'user_id': 42, 'user_info': {'address': 42}}}}\n", + "{'output': AIMessageChunk(content='')}\n", + "{'output': AIMessageChunk(content='m')}\n", + "{'output': AIMessageChunk(content='ela')}\n", + "{'output': AIMessageChunk(content='')}\n" + ] + } + ], + "source": [ + "for chunk in chain.stream({'thing': 'apple', 'language': 'italian', 'info': {\"user_id\": 42, \"user_info\": {\"address\": 42}}}):\n", + " print(chunk)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "from langserve import RemoteRunnable\n", + "\n", + "chain = RemoteRunnable(\"http://localhost:8000/v2/\")" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "tags": [] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{'output': AIMessage(content='`apple` translates to `mela` in Italian.'),\n", + " 'info': {'user_id': 42, 'user_info': {'address': 42}}}" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.invoke({'thing': 'apple', 'language': 'italian', 'info': {\"user_id\": 42, \"user_info\": {\"address\": 42}}})" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/passthrough_dict/server.py b/examples/passthrough_dict/server.py new file mode 100755 index 00000000..541d9cef --- /dev/null +++ b/examples/passthrough_dict/server.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +"""Example LangChain server exposes multiple runnables (LLMs in this case).""" + +from typing import Any, Callable, Dict, List, Optional, TypedDict + +from fastapi import FastAPI +from langchain.chat_models import ChatOpenAI +from langchain.prompts import ChatPromptTemplate +from langchain.schema.runnable import RunnableMap, RunnablePassthrough + +from langserve import add_routes + +app = FastAPI( + title="LangChain Server", + version="1.0", + description="Spin up a simple api server using Langchain's Runnable interfaces", +) + + +def _create_projection( + *, include_keys: Optional[List] = None, exclude_keys: Optional[List[str]] = None +) -> Callable[[dict], dict]: + """Create a projection function.""" + + def _project_dict( + d: dict, + ) -> dict: + """Project dictionary.""" + keys = d.keys() + if include_keys is not None: + keys = set(keys) & set(include_keys) + if exclude_keys is not None: + keys = set(keys) - set(exclude_keys) + return {k: d[k] for k in keys} + + return _project_dict + + +prompt = ChatPromptTemplate.from_messages( + [("human", "translate `{thing}` to {language}")] +) +model = ChatOpenAI() + +underlying_chain = prompt | model + +wrapped_chain = RunnableMap( + { + "output": _create_projection(exclude_keys=["info"]) | underlying_chain, + "info": _create_projection(include_keys=["info"]), + } +) + + +class Input(TypedDict): + thing: str + language: str + info: Dict[str, Any] + + +class Output(TypedDict): + output: underlying_chain.output_schema + info: Dict[str, Any] + + +add_routes( + app, wrapped_chain.with_types(input_type=Input, output_type=Output), path="/v1" +) + + +# Version 2 +# Uses RunnablePassthrough.assign +wrapped_chain_2 = RunnablePassthrough.assign(output=underlying_chain) | { + "output": lambda x: x["output"], + "info": lambda x: x["info"], +} + +add_routes( + app, + wrapped_chain_2.with_types(input_type=Input, output_type=Output), + path="/v2", +) + +if __name__ == "__main__": + import uvicorn + + uvicorn.run(app, host="localhost", port=8000)