From f7de2e947c04cd3d4e8319cc117a8f9ecfa21239 Mon Sep 17 00:00:00 2001 From: William Black <125844868+smokestacklightnin@users.noreply.github.com> Date: Mon, 8 Apr 2024 00:13:47 -0700 Subject: [PATCH] Update Anthropic assistants (#380) --- docs/examples/gallery_streaming.py | 5 +- docs/tutorials/gallery_python_api.py | 5 +- ragna/assistants/__init__.py | 7 +-- ragna/assistants/_anthropic.py | 68 +++++++++++++++++++--------- 4 files changed, 57 insertions(+), 28 deletions(-) diff --git a/docs/examples/gallery_streaming.py b/docs/examples/gallery_streaming.py index b0b5b3f1..87c51f9e 100644 --- a/docs/examples/gallery_streaming.py +++ b/docs/examples/gallery_streaming.py @@ -27,8 +27,9 @@ # Of the assistants that Ragna has built in, the following ones support streaming: # # - [Anthropic](https://www.anthropic.com/) -# - [ragna.assistants.Claude][] -# - [ragna.assistants.ClaudeInstant][] +# - [ragna.assistants.ClaudeOpus][] +# - [ragna.assistants.ClaudeSonnet][] +# - [ragna.assistants.ClaudeHaiku][] # - [Cohere](https://cohere.com/) # - [ragna.assistants.Command][] # - [ragna.assistants.CommandLight][] diff --git a/docs/tutorials/gallery_python_api.py b/docs/tutorials/gallery_python_api.py index c703b7b3..05a59b3f 100644 --- a/docs/tutorials/gallery_python_api.py +++ b/docs/tutorials/gallery_python_api.py @@ -75,8 +75,9 @@ # Ragna has builtin support for the following assistants: # # - [Anthropic](https://www.anthropic.com/) -# - [ragna.assistants.Claude][] -# - [ragna.assistants.ClaudeInstant][] +# - [ragna.assistants.ClaudeOpus][] +# - [ragna.assistants.ClaudeSonnet][] +# - [ragna.assistants.ClaudeHaiku][] # - [Cohere](https://cohere.com/) # - [ragna.assistants.Command][] # - [ragna.assistants.CommandLight][] diff --git a/ragna/assistants/__init__.py b/ragna/assistants/__init__.py index 2fb79471..13ef3d2a 100644 --- a/ragna/assistants/__init__.py +++ b/ragna/assistants/__init__.py @@ -1,6 +1,7 @@ __all__ = [ - "Claude", - "ClaudeInstant", + "ClaudeHaiku", + "ClaudeOpus", + "ClaudeSonnet", "Command", "CommandLight", "GeminiPro", @@ -14,7 +15,7 @@ ] from ._ai21labs import Jurassic2Ultra -from ._anthropic import Claude, ClaudeInstant +from ._anthropic import ClaudeHaiku, ClaudeOpus, ClaudeSonnet from ._cohere import Command, CommandLight from ._demo import RagnaDemoAssistant from ._google import GeminiPro, GeminiUltra diff --git a/ragna/assistants/_anthropic.py b/ragna/assistants/_anthropic.py index f5f4c538..fa8922fe 100644 --- a/ragna/assistants/_anthropic.py +++ b/ragna/assistants/_anthropic.py @@ -18,26 +18,35 @@ def _extra_requirements(cls) -> list[Requirement]: def display_name(cls) -> str: return f"Anthropic/{cls._MODEL}" - def _instructize_prompt(self, prompt: str, sources: list[Source]) -> str: - # See https://docs.anthropic.com/claude/docs/introduction-to-prompt-design#human--assistant-formatting + def _instructize_system_prompt(self, sources: list[Source]) -> str: + # See https://docs.anthropic.com/claude/docs/system-prompts + # See https://docs.anthropic.com/claude/docs/long-context-window-tips#tips-for-document-qa instruction = ( - "\n\nHuman: " - "Use the following pieces of context to answer the question at the end. " - "If you don't know the answer, just say so. Don't try to make up an answer.\n" + f"I'm going to give you {len(sources)} document(s). " + f"Read the document(s) carefully because I'm going to ask you a question about them. " + f"If you can't answer the question with just the given document(s), just say so. " + "Don't try to make up an answer.\n\n" + ) + # See https://docs.anthropic.com/claude/docs/long-context-window-tips#structuring-long-documents + + return ( + instruction + + "" + + "\n".join(f"{source.content}" for source in sources) + + "" ) - instruction += "\n\n".join(source.content for source in sources) - return f"{instruction}\n\nQuestion: {prompt}\n\nAssistant:" async def _call_api( self, prompt: str, sources: list[Source], *, max_new_tokens: int ) -> AsyncIterator[str]: import httpx_sse + # See https://docs.anthropic.com/claude/reference/messages_post # See https://docs.anthropic.com/claude/reference/streaming async with httpx_sse.aconnect_sse( self._client, "POST", - "https://api.anthropic.com/v1/complete", + "https://api.anthropic.com/v1/messages", headers={ "accept": "application/json", "anthropic-version": "2023-06-01", @@ -46,8 +55,9 @@ async def _call_api( }, json={ "model": self._MODEL, - "prompt": self._instructize_prompt(prompt, sources), - "max_tokens_to_sample": max_new_tokens, + "system": self._instructize_system_prompt(sources), + "messages": [{"role": "user", "content": prompt}], + "max_tokens": max_new_tokens, "temperature": 0.0, "stream": True, }, @@ -56,18 +66,34 @@ async def _call_api( async for sse in event_source.aiter_sse(): data = json.loads(sse.data) - if data["type"] != "completion": - continue - elif "error" in data: + # See https://docs.anthropic.com/claude/reference/messages-streaming#raw-http-stream-response + if "error" in data: raise RagnaException(data["error"].pop("message"), **data["error"]) - elif data["stop_reason"] is not None: + elif data["type"] == "message_stop": break + elif data["type"] != "content_block_delta": + continue + + yield cast(str, data["delta"].pop("text")) + + +class ClaudeOpus(AnthropicApiAssistant): + """[Claude 3 Opus](https://docs.anthropic.com/claude/docs/models-overview) + + !!! info "Required environment variables" + + - `ANTHROPIC_API_KEY` + + !!! info "Required packages" + + - `httpx_sse` + """ - yield cast(str, data["completion"]) + _MODEL = "claude-3-opus-20240229" -class ClaudeInstant(AnthropicApiAssistant): - """[Claude Instant](https://docs.anthropic.com/claude/reference/selecting-a-model) +class ClaudeSonnet(AnthropicApiAssistant): + """[Claude 3 Sonnet](https://docs.anthropic.com/claude/docs/models-overview) !!! info "Required environment variables" @@ -78,11 +104,11 @@ class ClaudeInstant(AnthropicApiAssistant): - `httpx_sse` """ - _MODEL = "claude-instant-1" + _MODEL = "claude-3-sonnet-20240229" -class Claude(AnthropicApiAssistant): - """[Claude](https://docs.anthropic.com/claude/reference/selecting-a-model) +class ClaudeHaiku(AnthropicApiAssistant): + """[Claude 3 Haiku](https://docs.anthropic.com/claude/docs/models-overview) !!! info "Required environment variables" @@ -93,4 +119,4 @@ class Claude(AnthropicApiAssistant): - `httpx_sse` """ - _MODEL = "claude-2" + _MODEL = "claude-3-haiku-20240307"