Skip to content

Commit

Permalink
Update Anthropic assistants (#380)
Browse files Browse the repository at this point in the history
  • Loading branch information
smokestacklightnin authored Apr 8, 2024
1 parent a5e4795 commit f7de2e9
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 28 deletions.
5 changes: 3 additions & 2 deletions docs/examples/gallery_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,9 @@
# Of the assistants that Ragna has built in, the following ones support streaming:
#
# - [Anthropic](https://www.anthropic.com/)
# - [ragna.assistants.Claude][]
# - [ragna.assistants.ClaudeInstant][]
# - [ragna.assistants.ClaudeOpus][]
# - [ragna.assistants.ClaudeSonnet][]
# - [ragna.assistants.ClaudeHaiku][]
# - [Cohere](https://cohere.com/)
# - [ragna.assistants.Command][]
# - [ragna.assistants.CommandLight][]
Expand Down
5 changes: 3 additions & 2 deletions docs/tutorials/gallery_python_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,9 @@
# Ragna has builtin support for the following assistants:
#
# - [Anthropic](https://www.anthropic.com/)
# - [ragna.assistants.Claude][]
# - [ragna.assistants.ClaudeInstant][]
# - [ragna.assistants.ClaudeOpus][]
# - [ragna.assistants.ClaudeSonnet][]
# - [ragna.assistants.ClaudeHaiku][]
# - [Cohere](https://cohere.com/)
# - [ragna.assistants.Command][]
# - [ragna.assistants.CommandLight][]
Expand Down
7 changes: 4 additions & 3 deletions ragna/assistants/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
__all__ = [
"Claude",
"ClaudeInstant",
"ClaudeHaiku",
"ClaudeOpus",
"ClaudeSonnet",
"Command",
"CommandLight",
"GeminiPro",
Expand All @@ -14,7 +15,7 @@
]

from ._ai21labs import Jurassic2Ultra
from ._anthropic import Claude, ClaudeInstant
from ._anthropic import ClaudeHaiku, ClaudeOpus, ClaudeSonnet
from ._cohere import Command, CommandLight
from ._demo import RagnaDemoAssistant
from ._google import GeminiPro, GeminiUltra
Expand Down
68 changes: 47 additions & 21 deletions ragna/assistants/_anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,26 +18,35 @@ def _extra_requirements(cls) -> list[Requirement]:
def display_name(cls) -> str:
return f"Anthropic/{cls._MODEL}"

def _instructize_prompt(self, prompt: str, sources: list[Source]) -> str:
# See https://docs.anthropic.com/claude/docs/introduction-to-prompt-design#human--assistant-formatting
def _instructize_system_prompt(self, sources: list[Source]) -> str:
# See https://docs.anthropic.com/claude/docs/system-prompts
# See https://docs.anthropic.com/claude/docs/long-context-window-tips#tips-for-document-qa
instruction = (
"\n\nHuman: "
"Use the following pieces of context to answer the question at the end. "
"If you don't know the answer, just say so. Don't try to make up an answer.\n"
f"I'm going to give you {len(sources)} document(s). "
f"Read the document(s) carefully because I'm going to ask you a question about them. "
f"If you can't answer the question with just the given document(s), just say so. "
"Don't try to make up an answer.\n\n"
)
# See https://docs.anthropic.com/claude/docs/long-context-window-tips#structuring-long-documents

return (
instruction
+ "<documents>"
+ "\n".join(f"<document>{source.content}</document>" for source in sources)
+ "</documents>"
)
instruction += "\n\n".join(source.content for source in sources)
return f"{instruction}\n\nQuestion: {prompt}\n\nAssistant:"

async def _call_api(
self, prompt: str, sources: list[Source], *, max_new_tokens: int
) -> AsyncIterator[str]:
import httpx_sse

# See https://docs.anthropic.com/claude/reference/messages_post
# See https://docs.anthropic.com/claude/reference/streaming
async with httpx_sse.aconnect_sse(
self._client,
"POST",
"https://api.anthropic.com/v1/complete",
"https://api.anthropic.com/v1/messages",
headers={
"accept": "application/json",
"anthropic-version": "2023-06-01",
Expand All @@ -46,8 +55,9 @@ async def _call_api(
},
json={
"model": self._MODEL,
"prompt": self._instructize_prompt(prompt, sources),
"max_tokens_to_sample": max_new_tokens,
"system": self._instructize_system_prompt(sources),
"messages": [{"role": "user", "content": prompt}],
"max_tokens": max_new_tokens,
"temperature": 0.0,
"stream": True,
},
Expand All @@ -56,18 +66,34 @@ async def _call_api(

async for sse in event_source.aiter_sse():
data = json.loads(sse.data)
if data["type"] != "completion":
continue
elif "error" in data:
# See https://docs.anthropic.com/claude/reference/messages-streaming#raw-http-stream-response
if "error" in data:
raise RagnaException(data["error"].pop("message"), **data["error"])
elif data["stop_reason"] is not None:
elif data["type"] == "message_stop":
break
elif data["type"] != "content_block_delta":
continue

yield cast(str, data["delta"].pop("text"))


class ClaudeOpus(AnthropicApiAssistant):
"""[Claude 3 Opus](https://docs.anthropic.com/claude/docs/models-overview)
!!! info "Required environment variables"
- `ANTHROPIC_API_KEY`
!!! info "Required packages"
- `httpx_sse`
"""

yield cast(str, data["completion"])
_MODEL = "claude-3-opus-20240229"


class ClaudeInstant(AnthropicApiAssistant):
"""[Claude Instant](https://docs.anthropic.com/claude/reference/selecting-a-model)
class ClaudeSonnet(AnthropicApiAssistant):
"""[Claude 3 Sonnet](https://docs.anthropic.com/claude/docs/models-overview)
!!! info "Required environment variables"
Expand All @@ -78,11 +104,11 @@ class ClaudeInstant(AnthropicApiAssistant):
- `httpx_sse`
"""

_MODEL = "claude-instant-1"
_MODEL = "claude-3-sonnet-20240229"


class Claude(AnthropicApiAssistant):
"""[Claude](https://docs.anthropic.com/claude/reference/selecting-a-model)
class ClaudeHaiku(AnthropicApiAssistant):
"""[Claude 3 Haiku](https://docs.anthropic.com/claude/docs/models-overview)
!!! info "Required environment variables"
Expand All @@ -93,4 +119,4 @@ class Claude(AnthropicApiAssistant):
- `httpx_sse`
"""

_MODEL = "claude-2"
_MODEL = "claude-3-haiku-20240307"

0 comments on commit f7de2e9

Please sign in to comment.