From 41cf48b8bce030700b8eb3b3f7f834607f97ba58 Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Wed, 8 May 2024 13:12:32 -0700 Subject: [PATCH 01/12] TODO: streaming doesn't work. Anthropic has a manager class because of course generators aren't powerful enough for them. --- python/langsmith/wrappers/__init__.py | 3 +- python/langsmith/wrappers/_anthropic.py | 196 ++++++++++++++++++ .../wrappers/test_anthropic.py | 195 +++++++++++++++++ 3 files changed, 393 insertions(+), 1 deletion(-) create mode 100644 python/langsmith/wrappers/_anthropic.py create mode 100644 python/tests/integration_tests/wrappers/test_anthropic.py diff --git a/python/langsmith/wrappers/__init__.py b/python/langsmith/wrappers/__init__.py index 34f425953..48e9f78b3 100644 --- a/python/langsmith/wrappers/__init__.py +++ b/python/langsmith/wrappers/__init__.py @@ -1,5 +1,6 @@ """This module provides convenient tracing wrappers for popular libraries.""" +from langsmith.wrappers._anthropic import wrap_anthropic from langsmith.wrappers._openai import wrap_openai -__all__ = ["wrap_openai"] +__all__ = ["wrap_anthropic", "wrap_openai"] diff --git a/python/langsmith/wrappers/_anthropic.py b/python/langsmith/wrappers/_anthropic.py new file mode 100644 index 000000000..b665add1c --- /dev/null +++ b/python/langsmith/wrappers/_anthropic.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +import functools +import logging +from typing import ( + TYPE_CHECKING, + Any, + Callable, + List, + Mapping, + Optional, + Type, + TypeVar, + Union, +) + +from typing_extensions import TypedDict + +from langsmith import client as ls_client +from langsmith import run_helpers + +if TYPE_CHECKING: + from anthropic import Anthropic, AsyncAnthropic + from anthropic.types import ( + Completion, + Message, + MessageStreamEvent, + ) + +C = TypeVar("C", bound=Union["Anthropic", "AsyncAnthropic", Any]) +logger = logging.getLogger(__name__) + + +@functools.lru_cache +def _get_not_given() -> Optional[Type]: + try: + from anthropic._types import NotGiven + + return NotGiven + except ImportError: + return None + + +def _strip_not_given(d: dict) -> dict: + try: + not_given = _get_not_given() + if not_given is None: + return d + return {k: v for k, v in d.items() if not isinstance(v, not_given)} + except Exception as e: + logger.error(f"Error stripping NotGiven: {e}") + return d + + +def _accumulate_event( + *, event: MessageStreamEvent, current_snapshot: Message | None +) -> Message: + try: + from anthropic.types import ContentBlock + except ImportError: + logger.debug("Error importing ContentBlock") + return current_snapshot + + if current_snapshot is None: + if event.type == "message_start": + return event.message + + raise RuntimeError( + f'Unexpected event order, got {event.type} before "message_start"' + ) + + if event.type == "content_block_start": + # TODO: check index <-- from anthropic SDK :) + current_snapshot.content.append( + ContentBlock.construct(**event.content_block.model_dump()), + ) + elif event.type == "content_block_delta": + content = current_snapshot.content[event.index] + if content.type == "text" and event.delta.type == "text_delta": + content.text += event.delta.text + elif event.type == "message_delta": + current_snapshot.stop_reason = event.delta.stop_reason + current_snapshot.stop_sequence = event.delta.stop_sequence + current_snapshot.usage.output_tokens = event.usage.output_tokens + + return current_snapshot + + +def _reduce_chat(all_chunks: List) -> dict: + full_message = None + for chunk in all_chunks: + try: + full_message = _accumulate_event(event=chunk, current_snapshot=full_message) + except RuntimeError as e: + logger.debug(f"Error accumulating event in Anthropic Wrapper: {e}") + return {"output": all_chunks} + if full_message is None: + return {"output": all_chunks} + return full_message.model_dump() + + +def _reduce_completions(all_chunks: List[Completion]) -> dict: + all_content = [] + for chunk in all_chunks: + content = chunk.choices[0].text + if content is not None: + all_content.append(content) + content = "".join(all_content) + if all_chunks: + d = all_chunks[-1].model_dump() + d["choices"] = [{"text": content}] + else: + d = {"choices": [{"text": content}]} + + return d + + +def _get_wrapper( + original_create: Callable, + name: str, + reduce_fn: Optional[Callable] = None, + tracing_extra: Optional[TracingExtra] = None, + force_stream: bool = False, +) -> Callable: + textra = tracing_extra or {} + + @functools.wraps(original_create) + def create(*args, **kwargs): + stream = kwargs.get("stream") + decorator = run_helpers.traceable( + name=name, + run_type="llm", + reduce_fn=reduce_fn if force_stream or stream else None, + process_inputs=_strip_not_given, + **textra, + ) + + return decorator(original_create)(*args, **kwargs) + + @functools.wraps(original_create) + async def acreate(*args, **kwargs): + stream = kwargs.get("stream") + decorator = run_helpers.traceable( + name=name, + run_type="llm", + reduce_fn=reduce_fn if force_stream or stream else None, + process_inputs=_strip_not_given, + **textra, + ) + if stream: + # TODO: This slightly alters the output to be a generator instead of the + # stream object. We can probably fix this with a bit of simple changes + res = decorator(original_create)(*args, stream=stream, **kwargs) + return res + return await decorator(original_create)(*args, **kwargs) + + return acreate if run_helpers.is_async(original_create) else create + + +class TracingExtra(TypedDict, total=False): + metadata: Optional[Mapping[str, Any]] + tags: Optional[List[str]] + client: Optional[ls_client.Client] + + +def wrap_anthropic(client: C, *, tracing_extra: Optional[TracingExtra] = None) -> C: + """Patch the Anthropic client to make it traceable. + + Args: + client (Union[Anthropic, AsyncAnthropic]): The client to patch. + tracing_extra (Optional[TracingExtra], optional): Extra tracing information. + Defaults to None. + + Returns: + Union[Anthropic, AsyncAnthropic]: The patched client. + + """ + client.messages.create = _get_wrapper( # type: ignore[method-assign] + client.messages.create, + "ChatAnthropic", + tracing_extra=tracing_extra, + ) + client.messages.stream = _get_wrapper( # type: ignore[method-assign] + client.messages.stream, + "ChatAnthropic", + _reduce_chat, + force_stream=True, + tracing_extra=tracing_extra, + ) + client.completions.create = _get_wrapper( # type: ignore[method-assign] + client.completions.create, + "Anthropic", + _reduce_completions, + tracing_extra=tracing_extra, + ) + return client diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py new file mode 100644 index 000000000..1efc70bf4 --- /dev/null +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -0,0 +1,195 @@ +# mypy: disable-error-code="attr-defined, union-attr, arg-type, call-overload" +import time +from unittest import mock + +import pytest + +from langsmith.wrappers import wrap_anthropic + +model_name = "claude-3-haiku-20240307" + + +@mock.patch("langsmith.client.requests.Session") +@pytest.mark.parametrize("stream", [False, True]) +def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): + import anthropic # noqa + + original_client = anthropic.Anthropic() + patched_client = wrap_anthropic(anthropic.Anthropic()) + messages = [{"role": "user", "content": "Say 'foo'"}] + + if stream: + original = original_client.messages.stream( + messages=messages, # noqa: [arg-type] + temperature=0, + model=model_name, + max_tokens=3, + ) + patched = patched_client.messages.stream( + messages=messages, # noqa: [arg-type] + temperature=0, + model=model_name, + max_tokens=3, + ) + # We currently return a generator, so + # the types aren't the same. + patched_chunks = list(patched) + original_chunks = list(original) + assert len(original_chunks) == len(patched_chunks) + assert "".join([c.text for c in original.content]) == "".join( + c.text for c in patched.content + ) + else: + original = original_client.messages.create( + messages=messages, # noqa: [arg-type] + temperature=0, + model=model_name, + max_tokens=3, + ) + patched = patched_client.messages.create( + messages=messages, # noqa: [arg-type] + temperature=0, + model=model_name, + max_tokens=3, + ) + assert type(original) == type(patched) + assert "".join([c.text for c in original.content]) == "".join( + c.text for c in patched.content + ) + # Give the thread a chance. + time.sleep(0.01) + for call in mock_session.return_value.request.call_args_list: + assert call[0][0].upper() == "POST" + + +@mock.patch("langsmith.client.requests.Session") +@pytest.mark.parametrize("stream", [False, True]) +async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): + import anthropic # noqa + + original_client = anthropic.AsyncAnthropic() + patched_client = wrap_anthropic(anthropic.AsyncAnthropic()) + messages = [{"role": "user", "content": "Say 'foo'"}] + + if stream: + original = await original_client.messages.stream( + messages=messages, + temperature=0, + model=model_name, + max_tokens=3, + ) + patched = await patched_client.messages.stream( + messages=messages, + temperature=0, + model=model_name, + max_tokens=3, + ) + # We currently return a generator, so + # the types aren't the same. + original_chunks = [] + async for chunk in original: + original_chunks.append(chunk) + patched_chunks = [] + async for chunk in patched: + patched_chunks.append(chunk) + assert len(original_chunks) == len(patched_chunks) + assert [o.choices == p.choices for o, p in zip(original_chunks, patched_chunks)] + else: + original = await original_client.messages.create( + messages=messages, + temperature=0, + model=model_name, + max_tokens=3, + ) + patched = await patched_client.messages.create( + messages=messages, + temperature=0, + model=model_name, + max_tokens=3, + ) + assert type(original) == type(patched) + assert original.choices == patched.choices + # Give the thread a chance. + time.sleep(0.1) + for call in mock_session.return_value.request.call_args_list: + assert call[0][0].upper() == "POST" + + +@mock.patch("langsmith.client.requests.Session") +def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): + import anthropic + + original_client = anthropic.Anthropic() + patched_client = wrap_anthropic(anthropic.Anthropic()) + prompt = ("Say 'Foo' then stop.",) + original = original_client.completions.create( + model="gpt-3.5-turbo-instruct", + prompt=prompt, + temperature=0, + stream=stream, + max_tokens_to_sample=3, + ) + patched = patched_client.completions.create( + model="gpt-3.5-turbo-instruct", + prompt=prompt, + temperature=0, + stream=stream, + max_tokens_to_sample=3, + ) + if stream: + # We currently return a generator, so + # the types aren't the same. + original_chunks = list(original) + patched_chunks = list(patched) + assert len(original_chunks) == len(patched_chunks) + assert [o.choices == p.choices for o, p in zip(original_chunks, patched_chunks)] + else: + assert type(original) == type(patched) + assert original.choices == patched.choices + # Give the thread a chance. + time.sleep(0.1) + for call in mock_session.return_value.request.call_args_list: + assert call[0][0].upper() == "POST" + + +@mock.patch("langsmith.client.requests.Session") +@pytest.mark.parametrize("stream", [False, True]) +async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool): + import anthropic + + original_client = anthropic.AsyncAnthropic() + patched_client = wrap_anthropic(anthropic.AsyncAnthropic()) + prompt = ("Say 'Hi i'm ChatGPT' then stop.",) + original = await original_client.completions.create( + model="gpt-3.5-turbo-instruct", + prompt=prompt, + temperature=0, + stream=stream, + max_tokens_to_sample=3, + ) + patched = await patched_client.completions.create( + model="gpt-3.5-turbo-instruct", + prompt=prompt, + temperature=0, + stream=stream, + max_tokens_to_sample=3, + ) + if stream: + # We currently return a generator, so + # the types aren't the same. + original_chunks = [] + async for chunk in original: + original_chunks.append(chunk) + patched_chunks = [] + async for chunk in patched: + patched_chunks.append(chunk) + assert len(original_chunks) == len(patched_chunks) + assert [o.choices == p.choices for o, p in zip(original_chunks, patched_chunks)] + else: + assert type(original) == type(patched) + assert original.choices == patched.choices + # Give the thread a chance. + time.sleep(0.1) + assert mock_session.return_value.request.call_count >= 1 + for call in mock_session.return_value.request.call_args_list: + assert call[0][0].upper() == "POST" From 26906ce2e3832fdcdb33c33a1c1af9e0011260e7 Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Fri, 20 Sep 2024 15:44:57 -0700 Subject: [PATCH 02/12] [Python] Support stream responses --- python/langsmith/run_helpers.py | 432 ++++++++++++++---- python/langsmith/wrappers/_openai.py | 5 - .../integration_tests/wrappers/test_openai.py | 4 + python/tests/unit_tests/test_run_helpers.py | 125 ++++- 4 files changed, 473 insertions(+), 93 deletions(-) diff --git a/python/langsmith/run_helpers.py b/python/langsmith/run_helpers.py index fe933bda7..6da368a06 100644 --- a/python/langsmith/run_helpers.py +++ b/python/langsmith/run_helpers.py @@ -16,11 +16,13 @@ TYPE_CHECKING, Any, AsyncGenerator, + AsyncIterator, Awaitable, Callable, Dict, Generator, Generic, + Iterator, List, Mapping, Optional, @@ -90,13 +92,6 @@ def get_tracing_context( return {k: context.get(v) for k, v in _CONTEXT_KEYS.items()} -def _set_tracing_context(context: Dict[str, Any]): - """Set the tracing context.""" - for k, v in context.items(): - var = _CONTEXT_KEYS[k] - var.set(v) - - @contextlib.contextmanager def tracing_context( *, @@ -465,19 +460,9 @@ def manual_extra_function(x): invocation_params_fn=kwargs.pop("_invocation_params_fn", None), ) outputs_processor = kwargs.pop("process_outputs", None) - - def _on_run_end( - container: _TraceableContainer, - outputs: Optional[Any] = None, - error: Optional[BaseException] = None, - ) -> None: - """Handle the end of run.""" - try: - if outputs_processor is not None: - outputs = outputs_processor(outputs) - _container_end(container, outputs=outputs, error=error) - except BaseException as e: - LOGGER.warning(f"Unable to process trace outputs: {repr(e)}") + _on_run_end = functools.partial( + _handle_container_end, outputs_processor=outputs_processor + ) if kwargs: warnings.warn( @@ -570,34 +555,19 @@ async def async_generator_wrapper( **get_tracing_context(run_container["context"]) ): async_gen_result = await async_gen_result - try: - while True: - if accepts_context: - item = await asyncio.create_task( # type: ignore[call-arg, var-annotated] - aitertools.py_anext(async_gen_result), # type: ignore[arg-type] - context=run_container["context"], - ) - else: - # Python < 3.11 - with tracing_context( - **get_tracing_context(run_container["context"]) - ): - item = await aitertools.py_anext(async_gen_result) - if run_type == "llm": - if run_container["new_run"]: - run_container["new_run"].add_event( - { - "name": "new_token", - "time": datetime.datetime.now( - datetime.timezone.utc - ).isoformat(), - "kwargs": {"token": item}, - } - ) - results.append(item) - yield item - except StopAsyncIteration: - pass + + async for item in _process_async_iterator( + generator=async_gen_result, + run_container=run_container, + is_llm_run=( + run_container["new_run"].run_type == "llm" + if run_container["new_run"] + else False + ), + accepts_context=accepts_context, + results=results, + ): + yield item except BaseException as e: await asyncio.shield( aitertools.aio_to_thread(_on_run_end, run_container, error=e) @@ -663,45 +633,28 @@ def generator_wrapper( ) results: List[Any] = [] function_return: Any = None + try: if func_accepts_parent_run: kwargs["run_tree"] = run_container["new_run"] - # TODO: Nesting is ambiguous if a nested traceable function is only - # called mid-generation. Need to explicitly accept run_tree to get - # around this. if not func_accepts_config: kwargs.pop("config", None) generator_result = run_container["context"].run(func, *args, **kwargs) - try: - while True: - item = run_container["context"].run(next, generator_result) - if run_type == "llm": - if run_container["new_run"]: - run_container["new_run"].add_event( - { - "name": "new_token", - "time": datetime.datetime.now( - datetime.timezone.utc - ).isoformat(), - "kwargs": {"token": item}, - } - ) - results.append(item) - try: - yield item - except GeneratorExit: - break - except StopIteration as e: - function_return = e.value - if function_return is not None: - # In 99% of cases, people yield OR return; to keep - # backwards compatibility, we'll only return if there's - # return value is non-null. - results.append(function_return) + + function_return = yield from _process_iterator( + generator_result, + run_container, + is_llm_run=run_type == "llm", + results=results, + ) + + if function_return is not None: + results.append(function_return) except BaseException as e: _on_run_end(run_container, error=e) raise e + if results: if reduce_fn: try: @@ -716,17 +669,88 @@ def generator_wrapper( _on_run_end(run_container, outputs=function_result) return function_return + # "Stream" functions (used in methods like OpenAI/Anthropic's SDKs) + # are functions that return iterable responses and should not be + # considered complete until the streaming is completed + @functools.wraps(func) + def stream_wrapper( + *args: Any, langsmith_extra: Optional[LangSmithExtra] = None, **kwargs: Any + ) -> Any: + trace_container = _setup_run( + func, + container_input=container_input, + langsmith_extra=langsmith_extra, + args=args, + kwargs=kwargs, + ) + + try: + if func_accepts_parent_run: + kwargs["run_tree"] = trace_container["new_run"] + if not func_accepts_config: + kwargs.pop("config", None) + stream = trace_container["context"].run(func, *args, **kwargs) + except Exception as e: + _on_run_end(trace_container, error=e) + raise + + if hasattr(stream, "__iter__"): + return _TracedStream(stream, trace_container, reduce_fn) + elif hasattr(stream, "__aiter__"): + # sync function -> async iterable (unexpected) + return _TracedAsyncStream(stream, trace_container, reduce_fn) + + # If it's not iterable, end the trace immediately + _on_run_end(trace_container, outputs=stream) + return stream + + @functools.wraps(func) + async def async_stream_wrapper( + *args: Any, langsmith_extra: Optional[LangSmithExtra] = None, **kwargs: Any + ) -> Any: + trace_container = await aitertools.aio_to_thread( + _setup_run, + func, + container_input=container_input, + langsmith_extra=langsmith_extra, + args=args, + kwargs=kwargs, + ) + + try: + if func_accepts_parent_run: + kwargs["run_tree"] = trace_container["new_run"] + if not func_accepts_config: + kwargs.pop("config", None) + stream = await func(*args, **kwargs) + except Exception as e: + await aitertools.aio_to_thread(_on_run_end, trace_container, error=e) + raise + + if hasattr(stream, "__aiter__"): + return _TracedAsyncStream(stream, trace_container, reduce_fn) + elif hasattr(stream, "__iter__"): + # Async function -> sync iterable + return _TracedStream(stream, trace_container, reduce_fn) + + # If it's not iterable, end the trace immediately + await aitertools.aio_to_thread(_on_run_end, trace_container, outputs=stream) + return stream + if inspect.isasyncgenfunction(func): selected_wrapper: Callable = async_generator_wrapper + elif inspect.isgeneratorfunction(func): + selected_wrapper = generator_wrapper elif is_async(func): if reduce_fn: - selected_wrapper = async_generator_wrapper + selected_wrapper = async_stream_wrapper else: selected_wrapper = async_wrapper - elif reduce_fn or inspect.isgeneratorfunction(func): - selected_wrapper = generator_wrapper else: - selected_wrapper = wrapper + if reduce_fn: + selected_wrapper = stream_wrapper + else: + selected_wrapper = wrapper setattr(selected_wrapper, "__langsmith_traceable__", True) sig = inspect.signature(selected_wrapper) if not sig.parameters.get("config"): @@ -1154,7 +1178,6 @@ async def awrap_traceable(inputs: dict, config: RunnableConfig) -> Any: ## Private Methods and Objects - _VALID_RUN_TYPES = { "tool", "chain", @@ -1409,6 +1432,21 @@ def _setup_run( return response_container +def _handle_container_end( + container: _TraceableContainer, + outputs: Optional[Any] = None, + error: Optional[BaseException] = None, + outputs_processor: Optional[Callable[..., dict]] = None, +) -> None: + """Handle the end of run.""" + try: + if outputs_processor is not None: + outputs = outputs_processor(outputs) + _container_end(container, outputs=outputs, error=error) + except BaseException as e: + LOGGER.warning(f"Unable to process trace outputs: {repr(e)}") + + def _is_traceable_function(func: Callable) -> bool: return getattr(func, "__langsmith_traceable__", False) @@ -1441,3 +1479,233 @@ def _get_inputs_safe( except BaseException as e: LOGGER.debug(f"Failed to get inputs for {signature}: {e}") return {"args": args, "kwargs": kwargs} + + +def _set_tracing_context(context: Dict[str, Any]): + """Set the tracing context.""" + for k, v in context.items(): + var = _CONTEXT_KEYS[k] + var.set(v) + + +def _process_iterator( + generator: Iterator[T], + run_container: _TraceableContainer, + is_llm_run: bool, + # Results is mutated + results: List[Any], +) -> Generator[T, None, Any]: + try: + while True: + item = run_container["context"].run(next, generator) + if is_llm_run and run_container["new_run"]: + run_container["new_run"].add_event( + { + "name": "new_token", + "time": datetime.datetime.now( + datetime.timezone.utc + ).isoformat(), + "kwargs": {"token": item}, + } + ) + results.append(item) + yield item + except StopIteration as e: + return e.value + + +async def _process_async_iterator( + generator: AsyncIterator[T], + run_container: _TraceableContainer, + *, + is_llm_run: bool, + accepts_context: bool, + results: List[Any], +) -> AsyncGenerator[T, None]: + try: + while True: + if accepts_context: + item = await asyncio.create_task( # type: ignore[call-arg, var-annotated] + aitertools.py_anext(generator), # type: ignore[arg-type] + context=run_container["context"], + ) + else: + # Python < 3.11 + with tracing_context(**get_tracing_context(run_container["context"])): + item = await aitertools.py_anext(generator) + if is_llm_run and run_container["new_run"]: + run_container["new_run"].add_event( + { + "name": "new_token", + "time": datetime.datetime.now( + datetime.timezone.utc + ).isoformat(), + "kwargs": {"token": item}, + } + ) + results.append(item) + yield item + except StopAsyncIteration: + pass + + +T = TypeVar("T") + + +class _TracedStreamBase(Generic[T]): + """Base class for traced stream objects.""" + + def __init__( + self, + stream: Union[Iterator[T], AsyncIterator[T]], + trace_container: _TraceableContainer, + reduce_fn: Optional[Callable] = None, + ): + self.__ls_stream__ = stream + self.__ls_trace_container__ = trace_container + self.__ls_completed__ = False + self.__ls_reduce_fn__ = reduce_fn + self.__ls_accumulated_output__: list[T] = [] + self.__is_llm_run__ = ( + trace_container["new_run"].run_type == "llm" + if trace_container["new_run"] + else False + ) + + def __getattr__(self, name: str): + return getattr(self.__ls_stream__, name) + + def __dir__(self): + return list(set(dir(self.__class__) + dir(self.__ls_stream__))) + + def __repr__(self): + return f"Traceable({self.__ls_stream__!r})" + + def __str__(self): + return str(self.__ls_stream__) + + def __del__(self): + try: + if not self.__ls_completed__: + self._end_trace() + except BaseException: + pass + try: + self.__ls_stream__.__del__() + except BaseException: + pass + + def _end_trace(self, error: Optional[BaseException] = None): + if self.__ls_completed__: + return + try: + if self.__ls_reduce_fn__: + reduced_output = self.__ls_reduce_fn__(self.__ls_accumulated_output__) + else: + reduced_output = self.__ls_accumulated_output__ + _container_end( + self.__ls_trace_container__, outputs=reduced_output, error=error + ) + finally: + self.__ls_completed__ = True + + +class _TracedStream(_TracedStreamBase, Generic[T]): + """A wrapper for synchronous stream objects that handles tracing.""" + + def __init__( + self, + stream: Iterator[T], + trace_container: _TraceableContainer, + reduce_fn: Optional[Callable] = None, + ): + super().__init__( + stream=stream, trace_container=trace_container, reduce_fn=reduce_fn + ) + self.__ls_stream__ = stream + self.__ls__gen__ = _process_iterator( + self.__ls_stream__, + self.__ls_trace_container__, + is_llm_run=self.__is_llm_run__, + results=self.__ls_accumulated_output__, + ) + + def __next__(self) -> T: + try: + return next(self.__ls__gen__) + except StopIteration: + self._end_trace() + raise + + def __iter__(self) -> Iterator[T]: + try: + yield from self.__ls__gen__ + except BaseException as e: + self._end_trace(error=e) + raise + else: + self._end_trace() + + def __enter__(self): + return self.__ls_stream__.__enter__() + + def __exit__(self, exc_type, exc_val, exc_tb): + try: + return self.__ls_stream__.__exit__(exc_type, exc_val, exc_tb) + finally: + self._end_trace(error=exc_val if exc_type else None) + + +class _TracedAsyncStream(_TracedStreamBase, Generic[T]): + """A wrapper for asynchronous stream objects that handles tracing.""" + + def __init__( + self, + stream: AsyncIterator[T], + trace_container: _TraceableContainer, + reduce_fn: Optional[Callable] = None, + ): + super().__init__( + stream=stream, trace_container=trace_container, reduce_fn=reduce_fn + ) + self.__ls_stream__ = stream + self.__ls_gen = _process_async_iterator( + generator=self.__ls_stream__, + run_container=self.__ls_trace_container__, + is_llm_run=self.__is_llm_run__, + accepts_context=aitertools.asyncio_accepts_context(), + results=self.__ls_accumulated_output__, + ) + + async def _aend_trace(self, error: Optional[BaseException] = None): + ctx = copy_context() + await asyncio.shield( + aitertools.aio_to_thread(self._end_trace, error, __ctx=ctx) + ) + _set_tracing_context(get_tracing_context(ctx)) + + async def __anext__(self) -> T: + try: + return cast(T, await aitertools.py_anext(self.__ls_gen)) + except StopAsyncIteration: + await self._aend_trace() + raise + + async def __aiter__(self) -> AsyncIterator[T]: + try: + async for item in self.__ls_gen: + yield item + except BaseException: + await self._aend_trace() + raise + else: + await self._aend_trace() + + async def __aenter__(self): + return await self.__ls_stream__.__aenter__() + + async def __aexit__(self, exc_type, exc_val, exc_tb): + try: + return await self.__ls_stream__.__aexit__(exc_type, exc_val, exc_tb) + finally: + await self._aend_trace() diff --git a/python/langsmith/wrappers/_openai.py b/python/langsmith/wrappers/_openai.py index 663c3c3f1..014d364cd 100644 --- a/python/langsmith/wrappers/_openai.py +++ b/python/langsmith/wrappers/_openai.py @@ -193,11 +193,6 @@ async def acreate(*args, stream: bool = False, **kwargs): _invocation_params_fn=invocation_params_fn, **textra, ) - if stream: - # TODO: This slightly alters the output to be a generator instead of the - # stream object. We can probably fix this with a bit of simple changes - res = decorator(original_create)(*args, stream=stream, **kwargs) - return res return await decorator(original_create)(*args, stream=stream, **kwargs) return acreate if run_helpers.is_async(original_create) else create diff --git a/python/tests/integration_tests/wrappers/test_openai.py b/python/tests/integration_tests/wrappers/test_openai.py index d12e77da6..32dcd85c2 100644 --- a/python/tests/integration_tests/wrappers/test_openai.py +++ b/python/tests/integration_tests/wrappers/test_openai.py @@ -114,6 +114,8 @@ def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): patched_chunks = list(patched) assert len(original_chunks) == len(patched_chunks) assert [o.choices == p.choices for o, p in zip(original_chunks, patched_chunks)] + assert original.response + assert patched.response else: assert type(original) == type(patched) assert original.choices == patched.choices @@ -165,6 +167,8 @@ async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool) patched_chunks.append(chunk) assert len(original_chunks) == len(patched_chunks) assert [o.choices == p.choices for o, p in zip(original_chunks, patched_chunks)] + assert original.response + assert patched.response else: assert type(original) == type(patched) assert original.choices == patched.choices diff --git a/python/tests/unit_tests/test_run_helpers.py b/python/tests/unit_tests/test_run_helpers.py index a0bff6bba..2f48dbff7 100644 --- a/python/tests/unit_tests/test_run_helpers.py +++ b/python/tests/unit_tests/test_run_helpers.py @@ -16,6 +16,7 @@ from langsmith import Client from langsmith import schemas as ls_schemas from langsmith import utils as ls_utils +from langsmith._internal import _aiter as aitertools from langsmith.run_helpers import ( _get_inputs, as_runnable, @@ -32,7 +33,7 @@ def _get_calls( mock_client: Any, minimum: Optional[int] = 0, verbs: Set[str] = {"POST"}, - attempts: int = 5, + attempts: int = 10, ) -> list: calls = [] for _ in range(attempts): @@ -200,29 +201,38 @@ def mock_client() -> Client: @pytest.mark.parametrize("use_next", [True, False]) -def test_traceable_iterator(use_next: bool, mock_client: Client) -> None: +@pytest.mark.parametrize("return_val", [None, "foo"]) +def test_traceable_iterator( + use_next: bool, return_val: Optional[str], mock_client: Client +) -> None: with tracing_context(enabled=True): @traceable(client=mock_client) - def my_iterator_fn(a, b, d, **kwargs): + def my_iterator_fn(a, b, d, **kwargs) -> Any: assert kwargs == {"e": 5} for i in range(a + b + d): yield i + return return_val expected = [0, 1, 2, 3, 4, 5] + if return_val is not None: + expected.append(return_val) genout = my_iterator_fn(1, 2, 3, e=5) if use_next: results = [] while True: try: results.append(next(genout)) - except StopIteration: + except StopIteration as e: + assert e.value == return_val + if e.value is not None: + results.append(e.value) break else: results = list(genout) + if return_val is not None: + results.append(return_val) assert results == expected - # Wait for batcher - # check the mock_calls mock_calls = _get_calls(mock_client, minimum=1) assert 1 <= len(mock_calls) <= 2 @@ -235,6 +245,109 @@ def my_iterator_fn(a, b, d, **kwargs): assert body["post"][0]["outputs"]["output"] == expected +class MyStreamObject: + def __init__(self, some_values: list): + self.vals = some_values + self._iter = iter(self.vals) + + def __next__(self): + return next(self._iter) + + def __iter__(self): + yield from self.vals + + +class MyAsyncStreamObject: + def __init__(self, some_values: list): + self.vals = some_values + + async def iter(): + for val in some_values: + yield val + + self._iter = iter() + + async def __anext__(self): + return await aitertools.py_anext(self._iter) + + async def __aiter__(self): + async for val in self._iter: + yield val + + +@pytest.mark.parametrize("use_next", [True, False]) +@pytest.mark.parametrize("response_type", ["async", "async"]) +async def test_traceable_stream( + use_next: bool, response_type: str, mock_client: Client +) -> None: + def reduce_fn(results: list): + return {"my_output": results} + + @traceable(client=mock_client, reduce_fn=reduce_fn) + def my_stream_fn(a, b, d, **kwargs): + assert kwargs == {"e": 5} + vals = [0, 1, 2, 3, 4, 5] + if response_type == "sync": + return MyStreamObject(vals) + else: + return MyAsyncStreamObject(vals) + + with tracing_context(enabled=True): + expected = [0, 1, 2, 3, 4, 5] + genout = my_stream_fn(1, 2, 3, e=5) + # assert getattr(genout, "vals") == expected + if use_next: + results = [] + if response_type == "sync": + while True: + try: + results.append(next(genout)) + except StopIteration: + break + else: + while True: + try: + results.append(await aitertools.py_anext(genout)) + except StopAsyncIteration: + break + + else: + if response_type == "sync": + results = list(genout) + else: + results = [r async for r in genout] + assert results == expected + # check the mock_calls + mock_calls = _get_calls(mock_client, minimum=1) + assert 1 <= len(mock_calls) <= 2 + + call = mock_calls[0] + assert call.args[0] == "POST" + assert call.args[1].startswith("https://api.smith.langchain.com") + call_data = [json.loads(mock_call.kwargs["data"]) for mock_call in mock_calls] + body = call_data[0] + assert body["post"] + assert body["post"][0]["name"] == "my_stream_fn" + if body["post"][0]["outputs"]: + assert body["post"][0]["outputs"] == {"my_output": expected} + else: + first_patch = next((d for d in call_data if d.get("patch")), None) + attempt = 0 + while first_patch is None: + time.sleep(0.2) + if attempt > 2: + assert False, "Could not get patch" + mock_calls = _get_calls(mock_client, minimum=1) + call_data = [ + json.loads(mock_call.kwargs["data"]) for mock_call in mock_calls + ] + first_patch = next((d for d in call_data if d.get("patch")), None) + attempt += 1 + + assert first_patch["name"] == "my_stream_fn" + assert first_patch[0]["outputs"] == {"my_output": expected} + + @pytest.mark.parametrize("use_next", [True, False]) async def test_traceable_async_iterator(use_next: bool, mock_client: Client) -> None: with tracing_context(enabled=True): From bd828947b8d5cbde18b8ddd51067922d05497edf Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Fri, 20 Sep 2024 17:40:57 -0700 Subject: [PATCH 03/12] merge --- python/langsmith/wrappers/_anthropic.py | 5 - .../wrappers/test_anthropic.py | 110 ++++++++++-------- 2 files changed, 59 insertions(+), 56 deletions(-) diff --git a/python/langsmith/wrappers/_anthropic.py b/python/langsmith/wrappers/_anthropic.py index b665add1c..99d4b02dd 100644 --- a/python/langsmith/wrappers/_anthropic.py +++ b/python/langsmith/wrappers/_anthropic.py @@ -147,11 +147,6 @@ async def acreate(*args, **kwargs): process_inputs=_strip_not_given, **textra, ) - if stream: - # TODO: This slightly alters the output to be a generator instead of the - # stream object. We can probably fix this with a bit of simple changes - res = decorator(original_create)(*args, stream=stream, **kwargs) - return res return await decorator(original_create)(*args, **kwargs) return acreate if run_helpers.is_async(original_create) else create diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py index 1efc70bf4..057400708 100644 --- a/python/tests/integration_tests/wrappers/test_anthropic.py +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -1,9 +1,11 @@ # mypy: disable-error-code="attr-defined, union-attr, arg-type, call-overload" +import json import time from unittest import mock import pytest +from langsmith import Client from langsmith.wrappers import wrap_anthropic model_name = "claude-3-haiku-20240307" @@ -13,53 +15,62 @@ @pytest.mark.parametrize("stream", [False, True]) def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): import anthropic # noqa + from tests.unit_tests.test_run_helpers import _get_calls + mock_client = Client(session=mock_session) original_client = anthropic.Anthropic() - patched_client = wrap_anthropic(anthropic.Anthropic()) + patched_client = wrap_anthropic( + anthropic.Anthropic(), tracing_extra={"client": mock_client} + ) messages = [{"role": "user", "content": "Say 'foo'"}] if stream: original = original_client.messages.stream( - messages=messages, # noqa: [arg-type] + messages=messages, temperature=0, model=model_name, max_tokens=3, ) patched = patched_client.messages.stream( - messages=messages, # noqa: [arg-type] + messages=messages, temperature=0, model=model_name, max_tokens=3, ) - # We currently return a generator, so - # the types aren't the same. - patched_chunks = list(patched) - original_chunks = list(original) + with original as om: + original_chunks = list(om.text_stream) + with patched as pm: + patched_chunks = list(pm.text_stream) assert len(original_chunks) == len(patched_chunks) - assert "".join([c.text for c in original.content]) == "".join( - c.text for c in patched.content - ) + assert "".join(original_chunks) == "".join(patched_chunks) else: original = original_client.messages.create( - messages=messages, # noqa: [arg-type] + messages=messages, temperature=0, model=model_name, max_tokens=3, ) patched = patched_client.messages.create( - messages=messages, # noqa: [arg-type] + messages=messages, temperature=0, model=model_name, max_tokens=3, ) assert type(original) == type(patched) assert "".join([c.text for c in original.content]) == "".join( - c.text for c in patched.content + [c.text for c in patched.content] ) - # Give the thread a chance. - time.sleep(0.01) - for call in mock_session.return_value.request.call_args_list: - assert call[0][0].upper() == "POST" + + calls = _get_calls(mock_client, minimum=1) + assert calls + datas = [json.loads(call.kwargs["data"]) for call in calls] + outputs = None + for data in datas: + if outputs := data["post"][0]["outputs"]: + break + if data.get("patch"): + outputs = data["patch"][0]["outputs"] + break @mock.patch("langsmith.client.requests.Session") @@ -84,16 +95,12 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): model=model_name, max_tokens=3, ) - # We currently return a generator, so - # the types aren't the same. - original_chunks = [] - async for chunk in original: - original_chunks.append(chunk) - patched_chunks = [] - async for chunk in patched: - patched_chunks.append(chunk) + original_chunks = [chunk async for chunk in original] + patched_chunks = [chunk async for chunk in patched] assert len(original_chunks) == len(patched_chunks) - assert [o.choices == p.choices for o, p in zip(original_chunks, patched_chunks)] + assert "".join([c.content[0].text for c in original_chunks]) == "".join( + [c.content[0].text for c in patched_chunks] + ) else: original = await original_client.messages.create( messages=messages, @@ -108,46 +115,51 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): max_tokens=3, ) assert type(original) == type(patched) - assert original.choices == patched.choices - # Give the thread a chance. + assert "".join([c.text for c in original.content]) == "".join( + [c.text for c in patched.content] + ) + time.sleep(0.1) + assert mock_session.return_value.request.call_count > 0 for call in mock_session.return_value.request.call_args_list: assert call[0][0].upper() == "POST" @mock.patch("langsmith.client.requests.Session") +@pytest.mark.parametrize("stream", [False, True]) def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): import anthropic original_client = anthropic.Anthropic() patched_client = wrap_anthropic(anthropic.Anthropic()) - prompt = ("Say 'Foo' then stop.",) + prompt = "Human: Say 'Hi i'm Claude' then stop.\n\nAssistant:" original = original_client.completions.create( - model="gpt-3.5-turbo-instruct", + model="claude-2.1", prompt=prompt, temperature=0, stream=stream, max_tokens_to_sample=3, ) patched = patched_client.completions.create( - model="gpt-3.5-turbo-instruct", + model="claude-2.1", prompt=prompt, temperature=0, stream=stream, max_tokens_to_sample=3, ) if stream: - # We currently return a generator, so - # the types aren't the same. original_chunks = list(original) patched_chunks = list(patched) assert len(original_chunks) == len(patched_chunks) - assert [o.choices == p.choices for o, p in zip(original_chunks, patched_chunks)] + assert "".join([c.completion for c in original_chunks]) == "".join( + [c.completion for c in patched_chunks] + ) else: assert type(original) == type(patched) - assert original.choices == patched.choices - # Give the thread a chance. + assert original.completion == patched.completion + time.sleep(0.1) + assert mock_session.return_value.request.call_count > 0 for call in mock_session.return_value.request.call_args_list: assert call[0][0].upper() == "POST" @@ -159,37 +171,33 @@ async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool) original_client = anthropic.AsyncAnthropic() patched_client = wrap_anthropic(anthropic.AsyncAnthropic()) - prompt = ("Say 'Hi i'm ChatGPT' then stop.",) + prompt = "Human: Say 'Hi i'm Claude' then stop.\n\nAssistant:" original = await original_client.completions.create( - model="gpt-3.5-turbo-instruct", + model="claude-2.1", prompt=prompt, temperature=0, stream=stream, max_tokens_to_sample=3, ) patched = await patched_client.completions.create( - model="gpt-3.5-turbo-instruct", + model="claude-2.1", prompt=prompt, temperature=0, stream=stream, max_tokens_to_sample=3, ) if stream: - # We currently return a generator, so - # the types aren't the same. - original_chunks = [] - async for chunk in original: - original_chunks.append(chunk) - patched_chunks = [] - async for chunk in patched: - patched_chunks.append(chunk) + original_chunks = [chunk async for chunk in original] + patched_chunks = [chunk async for chunk in patched] assert len(original_chunks) == len(patched_chunks) - assert [o.choices == p.choices for o, p in zip(original_chunks, patched_chunks)] + assert "".join([c.completion for c in original_chunks]) == "".join( + [c.completion for c in patched_chunks] + ) else: assert type(original) == type(patched) - assert original.choices == patched.choices - # Give the thread a chance. + assert original.completion == patched.completion + time.sleep(0.1) - assert mock_session.return_value.request.call_count >= 1 + assert mock_session.return_value.request.call_count > 0 for call in mock_session.return_value.request.call_args_list: assert call[0][0].upper() == "POST" From 0c4d42a42c853530c3f6cdb3e09d924b18f9cd68 Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Fri, 24 Jan 2025 15:33:48 -0800 Subject: [PATCH 04/12] wip --- python/langsmith/wrappers/_anthropic.py | 2 +- .../wrappers/test_anthropic.py | 59 ++++++++++++------- 2 files changed, 38 insertions(+), 23 deletions(-) diff --git a/python/langsmith/wrappers/_anthropic.py b/python/langsmith/wrappers/_anthropic.py index 99d4b02dd..4dcb096b0 100644 --- a/python/langsmith/wrappers/_anthropic.py +++ b/python/langsmith/wrappers/_anthropic.py @@ -102,7 +102,7 @@ def _reduce_chat(all_chunks: List) -> dict: def _reduce_completions(all_chunks: List[Completion]) -> dict: all_content = [] for chunk in all_chunks: - content = chunk.choices[0].text + content = chunk.completion if content is not None: all_content.append(content) content = "".join(all_content) diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py index 057400708..144f8dcf0 100644 --- a/python/tests/integration_tests/wrappers/test_anthropic.py +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -17,7 +17,7 @@ def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): import anthropic # noqa from tests.unit_tests.test_run_helpers import _get_calls - mock_client = Client(session=mock_session) + mock_client = Client(session=mock_session()) original_client = anthropic.Anthropic() patched_client = wrap_anthropic( anthropic.Anthropic(), tracing_extra={"client": mock_client} @@ -66,40 +66,48 @@ def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): datas = [json.loads(call.kwargs["data"]) for call in calls] outputs = None for data in datas: - if outputs := data["post"][0]["outputs"]: - break + if data.get("post"): + if outputs := data["post"][0]["outputs"]: + break if data.get("patch"): outputs = data["patch"][0]["outputs"] break + + assert outputs @mock.patch("langsmith.client.requests.Session") @pytest.mark.parametrize("stream", [False, True]) async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): import anthropic # noqa - + + client = Client(session=mock_session()) original_client = anthropic.AsyncAnthropic() - patched_client = wrap_anthropic(anthropic.AsyncAnthropic()) + patched_client = wrap_anthropic(anthropic.AsyncAnthropic(), tracing_extra={"client": client}) messages = [{"role": "user", "content": "Say 'foo'"}] if stream: - original = await original_client.messages.stream( + original_chunks, patched_chunks = [], [] + async with original_client.messages.stream( messages=messages, temperature=0, model=model_name, max_tokens=3, - ) - patched = await patched_client.messages.stream( + ) as stream: + async for chunk in stream: + original_chunks.append(chunk) + async with patched_client.messages.stream( messages=messages, temperature=0, model=model_name, max_tokens=3, - ) - original_chunks = [chunk async for chunk in original] - patched_chunks = [chunk async for chunk in patched] + ) as stream: + async for chunk in stream: + patched_chunks.append(chunk) assert len(original_chunks) == len(patched_chunks) - assert "".join([c.content[0].text for c in original_chunks]) == "".join( - [c.content[0].text for c in patched_chunks] + + assert "".join([c.message.content[0].text for c in original_chunks if hasattr(c, 'message') and len(c.message.content)]) == "".join( + [c.message.content[0].text for c in patched_chunks if hasattr(c, 'message') and len(c.message.content)] ) else: original = await original_client.messages.create( @@ -120,8 +128,10 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): ) time.sleep(0.1) - assert mock_session.return_value.request.call_count > 0 - for call in mock_session.return_value.request.call_args_list: + assert mock_session.return_value.request.call_count > 1 + # This is the info call + assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" + for call in mock_session.return_value.request.call_args_list[1:]: assert call[0][0].upper() == "POST" @@ -129,9 +139,9 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): @pytest.mark.parametrize("stream", [False, True]) def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): import anthropic - + client = Client(session=mock_session()) original_client = anthropic.Anthropic() - patched_client = wrap_anthropic(anthropic.Anthropic()) + patched_client = wrap_anthropic(anthropic.Anthropic(), tracing_extra={"client": client}) prompt = "Human: Say 'Hi i'm Claude' then stop.\n\nAssistant:" original = original_client.completions.create( model="claude-2.1", @@ -159,8 +169,10 @@ def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): assert original.completion == patched.completion time.sleep(0.1) - assert mock_session.return_value.request.call_count > 0 - for call in mock_session.return_value.request.call_args_list: + assert mock_session.return_value.request.call_count > 1 + # This is the info call + assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" + for call in mock_session.return_value.request.call_args_list[1:]: assert call[0][0].upper() == "POST" @@ -169,8 +181,9 @@ def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool): import anthropic + client = Client(session=mock_session()) original_client = anthropic.AsyncAnthropic() - patched_client = wrap_anthropic(anthropic.AsyncAnthropic()) + patched_client = wrap_anthropic(anthropic.AsyncAnthropic(), tracing_extra={"client": client}) prompt = "Human: Say 'Hi i'm Claude' then stop.\n\nAssistant:" original = await original_client.completions.create( model="claude-2.1", @@ -198,6 +211,8 @@ async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool) assert original.completion == patched.completion time.sleep(0.1) - assert mock_session.return_value.request.call_count > 0 - for call in mock_session.return_value.request.call_args_list: + assert mock_session.return_value.request.call_count > 1 + # This is the info call + assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" + for call in mock_session.return_value.request.call_args_list[1:]: assert call[0][0].upper() == "POST" From 2b15984d9443cbb2f9da22e5222fe8c9964dbf54 Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Fri, 24 Jan 2025 16:06:34 -0800 Subject: [PATCH 05/12] working --- python/langsmith/run_helpers.py | 4 - python/langsmith/wrappers/_anthropic.py | 11 ++ python/poetry.lock | 129 +++++++++++++++++- python/pyproject.toml | 1 + .../wrappers/test_anthropic.py | 111 ++++++++++++++- 5 files changed, 242 insertions(+), 14 deletions(-) diff --git a/python/langsmith/run_helpers.py b/python/langsmith/run_helpers.py index f519e8ddb..07d73ce56 100644 --- a/python/langsmith/run_helpers.py +++ b/python/langsmith/run_helpers.py @@ -1473,7 +1473,6 @@ def _handle_container_end( LOGGER.warning(f"Unable to process trace outputs: {repr(e)}") - def _is_traceable_function(func: Any) -> bool: return getattr(func, "__langsmith_traceable__", False) @@ -1508,7 +1507,6 @@ def _get_inputs_safe( return {"args": args, "kwargs": kwargs} - def _is_attachment(param: inspect.Parameter) -> bool: return param.annotation == schemas.Attachment or ( get_origin(param.annotation) == Annotated @@ -1555,7 +1553,6 @@ def _get_inputs_and_attachments_safe( return {"args": args, "kwargs": kwargs}, {} - def _set_tracing_context(context: Dict[str, Any]): """Set the tracing context.""" for k, v in context.items(): @@ -1786,7 +1783,6 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): await self._aend_trace() - def _get_function_result(results: list, reduce_fn: Callable) -> Any: if results: if reduce_fn is not None: diff --git a/python/langsmith/wrappers/_anthropic.py b/python/langsmith/wrappers/_anthropic.py index 4dcb096b0..937eabc18 100644 --- a/python/langsmith/wrappers/_anthropic.py +++ b/python/langsmith/wrappers/_anthropic.py @@ -188,4 +188,15 @@ def wrap_anthropic(client: C, *, tracing_extra: Optional[TracingExtra] = None) - _reduce_completions, tracing_extra=tracing_extra, ) + + if ( + hasattr(client, "beta") + and hasattr(client.beta, "messages") + and hasattr(client.beta.messages, "create") + ): + client.beta.messages.create = _get_wrapper( # type: ignore[method-assign] + client.beta.messages.create, # type: ignore + "Anthropic", + tracing_extra=tracing_extra, + ) return client diff --git a/python/poetry.lock b/python/poetry.lock index 4be08f1d0..a2b9a9b79 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,17 +6,44 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint", "test"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] +[[package]] +name = "anthropic" +version = "0.45.0" +description = "The official Python library for the anthropic API" +optional = false +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "anthropic-0.45.0-py3-none-any.whl", hash = "sha256:f36aff71d2c232945e64d1970be68a91b05a2ef5e3afa6c1ff195c3303a95ad3"}, + {file = "anthropic-0.45.0.tar.gz", hash = "sha256:4e8541dc355332090bfc51b84549c19b649a13a23dbd6bd68e1d012e08551025"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +typing-extensions = ">=4.10,<5" + +[package.extras] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth (>=2,<3)"] + [[package]] name = "anyio" version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main", "dev", "lint", "test"] files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -39,6 +66,7 @@ version = "24.3.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, @@ -58,6 +86,7 @@ version = "24.10.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, @@ -104,6 +133,7 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev", "lint", "test"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -115,6 +145,8 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation == \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -194,6 +226,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -295,6 +328,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -309,10 +343,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "lint", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "sys_platform == \"win32\"", dev = "sys_platform == \"win32\" or platform_system == \"Windows\"", lint = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -320,6 +356,7 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -397,6 +434,7 @@ version = "0.6.7" description = "Easily serialize dataclasses to and from JSON." optional = false python-versions = "<4.0,>=3.7" +groups = ["dev"] files = [ {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, @@ -412,6 +450,7 @@ version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" +groups = ["lint", "test"] files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -423,6 +462,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "lint", "test"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -437,6 +478,7 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -451,6 +493,7 @@ version = "0.115.6" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305"}, {file = "fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654"}, @@ -471,6 +514,7 @@ version = "1.5.1" description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, @@ -485,6 +529,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "lint", "test"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -496,6 +541,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint", "test"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -517,6 +563,7 @@ version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint", "test"] files = [ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, @@ -542,6 +589,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev", "lint", "test"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -556,6 +604,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -567,6 +616,7 @@ version = "0.8.2" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" +groups = ["lint", "test"] files = [ {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, @@ -652,6 +702,8 @@ version = "0.1.0rc5" description = "" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"langsmith-pyo3\"" files = [ {file = "langsmith_pyo3-0.1.0rc5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:670e5707f09e756e3b71f12a46f546826893158c2dde3250e212234e15ac8c70"}, {file = "langsmith_pyo3-0.1.0rc5-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:e0d2261c3f9ba07594c7450187604c4c4b1e10e3b3f4a4eb39587a495d6945e3"}, @@ -691,6 +743,8 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"pytest\"" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -715,6 +769,7 @@ version = "3.25.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "marshmallow-3.25.1-py3-none-any.whl", hash = "sha256:ec5d00d873ce473b7f2ffcb7104286a376c354cab0c2fa12f5573dab03e87210"}, {file = "marshmallow-3.25.1.tar.gz", hash = "sha256:f4debda3bb11153d81ac34b0d582bf23053055ee11e791b54b4b35493468040a"}, @@ -734,6 +789,8 @@ version = "0.1.2" description = "Markdown URL utilities" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"pytest\"" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -745,6 +802,7 @@ version = "6.1.0" description = "multidict implementation" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -849,6 +907,7 @@ version = "1.2.1" description = "Parser for multipart/form-data" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "multipart-1.2.1-py3-none-any.whl", hash = "sha256:c03dc203bc2e67f6b46a599467ae0d87cf71d7530504b2c1ff4a9ea21d8b8c8c"}, {file = "multipart-1.2.1.tar.gz", hash = "sha256:829b909b67bc1ad1c6d4488fcdc6391c2847842b08323addf5200db88dbe9480"}, @@ -864,6 +923,7 @@ version = "1.14.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, @@ -923,6 +983,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -934,6 +995,7 @@ version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, @@ -988,6 +1050,7 @@ version = "1.59.9" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" +groups = ["lint"] files = [ {file = "openai-1.59.9-py3-none-any.whl", hash = "sha256:61a0608a1313c08ddf92fe793b6dbd1630675a1fe3866b2f96447ce30050c448"}, {file = "openai-1.59.9.tar.gz", hash = "sha256:ec1a20b0351b4c3e65c6292db71d8233515437c6065efd4fd50edeb55df5f5d2"}, @@ -1013,6 +1076,8 @@ version = "3.10.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -1101,6 +1166,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -1112,6 +1178,7 @@ version = "2.2.2.240807" description = "Type annotations for pandas" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pandas_stubs-2.2.2.240807-py3-none-any.whl", hash = "sha256:893919ad82be4275f0d07bb47a95d08bae580d3fdea308a7acfcb3f02e76186e"}, {file = "pandas_stubs-2.2.2.240807.tar.gz", hash = "sha256:64a559725a57a449f46225fbafc422520b7410bff9252b661a225b5559192a93"}, @@ -1127,6 +1194,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1138,6 +1206,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -1154,6 +1223,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1169,6 +1239,7 @@ version = "0.2.1" description = "Accelerated property cache" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, @@ -1260,6 +1331,7 @@ version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["dev"] files = [ {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, @@ -1288,6 +1360,7 @@ version = "0.3.14" description = "Sampling profiler for Python programs" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"}, {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"}, @@ -1304,6 +1377,8 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation == \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -1315,6 +1390,7 @@ version = "2.10.5" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint", "test"] files = [ {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, @@ -1335,6 +1411,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint", "test"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1447,6 +1524,8 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"pytest\"" files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -1461,6 +1540,7 @@ version = "2.8.1" description = "Python module to run and analyze benchmarks" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pyperf-2.8.1-py3-none-any.whl", hash = "sha256:12a974a800a96568575be51d229b88e6b14197d02440afd98e908d80a42a1a44"}, {file = "pyperf-2.8.1.tar.gz", hash = "sha256:ef103e21a4d04999315003026a2d659c48a7cfce5e1440f03d6e72591400713a"}, @@ -1478,6 +1558,7 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -1500,6 +1581,7 @@ version = "0.21.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, @@ -1518,6 +1600,7 @@ version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, @@ -1536,6 +1619,7 @@ version = "14.0" description = "pytest plugin to re-run tests to eliminate flaky failures" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-rerunfailures-14.0.tar.gz", hash = "sha256:4a400bcbcd3c7a4ad151ab8afac123d90eca3abe27f98725dc4d9702887d2e92"}, {file = "pytest_rerunfailures-14.0-py3-none-any.whl", hash = "sha256:4197bdd2eaeffdbf50b5ea6e7236f47ff0e44d1def8dae08e409f536d84e7b32"}, @@ -1551,6 +1635,7 @@ version = "0.7.0" description = "Pytest Plugin to disable socket calls during tests" optional = false python-versions = ">=3.8,<4.0" +groups = ["dev", "test"] files = [ {file = "pytest_socket-0.7.0-py3-none-any.whl", hash = "sha256:7e0f4642177d55d317bbd58fc68c6bd9048d6eadb2d46a89307fa9221336ce45"}, {file = "pytest_socket-0.7.0.tar.gz", hash = "sha256:71ab048cbbcb085c15a4423b73b619a8b35d6a307f46f78ea46be51b1b7e11b3"}, @@ -1565,6 +1650,7 @@ version = "0.11.0" description = "unittest subTest() support and subtests fixture" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-subtests-0.11.0.tar.gz", hash = "sha256:51865c88457545f51fb72011942f0a3c6901ee9e24cbfb6d1b9dc1348bafbe37"}, {file = "pytest_subtests-0.11.0-py3-none-any.whl", hash = "sha256:453389984952eec85ab0ce0c4f026337153df79587048271c7fd0f49119c07e4"}, @@ -1580,6 +1666,7 @@ version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" +groups = ["dev"] files = [ {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, @@ -1595,6 +1682,7 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -1615,6 +1703,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1629,6 +1718,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -1691,6 +1781,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -1712,6 +1803,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -1726,6 +1818,8 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = true python-versions = ">=3.8.0" +groups = ["main"] +markers = "extra == \"pytest\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -1745,6 +1839,7 @@ version = "0.6.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, @@ -1772,6 +1867,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1783,6 +1879,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "lint", "test"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -1794,6 +1891,7 @@ version = "0.41.3" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7"}, {file = "starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835"}, @@ -1812,6 +1910,7 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -1846,6 +1945,7 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +markers = {main = "python_version < \"3.11\"", dev = "python_full_version <= \"3.11.0a6\"", test = "python_version < \"3.11\""} [[package]] name = "tqdm" @@ -1853,6 +1953,7 @@ version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" +groups = ["lint"] files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -1874,6 +1975,7 @@ version = "5.9.5.20240516" description = "Typing stubs for psutil" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-psutil-5.9.5.20240516.tar.gz", hash = "sha256:bb296f59fc56458891d0feb1994717e548a1bcf89936a2877df8792b822b4696"}, {file = "types_psutil-5.9.5.20240516-py3-none-any.whl", hash = "sha256:83146ded949a10167d9895e567b3b71e53ebc5e23fd8363eab62b3c76cce7b89"}, @@ -1885,6 +1987,7 @@ version = "2024.2.0.20241221" description = "Typing stubs for pytz" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_pytz-2024.2.0.20241221-py3-none-any.whl", hash = "sha256:8fc03195329c43637ed4f593663df721fef919b60a969066e22606edf0b53ad5"}, {file = "types_pytz-2024.2.0.20241221.tar.gz", hash = "sha256:06d7cde9613e9f7504766a0554a270c369434b50e00975b3a4a0f6eed0f2c1a9"}, @@ -1896,6 +1999,7 @@ version = "6.0.12.20241230" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -1907,6 +2011,8 @@ version = "2.31.0.6" description = "Typing stubs for requests" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\"" files = [ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, @@ -1921,6 +2027,8 @@ version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.10\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -1935,6 +2043,7 @@ version = "4.67.0.20241221" description = "Typing stubs for tqdm" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_tqdm-4.67.0.20241221-py3-none-any.whl", hash = "sha256:a1f1c9cda5c2d8482d2c73957a5398bfdedda10f6bc7b3b4e812d5c910486d29"}, {file = "types_tqdm-4.67.0.20241221.tar.gz", hash = "sha256:e56046631056922385abe89aeb18af5611f471eadd7918a0ad7f34d84cd4c8cc"}, @@ -1949,6 +2058,8 @@ version = "1.26.25.14" description = "Typing stubs for urllib3" optional = false python-versions = "*" +groups = ["dev"] +markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\"" files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, @@ -1960,6 +2071,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint", "test"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -1971,6 +2083,7 @@ version = "0.9.0" description = "Runtime inspection utilities for typing module." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, @@ -1986,6 +2099,8 @@ version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main", "dev"] +markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\"" files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, @@ -2002,6 +2117,8 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] +markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.10\"" files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -2019,6 +2136,7 @@ version = "0.29.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, @@ -2038,6 +2156,7 @@ version = "6.0.2" description = "Automatically mock your HTTP interactions to simplify and speed up testing" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "vcrpy-6.0.2-py2.py3-none-any.whl", hash = "sha256:40370223861181bc76a5e5d4b743a95058bb1ad516c3c08570316ab592f56cad"}, {file = "vcrpy-6.0.2.tar.gz", hash = "sha256:88e13d9111846745898411dbc74a75ce85870af96dd320d75f1ee33158addc09"}, @@ -2061,6 +2180,7 @@ version = "6.0.0" description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, @@ -2103,6 +2223,7 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -2191,6 +2312,7 @@ version = "1.18.3" description = "Yet another URL library" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -2287,6 +2409,7 @@ version = "0.23.0" description = "Zstandard bindings for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, @@ -2399,6 +2522,6 @@ pytest = ["pytest", "rich"] vcr = [] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.9,<4.0" -content-hash = "dba91f69a082b69ba78b241563849629fc80ffc37934d00f7a365f5e5f2cfe56" +content-hash = "a88403e20aee1424566638e4bb9eceb66ef0615d2a52b12f6e2bcf0098a6c4de" diff --git a/python/pyproject.toml b/python/pyproject.toml index b34d1934b..0256e5169 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -74,6 +74,7 @@ openai = "^1.10" [tool.poetry.group.test.dependencies] pytest-socket = "^0.7.0" +anthropic = "^0.45.0" [tool.poetry.extras] vcr = ["vcrpy"] diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py index 144f8dcf0..ea404c31e 100644 --- a/python/tests/integration_tests/wrappers/test_anthropic.py +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -72,7 +72,7 @@ def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): if data.get("patch"): outputs = data["patch"][0]["outputs"] break - + assert outputs @@ -80,10 +80,12 @@ def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): @pytest.mark.parametrize("stream", [False, True]) async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): import anthropic # noqa - + client = Client(session=mock_session()) original_client = anthropic.AsyncAnthropic() - patched_client = wrap_anthropic(anthropic.AsyncAnthropic(), tracing_extra={"client": client}) + patched_client = wrap_anthropic( + anthropic.AsyncAnthropic(), tracing_extra={"client": client} + ) messages = [{"role": "user", "content": "Say 'foo'"}] if stream: @@ -106,8 +108,18 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): patched_chunks.append(chunk) assert len(original_chunks) == len(patched_chunks) - assert "".join([c.message.content[0].text for c in original_chunks if hasattr(c, 'message') and len(c.message.content)]) == "".join( - [c.message.content[0].text for c in patched_chunks if hasattr(c, 'message') and len(c.message.content)] + assert "".join( + [ + c.message.content[0].text + for c in original_chunks + if hasattr(c, "message") and len(c.message.content) + ] + ) == "".join( + [ + c.message.content[0].text + for c in patched_chunks + if hasattr(c, "message") and len(c.message.content) + ] ) else: original = await original_client.messages.create( @@ -139,9 +151,12 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): @pytest.mark.parametrize("stream", [False, True]) def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): import anthropic + client = Client(session=mock_session()) original_client = anthropic.Anthropic() - patched_client = wrap_anthropic(anthropic.Anthropic(), tracing_extra={"client": client}) + patched_client = wrap_anthropic( + anthropic.Anthropic(), tracing_extra={"client": client} + ) prompt = "Human: Say 'Hi i'm Claude' then stop.\n\nAssistant:" original = original_client.completions.create( model="claude-2.1", @@ -183,7 +198,9 @@ async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool) client = Client(session=mock_session()) original_client = anthropic.AsyncAnthropic() - patched_client = wrap_anthropic(anthropic.AsyncAnthropic(), tracing_extra={"client": client}) + patched_client = wrap_anthropic( + anthropic.AsyncAnthropic(), tracing_extra={"client": client} + ) prompt = "Human: Say 'Hi i'm Claude' then stop.\n\nAssistant:" original = await original_client.completions.create( model="claude-2.1", @@ -216,3 +233,83 @@ async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool) assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" for call in mock_session.return_value.request.call_args_list[1:]: assert call[0][0].upper() == "POST" + + +@mock.patch("langsmith.client.requests.Session") +def test_beta_chat_sync_api(mock_session: mock.MagicMock): + import anthropic # noqa + from tests.unit_tests.test_run_helpers import _get_calls + + mock_client = Client(session=mock_session()) + original_client = anthropic.Anthropic() + patched_client = wrap_anthropic( + anthropic.Anthropic(), tracing_extra={"client": mock_client} + ) + messages = [{"role": "user", "content": "Say 'foo'"}] + + original = original_client.beta.messages.create( + messages=messages, + temperature=0, + model=model_name, + max_tokens=3, + ) + patched = patched_client.beta.messages.create( + messages=messages, + temperature=0, + model=model_name, + max_tokens=3, + ) + assert type(original) == type(patched) + assert "".join([c.text for c in original.content]) == "".join( + [c.text for c in patched.content] + ) + + calls = _get_calls(mock_client, minimum=1) + assert calls + datas = [json.loads(call.kwargs["data"]) for call in calls] + outputs = None + for data in datas: + if data.get("post"): + if outputs := data["post"][0]["outputs"]: + break + if data.get("patch"): + outputs = data["patch"][0]["outputs"] + break + + assert outputs + + +@mock.patch("langsmith.client.requests.Session") +async def test_beta_chat_async_api(mock_session: mock.MagicMock): + import anthropic # noqa + + client = Client(session=mock_session()) + original_client = anthropic.AsyncAnthropic() + patched_client = wrap_anthropic( + anthropic.AsyncAnthropic(), tracing_extra={"client": client} + ) + messages = [{"role": "user", "content": "Say 'foo'"}] + + original = await original_client.beta.messages.create( + messages=messages, + temperature=0, + model=model_name, + max_tokens=3, + ) + patched = await patched_client.beta.messages.create( + messages=messages, + temperature=0, + model=model_name, + max_tokens=3, + ) + assert type(original) == type(patched) + assert "".join([c.text for c in original.content]) == "".join( + [c.text for c in patched.content] + ) + + time.sleep(0.1) + assert mock_session.return_value.request.call_count > 1 + # This is the info call + assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" + for call in mock_session.return_value.request.call_args_list[1:]: + assert call[0][0].upper() == "POST" From 50bc0965bc8f85fc4aeb65efaa9587f82c145ab9 Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Mon, 27 Jan 2025 18:07:14 -0800 Subject: [PATCH 06/12] fmt --- .../integration_tests/wrappers/test_anthropic.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py index ea404c31e..4e51ff161 100644 --- a/python/tests/integration_tests/wrappers/test_anthropic.py +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -56,7 +56,7 @@ def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): model=model_name, max_tokens=3, ) - assert type(original) == type(patched) + assert isinstance(patched, type(original)) assert "".join([c.text for c in original.content]) == "".join( [c.text for c in patched.content] ) @@ -134,7 +134,7 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): model=model_name, max_tokens=3, ) - assert type(original) == type(patched) + assert isinstance(patched, type(original)) assert "".join([c.text for c in original.content]) == "".join( [c.text for c in patched.content] ) @@ -180,7 +180,7 @@ def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): [c.completion for c in patched_chunks] ) else: - assert type(original) == type(patched) + assert isinstance(patched, type(original)) assert original.completion == patched.completion time.sleep(0.1) @@ -224,7 +224,7 @@ async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool) [c.completion for c in patched_chunks] ) else: - assert type(original) == type(patched) + assert isinstance(patched, type(original)) assert original.completion == patched.completion time.sleep(0.1) @@ -259,7 +259,7 @@ def test_beta_chat_sync_api(mock_session: mock.MagicMock): model=model_name, max_tokens=3, ) - assert type(original) == type(patched) + assert isinstance(patched, type(original)) assert "".join([c.text for c in original.content]) == "".join( [c.text for c in patched.content] ) @@ -302,7 +302,7 @@ async def test_beta_chat_async_api(mock_session: mock.MagicMock): model=model_name, max_tokens=3, ) - assert type(original) == type(patched) + assert isinstance(patched, type(original)) assert "".join([c.text for c in original.content]) == "".join( [c.text for c in patched.content] ) From d2fa88f6eb404f95f49c4e0b38267fbca03784ea Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Mon, 27 Jan 2025 18:18:33 -0800 Subject: [PATCH 07/12] fmt --- python/langsmith/wrappers/_anthropic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/langsmith/wrappers/_anthropic.py b/python/langsmith/wrappers/_anthropic.py index 937eabc18..cbb688f42 100644 --- a/python/langsmith/wrappers/_anthropic.py +++ b/python/langsmith/wrappers/_anthropic.py @@ -54,7 +54,7 @@ def _strip_not_given(d: dict) -> dict: def _accumulate_event( *, event: MessageStreamEvent, current_snapshot: Message | None -) -> Message: +) -> Message | None: try: from anthropic.types import ContentBlock except ImportError: @@ -72,7 +72,7 @@ def _accumulate_event( if event.type == "content_block_start": # TODO: check index <-- from anthropic SDK :) current_snapshot.content.append( - ContentBlock.construct(**event.content_block.model_dump()), + ContentBlock.construct(**event.content_block.model_dump()), # type: ignore[attr-defined] ) elif event.type == "content_block_delta": content = current_snapshot.content[event.index] From 52f8829181d5ccef8bfa3911c29493c40be404b3 Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Wed, 29 Jan 2025 10:48:02 -0800 Subject: [PATCH 08/12] trying to fix tests --- .../python-integration-tests/action.yml | 1 + .../wrappers/test_anthropic.py | 46 +++++++++---------- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/.github/actions/python-integration-tests/action.yml b/.github/actions/python-integration-tests/action.yml index 61e4f41b9..c884b448c 100644 --- a/.github/actions/python-integration-tests/action.yml +++ b/.github/actions/python-integration-tests/action.yml @@ -46,6 +46,7 @@ runs: LANGSMITH_ENDPOINT: https://beta.api.smith.langchain.com LANGSMITH_API_KEY: ${{ inputs.langchain-api-key-beta }} OPENAI_API_KEY: ${{ inputs.openai-api-key }} + ANTHROPIC_API_KEY: ${{ inputs.anthropic-api-key }} LANGSMITH_TEST_CACHE: tests/cassettes run: make integration_tests_fast shell: bash diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py index 4e51ff161..d06149140 100644 --- a/python/tests/integration_tests/wrappers/test_anthropic.py +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -7,17 +7,17 @@ from langsmith import Client from langsmith.wrappers import wrap_anthropic +from tests.unit_tests.test_run_helpers import _get_calls model_name = "claude-3-haiku-20240307" -@mock.patch("langsmith.client.requests.Session") @pytest.mark.parametrize("stream", [False, True]) -def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): +def test_chat_sync_api(stream: bool): import anthropic # noqa - from tests.unit_tests.test_run_helpers import _get_calls - mock_client = Client(session=mock_session()) + mock_session = mock.MagicMock() + mock_client = Client(session=mock_session) original_client = anthropic.Anthropic() patched_client = wrap_anthropic( anthropic.Anthropic(), tracing_extra={"client": mock_client} @@ -76,15 +76,15 @@ def test_chat_sync_api(mock_session: mock.MagicMock, stream: bool): assert outputs -@mock.patch("langsmith.client.requests.Session") @pytest.mark.parametrize("stream", [False, True]) -async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): +async def test_chat_async_api(stream: bool): import anthropic # noqa - client = Client(session=mock_session()) + mock_session = mock.MagicMock() + mock_client = Client(session=mock_session) original_client = anthropic.AsyncAnthropic() patched_client = wrap_anthropic( - anthropic.AsyncAnthropic(), tracing_extra={"client": client} + anthropic.AsyncAnthropic(), tracing_extra={"client": mock_client} ) messages = [{"role": "user", "content": "Say 'foo'"}] @@ -147,15 +147,15 @@ async def test_chat_async_api(mock_session: mock.MagicMock, stream: bool): assert call[0][0].upper() == "POST" -@mock.patch("langsmith.client.requests.Session") @pytest.mark.parametrize("stream", [False, True]) -def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): +def test_completions_sync_api(stream: bool): import anthropic - client = Client(session=mock_session()) + mock_session = mock.MagicMock() + mock_client = Client(session=mock_session) original_client = anthropic.Anthropic() patched_client = wrap_anthropic( - anthropic.Anthropic(), tracing_extra={"client": client} + anthropic.Anthropic(), tracing_extra={"client": mock_client} ) prompt = "Human: Say 'Hi i'm Claude' then stop.\n\nAssistant:" original = original_client.completions.create( @@ -191,15 +191,15 @@ def test_completions_sync_api(mock_session: mock.MagicMock, stream: bool): assert call[0][0].upper() == "POST" -@mock.patch("langsmith.client.requests.Session") @pytest.mark.parametrize("stream", [False, True]) -async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool): +async def test_completions_async_api(stream: bool): import anthropic - client = Client(session=mock_session()) + mock_session = mock.MagicMock() + mock_client = Client(session=mock_session) original_client = anthropic.AsyncAnthropic() patched_client = wrap_anthropic( - anthropic.AsyncAnthropic(), tracing_extra={"client": client} + anthropic.AsyncAnthropic(), tracing_extra={"client": mock_client} ) prompt = "Human: Say 'Hi i'm Claude' then stop.\n\nAssistant:" original = await original_client.completions.create( @@ -235,12 +235,12 @@ async def test_completions_async_api(mock_session: mock.MagicMock, stream: bool) assert call[0][0].upper() == "POST" -@mock.patch("langsmith.client.requests.Session") -def test_beta_chat_sync_api(mock_session: mock.MagicMock): +def test_beta_chat_sync_api(): import anthropic # noqa from tests.unit_tests.test_run_helpers import _get_calls - mock_client = Client(session=mock_session()) + mock_session = mock.MagicMock() + mock_client = Client(session=mock_session) original_client = anthropic.Anthropic() patched_client = wrap_anthropic( anthropic.Anthropic(), tracing_extra={"client": mock_client} @@ -279,14 +279,14 @@ def test_beta_chat_sync_api(mock_session: mock.MagicMock): assert outputs -@mock.patch("langsmith.client.requests.Session") -async def test_beta_chat_async_api(mock_session: mock.MagicMock): +async def test_beta_chat_async_api(): import anthropic # noqa - client = Client(session=mock_session()) + mock_session = mock.MagicMock() + mock_client = Client(session=mock_session) original_client = anthropic.AsyncAnthropic() patched_client = wrap_anthropic( - anthropic.AsyncAnthropic(), tracing_extra={"client": client} + anthropic.AsyncAnthropic(), tracing_extra={"client": mock_client} ) messages = [{"role": "user", "content": "Say 'foo'"}] From af534fc77584f708f3b5d79cf729e709a455d734 Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Wed, 29 Jan 2025 11:02:11 -0800 Subject: [PATCH 09/12] test fixes --- python/tests/integration_tests/wrappers/test_anthropic.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py index d06149140..ce83bef16 100644 --- a/python/tests/integration_tests/wrappers/test_anthropic.py +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -139,7 +139,7 @@ async def test_chat_async_api(stream: bool): [c.text for c in patched.content] ) - time.sleep(0.1) + time.sleep(1) assert mock_session.return_value.request.call_count > 1 # This is the info call assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" @@ -183,7 +183,7 @@ def test_completions_sync_api(stream: bool): assert isinstance(patched, type(original)) assert original.completion == patched.completion - time.sleep(0.1) + time.sleep(1) assert mock_session.return_value.request.call_count > 1 # This is the info call assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" @@ -227,7 +227,7 @@ async def test_completions_async_api(stream: bool): assert isinstance(patched, type(original)) assert original.completion == patched.completion - time.sleep(0.1) + time.sleep(1) assert mock_session.return_value.request.call_count > 1 # This is the info call assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" @@ -307,7 +307,7 @@ async def test_beta_chat_async_api(): [c.text for c in patched.content] ) - time.sleep(0.1) + time.sleep(1) assert mock_session.return_value.request.call_count > 1 # This is the info call assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" From 0ac755ebf1ab070628a5ceb1238e128d865fe9a8 Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Wed, 29 Jan 2025 12:50:30 -0800 Subject: [PATCH 10/12] debugging --- python/Makefile | 2 +- python/langsmith/run_helpers.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/python/Makefile b/python/Makefile index cd8983785..5593a1c9d 100644 --- a/python/Makefile +++ b/python/Makefile @@ -39,7 +39,7 @@ integration_tests: poetry run python -m pytest -x -v --durations=10 --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests integration_tests_fast: - poetry run python -m pytest -x -n auto --durations=10 -v --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests + poetry run python -m pytest -s -x -n auto --durations=10 -v --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests doctest: poetry run python -m pytest -n auto -x --durations=10 --doctest-modules langsmith diff --git a/python/langsmith/run_helpers.py b/python/langsmith/run_helpers.py index 07d73ce56..b8847a713 100644 --- a/python/langsmith/run_helpers.py +++ b/python/langsmith/run_helpers.py @@ -1326,6 +1326,7 @@ def _setup_run( kwargs: Any = None, ) -> _TraceableContainer: """Create a new run or create_child() if run is passed in kwargs.""" + print("SETTING UP RUN", utils.tracing_is_enabled()) extra_outer = container_input.get("extra_outer") or {} metadata = container_input.get("metadata") tags = container_input.get("tags") @@ -1441,6 +1442,7 @@ def _setup_run( ) if utils.tracing_is_enabled() is True: try: + print("POSTING RUN") new_run.post() except BaseException as e: LOGGER.error(f"Failed to post run {new_run.id}: {e}") From 03d0c4b692252e6ef26882ac7133eaa530fb003f Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Wed, 29 Jan 2025 13:07:42 -0800 Subject: [PATCH 11/12] test --- python/Makefile | 2 +- python/langsmith/run_helpers.py | 2 -- .../wrappers/test_anthropic.py | 25 ++++++++++--------- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/python/Makefile b/python/Makefile index 5593a1c9d..cd8983785 100644 --- a/python/Makefile +++ b/python/Makefile @@ -39,7 +39,7 @@ integration_tests: poetry run python -m pytest -x -v --durations=10 --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests integration_tests_fast: - poetry run python -m pytest -s -x -n auto --durations=10 -v --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests + poetry run python -m pytest -x -n auto --durations=10 -v --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests doctest: poetry run python -m pytest -n auto -x --durations=10 --doctest-modules langsmith diff --git a/python/langsmith/run_helpers.py b/python/langsmith/run_helpers.py index b8847a713..07d73ce56 100644 --- a/python/langsmith/run_helpers.py +++ b/python/langsmith/run_helpers.py @@ -1326,7 +1326,6 @@ def _setup_run( kwargs: Any = None, ) -> _TraceableContainer: """Create a new run or create_child() if run is passed in kwargs.""" - print("SETTING UP RUN", utils.tracing_is_enabled()) extra_outer = container_input.get("extra_outer") or {} metadata = container_input.get("metadata") tags = container_input.get("tags") @@ -1442,7 +1441,6 @@ def _setup_run( ) if utils.tracing_is_enabled() is True: try: - print("POSTING RUN") new_run.post() except BaseException as e: LOGGER.error(f"Failed to post run {new_run.id}: {e}") diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py index ce83bef16..334752ee3 100644 --- a/python/tests/integration_tests/wrappers/test_anthropic.py +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -7,6 +7,7 @@ from langsmith import Client from langsmith.wrappers import wrap_anthropic +import langsmith.schemas as ls_schemas from tests.unit_tests.test_run_helpers import _get_calls model_name = "claude-3-haiku-20240307" @@ -140,10 +141,10 @@ async def test_chat_async_api(stream: bool): ) time.sleep(1) - assert mock_session.return_value.request.call_count > 1 + assert mock_session.request.call_count > 1 # This is the info call - assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" - for call in mock_session.return_value.request.call_args_list[1:]: + assert mock_session.request.call_args_list[0][0][0].upper() == "GET" + for call in mock_session.request.call_args_list[1:]: assert call[0][0].upper() == "POST" @@ -184,10 +185,10 @@ def test_completions_sync_api(stream: bool): assert original.completion == patched.completion time.sleep(1) - assert mock_session.return_value.request.call_count > 1 + assert mock_session.request.call_count > 1 # This is the info call - assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" - for call in mock_session.return_value.request.call_args_list[1:]: + assert mock_session.request.call_args_list[0][0][0].upper() == "GET" + for call in mock_session.request.call_args_list[1:]: assert call[0][0].upper() == "POST" @@ -228,10 +229,10 @@ async def test_completions_async_api(stream: bool): assert original.completion == patched.completion time.sleep(1) - assert mock_session.return_value.request.call_count > 1 + assert mock_session.request.call_count > 1 # This is the info call - assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" - for call in mock_session.return_value.request.call_args_list[1:]: + assert mock_session.request.call_args_list[0][0][0].upper() == "GET" + for call in mock_session.request.call_args_list[1:]: assert call[0][0].upper() == "POST" @@ -308,8 +309,8 @@ async def test_beta_chat_async_api(): ) time.sleep(1) - assert mock_session.return_value.request.call_count > 1 + assert mock_session.request.call_count > 1 # This is the info call - assert mock_session.return_value.request.call_args_list[0][0][0].upper() == "GET" - for call in mock_session.return_value.request.call_args_list[1:]: + assert mock_session.request.call_args_list[0][0][0].upper() == "GET" + for call in mock_session.request.call_args_list[1:]: assert call[0][0].upper() == "POST" From 9414248d40502d030051f39f6ee717e19ada1a34 Mon Sep 17 00:00:00 2001 From: isaac hershenson Date: Wed, 29 Jan 2025 13:26:31 -0800 Subject: [PATCH 12/12] fmt --- python/tests/integration_tests/wrappers/test_anthropic.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/tests/integration_tests/wrappers/test_anthropic.py b/python/tests/integration_tests/wrappers/test_anthropic.py index 334752ee3..74e5864ba 100644 --- a/python/tests/integration_tests/wrappers/test_anthropic.py +++ b/python/tests/integration_tests/wrappers/test_anthropic.py @@ -7,7 +7,6 @@ from langsmith import Client from langsmith.wrappers import wrap_anthropic -import langsmith.schemas as ls_schemas from tests.unit_tests.test_run_helpers import _get_calls model_name = "claude-3-haiku-20240307"