diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 089feae7..563004f2 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.45.2" + ".": "0.46.0" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 64f8716d..b1ba6c6a 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 21 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-f5276eeef7512112e802c85530c51e0a971ee521eebe3a0db309621587b4973d.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-bda1c6bb3a8f16d4b0a936aa3a7b1618f23d38570547e7ef047a9c95265e6613.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 13a5a364..8950e8a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,31 @@ # Changelog +## 0.46.0 (2025-02-08) + +Full Changelog: [v0.45.2...v0.46.0](https://github.com/anthropics/anthropic-sdk-python/compare/v0.45.2...v0.46.0) + +### Features + +* **client:** send `X-Stainless-Read-Timeout` header ([#858](https://github.com/anthropics/anthropic-sdk-python/issues/858)) ([0e75983](https://github.com/anthropics/anthropic-sdk-python/commit/0e759835ddfa7d72d0926cab0101601e7c1e8f22)) +* **jsonl:** add .close() method ([#862](https://github.com/anthropics/anthropic-sdk-python/issues/862)) ([137335c](https://github.com/anthropics/anthropic-sdk-python/commit/137335c649f3dc886875bb60bddeb0c8d8abb67b)) +* **pagination:** avoid fetching when has_more: false ([#860](https://github.com/anthropics/anthropic-sdk-python/issues/860)) ([0cdb81d](https://github.com/anthropics/anthropic-sdk-python/commit/0cdb81d106c48c851fff5c9532c675b414f474b4)) + + +### Bug Fixes + +* **internal:** add back custom header naming support ([#861](https://github.com/anthropics/anthropic-sdk-python/issues/861)) ([cf851ae](https://github.com/anthropics/anthropic-sdk-python/commit/cf851ae9ee57635250beec8bedb0134aa2d79a42)) +* **jsonl:** lower chunk size ([#863](https://github.com/anthropics/anthropic-sdk-python/issues/863)) ([38fb720](https://github.com/anthropics/anthropic-sdk-python/commit/38fb72043b436afc02839ad4e2a966a5ef0b0bc1)) + + +### Chores + +* **api:** update openapi spec url ([#852](https://github.com/anthropics/anthropic-sdk-python/issues/852)) ([461d821](https://github.com/anthropics/anthropic-sdk-python/commit/461d821965c61d98bf35a8b6fab5da55a2ddddef)) +* **internal:** bummp ruff dependency ([#856](https://github.com/anthropics/anthropic-sdk-python/issues/856)) ([590c3fa](https://github.com/anthropics/anthropic-sdk-python/commit/590c3fa154e38f85c3cc6fcc518a6c68ee2bd234)) +* **internal:** change default timeout to an int ([#855](https://github.com/anthropics/anthropic-sdk-python/issues/855)) ([3152e1a](https://github.com/anthropics/anthropic-sdk-python/commit/3152e1a135a07430404f3209fbbcb924d9d2b9a2)) +* **internal:** fix type traversing dictionary params ([#859](https://github.com/anthropics/anthropic-sdk-python/issues/859)) ([c5b700d](https://github.com/anthropics/anthropic-sdk-python/commit/c5b700d9abea1fcebc43fe12ac514512fedff0db)) +* **internal:** reorder model constants ([#847](https://github.com/anthropics/anthropic-sdk-python/issues/847)) ([aadd531](https://github.com/anthropics/anthropic-sdk-python/commit/aadd5315868ce2eec17cc7d0e8ec4f0b6a4c3c87)) +* **internal:** update models used in tests ([aadd531](https://github.com/anthropics/anthropic-sdk-python/commit/aadd5315868ce2eec17cc7d0e8ec4f0b6a4c3c87)) + ## 0.45.2 (2025-01-27) Full Changelog: [v0.45.1...v0.45.2](https://github.com/anthropics/anthropic-sdk-python/compare/v0.45.1...v0.45.2) diff --git a/pyproject.toml b/pyproject.toml index 781b6728..6468ff31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "anthropic" -version = "0.45.2" +version = "0.46.0" description = "The official Python library for the anthropic API" dynamic = ["readme"] license = "MIT" @@ -183,7 +183,7 @@ select = [ "T201", "T203", # misuse of typing.TYPE_CHECKING - "TCH004", + "TC004", # import rules "TID251", ] diff --git a/requirements-dev.lock b/requirements-dev.lock index 2aba34ee..ad494aa8 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -104,7 +104,7 @@ respx==0.22.0 rich==13.7.1 rsa==4.9 # via google-auth -ruff==0.6.9 +ruff==0.9.4 s3transfer==0.10.3 # via boto3 setuptools==68.2.2 diff --git a/scripts/utils/ruffen-docs.py b/scripts/utils/ruffen-docs.py index 37b3d94f..0cf2bd2f 100644 --- a/scripts/utils/ruffen-docs.py +++ b/scripts/utils/ruffen-docs.py @@ -47,7 +47,7 @@ def _md_match(match: Match[str]) -> str: with _collect_error(match): code = format_code_block(code) code = textwrap.indent(code, match["indent"]) - return f'{match["before"]}{code}{match["after"]}' + return f"{match['before']}{code}{match['after']}" def _pycon_match(match: Match[str]) -> str: code = "" @@ -97,7 +97,7 @@ def finish_fragment() -> None: def _md_pycon_match(match: Match[str]) -> str: code = _pycon_match(match) code = textwrap.indent(code, match["indent"]) - return f'{match["before"]}{code}{match["after"]}' + return f"{match['before']}{code}{match['after']}" src = MD_RE.sub(_md_match, src) src = MD_PYCON_RE.sub(_md_pycon_match, src) diff --git a/src/anthropic/_base_client.py b/src/anthropic/_base_client.py index 05efd205..94a261ad 100644 --- a/src/anthropic/_base_client.py +++ b/src/anthropic/_base_client.py @@ -419,10 +419,17 @@ def _build_headers(self, options: FinalRequestOptions, *, retries_taken: int = 0 if idempotency_header and options.method.lower() != "get" and idempotency_header not in headers: headers[idempotency_header] = options.idempotency_key or self._idempotency_key() - # Don't set the retry count header if it was already set or removed by the caller. We check + # Don't set these headers if they were already set or removed by the caller. We check # `custom_headers`, which can contain `Omit()`, instead of `headers` to account for the removal case. - if "x-stainless-retry-count" not in (header.lower() for header in custom_headers): + lower_custom_headers = [header.lower() for header in custom_headers] + if "x-stainless-retry-count" not in lower_custom_headers: headers["x-stainless-retry-count"] = str(retries_taken) + if "x-stainless-read-timeout" not in lower_custom_headers: + timeout = self.timeout if isinstance(options.timeout, NotGiven) else options.timeout + if isinstance(timeout, Timeout): + timeout = timeout.read + if timeout is not None: + headers["x-stainless-read-timeout"] = str(timeout) return headers diff --git a/src/anthropic/_constants.py b/src/anthropic/_constants.py index 951e6790..07c46781 100644 --- a/src/anthropic/_constants.py +++ b/src/anthropic/_constants.py @@ -6,7 +6,7 @@ OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to" # default timeout is 10 minutes -DEFAULT_TIMEOUT = httpx.Timeout(timeout=600.0, connect=5.0) +DEFAULT_TIMEOUT = httpx.Timeout(timeout=600, connect=5.0) DEFAULT_MAX_RETRIES = 2 DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=1000, max_keepalive_connections=100) diff --git a/src/anthropic/_decoders/jsonl.py b/src/anthropic/_decoders/jsonl.py index e9d29a1c..ac5ac74f 100644 --- a/src/anthropic/_decoders/jsonl.py +++ b/src/anthropic/_decoders/jsonl.py @@ -17,11 +17,15 @@ class JSONLDecoder(Generic[_T]): into a given type. """ - http_response: httpx.Response | None + http_response: httpx.Response """The HTTP response this decoder was constructed from""" def __init__( - self, *, raw_iterator: Iterator[bytes], line_type: type[_T], http_response: httpx.Response | None + self, + *, + raw_iterator: Iterator[bytes], + line_type: type[_T], + http_response: httpx.Response, ) -> None: super().__init__() self.http_response = http_response @@ -29,6 +33,13 @@ def __init__( self._line_type = line_type self._iterator = self.__decode__() + def close(self) -> None: + """Close the response body stream. + + This is called automatically if you consume the entire stream. + """ + self.http_response.close() + def __decode__(self) -> Iterator[_T]: buf = b"" for chunk in self._raw_iterator: @@ -63,10 +74,14 @@ class AsyncJSONLDecoder(Generic[_T]): into a given type. """ - http_response: httpx.Response | None + http_response: httpx.Response def __init__( - self, *, raw_iterator: AsyncIterator[bytes], line_type: type[_T], http_response: httpx.Response | None + self, + *, + raw_iterator: AsyncIterator[bytes], + line_type: type[_T], + http_response: httpx.Response, ) -> None: super().__init__() self.http_response = http_response @@ -74,6 +89,13 @@ def __init__( self._line_type = line_type self._iterator = self.__decode__() + async def close(self) -> None: + """Close the response body stream. + + This is called automatically if you consume the entire stream. + """ + await self.http_response.aclose() + async def __decode__(self) -> AsyncIterator[_T]: buf = b"" async for chunk in self._raw_iterator: diff --git a/src/anthropic/_legacy_response.py b/src/anthropic/_legacy_response.py index 0b812de6..5703932e 100644 --- a/src/anthropic/_legacy_response.py +++ b/src/anthropic/_legacy_response.py @@ -214,7 +214,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: return cast( R, cast("type[JSONLDecoder[Any]]", cast_to)( - raw_iterator=self.http_response.iter_bytes(chunk_size=4096), + raw_iterator=self.http_response.iter_bytes(chunk_size=64), line_type=extract_type_arg(cast_to, 0), http_response=self.http_response, ), @@ -224,7 +224,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: return cast( R, cast("type[AsyncJSONLDecoder[Any]]", cast_to)( - raw_iterator=self.http_response.aiter_bytes(chunk_size=4096), + raw_iterator=self.http_response.aiter_bytes(chunk_size=64), line_type=extract_type_arg(cast_to, 0), http_response=self.http_response, ), diff --git a/src/anthropic/_models.py b/src/anthropic/_models.py index 4470e480..dad8df9e 100644 --- a/src/anthropic/_models.py +++ b/src/anthropic/_models.py @@ -188,7 +188,7 @@ def to_json( @override def __str__(self) -> str: # mypy complains about an invalid self arg - return f'{self.__repr_name__()}({self.__repr_str__(", ")})' # type: ignore[misc] + return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc] # Override the 'construct' method in a way that supports recursive parsing without validation. # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. @@ -442,10 +442,16 @@ def construct_type(*, value: object, type_: object) -> object: If the given value does not match the expected type then it is returned as-is. """ + + # store a reference to the original type we were given before we extract any inner + # types so that we can properly resolve forward references in `TypeAliasType` annotations + original_type = None + # we allow `object` as the input type because otherwise, passing things like # `Literal['value']` will be reported as a type error by type checkers type_ = cast("type[object]", type_) if is_type_alias_type(type_): + original_type = type_ # type: ignore[unreachable] type_ = type_.__value__ # type: ignore[unreachable] # unwrap `Annotated[T, ...]` -> `T` @@ -462,7 +468,7 @@ def construct_type(*, value: object, type_: object) -> object: if is_union(origin): try: - return validate_type(type_=cast("type[object]", type_), value=value) + return validate_type(type_=cast("type[object]", original_type or type_), value=value) except Exception: pass diff --git a/src/anthropic/_response.py b/src/anthropic/_response.py index 40f79958..64a3f158 100644 --- a/src/anthropic/_response.py +++ b/src/anthropic/_response.py @@ -144,7 +144,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: return cast( R, cast("type[JSONLDecoder[Any]]", cast_to)( - raw_iterator=self.http_response.iter_bytes(chunk_size=4096), + raw_iterator=self.http_response.iter_bytes(chunk_size=64), line_type=extract_type_arg(cast_to, 0), http_response=self.http_response, ), @@ -154,7 +154,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: return cast( R, cast("type[AsyncJSONLDecoder[Any]]", cast_to)( - raw_iterator=self.http_response.aiter_bytes(chunk_size=4096), + raw_iterator=self.http_response.aiter_bytes(chunk_size=64), line_type=extract_type_arg(cast_to, 0), http_response=self.http_response, ), diff --git a/src/anthropic/_utils/_transform.py b/src/anthropic/_utils/_transform.py index a6b62cad..18afd9d8 100644 --- a/src/anthropic/_utils/_transform.py +++ b/src/anthropic/_utils/_transform.py @@ -25,7 +25,7 @@ is_annotated_type, strip_annotated_type, ) -from .._compat import model_dump, is_typeddict +from .._compat import get_origin, model_dump, is_typeddict _T = TypeVar("_T") @@ -164,9 +164,14 @@ def _transform_recursive( inner_type = annotation stripped_type = strip_annotated_type(inner_type) + origin = get_origin(stripped_type) or stripped_type if is_typeddict(stripped_type) and is_mapping(data): return _transform_typeddict(data, stripped_type) + if origin == dict and is_mapping(data): + items_type = get_args(stripped_type)[1] + return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()} + if ( # List[T] (is_list_type(stripped_type) and is_list(data)) @@ -307,9 +312,14 @@ async def _async_transform_recursive( inner_type = annotation stripped_type = strip_annotated_type(inner_type) + origin = get_origin(stripped_type) or stripped_type if is_typeddict(stripped_type) and is_mapping(data): return await _async_transform_typeddict(data, stripped_type) + if origin == dict and is_mapping(data): + items_type = get_args(stripped_type)[1] + return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()} + if ( # List[T] (is_list_type(stripped_type) and is_list(data)) diff --git a/src/anthropic/_version.py b/src/anthropic/_version.py index 351b837e..aa6674b6 100644 --- a/src/anthropic/_version.py +++ b/src/anthropic/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "anthropic" -__version__ = "0.45.2" # x-release-please-version +__version__ = "0.46.0" # x-release-please-version diff --git a/src/anthropic/pagination.py b/src/anthropic/pagination.py index b6ee5293..c4553fba 100644 --- a/src/anthropic/pagination.py +++ b/src/anthropic/pagination.py @@ -23,6 +23,14 @@ def _get_page_items(self) -> List[_T]: return [] return data + @override + def has_next_page(self) -> bool: + has_more = self.has_more + if has_more is not None and has_more is False: + return False + + return super().has_next_page() + @override def next_page_info(self) -> Optional[PageInfo]: if self._options.params.get("before_id"): @@ -52,6 +60,14 @@ def _get_page_items(self) -> List[_T]: return [] return data + @override + def has_next_page(self) -> bool: + has_more = self.has_more + if has_more is not None and has_more is False: + return False + + return super().has_next_page() + @override def next_page_info(self) -> Optional[PageInfo]: if self._options.params.get("before_id"): diff --git a/src/anthropic/types/model.py b/src/anthropic/types/model.py index 71157772..6fcafc5e 100644 --- a/src/anthropic/types/model.py +++ b/src/anthropic/types/model.py @@ -6,7 +6,6 @@ __all__ = ["Model"] Model: TypeAlias = Union[ - str, Literal[ "claude-3-5-haiku-latest", "claude-3-5-haiku-20241022", @@ -20,4 +19,5 @@ "claude-2.1", "claude-2.0", ], + str, ] diff --git a/src/anthropic/types/model_param.py b/src/anthropic/types/model_param.py index 5d4a1560..26982563 100644 --- a/src/anthropic/types/model_param.py +++ b/src/anthropic/types/model_param.py @@ -8,7 +8,6 @@ __all__ = ["ModelParam"] ModelParam: TypeAlias = Union[ - str, Literal[ "claude-3-5-haiku-latest", "claude-3-5-haiku-20241022", @@ -22,4 +21,5 @@ "claude-2.1", "claude-2.0", ], + str, ] diff --git a/tests/api_resources/beta/messages/test_batches.py b/tests/api_resources/beta/messages/test_batches.py index 3c06576f..a3cb838f 100644 --- a/tests/api_resources/beta/messages/test_batches.py +++ b/tests/api_resources/beta/messages/test_batches.py @@ -38,7 +38,7 @@ def test_method_create(self, client: Anthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -59,7 +59,7 @@ def test_method_create_with_all_params(self, client: Anthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", "metadata": {"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, "stop_sequences": ["string"], "stream": False, @@ -129,7 +129,7 @@ def test_raw_response_create(self, client: Anthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -154,7 +154,7 @@ def test_streaming_response_create(self, client: Anthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -397,7 +397,7 @@ async def test_method_create(self, async_client: AsyncAnthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -418,7 +418,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncAnthropic) "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", "metadata": {"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, "stop_sequences": ["string"], "stream": False, @@ -488,7 +488,7 @@ async def test_raw_response_create(self, async_client: AsyncAnthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -513,7 +513,7 @@ async def test_streaming_response_create(self, async_client: AsyncAnthropic) -> "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], diff --git a/tests/api_resources/beta/test_messages.py b/tests/api_resources/beta/test_messages.py index 1d6fae2d..03773514 100644 --- a/tests/api_resources/beta/test_messages.py +++ b/tests/api_resources/beta/test_messages.py @@ -30,7 +30,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert_matches_type(BetaMessage, message, path=["response"]) @@ -44,7 +44,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], stream=False, @@ -107,7 +107,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert response.is_closed is True @@ -125,7 +125,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -145,7 +145,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) message_stream.response.close() @@ -160,7 +160,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], @@ -223,7 +223,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) @@ -241,7 +241,7 @@ def test_streaming_response_create_overload_2(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) as response: assert not response.is_closed @@ -261,7 +261,7 @@ def test_method_count_tokens(self, client: Anthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) assert_matches_type(BetaMessageTokensCount, message, path=["response"]) @@ -274,7 +274,7 @@ def test_method_count_tokens_with_all_params(self, client: Anthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", system=[ { "text": "Today's date is 2024-06-01.", @@ -330,7 +330,7 @@ def test_raw_response_count_tokens(self, client: Anthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) assert response.is_closed is True @@ -347,7 +347,7 @@ def test_streaming_response_count_tokens(self, client: Anthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -371,7 +371,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert_matches_type(BetaMessage, message, path=["response"]) @@ -385,7 +385,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], stream=False, @@ -448,7 +448,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert response.is_closed is True @@ -466,7 +466,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -486,7 +486,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) await message_stream.response.aclose() @@ -501,7 +501,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], @@ -564,7 +564,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) @@ -582,7 +582,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncAnt "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) as response: assert not response.is_closed @@ -602,7 +602,7 @@ async def test_method_count_tokens(self, async_client: AsyncAnthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) assert_matches_type(BetaMessageTokensCount, message, path=["response"]) @@ -615,7 +615,7 @@ async def test_method_count_tokens_with_all_params(self, async_client: AsyncAnth "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", system=[ { "text": "Today's date is 2024-06-01.", @@ -671,7 +671,7 @@ async def test_raw_response_count_tokens(self, async_client: AsyncAnthropic) -> "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) assert response.is_closed is True @@ -688,7 +688,7 @@ async def test_streaming_response_count_tokens(self, async_client: AsyncAnthropi "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/messages/test_batches.py b/tests/api_resources/messages/test_batches.py index de84b3c3..e3a3f5e1 100644 --- a/tests/api_resources/messages/test_batches.py +++ b/tests/api_resources/messages/test_batches.py @@ -37,7 +37,7 @@ def test_method_create(self, client: Anthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -58,7 +58,7 @@ def test_raw_response_create(self, client: Anthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -83,7 +83,7 @@ def test_streaming_response_create(self, client: Anthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -292,7 +292,7 @@ async def test_method_create(self, async_client: AsyncAnthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -313,7 +313,7 @@ async def test_raw_response_create(self, async_client: AsyncAnthropic) -> None: "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], @@ -338,7 +338,7 @@ async def test_streaming_response_create(self, async_client: AsyncAnthropic) -> "role": "user", } ], - "model": "claude-3-5-sonnet-20241022", + "model": "claude-3-5-haiku-latest", }, } ], diff --git a/tests/api_resources/test_completions.py b/tests/api_resources/test_completions.py index 24374a13..828900de 100644 --- a/tests/api_resources/test_completions.py +++ b/tests/api_resources/test_completions.py @@ -21,7 +21,7 @@ class TestCompletions: def test_method_create_overload_1(self, client: Anthropic) -> None: completion = client.completions.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) assert_matches_type(Completion, completion, path=["response"]) @@ -30,7 +30,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None: def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> None: completion = client.completions.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], @@ -45,7 +45,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No def test_raw_response_create_overload_1(self, client: Anthropic) -> None: response = client.completions.with_raw_response.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) @@ -58,7 +58,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None: def test_streaming_response_create_overload_1(self, client: Anthropic) -> None: with client.completions.with_streaming_response.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) as response: assert not response.is_closed @@ -73,7 +73,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None: def test_method_create_overload_2(self, client: Anthropic) -> None: completion_stream = client.completions.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) @@ -83,7 +83,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None: def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> None: completion_stream = client.completions.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, @@ -98,7 +98,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No def test_raw_response_create_overload_2(self, client: Anthropic) -> None: response = client.completions.with_raw_response.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) @@ -111,7 +111,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None: def test_streaming_response_create_overload_2(self, client: Anthropic) -> None: with client.completions.with_streaming_response.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) as response: @@ -131,7 +131,7 @@ class TestAsyncCompletions: async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> None: completion = await async_client.completions.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) assert_matches_type(Completion, completion, path=["response"]) @@ -140,7 +140,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N async def test_method_create_with_all_params_overload_1(self, async_client: AsyncAnthropic) -> None: completion = await async_client.completions.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], @@ -155,7 +155,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic) -> None: response = await async_client.completions.with_raw_response.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) @@ -168,7 +168,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic async def test_streaming_response_create_overload_1(self, async_client: AsyncAnthropic) -> None: async with async_client.completions.with_streaming_response.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", ) as response: assert not response.is_closed @@ -183,7 +183,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> None: completion_stream = await async_client.completions.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) @@ -193,7 +193,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N async def test_method_create_with_all_params_overload_2(self, async_client: AsyncAnthropic) -> None: completion_stream = await async_client.completions.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, @@ -208,7 +208,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic) -> None: response = await async_client.completions.with_raw_response.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) @@ -221,7 +221,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic async def test_streaming_response_create_overload_2(self, async_client: AsyncAnthropic) -> None: async with async_client.completions.with_streaming_response.create( max_tokens_to_sample=256, - model="string", + model="claude-3-5-haiku-latest", prompt="\n\nHuman: Hello, world!\n\nAssistant:", stream=True, ) as response: diff --git a/tests/api_resources/test_messages.py b/tests/api_resources/test_messages.py index 9fa312d5..374a98ff 100644 --- a/tests/api_resources/test_messages.py +++ b/tests/api_resources/test_messages.py @@ -31,7 +31,7 @@ def test_method_create_overload_1(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert_matches_type(Message, message, path=["response"]) @@ -45,7 +45,7 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], stream=False, @@ -106,7 +106,7 @@ def test_raw_response_create_overload_1(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert response.is_closed is True @@ -124,7 +124,7 @@ def test_streaming_response_create_overload_1(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -144,7 +144,7 @@ def test_method_create_overload_2(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) message_stream.response.close() @@ -159,7 +159,7 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], @@ -220,7 +220,7 @@ def test_raw_response_create_overload_2(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) @@ -238,7 +238,7 @@ def test_streaming_response_create_overload_2(self, client: Anthropic) -> None: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) as response: assert not response.is_closed @@ -268,7 +268,7 @@ def test_method_count_tokens(self, client: Anthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) assert_matches_type(MessageTokensCount, message, path=["response"]) @@ -281,7 +281,7 @@ def test_method_count_tokens_with_all_params(self, client: Anthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", system=[ { "text": "Today's date is 2024-06-01.", @@ -335,7 +335,7 @@ def test_raw_response_count_tokens(self, client: Anthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) assert response.is_closed is True @@ -352,7 +352,7 @@ def test_streaming_response_count_tokens(self, client: Anthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -376,7 +376,7 @@ async def test_method_create_overload_1(self, async_client: AsyncAnthropic) -> N "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert_matches_type(Message, message, path=["response"]) @@ -390,7 +390,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], stream=False, @@ -451,7 +451,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncAnthropic "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert response.is_closed is True @@ -469,7 +469,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncAnt "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -489,7 +489,7 @@ async def test_method_create_overload_2(self, async_client: AsyncAnthropic) -> N "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) await message_stream.response.aclose() @@ -504,7 +504,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, metadata={"user_id": "13803d75-b4b5-4c3e-b2a2-6f21399b021b"}, stop_sequences=["string"], @@ -565,7 +565,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncAnthropic "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) @@ -583,7 +583,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncAnt "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", stream=True, ) as response: assert not response.is_closed @@ -613,7 +613,7 @@ async def test_method_count_tokens(self, async_client: AsyncAnthropic) -> None: "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) assert_matches_type(MessageTokensCount, message, path=["response"]) @@ -626,7 +626,7 @@ async def test_method_count_tokens_with_all_params(self, async_client: AsyncAnth "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", system=[ { "text": "Today's date is 2024-06-01.", @@ -680,7 +680,7 @@ async def test_raw_response_count_tokens(self, async_client: AsyncAnthropic) -> "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) assert response.is_closed is True @@ -697,7 +697,7 @@ async def test_streaming_response_count_tokens(self, async_client: AsyncAnthropi "role": "user", } ], - model="string", + model="claude-3-5-haiku-latest", ) as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/test_client.py b/tests/test_client.py index 5c1f0759..f60e2fb9 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -816,7 +816,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert response.retries_taken == failures_before_success @@ -849,7 +849,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", extra_headers={"x-stainless-retry-count": Omit()}, ) @@ -882,7 +882,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", extra_headers={"x-stainless-retry-count": "42"}, ) @@ -915,7 +915,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) as response: assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @@ -1697,7 +1697,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) assert response.retries_taken == failures_before_success @@ -1731,7 +1731,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", extra_headers={"x-stainless-retry-count": Omit()}, ) @@ -1765,7 +1765,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", extra_headers={"x-stainless-retry-count": "42"}, ) @@ -1799,7 +1799,7 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: "role": "user", } ], - model="claude-3-5-sonnet-20241022", + model="claude-3-5-haiku-latest", ) as response: assert response.retries_taken == failures_before_success assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success diff --git a/tests/test_transform.py b/tests/test_transform.py index 0b15cc64..05a2ae13 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -2,7 +2,7 @@ import io import pathlib -from typing import Any, List, Union, TypeVar, Iterable, Optional, cast +from typing import Any, Dict, List, Union, TypeVar, Iterable, Optional, cast from datetime import date, datetime from typing_extensions import Required, Annotated, TypedDict @@ -388,6 +388,15 @@ def my_iter() -> Iterable[Baz8]: } +@parametrize +@pytest.mark.asyncio +async def test_dictionary_items(use_async: bool) -> None: + class DictItems(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + assert await transform({"foo": {"foo_baz": "bar"}}, Dict[str, DictItems], use_async) == {"foo": {"fooBaz": "bar"}} + + class TypedDictIterableUnionStr(TypedDict): foo: Annotated[Union[str, Iterable[Baz8]], PropertyInfo(alias="FOO")]