diff --git a/python/aistore/common_requirements b/python/aistore/common_requirements index b4d684d4346..f976efe9908 100644 --- a/python/aistore/common_requirements +++ b/python/aistore/common_requirements @@ -10,4 +10,5 @@ pytest==7.4.4 PyYAML==6.0.2 requests==2.32.3 typing_extensions==4.12.2 -webdataset==0.2.100 \ No newline at end of file +webdataset==0.2.100 +xxhash==3.5.0 \ No newline at end of file diff --git a/python/aistore/sdk/bucket.py b/python/aistore/sdk/bucket.py index d3de3d48559..62b59a529d9 100644 --- a/python/aistore/sdk/bucket.py +++ b/python/aistore/sdk/bucket.py @@ -832,7 +832,7 @@ def object(self, obj_name: str, props: ObjectProps = None) -> Object: Returns: The object created. """ - details = BucketDetails(self.name, self.provider, self.qparam) + details = BucketDetails(self.name, self.provider, self.qparam, self.get_path()) return Object( client=self.client, bck_details=details, name=obj_name, props=props ) diff --git a/python/aistore/sdk/const.py b/python/aistore/sdk/const.py index 719b555eb36..54b007595db 100644 --- a/python/aistore/sdk/const.py +++ b/python/aistore/sdk/const.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2021-2024, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2021-2025, NVIDIA CORPORATION. All rights reserved. # # Standard Header Keys @@ -171,3 +171,6 @@ # Ref: https://www.rfc-editor.org/rfc/rfc7233#section-2.1 BYTE_RANGE_PREFIX_LENGTH = 6 + +# Custom seed (MLCG32) +XX_HASH_SEED = 1103515245 diff --git a/python/aistore/sdk/obj/object.py b/python/aistore/sdk/obj/object.py index 511e4a73b3a..b7c9e0d66bc 100644 --- a/python/aistore/sdk/obj/object.py +++ b/python/aistore/sdk/obj/object.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2022-2025, NVIDIA CORPORATION. All rights reserved. # import warnings @@ -8,7 +8,7 @@ from io import BufferedWriter from pathlib import Path from typing import Dict - +import os from requests import Response from requests.structures import CaseInsensitiveDict @@ -55,6 +55,7 @@ class BucketDetails: name: str provider: Provider qparams: Dict[str, str] + path: str class Object: @@ -129,7 +130,7 @@ def head(self) -> CaseInsensitiveDict: self._props = ObjectProps(headers) return headers - # pylint: disable=too-many-arguments + # pylint: disable=too-many-arguments,too-many-locals def get_reader( self, archive_config: ArchiveConfig = None, @@ -139,6 +140,7 @@ def get_reader( writer: BufferedWriter = None, latest: bool = False, byte_range: str = None, + direct: bool = False, ) -> ObjectReader: """ Creates and returns an ObjectReader with access to object contents and optionally writes to a provided writer. @@ -153,6 +155,8 @@ def get_reader( latest (bool, optional): GET the latest object version from the associated remote bucket byte_range (str, optional): Byte range in RFC 7233 format for single-range requests (e.g., "bytes=0-499", "bytes=500-", "bytes=-500"). See: https://www.rfc-editor.org/rfc/rfc7233#section-2.1. + direct (bool, optional): If True, the object content is read directly from the target node, + bypassing the proxy Returns: An ObjectReader which can be iterated over to stream chunks of object content or used to read all content @@ -197,13 +201,13 @@ def get_reader( int(byte_range_l) if byte_range_l else None, int(byte_range_r) if byte_range_r else None, ) - obj_client = ObjectClient( request_client=self._client, path=self._object_path, params=params, headers=headers, byte_range=byte_range_tuple, + uname=os.path.join(self._bck_details.path, self.name) if direct else None, ) obj_reader = ObjectReader( diff --git a/python/aistore/sdk/obj/object_client.py b/python/aistore/sdk/obj/object_client.py index 2c5dba0a56e..aa3dc70b861 100644 --- a/python/aistore/sdk/obj/object_client.py +++ b/python/aistore/sdk/obj/object_client.py @@ -9,6 +9,7 @@ from aistore.sdk.const import HTTP_METHOD_GET, HTTP_METHOD_HEAD, HEADER_RANGE from aistore.sdk.obj.object_attributes import ObjectAttributes from aistore.sdk.request_client import RequestClient +from aistore.sdk.errors import ErrObjNotFound class ObjectClient: @@ -21,6 +22,7 @@ class ObjectClient: params (Dict[str, str]): Query parameters for the request headers (Optional[Dict[str, str]]): HTTP request headers byte_range (Optional[Tuple[Optional[int], Optional[int]]): Tuple representing the byte range + uname (Optional[str]): Unique (namespaced) name of the object (used for determining the target node) """ # pylint: disable=too-many-arguments @@ -31,47 +33,96 @@ def __init__( params: Dict[str, str], headers: Optional[Dict[str, str]] = None, byte_range: Optional[Tuple[Optional[int], Optional[int]]] = (None, None), + uname: Optional[str] = None, ): self._request_client = request_client self._request_path = path self._request_params = params self._request_headers = headers self._byte_range = byte_range + self._uname = uname + if uname: + self._initialize_target_client() + + def _initialize_target_client(self, force: bool = False): + """ + Initialize a new RequestClient pointing to the target node for the object. + """ + smap = self._request_client.get_smap(force) + target_node = smap.get_target_for_object(self._uname) + new_client = self._request_client.clone( + base_url=target_node.public_net.direct_url + ) + self._request_client = new_client + + def _retry_with_new_smap(self, method: str, **kwargs): + """ + Retry the request with the latest `smap` if a 404 error is encountered. + + Args: + method (str): HTTP method (e.g., GET, HEAD). + **kwargs: Additional arguments to pass to the request. + + Returns: + requests.Response: The response object from the retried request. + """ + if self._uname: + # Force update the smap + self._initialize_target_client(force=True) + + # Retry the request + return self._request_client.request(method, **kwargs) def get(self, stream: bool, offset: Optional[int] = None) -> requests.Response: """ - Make a request to AIS to get the object content, applying an optional offset. + Fetch object content from AIS, applying an optional offset. Args: stream (bool): If True, stream the response content. - offset (int, optional): The offset in bytes to apply. If not provided, no offset - is applied. + offset (int, optional): Byte offset for reading the object. Defaults to None. Returns: - requests.Response: The response object from the request. + requests.Response: The response object containing the content. + + Raises: + ErrObjNotFound: If the object is not found and cannot be retried. + requests.RequestException: For network-related errors. + Exception: For any unexpected failures. """ headers = self._request_headers.copy() if self._request_headers else {} if offset: l, r = self._byte_range if l is not None: - l = l + offset + l += offset elif r is not None: - r = r - offset + r -= offset else: l = offset headers[HEADER_RANGE] = f"bytes={l or ''}-{r or ''}" - resp = self._request_client.request( - HTTP_METHOD_GET, - path=self._request_path, - params=self._request_params, - stream=stream, - headers=headers, - ) - resp.raise_for_status() - return resp + try: + resp = self._request_client.request( + HTTP_METHOD_GET, + path=self._request_path, + params=self._request_params, + stream=stream, + headers=headers, + ) + resp.raise_for_status() + return resp + + except ErrObjNotFound as _: + if self._uname: + return self._retry_with_new_smap( + HTTP_METHOD_GET, + path=self._request_path, + params=self._request_params, + stream=stream, + headers=headers, + ) + raise def head(self) -> ObjectAttributes: """ diff --git a/python/aistore/sdk/request_client.py b/python/aistore/sdk/request_client.py index 4efb47127b5..91013b34149 100644 --- a/python/aistore/sdk/request_client.py +++ b/python/aistore/sdk/request_client.py @@ -15,10 +15,15 @@ HEADER_LOCATION, STATUS_REDIRECT_PERM, STATUS_REDIRECT_TMP, + URL_PATH_DAEMON, + WHAT_SMAP, + QPARAM_WHAT, + HTTP_METHOD_GET, ) from aistore.sdk.session_manager import SessionManager from aistore.sdk.utils import parse_ais_error, handle_errors, decode_response from aistore.version import __version__ as sdk_version +from aistore.sdk.types import Smap T = TypeVar("T") @@ -52,6 +57,8 @@ def __init__( self._token = token self._timeout = timeout self._error_handler = error_handler + # smap is used to calculate the target node for a given object + self._smap = None @property def base_url(self): @@ -101,6 +108,42 @@ def token(self, token: str): """ self._token = token + def get_smap(self, force_update: bool = False) -> "Smap": + """Return the smap.""" + if not self._smap or force_update: + self._smap = self.request_deserialize( + HTTP_METHOD_GET, + path=URL_PATH_DAEMON, + res_model=Smap, + params={QPARAM_WHAT: WHAT_SMAP}, + ) + return self._smap + + def clone(self, base_url: Optional[str] = None) -> "RequestClient": + """ + Create a copy of the current RequestClient instance with an optional new base URL. + + Args: + base_url (Optional[str]): New base URL for the cloned client. Defaults to the existing base URL. + + Returns: + RequestClient: A new instance with the same settings but an optional different base URL. + """ + + # Default to the existing base URL if none is provided + base_url = base_url or self._base_url + + # Ensure the base URL ends with "/v1" + base_url = base_url if base_url.endswith("/v1") else urljoin(base_url, "v1") + + return RequestClient( + endpoint=base_url, + session_manager=self._session_manager, + timeout=self._timeout, + token=self._token, + error_handler=self._error_handler, + ) + def request_deserialize( self, method: str, path: str, res_model: Type[T], **kwargs ) -> T: diff --git a/python/aistore/sdk/types.py b/python/aistore/sdk/types.py index a24f156c94a..2d189a51a7e 100644 --- a/python/aistore/sdk/types.py +++ b/python/aistore/sdk/types.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2021-2024, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2021-2025, NVIDIA CORPORATION. All rights reserved. # from __future__ import annotations @@ -24,6 +24,8 @@ AIS_LOCATION, AIS_MIRROR_COPIES, ) +from aistore.sdk.utils import get_digest, xoshiro256_hash +from aistore.sdk.errors import AISError # pylint: disable=too-few-public-methods,unused-variable,missing-function-docstring,too-many-lines @@ -58,6 +60,10 @@ class Snode(BaseModel): intra_control_net: NetInfo = None intra_data_net: NetInfo = None flags: int = 0 + id_digest: int = 0 + + def in_maint_or_decomm(self) -> bool: + return (self.flags & (1 << 2 | 1 << 3)) != 0 class Smap(BaseModel): @@ -72,6 +78,40 @@ class Smap(BaseModel): uuid: str = "" creation_time: str = "" + def get_target_for_object(self, uname: str) -> Snode: + """ + Determine the target node responsible for an object based on its bucket path and name. + + Args: + uname (str): Fully qualified (namespaced) object name (e.g., f"{bck.get_path()}{obj.name}"). + + Returns: + Snode: The assigned target node. + + Raises: + AISError: If no suitable target node is found. + """ + digest = get_digest(uname) + + selected_node, max_hash = None, -1 + + for tsi in self.tmap.values(): + if tsi.in_maint_or_decomm(): + continue # Skip nodes in maintenance or decommissioned mode + + # Compute hash using Xoshiro256 + cs = xoshiro256_hash(tsi.id_digest ^ digest) + + if cs > max_hash: + max_hash, selected_node = cs, tsi + + if selected_node is None: + raise AISError( + 500, f"No available targets in the map. Total nodes: {len(self.tmap)}" + ) + + return selected_node + class BucketEntry(msgspec.Struct): """ diff --git a/python/aistore/sdk/utils.py b/python/aistore/sdk/utils.py index b8e5324deae..8c8c4450d36 100644 --- a/python/aistore/sdk/utils.py +++ b/python/aistore/sdk/utils.py @@ -11,6 +11,7 @@ import braceexpand import humanize import requests +import xxhash from msgspec import msgpack from pydantic.v1 import BaseModel, parse_raw_as @@ -20,6 +21,7 @@ HEADER_CONTENT_TYPE, MSGPACK_CONTENT_TYPE, DEFAULT_LOG_FORMAT, + XX_HASH_SEED, ) from aistore.sdk.errors import ( AISError, @@ -34,6 +36,13 @@ from aistore.sdk.provider import Provider T = TypeVar("T") +MASK = 0xFFFFFFFFFFFFFFFF # 64-bit mask +# fmt: off +GOLDEN_RATIO = 0x9e3779b97f4a7c15 +CONST1 = 0xbf58476d1ce4e5b9 +CONST2 = 0x94d049bb133111eb +# fmt: on +ROTATION_BITS = 7 class HttpError(BaseModel): @@ -312,3 +321,31 @@ def get_logger(name: str, log_format: str = DEFAULT_LOG_FORMAT): logger.addHandler(handler) logger.propagate = False return logger + + +# Translated from: +# http://xoshiro.di.unimi.it/xoshiro256starstar.c +# Scrambled Linear Pseudorandom Number Generators +# David Blackman, Sebastiano Vigna +# https://arxiv.org/abs/1805.01407 +# http://www.pcg-random.org/posts/a-quick-look-at-xoshiro256.html +def xoshiro256_hash(seed: int) -> int: + """ + Xoshiro256-inspired hash function with 64-bit overflow behavior. + """ + z = (seed + GOLDEN_RATIO) & MASK + z = (z ^ (z >> 30)) * CONST1 & MASK + z = (z ^ (z >> 27)) * CONST2 & MASK + z = (z ^ (z >> 31)) + GOLDEN_RATIO & MASK + z = (z ^ (z >> 30)) * CONST1 & MASK + z = (z ^ (z >> 27)) * CONST2 & MASK + z = (z ^ (z >> 31)) * 5 & MASK + rotated = ((z << ROTATION_BITS) | (z >> (64 - ROTATION_BITS))) & MASK + return (rotated * 9) & MASK + + +def get_digest(name: str) -> int: + """ + Get the xxhash digest of a given string. + """ + return xxhash.xxh64(seed=XX_HASH_SEED, input=name.encode("utf-8")).intdigest() diff --git a/python/pyproject.toml b/python/pyproject.toml index 6240bc3613b..90dcefa614e 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -47,6 +47,7 @@ dependencies = [ "humanize>=4.6.0", "braceexpand>=0.1.7", "msgspec>=0.15.1", + "xxhash>=3.5.0", ] keywords = [ diff --git a/python/tests/integration/sdk/test_object_ops.py b/python/tests/integration/sdk/test_object_ops.py index 822e86b54a4..1916316607b 100644 --- a/python/tests/integration/sdk/test_object_ops.py +++ b/python/tests/integration/sdk/test_object_ops.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2018-2024, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2018-2025, NVIDIA CORPORATION. All rights reserved. # import random import unittest @@ -15,6 +15,7 @@ HEADER_CONTENT_LENGTH, UTF_ENCODING, ) +from aistore import Client from aistore.sdk.list_object_flag import ListObjectFlag from aistore.sdk.archive_config import ArchiveMode, ArchiveConfig @@ -397,3 +398,41 @@ def test_get_object_from_url(self): fetched_obj = self.client.get_object_from_url(url) fetched_content = fetched_obj.get_reader().read_all() self.assertEqual(content, fetched_content) + + @unittest.skipIf( + len(Client(CLUSTER_ENDPOINT).cluster().get_info().tmap) < 2, + "Test requires more than one target", + ) + def test_get_object_direct(self): + """ + Test fetching objects directly from the target node. + """ + self.bucket = self._create_bucket() + total_objects = 20 + obj_names = self._create_objects(num_obj=total_objects) + + for obj_name in obj_names: + # Get object data directly from the target + obj_from_direct = ( + self.bucket.object(obj_name).get_reader(direct=True).read_all() + ) + self.assertIsNotNone( + obj_from_direct, f"Direct fetch failed for object: {obj_name}" + ) + + # Get object data via proxy + obj_from_non_direct = self.bucket.object(obj_name).get_reader().read_all() + self.assertIsNotNone( + obj_from_non_direct, f"Proxy fetch failed for object: {obj_name}" + ) + + # Verify direct and proxy data match + self.assertEqual( + obj_from_direct, + obj_from_non_direct, + f"Data mismatch for object: {obj_name}", + ) + + self.assertGreater( + len(obj_from_direct), 0, f"Object data is empty for object: {obj_name}" + ) diff --git a/python/tests/unit/sdk/obj/test_object.py b/python/tests/unit/sdk/obj/test_object.py index 07e0d91b5d8..d44bbac671f 100644 --- a/python/tests/unit/sdk/obj/test_object.py +++ b/python/tests/unit/sdk/obj/test_object.py @@ -60,7 +60,7 @@ def setUp(self) -> None: self.mock_client = Mock() self.bck_qparams = {"propkey": "propval"} self.bucket_details = BucketDetails( - BCK_NAME, AIS_BCK_PROVIDER, self.bck_qparams + BCK_NAME, "ais", self.bck_qparams, f"ais/@#/{BCK_NAME}/" ) self.mock_writer = Mock() self.expected_params = self.bck_qparams @@ -68,7 +68,7 @@ def setUp(self) -> None: def test_properties(self): self.assertEqual(BCK_NAME, self.object.bucket_name) - self.assertEqual(AIS_BCK_PROVIDER, self.object.bucket_provider) + self.assertEqual("ais", self.object.bucket_provider) self.assertEqual(self.bck_qparams, self.object.query_params) self.assertEqual(OBJ_NAME, self.object.name) self.assertIsNone(self.object.props) @@ -128,6 +128,11 @@ def test_get_archregex(self): archive_config = ArchiveConfig(regex=regex, mode=mode) self.get_exec_assert(archive_config=archive_config) + def test_get_direct(self): + self.get_exec_assert( + direct=True, expected_uname=f"{self.bucket_details.path}{OBJ_NAME}" + ) + @patch("aistore.sdk.obj.object.ObjectReader") @patch("aistore.sdk.obj.object.ObjectClient") def get_exec_assert(self, mock_obj_client, mock_obj_reader, **kwargs): @@ -138,6 +143,7 @@ def get_exec_assert(self, mock_obj_client, mock_obj_reader, **kwargs): expected_headers = kwargs.pop("expected_headers", {}) expected_byte_range_tuple = kwargs.pop("expected_byte_range_tuple", None) expected_chunk_size = kwargs.get("chunk_size", DEFAULT_CHUNK_SIZE) + expected_uname = kwargs.pop("expected_uname", None) res = self.object.get_reader(**kwargs) @@ -149,6 +155,7 @@ def get_exec_assert(self, mock_obj_client, mock_obj_reader, **kwargs): params=self.expected_params, headers=expected_headers, byte_range=expected_byte_range_tuple, + uname=expected_uname, ) mock_obj_reader.assert_called_with( diff --git a/python/tests/unit/sdk/test_types.py b/python/tests/unit/sdk/test_types.py new file mode 100644 index 00000000000..e9fab3a5651 --- /dev/null +++ b/python/tests/unit/sdk/test_types.py @@ -0,0 +1,104 @@ +from unittest import TestCase +from unittest.mock import patch, create_autospec +from aistore.sdk.types import Smap, Snode +from aistore.sdk.errors import AISError + + +class TestSmap(TestCase): + + def setUp(self): + """Set up reusable test variables to avoid redundant code.""" + self.mock_proxy = create_autospec(Snode, instance=True) + + def smap_with_nodes(self): + """Create a sample Smap with active nodes.""" + node1 = Snode(id_digest=1234, flags=0, daemon_id="node1", daemon_type="target") + node2 = Snode( + id_digest=3456, flags=0, daemon_id="node2", daemon_type="target" + ) # Expected selection + node3 = Snode(id_digest=5678, flags=0, daemon_id="node3", daemon_type="target") + + return Smap( + tmap={"node1": node1, "node2": node2, "node3": node3}, + pmap={}, + proxy_si=self.mock_proxy, + ) + + def smap_without_nodes(self): + """Create a sample Smap with no available target nodes.""" + return Smap( + tmap={}, + pmap={}, + proxy_si=self.mock_proxy, + ) + + @patch("aistore.sdk.types.xoshiro256_hash") + @patch("aistore.sdk.types.get_digest") + def test_get_target_for_object(self, mock_get_digest, mock_xoshiro256_hash): + """Test that `get_target_for_object` correctly selects the node with the highest hash value.""" + + mock_get_digest.return_value = 1234 + mock_xoshiro256_hash.side_effect = [100, 200, 50] # Simulated hash values + + smap = self.smap_with_nodes() + result = smap.get_target_for_object("test_object") + + # Verify method calls + mock_get_digest.assert_called_once_with("test_object") + with self.subTest(msg="Check xoshiro256_hash calls"): + mock_xoshiro256_hash.assert_any_call(1234 ^ 1234) + mock_xoshiro256_hash.assert_any_call(3456 ^ 1234) + mock_xoshiro256_hash.assert_any_call(5678 ^ 1234) + + # Ensure correct node is selected + self.assertEqual( + result, + smap.tmap["node2"], + msg="Expected node2 to be selected as it has the highest hash value.", + ) + + @patch("aistore.sdk.types.get_digest") + def test_get_target_for_object_no_nodes(self, mock_get_digest): + """Test that `get_target_for_object` raises an error when no target nodes are available.""" + + mock_get_digest.return_value = 1234 + smap = self.smap_without_nodes() + + with self.assertRaises(AISError) as context: + smap.get_target_for_object("test_object") + + self.assertEqual( + str(context.exception), + "STATUS:500, MESSAGE:No available targets in the map. Total nodes: 0", + msg="Expected an error when no nodes are available.", + ) + + @patch("aistore.sdk.types.get_digest") + def test_get_target_for_object_all_nodes_in_maint(self, mock_get_digest): + """Test that `get_target_for_object` raises an error when all nodes are in maintenance mode.""" + + mock_get_digest.return_value = 1234 + + # Mock nodes in maintenance mode + node1 = create_autospec(Snode, instance=True) + node1.configure_mock(id_digest=5678, flags=4) + node1.in_maint_or_decomm.return_value = True + + node2 = create_autospec(Snode, instance=True) + node2.configure_mock(id_digest=6789, flags=4) + node2.in_maint_or_decomm.return_value = True + + smap = Smap( + tmap={"node1": node1, "node2": node2}, + pmap={}, + proxy_si=self.mock_proxy, + ) + + with self.assertRaises(AISError) as context: + smap.get_target_for_object("test_object") + + self.assertEqual( + str(context.exception), + "STATUS:500, MESSAGE:No available targets in the map. Total nodes: 2", + msg="Expected an error when all nodes are in maintenance mode.", + ) diff --git a/python/tests/unit/sdk/test_utils.py b/python/tests/unit/sdk/test_utils.py index b30eb436fc0..ff03a415c15 100644 --- a/python/tests/unit/sdk/test_utils.py +++ b/python/tests/unit/sdk/test_utils.py @@ -10,7 +10,7 @@ from requests import Response from aistore.sdk.authn.utils import parse_authn_error -from aistore.sdk.const import MSGPACK_CONTENT_TYPE, HEADER_CONTENT_TYPE +from aistore.sdk.const import MSGPACK_CONTENT_TYPE, HEADER_CONTENT_TYPE, XX_HASH_SEED from aistore.sdk.errors import ( AISError, ErrRemoteBckNotFound, @@ -30,6 +30,8 @@ read_file_bytes, validate_directory, validate_file, + xoshiro256_hash, + get_digest, ) from aistore.sdk.authn.errors import ( AuthNError, @@ -208,3 +210,26 @@ def test_decode_response_msgpack(self): res = decode_response(dict, mock_response) self.assertEqual(unpacked_content, res) + + @cases( + (123456789, 5288836854215336256), + (0, 1905207664160064169), + (2**64 - 1, 10227601306713020730), + ) + def test_xoshiro256_hash(self, test_case): + seed, expected_result = test_case + result = xoshiro256_hash(seed) + self.assertIsInstance(result, int) + self.assertGreaterEqual(result, 0) + self.assertLess(result, 2**64) # Ensure 64-bit overflow behavior + self.assertEqual(expected_result, result) + + @patch("aistore.sdk.utils.xxhash.xxh64") + def test_get_digest(self, mock_xxhash): + mock_xxhash.return_value.intdigest.return_value = 987654321 + name = "test_object" + result = get_digest(name) + mock_xxhash.assert_called_once_with( + seed=XX_HASH_SEED, input=name.encode("utf-8") + ) + self.assertEqual(result, 987654321)