diff --git a/js/src/run_trees.ts b/js/src/run_trees.ts index dc92cbac1..7c5bf01a8 100644 --- a/js/src/run_trees.ts +++ b/js/src/run_trees.ts @@ -9,6 +9,7 @@ import { import { RuntimeEnvironment, getEnvironmentVariable, + getLangSmithEnvironmentVariable, getRuntimeEnvironment, } from "./utils/env.js"; import { Client } from "./client.js"; @@ -231,7 +232,7 @@ export class RunTree implements BaseRun { id: uuid.v4(), run_type: "chain", project_name: - getEnvironmentVariable("LANGCHAIN_PROJECT") ?? + getLangSmithEnvironmentVariable("PROJECT") ?? getEnvironmentVariable("LANGCHAIN_SESSION") ?? // TODO: Deprecate "default", child_runs: [], diff --git a/python/langsmith/_internal/_serde.py b/python/langsmith/_internal/_serde.py index 1bf8865c1..d3c7d7d10 100644 --- a/python/langsmith/_internal/_serde.py +++ b/python/langsmith/_internal/_serde.py @@ -146,7 +146,7 @@ def dumps_json(obj: Any) -> bytes: logger.debug(f"Orjson serialization failed: {repr(e)}. Falling back to json.") result = json.dumps( obj, - default=_simple_default, + default=_serialize_json, ensure_ascii=True, ).encode("utf-8") try: diff --git a/python/langsmith/schemas.py b/python/langsmith/schemas.py index acedaf177..4afb0a5e0 100644 --- a/python/langsmith/schemas.py +++ b/python/langsmith/schemas.py @@ -523,6 +523,10 @@ class FeedbackSourceBase(BaseModel): """The type of the feedback source.""" metadata: Optional[Dict[str, Any]] = Field(default_factory=dict) """Additional metadata for the feedback source.""" + user_id: Optional[Union[UUID, str]] = None + """The user ID associated with the feedback source.""" + user_name: Optional[str] = None + """The user name associated with the feedback source.""" class APIFeedbackSource(FeedbackSourceBase): diff --git a/python/langsmith/wrappers/_openai.py b/python/langsmith/wrappers/_openai.py index 3230616d4..258ff88ee 100644 --- a/python/langsmith/wrappers/_openai.py +++ b/python/langsmith/wrappers/_openai.py @@ -3,6 +3,7 @@ import functools import logging from collections import defaultdict +from collections.abc import Mapping from typing import ( TYPE_CHECKING, Any, @@ -10,7 +11,6 @@ DefaultDict, Dict, List, - Mapping, Optional, Type, TypeVar, @@ -82,23 +82,28 @@ def _reduce_choices(choices: List[Choice]) -> dict: "content": "", } for c in reversed_choices: - if c.delta.role: + if hasattr(c, "delta") and getattr(c.delta, "role", None): message["role"] = c.delta.role break tool_calls: DefaultDict[int, List[ChoiceDeltaToolCall]] = defaultdict(list) for c in choices: - if c.delta.content: - message["content"] += c.delta.content - if c.delta.function_call: - if not message.get("function_call"): - message["function_call"] = {"name": "", "arguments": ""} - if c.delta.function_call.name: - message["function_call"]["name"] += c.delta.function_call.name - if c.delta.function_call.arguments: - message["function_call"]["arguments"] += c.delta.function_call.arguments - if c.delta.tool_calls: - for tool_call in c.delta.tool_calls: - tool_calls[c.index].append(tool_call) + if hasattr(c, "delta") and getattr(c.delta, "content", None): + if getattr(c.delta, "content", None): + message["content"] += c.delta.content + if getattr(c.delta, "function_call", None): + if not message.get("function_call"): + message["function_call"] = {"name": "", "arguments": ""} + name_ = getattr(c.delta.function_call, "name", None) + if name_: + message["function_call"]["name"] += name_ + arguments_ = getattr(c.delta.function_call, "arguments", None) + if arguments_: + message["function_call"]["arguments"] += arguments_ + if getattr(c.delta, "tool_calls", None): + tool_calls_list = c.delta.tool_calls + if tool_calls_list is not None: + for tool_call in tool_calls_list: + tool_calls[c.index].append(tool_call) if tool_calls: message["tool_calls"] = [None for _ in tool_calls.keys()] for index, tool_call_chunks in tool_calls.items(): @@ -108,22 +113,28 @@ def _reduce_choices(choices: List[Choice]) -> dict: "type": next((c.type for c in tool_call_chunks if c.type), None), } for chunk in tool_call_chunks: - if chunk.function: + if getattr(chunk, "function", None): if not message["tool_calls"][index].get("function"): message["tool_calls"][index]["function"] = { "name": "", "arguments": "", } - if chunk.function.name: + name_ = getattr(chunk.function, "name", None) + if name_: fn_ = message["tool_calls"][index]["function"] - fn_["name"] += chunk.function.name - if chunk.function.arguments: + fn_["name"] += name_ + arguments_ = getattr(chunk.function, "arguments", None) + if arguments_: fn_ = message["tool_calls"][index]["function"] - fn_["arguments"] += chunk.function.arguments + fn_["arguments"] += arguments_ return { - "index": choices[0].index, + "index": getattr(choices[0], "index", 0) if choices else 0, "finish_reason": next( - (c.finish_reason for c in reversed_choices if c.finish_reason), + ( + c.finish_reason + for c in reversed_choices + if getattr(c, "finish_reason", None) + ), None, ), "message": message, diff --git a/python/tests/integration_tests/test_client.py b/python/tests/integration_tests/test_client.py index 3bcd9d04c..02bd6bab0 100644 --- a/python/tests/integration_tests/test_client.py +++ b/python/tests/integration_tests/test_client.py @@ -19,6 +19,7 @@ from pydantic import BaseModel from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor +from langsmith._internal._serde import dumps_json from langsmith.client import ID_TYPE, Client from langsmith.evaluation import aevaluate, evaluate from langsmith.schemas import ( @@ -1155,6 +1156,37 @@ def test_surrogates(): ) +def test_fallback_json_serialization(): + class Document(BaseModel): + content: str + + raw_surrogates = [ + ("Hello\ud83d\ude00", "Hello😀"), + ("Python\ud83d\udc0d", "Python🐍"), + ("Surrogate\ud834\udd1e", "Surrogate𝄞"), + ("Example\ud83c\udf89", "Example🎉"), + ("String\ud83c\udfa7", "String🎧"), + ("With\ud83c\udf08", "With🌈"), + ("Surrogates\ud83d\ude0e", "Surrogates😎"), + ("Embedded\ud83d\udcbb", "Embedded💻"), + ("In\ud83c\udf0e", "In🌎"), + ("The\ud83d\udcd6", "The📖"), + ("Text\ud83d\udcac", "Text💬"), + ("收花🙄·到", "收花🙄·到"), + ] + pydantic_surrogates = [ + (Document(content=item), expected) for item, expected in raw_surrogates + ] + + for item, expected in raw_surrogates: + output = dumps_json(item).decode("utf8") + assert f'"{expected}"' == output + + for item, expected in pydantic_surrogates: + output = dumps_json(item).decode("utf8") + assert f'{{"content":"{expected}"}}' == output + + def test_runs_stats(): langchain_client = Client() # We always have stuff in the "default" project... diff --git a/python/tests/integration_tests/wrappers/test_openai.py b/python/tests/integration_tests/wrappers/test_openai.py index 9dc6afb50..87e939ef5 100644 --- a/python/tests/integration_tests/wrappers/test_openai.py +++ b/python/tests/integration_tests/wrappers/test_openai.py @@ -381,13 +381,13 @@ def test_parse_sync_api(): original_client = openai.Client() patched_client = wrap_openai(openai.Client(), tracing_extra={"client": ls_client}) - messages = [{"role": "user", "content": "Say 'Foo' then stop."}] + messages = [{"role": "user", "content": "Say 'foo' then stop."}] original = original_client.beta.chat.completions.parse( - messages=messages, model="gpt-3.5-turbo" + messages=messages, model="gpt-3.5-turbo", temperature=0, seed=42, max_tokens=3 ) patched = patched_client.beta.chat.completions.parse( - messages=messages, model="gpt-3.5-turbo" + messages=messages, model="gpt-3.5-turbo", temperature=0, seed=42, max_tokens=3 ) assert type(original) is type(patched) @@ -413,13 +413,13 @@ async def test_parse_async_api(): openai.AsyncClient(), tracing_extra={"client": ls_client} ) - messages = [{"role": "user", "content": "Say 'Foo' then stop."}] + messages = [{"role": "user", "content": "Say 'foo' then stop."}] original = await original_client.beta.chat.completions.parse( - messages=messages, model="gpt-3.5-turbo" + messages=messages, model="gpt-3.5-turbo", temperature=0, seed=42, max_tokens=3 ) patched = await patched_client.beta.chat.completions.parse( - messages=messages, model="gpt-3.5-turbo" + messages=messages, model="gpt-3.5-turbo", temperature=0, seed=42, max_tokens=3 ) assert type(original) is type(patched)