From d3e76a7d2901a96328bb7b50ec3c1124e467f14d Mon Sep 17 00:00:00 2001 From: maang-h Date: Sun, 11 Aug 2024 22:37:27 +0800 Subject: [PATCH 1/2] Standardize OpenAI Docs --- .../openai/langchain_openai/llms/base.py | 118 ++++++++++++++++-- 1 file changed, 109 insertions(+), 9 deletions(-) diff --git a/libs/partners/openai/langchain_openai/llms/base.py b/libs/partners/openai/langchain_openai/llms/base.py index e1d0938f01871..2213fa6882e96 100644 --- a/libs/partners/openai/langchain_openai/llms/base.py +++ b/libs/partners/openai/langchain_openai/llms/base.py @@ -605,21 +605,121 @@ def max_tokens_for_prompt(self, prompt: str) -> int: class OpenAI(BaseOpenAI): - """OpenAI large language models. + """OpenAI completion model integration. + + Setup: + Install ``langchain-openai`` and set environment variable ``OPENAI_API_KEY``. + + .. code-block:: bash + + pip install -U langchain-openai + export OPENAI_API_KEY="your-api-key" + + Key init args — completion params: + model: str + Name of OpenAI model to use. + temperature: float + Sampling temperature. + max_tokens: Optional[int] + Max number of tokens to generate. + logprobs: Optional[bool] + Whether to return logprobs. + stream_options: Dict + Configure streaming outputs, like whether to return token usage when + streaming (``{"include_usage": True}``). + + Key init args — client params: + timeout: Union[float, Tuple[float, float], Any, None] + Timeout for requests. + max_retries: int + Max number of retries. + api_key: Optional[str] + OpenAI API key. If not passed in will be read from env var OPENAI_API_KEY. + base_url: Optional[str] + Base URL for API requests. Only specify if using a proxy or service + emulator. + organization: Optional[str] + OpenAI organization ID. If not passed in will be read from env + var OPENAI_ORG_ID. + + See full list of supported init args and their descriptions in the params section. + + Instantiate: + .. code-block:: python - To use, you should have the environment variable ``OPENAI_API_KEY`` - set with your API key, or pass it as a named parameter to the constructor. + from langchain_openai import OpenAI - Any parameters that are valid to be passed to the openai.create call can be passed - in, even if not explicitly saved on this class. + llm = OpenAI( + model="gpt-3.5-turbo-instruct", + temperature=0, + max_retries=2, + # api_key="...", + # base_url="...", + # organization="...", + # other params... + ) - Example: + Invoke: .. code-block:: python - from langchain_openai import OpenAI + messages = [ + ( + "system", + "You are a helpful translator. Translate the user sentence to French.", + ), + ("human", "I love programming."), + ] + llm.invoke(messages) - model = OpenAI(model_name="gpt-3.5-turbo-instruct") - """ + .. code-block:: python + + "\nFrench: J'aime programmer." + + Stream: + .. code-block:: python + + for chunk in llm.stream(messages): + print(chunk) + + .. code-block:: python + + French + : + J + 'a + ime + programmer + . + + .. code-block:: python + + stream = llm.stream(messages) + full = next(stream) + for chunk in stream: + full += chunk + full + + .. code-block:: python + + "\nFrench: J'aime programmer." + + Async: + .. code-block:: python + + await llm.ainvoke(messages) + + # stream: + # async for chunk in llm.astream(messages): + # print(chunk) + + # batch: + # await llm.abatch([messages]) + + .. code-block:: python + + "\nFrench: J'aime programmer." + + """ # noqa: E501 @classmethod def get_lc_namespace(cls) -> List[str]: From eea23a434dca31f23dc146e95ab756696069d7bf Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Sun, 11 Aug 2024 16:18:42 -0400 Subject: [PATCH 2/2] update --- .../openai/langchain_openai/llms/base.py | 48 +++++++------------ 1 file changed, 16 insertions(+), 32 deletions(-) diff --git a/libs/partners/openai/langchain_openai/llms/base.py b/libs/partners/openai/langchain_openai/llms/base.py index 2213fa6882e96..0584b52a751fa 100644 --- a/libs/partners/openai/langchain_openai/llms/base.py +++ b/libs/partners/openai/langchain_openai/llms/base.py @@ -662,62 +662,46 @@ class OpenAI(BaseOpenAI): Invoke: .. code-block:: python - messages = [ - ( - "system", - "You are a helpful translator. Translate the user sentence to French.", - ), - ("human", "I love programming."), - ] - llm.invoke(messages) + input_text = "The meaning of life is " + llm.invoke(input_text) - .. code-block:: python + .. code-block:: none - "\nFrench: J'aime programmer." + "a philosophical question that has been debated by thinkers and scholars for centuries." Stream: .. code-block:: python - for chunk in llm.stream(messages): - print(chunk) + for chunk in llm.stream(input_text): + print(chunk, end="|") - .. code-block:: python + .. code-block:: none - French - : - J - 'a - ime - programmer - . + a| philosophical| question| that| has| been| debated| by| thinkers| and| scholars| for| centuries|. .. code-block:: python - stream = llm.stream(messages) - full = next(stream) - for chunk in stream: - full += chunk - full + "".join(llm.stream(input_text)) - .. code-block:: python + .. code-block:: none - "\nFrench: J'aime programmer." + "a philosophical question that has been debated by thinkers and scholars for centuries." Async: .. code-block:: python - await llm.ainvoke(messages) + await llm.ainvoke(input_text) # stream: - # async for chunk in llm.astream(messages): + # async for chunk in (await llm.astream(input_text)): # print(chunk) # batch: - # await llm.abatch([messages]) + # await llm.abatch([input_text]) - .. code-block:: python + .. code-block:: none - "\nFrench: J'aime programmer." + "a philosophical question that has been debated by thinkers and scholars for centuries." """ # noqa: E501