Skip to content

Commit

Permalink
Add _acall implementation & test
Browse files Browse the repository at this point in the history
  • Loading branch information
Wojciech-Rebisz committed Sep 6, 2024
1 parent a317199 commit f953bfa
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 7 deletions.
27 changes: 20 additions & 7 deletions libs/ibm/langchain_ibm/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,6 +404,20 @@ def _call(
)
return result.generations[0][0].text

async def _acall(
self,
prompt: str,
stop: Optional[List[str]] = None,
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
"""Async version of the _call method."""

result = await self._agenerate(
prompts=[prompt], stop=stop, run_manager=run_manager, **kwargs
)
return result.generations[0][0].text

def _generate(
self,
prompts: List[str],
Expand Down Expand Up @@ -459,16 +473,15 @@ async def _agenerate(
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> LLMResult:
"""Run the LLM on the given prompts."""
"""Async run the LLM on the given prompt and input."""
params, kwargs = self._get_chat_params(stop=stop, **kwargs)
params = self._validate_chat_params(params)
responses = [
await self.watsonx_model.agenerate(prompt=prompt, params=params, **kwargs)
for prompt in prompts
]

agen = await self.watsonx_model._agenerate(
prompt=prompts, params=params, **kwargs
)
response = [p async for p in agen]

return self._create_llm_result(response)
return self._create_llm_result(responses)

def _stream(
self,
Expand Down
10 changes: 10 additions & 0 deletions libs/ibm/tests/integration_tests/test_llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,6 +422,16 @@ async def test_watsonx_ainvoke() -> None:
assert isinstance(response, str)


async def test_watsonx_acall() -> None:
watsonxllm = WatsonxLLM(
model_id=MODEL_ID,
url="https://us-south.ml.cloud.ibm.com", # type: ignore[arg-type]
project_id=WX_PROJECT_ID,
)
response = await watsonxllm._acall("what is the color of the grass?")
assert 'green' in response.lower()


async def test_watsonx_agenerate() -> None:
watsonxllm = WatsonxLLM(
model_id=MODEL_ID,
Expand Down

0 comments on commit f953bfa

Please sign in to comment.