From 076dbb1a8f62e579859e67e96c14ac045d094cd2 Mon Sep 17 00:00:00 2001 From: Mateusz Szewczyk <139469471+MateuszOssGit@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:22:03 +0100 Subject: [PATCH] docs: IBM watsonx.ai Use `invoke` instead of `__call__` (#16371) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - **Description:** Updating documentation of IBM [watsonx.ai](https://www.ibm.com/products/watsonx-ai) LLM with using `invoke` instead of `__call__` - **Dependencies:** [ibm-watsonx-ai](https://pypi.org/project/ibm-watsonx-ai/), - **Tag maintainer:** : Please make sure your PR is passing linting and testing before submitting. Run `make format`, `make lint` and `make test` to check this locally. ✅ The following warning information show when i use `run` and `__call__` method: ``` LangChainDeprecationWarning: The function `__call__` was deprecated in LangChain 0.1.7 and will be removed in 0.2.0. Use invoke instead. warn_deprecated( ``` We need to update documentation for using `invoke` method --- docs/docs/integrations/llms/watsonxllm.ipynb | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/docs/docs/integrations/llms/watsonxllm.ipynb b/docs/docs/integrations/llms/watsonxllm.ipynb index be5d0841cd..f0b142cf96 100644 --- a/docs/docs/integrations/llms/watsonxllm.ipynb +++ b/docs/docs/integrations/llms/watsonxllm.ipynb @@ -176,7 +176,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "id": "c7d80c05", "metadata": {}, "outputs": [], @@ -197,17 +197,18 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "id": "dc076c56", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "'How many breeds of dog are there?'" + "{'topic': 'dog',\n", + " 'text': 'What is the name of the dog that is the most popular in the world?'}" ] }, - "execution_count": 5, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } @@ -216,7 +217,7 @@ "from langchain.chains import LLMChain\n", "\n", "llm_chain = LLMChain(prompt=prompt, llm=watsonx_llm)\n", - "llm_chain.run(\"dog\")" + "llm_chain.invoke(\"dog\")" ] }, { @@ -248,7 +249,7 @@ "source": [ "# Calling a single prompt\n", "\n", - "watsonx_llm(\"Who is man's best friend?\")" + "watsonx_llm.invoke(\"Who is man's best friend?\")" ] }, { @@ -327,7 +328,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.18" + "version": "3.10.13" } }, "nbformat": 4,