@ -67,7 +67,7 @@
"outputs": [],
"source": [
"llm = OctoAIEndpoint(\n",
" model=\"llama-2-13b-chat-fp16\",\n",
" model_name=\"llama-2-13b-chat-fp16\",\n",
" max_tokens=200,\n",
" presence_penalty=0,\n",
" temperature=0.1,\n",
@ -83,9 +83,9 @@
"question = \"Who was Leonardo da Vinci?\"\n",
"\n",
"llm_chain = LLMChain(prompt=prompt, llm=llm)\n",
"chain = prompt | llm\n",
"print(llm_chain.run(question))"
"print(chain.invoke(question))"
]
},
{
@ -35,7 +35,7 @@ There are two ways to set up parameters for myscale index.
```python
from langchain_community.vectorstores import MyScale, MyScaleSettings
config = MyScaleSetting(host="<your-backend-url>", port=8443, ...)
config = MyScaleSettings(host="<your-backend-url>", port=8443, ...)
index = MyScale(embedding_function, config)
index.add_documents(...)
```