mirror of
https://github.com/hwchase17/langchain
synced 2024-11-08 07:10:35 +00:00
01a9b06400
Example of how it would show up in our python docs: ![image](https://github.com/langchain-ai/langchain/assets/13333726/0f0a88cc-ba4a-4778-bc47-118c66807f15) Examples added to the reference docs: https://api.python.langchain.com/en/wfh-api_crosslink/vectorstores/langchain.vectorstores.chroma.Chroma.html#langchain.vectorstores.chroma.Chroma ![image](https://github.com/langchain-ai/langchain/assets/13333726/dcd150de-cb56-4d42-b49a-a76a002a5a52)
127 lines
4.1 KiB
Plaintext
127 lines
4.1 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"attachments": {},
|
|
"cell_type": "markdown",
|
|
"metadata": {},
|
|
"source": [
|
|
"# OctoAI Compute Service\n",
|
|
"This example goes over how to use LangChain to interact with `OctoAI` [LLM endpoints](https://octoai.cloud/templates)\n",
|
|
"## Environment setup\n",
|
|
"\n",
|
|
"To run our example app, there are four simple steps to take:\n",
|
|
"\n",
|
|
"1. Clone the MPT-7B demo template to your OctoAI account by visiting <https://octoai.cloud/templates/mpt-7b-demo> then clicking \"Clone Template.\" \n",
|
|
" 1. If you want to use a different LLM model, you can also containerize the model and make a custom OctoAI endpoint yourself, by following [Build a Container from Python](doc:create-custom-endpoints-from-python-code) and [Create a Custom Endpoint from a Container](doc:create-custom-endpoints-from-a-container)\n",
|
|
" \n",
|
|
"2. Paste your Endpoint URL in the code cell below\n",
|
|
"\n",
|
|
"3. Get an API Token from [your OctoAI account page](https://octoai.cloud/settings).\n",
|
|
" \n",
|
|
"4. Paste your API key in in the code cell below"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 13,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"import os\n",
|
|
"\n",
|
|
"os.environ[\"OCTOAI_API_TOKEN\"] = \"OCTOAI_API_TOKEN\"\n",
|
|
"os.environ[\"ENDPOINT_URL\"] = \"https://mpt-7b-demo-kk0powt97tmb.octoai.cloud/generate\""
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 14,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"from langchain.llms.octoai_endpoint import OctoAIEndpoint\n",
|
|
"from langchain import PromptTemplate, LLMChain"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 15,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"template = \"\"\"Below is an instruction that describes a task. Write a response that appropriately completes the request.\\n Instruction:\\n{question}\\n Response: \"\"\"\n",
|
|
"prompt = PromptTemplate(template=template, input_variables=[\"question\"])"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 30,
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"llm = OctoAIEndpoint(\n",
|
|
" model_kwargs={\n",
|
|
" \"max_new_tokens\": 200,\n",
|
|
" \"temperature\": 0.75,\n",
|
|
" \"top_p\": 0.95,\n",
|
|
" \"repetition_penalty\": 1,\n",
|
|
" \"seed\": None,\n",
|
|
" \"stop\": [],\n",
|
|
" },\n",
|
|
")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 31,
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"data": {
|
|
"text/plain": [
|
|
"'\\nLeonardo da Vinci was an Italian polymath and painter regarded by many as one of the greatest painters of all time. He is best known for his masterpieces including Mona Lisa, The Last Supper, and The Virgin of the Rocks. He was a draftsman, sculptor, architect, and one of the most important figures in the history of science. Da Vinci flew gliders, experimented with water turbines and windmills, and invented the catapult and a joystick-type human-powered aircraft control. He may have pioneered helicopters. As a scholar, he was interested in anatomy, geology, botany, engineering, mathematics, and astronomy.\\nOther painters and patrons claimed to be more talented, but Leonardo da Vinci was an incredibly productive artist, sculptor, engineer, anatomist, and scientist.'"
|
|
]
|
|
},
|
|
"execution_count": 31,
|
|
"metadata": {},
|
|
"output_type": "execute_result"
|
|
}
|
|
],
|
|
"source": [
|
|
"question = \"Who was leonardo davinci?\"\n",
|
|
"\n",
|
|
"llm_chain = LLMChain(prompt=prompt, llm=llm)\n",
|
|
"\n",
|
|
"llm_chain.run(question)"
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "langchain",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.9.16"
|
|
},
|
|
"orig_nbformat": 4,
|
|
"vscode": {
|
|
"interpreter": {
|
|
"hash": "97697b63fdcee0a640856f91cb41326ad601964008c341809e43189d1cab1047"
|
|
}
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 2
|
|
}
|