mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
docs[patch]: Update code that checks API keys (#25444)
Check whether the API key is already in the environment Update: ```python import getpass import os os.environ["DATABRICKS_HOST"] = "https://your-workspace.cloud.databricks.com" os.environ["DATABRICKS_TOKEN"] = getpass.getpass("Enter your Databricks access token: ") ``` To: ```python import getpass import os os.environ["DATABRICKS_HOST"] = "https://your-workspace.cloud.databricks.com" if "DATABRICKS_TOKEN" not in os.environ: os.environ["DATABRICKS_TOKEN"] = getpass.getpass( "Enter your Databricks access token: " ) ``` grit migration: ``` engine marzano(0.1) language python `os.environ[$Q] = getpass.getpass("$X")` as $CHECK where { $CHECK <: ! within if_statement(), $CHECK => `if $Q not in os.environ:\n $CHECK` } ```
This commit is contained in:
parent
60b65528c5
commit
b7c070d437
@ -61,7 +61,10 @@
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"DATABRICKS_HOST\"] = \"https://your-workspace.cloud.databricks.com\"\n",
|
||||
"os.environ[\"DATABRICKS_TOKEN\"] = getpass.getpass(\"Enter your Databricks access token: \")"
|
||||
"if \"DATABRICKS_TOKEN\" not in os.environ:\n",
|
||||
" os.environ[\"DATABRICKS_TOKEN\"] = getpass.getpass(\n",
|
||||
" \"Enter your Databricks access token: \"\n",
|
||||
" )"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -40,12 +40,7 @@
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"FRIENDLI_TOKEN\"] = getpass.getpass(\"Friendi Personal Access Token: \")"
|
||||
]
|
||||
"source": ["import getpass\nimport os\n\nif \"FRIENDLI_TOKEN\" not in os.environ:\n os.environ[\"FRIENDLI_TOKEN\"] = getpass.getpass(\"Friendi Personal Access Token: \")"]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
@ -59,11 +54,7 @@
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_community.llms.friendli import Friendli\n",
|
||||
"\n",
|
||||
"llm = Friendli(model=\"mixtral-8x7b-instruct-v0-1\", max_tokens=100, temperature=0)"
|
||||
]
|
||||
"source": ["from langchain_community.llms.friendli import Friendli\n\nllm = Friendli(model=\"mixtral-8x7b-instruct-v0-1\", max_tokens=100, temperature=0)"]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
@ -97,9 +88,7 @@
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"llm.invoke(\"Tell me a joke.\")"
|
||||
]
|
||||
"source": ["llm.invoke(\"Tell me a joke.\")"]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@ -118,9 +107,7 @@
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"llm.batch([\"Tell me a joke.\", \"Tell me a joke.\"])"
|
||||
]
|
||||
"source": ["llm.batch([\"Tell me a joke.\", \"Tell me a joke.\"])"]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@ -138,9 +125,7 @@
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"llm.generate([\"Tell me a joke.\", \"Tell me a joke.\"])"
|
||||
]
|
||||
"source": ["llm.generate([\"Tell me a joke.\", \"Tell me a joke.\"])"]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@ -158,10 +143,7 @@
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"for chunk in llm.stream(\"Tell me a joke.\"):\n",
|
||||
" print(chunk, end=\"\", flush=True)"
|
||||
]
|
||||
"source": ["for chunk in llm.stream(\"Tell me a joke.\"):\n print(chunk, end=\"\", flush=True)"]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
@ -186,9 +168,7 @@
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"await llm.ainvoke(\"Tell me a joke.\")"
|
||||
]
|
||||
"source": ["await llm.ainvoke(\"Tell me a joke.\")"]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@ -207,9 +187,7 @@
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"await llm.abatch([\"Tell me a joke.\", \"Tell me a joke.\"])"
|
||||
]
|
||||
"source": ["await llm.abatch([\"Tell me a joke.\", \"Tell me a joke.\"])"]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@ -227,9 +205,7 @@
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"await llm.agenerate([\"Tell me a joke.\", \"Tell me a joke.\"])"
|
||||
]
|
||||
"source": ["await llm.agenerate([\"Tell me a joke.\", \"Tell me a joke.\"])"]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
@ -247,10 +223,7 @@
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"async for chunk in llm.astream(\"Tell me a joke.\"):\n",
|
||||
" print(chunk, end=\"\", flush=True)"
|
||||
]
|
||||
"source": ["async for chunk in llm.astream(\"Tell me a joke.\"):\n print(chunk, end=\"\", flush=True)"]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
Loading…
Reference in New Issue
Block a user