Update Hugging Face Hub notebook (#7236)

Description: `flan-t5-xl` hangs, updated to `flan-t5-xxl`. Tested all
stabilityai LLMs- all hang so removed from tutorial. Temperature > 0 to
prevent unintended determinism.
Issue: #3275 
Tag maintainer: @baskaryan
This commit is contained in:
Hashem Alsaket 2023-07-05 19:45:02 -05:00 committed by GitHub
parent 8afc8e6f5d
commit 6aa66fd2b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -82,9 +82,9 @@
"source": [ "source": [
"from langchain import HuggingFaceHub\n", "from langchain import HuggingFaceHub\n",
"\n", "\n",
"repo_id = \"google/flan-t5-xl\" # See https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads for some other options\n", "repo_id = \"google/flan-t5-xxl\" # See https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads for some other options\n",
"\n", "\n",
"llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={\"temperature\": 0, \"max_length\": 64})" "llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={\"temperature\": 0.5, \"max_length\": 64})"
] ]
}, },
{ {
@ -118,51 +118,6 @@
"Below are some examples of models you can access through the Hugging Face Hub integration." "Below are some examples of models you can access through the Hugging Face Hub integration."
] ]
}, },
{
"attachments": {},
"cell_type": "markdown",
"id": "4fa9337e-ccb5-4c52-9b7c-1653148bc256",
"metadata": {},
"source": [
"### StableLM, by Stability AI\n",
"\n",
"See [Stability AI's](https://huggingface.co/stabilityai) organization page for a list of available models."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "36a1ce01-bd46-451f-8ee6-61c8f4bd665a",
"metadata": {},
"outputs": [],
"source": [
"repo_id = \"stabilityai/stablelm-tuned-alpha-3b\"\n",
"# Others include stabilityai/stablelm-base-alpha-3b\n",
"# as well as 7B parameter versions"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b5654cea-60b0-4f40-ab34-06ba1eca810d",
"metadata": {},
"outputs": [],
"source": [
"llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={\"temperature\": 0, \"max_length\": 64})"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2f19d0dc-c987-433f-a8d6-b1214e8ee067",
"metadata": {},
"outputs": [],
"source": [
"# Reuse the prompt and question from above.\n",
"llm_chain = LLMChain(prompt=prompt, llm=llm)\n",
"print(llm_chain.run(question))"
]
},
{ {
"attachments": {}, "attachments": {},
"cell_type": "markdown", "cell_type": "markdown",
@ -185,7 +140,7 @@
"\n", "\n",
"repo_id = \"databricks/dolly-v2-3b\"\n", "repo_id = \"databricks/dolly-v2-3b\"\n",
"\n", "\n",
"llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={\"temperature\": 0, \"max_length\": 64})" "llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={\"temperature\": 0.5, \"max_length\": 64})"
] ]
}, },
{ {
@ -225,7 +180,7 @@
"from langchain import HuggingFaceHub\n", "from langchain import HuggingFaceHub\n",
"\n", "\n",
"repo_id = \"Writer/camel-5b-hf\" # See https://huggingface.co/Writer for other options\n", "repo_id = \"Writer/camel-5b-hf\" # See https://huggingface.co/Writer for other options\n",
"llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={\"temperature\": 0, \"max_length\": 64})" "llm = HuggingFaceHub(repo_id=repo_id, model_kwargs={\"temperature\": 0.5, \"max_length\": 64})"
] ]
}, },
{ {