langchain/docs/extras/integrations/text_embedding/bge_huggingface.ipynb
manmax31 40096c73cd
Add BGE embeddings support (#8848)
- Description: [BGE-large](https://huggingface.co/BAAI/bge-large-en)
embeddings from BAAI are at the top of [MTEB
leaderboard](https://huggingface.co/spaces/mteb/leaderboard). Hence
adding support for it.
- Tag maintainer: @baskaryan
- Twitter handle: @ManabChetia3

---------

Co-authored-by: Harrison Chase <hw.chase.17@gmail.com>
2023-08-07 11:15:30 -07:00

85 lines
1.7 KiB
Plaintext

{
"cells": [
{
"cell_type": "markdown",
"id": "719619d3",
"metadata": {},
"source": [
"# BGE Hugging Face Embeddings\n",
"\n",
"This notebook shows how to use BGE Embeddings through Hugging Face"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "f7a54279",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"# !pip install sentence_transformers"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "9e1d5b6b",
"metadata": {},
"outputs": [],
"source": [
"from langchain.embeddings import HuggingFaceBgeEmbeddings\n",
"\n",
"model_name = \"BAAI/bge-small-en\"\n",
"model_kwargs = {'device': 'cpu'}\n",
"encode_kwargs = {'normalize_embeddings': False}\n",
"hf = HuggingFaceBgeEmbeddings(\n",
" model_name=model_name,\n",
" model_kwargs=model_kwargs,\n",
" encode_kwargs=encode_kwargs\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "e59d1a89",
"metadata": {},
"outputs": [],
"source": [
"embedding = hf.embed_query(\"hi this is harrison\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e596315f",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.1"
}
},
"nbformat": 4,
"nbformat_minor": 5
}