2023-12-11 21:53:30 +00:00
|
|
|
import logging
|
|
|
|
from urllib.parse import urlparse
|
|
|
|
|
|
|
|
from langchain_community.chat_models.mlflow import ChatMlflow
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class ChatDatabricks(ChatMlflow):
|
|
|
|
"""`Databricks` chat models API.
|
|
|
|
|
|
|
|
To use, you should have the ``mlflow`` python package installed.
|
2024-03-15 22:25:11 +00:00
|
|
|
For more information, see https://mlflow.org/docs/latest/llms/deployments.
|
2023-12-11 21:53:30 +00:00
|
|
|
|
|
|
|
Example:
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
from langchain_community.chat_models import ChatDatabricks
|
|
|
|
|
2024-05-20 14:33:57 +00:00
|
|
|
chat_model = ChatDatabricks(
|
2023-12-11 21:53:30 +00:00
|
|
|
target_uri="databricks",
|
community[patch]: Make ChatDatabricks model supports streaming response (#19912)
**Description:** Make ChatDatabricks model supports stream
**Issue:** N/A
**Dependencies:** MLflow nightly build version (we will release next
MLflow version soon)
**Twitter handle:** N/A
Manually test:
(Before testing, please install `pip install
git+https://github.com/mlflow/mlflow.git`)
```python
# Test Databricks Foundation LLM model
from langchain.chat_models import ChatDatabricks
chat_model = ChatDatabricks(
endpoint="databricks-llama-2-70b-chat",
max_tokens=500
)
from langchain_core.messages import AIMessageChunk
for chunk in chat_model.stream("What is mlflow?"):
print(chunk.content, end="|")
```
- [x] **Add tests and docs**: If you're adding a new integration, please
include
1. a test for the integration, preferably unit tests that do not rely on
network access,
2. an example notebook showing its use. It lives in
`docs/docs/integrations` directory.
- [x] **Lint and test**: Run `make format`, `make lint` and `make test`
from the root of the package(s) you've modified. See contribution
guidelines for more: https://python.langchain.com/docs/contributing/
Additional guidelines:
- Make sure optional dependencies are imported within a function.
- Please do not add dependencies to pyproject.toml files (even optional
ones) unless they are required for unit tests.
- Most PRs should not touch more than one package.
- Changes should be backwards compatible.
- If you are adding something to community, do not re-import it in
langchain.
If no one reviews your PR within a few days, please @-mention one of
baskaryan, efriis, eyurtsev, hwchase17.
---------
Signed-off-by: Weichen Xu <weichen.xu@databricks.com>
Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com>
Co-authored-by: Bagatur <baskaryan@gmail.com>
2024-04-16 23:34:49 +00:00
|
|
|
endpoint="databricks-llama-2-70b-chat",
|
2024-05-20 14:33:57 +00:00
|
|
|
temperature=0.1,
|
2023-12-11 21:53:30 +00:00
|
|
|
)
|
community[patch]: Make ChatDatabricks model supports streaming response (#19912)
**Description:** Make ChatDatabricks model supports stream
**Issue:** N/A
**Dependencies:** MLflow nightly build version (we will release next
MLflow version soon)
**Twitter handle:** N/A
Manually test:
(Before testing, please install `pip install
git+https://github.com/mlflow/mlflow.git`)
```python
# Test Databricks Foundation LLM model
from langchain.chat_models import ChatDatabricks
chat_model = ChatDatabricks(
endpoint="databricks-llama-2-70b-chat",
max_tokens=500
)
from langchain_core.messages import AIMessageChunk
for chunk in chat_model.stream("What is mlflow?"):
print(chunk.content, end="|")
```
- [x] **Add tests and docs**: If you're adding a new integration, please
include
1. a test for the integration, preferably unit tests that do not rely on
network access,
2. an example notebook showing its use. It lives in
`docs/docs/integrations` directory.
- [x] **Lint and test**: Run `make format`, `make lint` and `make test`
from the root of the package(s) you've modified. See contribution
guidelines for more: https://python.langchain.com/docs/contributing/
Additional guidelines:
- Make sure optional dependencies are imported within a function.
- Please do not add dependencies to pyproject.toml files (even optional
ones) unless they are required for unit tests.
- Most PRs should not touch more than one package.
- Changes should be backwards compatible.
- If you are adding something to community, do not re-import it in
langchain.
If no one reviews your PR within a few days, please @-mention one of
baskaryan, efriis, eyurtsev, hwchase17.
---------
Signed-off-by: Weichen Xu <weichen.xu@databricks.com>
Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com>
Co-authored-by: Bagatur <baskaryan@gmail.com>
2024-04-16 23:34:49 +00:00
|
|
|
|
|
|
|
# single input invocation
|
|
|
|
print(chat_model.invoke("What is MLflow?").content)
|
|
|
|
|
|
|
|
# single input invocation with streaming response
|
|
|
|
for chunk in chat_model.stream("What is MLflow?"):
|
|
|
|
print(chunk.content, end="|")
|
2023-12-11 21:53:30 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
target_uri: str = "databricks"
|
|
|
|
"""The target URI to use. Defaults to ``databricks``."""
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _llm_type(self) -> str:
|
|
|
|
"""Return type of chat model."""
|
|
|
|
return "databricks-chat"
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _mlflow_extras(self) -> str:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def _validate_uri(self) -> None:
|
|
|
|
if self.target_uri == "databricks":
|
|
|
|
return
|
|
|
|
|
|
|
|
if urlparse(self.target_uri).scheme != "databricks":
|
|
|
|
raise ValueError(
|
|
|
|
"Invalid target URI. The target URI must be a valid databricks URI."
|
|
|
|
)
|