From 0cf890eed4419453a05ed74b4af803f48d05f48c Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Thu, 27 Apr 2023 09:02:39 -0700 Subject: [PATCH] bump version to 151 (#3658) --- langchain/utilities/powerbi.py | 9 ++++++--- pyproject.toml | 8 ++++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/langchain/utilities/powerbi.py b/langchain/utilities/powerbi.py index 89247f37..e1570319 100644 --- a/langchain/utilities/powerbi.py +++ b/langchain/utilities/powerbi.py @@ -4,13 +4,11 @@ from __future__ import annotations import logging import os -from typing import Any, Dict, Iterable, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Union import aiohttp import requests from aiohttp import ServerTimeoutError -from azure.core.credentials import TokenCredential -from azure.core.exceptions import ClientAuthenticationError from pydantic import BaseModel, Field, root_validator from requests.exceptions import Timeout @@ -20,6 +18,9 @@ _LOGGER = logging.getLogger(__name__) BASE_URL = os.getenv("POWERBI_BASE_URL", "https://api.powerbi.com/v1.0/myorg/datasets/") +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + class PowerBIDataset(BaseModel): """Create PowerBI engine from dataset ID and credential or token. @@ -62,6 +63,8 @@ class PowerBIDataset(BaseModel): @property def headers(self) -> Dict[str, str]: """Get the token.""" + from azure.core.exceptions import ClientAuthenticationError + token = None if self.token: token = self.token diff --git a/pyproject.toml b/pyproject.toml index 0dc6e005..52122aee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain" -version = "0.0.150" +version = "0.0.151" description = "Building applications with LLMs through composability" authors = [] license = "MIT" @@ -17,7 +17,7 @@ SQLAlchemy = ">1.3,<3" requests = "^2" PyYAML = ">=5.4.1" numpy = "^1" -azure-core = {version = "^1.26.4"} +azure-core = {version = "^1.26.4", optional=true} tqdm = {version = ">=4.48.0", optional = true} openapi-schema-pydantic = "^1.2" faiss-cpu = {version = "^1", optional = true} @@ -149,8 +149,8 @@ qdrant = ["qdrant-client"] openai = ["openai"] cohere = ["cohere"] embeddings = ["sentence-transformers"] -azure = ["azure-identity", "azure-cosmos", "openai"] -all = ["anthropic", "cohere", "openai", "nlpcloud", "huggingface_hub", "jina", "manifest-ml", "elasticsearch", "opensearch-py", "google-search-results", "faiss-cpu", "sentence-transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch", "jinja2", "pinecone-client", "pinecone-text", "weaviate-client", "redis", "google-api-python-client", "wolframalpha", "qdrant-client", "tensorflow-text", "pypdf", "networkx", "nomic", "aleph-alpha-client", "deeplake", "pgvector", "psycopg2-binary", "boto3", "pyowm", "pytesseract", "html2text", "atlassian-python-api", "gptcache", "duckduckgo-search", "arxiv", "azure-identity", "clickhouse-connect", "azure-cosmos"] +azure = ["azure-identity", "azure-cosmos", "openai", "azure-core"] +all = ["anthropic", "cohere", "openai", "nlpcloud", "huggingface_hub", "jina", "manifest-ml", "elasticsearch", "opensearch-py", "google-search-results", "faiss-cpu", "sentence-transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch", "jinja2", "pinecone-client", "pinecone-text", "weaviate-client", "redis", "google-api-python-client", "wolframalpha", "qdrant-client", "tensorflow-text", "pypdf", "networkx", "nomic", "aleph-alpha-client", "deeplake", "pgvector", "psycopg2-binary", "boto3", "pyowm", "pytesseract", "html2text", "atlassian-python-api", "gptcache", "duckduckgo-search", "arxiv", "azure-identity", "clickhouse-connect", "azure-cosmos", "lancedb", "lark"] [tool.ruff] select = [