mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
f92006de3c
0.2rc migrations - [x] Move memory - [x] Move remaining retrievers - [x] graph_qa chains - [x] some dependency from evaluation code potentially on math utils - [x] Move openapi chain from `langchain.chains.api.openapi` to `langchain_community.chains.openapi` - [x] Migrate `langchain.chains.ernie_functions` to `langchain_community.chains.ernie_functions` - [x] migrate `langchain/chains/llm_requests.py` to `langchain_community.chains.llm_requests` - [x] Moving `langchain_community.cross_enoders.base:BaseCrossEncoder` -> `langchain_community.retrievers.document_compressors.cross_encoder:BaseCrossEncoder` (namespace not ideal, but it needs to be moved to `langchain` to avoid circular deps) - [x] unit tests langchain -- add pytest.mark.community to some unit tests that will stay in langchain - [x] unit tests community -- move unit tests that depend on community to community - [x] mv integration tests that depend on community to community - [x] mypy checks Other todo - [x] Make deprecation warnings not noisy (need to use warn deprecated and check that things are implemented properly) - [x] Update deprecation messages with timeline for code removal (likely we actually won't be removing things until 0.4 release) -- will give people more time to transition their code. - [ ] Add information to deprecation warning to show users how to migrate their code base using langchain-cli - [ ] Remove any unnecessary requirements in langchain (e.g., is SQLALchemy required?) --------- Co-authored-by: Erick Friis <erick@langchain.dev>
48 lines
1.6 KiB
Python
48 lines
1.6 KiB
Python
import pytest
|
|
from langchain_core.utils import get_from_env
|
|
|
|
from langchain_community.agent_toolkits import PowerBIToolkit, create_pbi_agent
|
|
from langchain_community.chat_models import ChatOpenAI
|
|
from langchain_community.utilities.powerbi import PowerBIDataset
|
|
|
|
|
|
def azure_installed() -> bool:
|
|
try:
|
|
from azure.core.credentials import TokenCredential # noqa: F401
|
|
from azure.identity import DefaultAzureCredential # noqa: F401
|
|
|
|
return True
|
|
except Exception as e:
|
|
print(f"azure not installed, skipping test {e}") # noqa: T201
|
|
return False
|
|
|
|
|
|
@pytest.mark.skipif(not azure_installed(), reason="requires azure package")
|
|
def test_daxquery() -> None:
|
|
from azure.identity import DefaultAzureCredential
|
|
|
|
DATASET_ID = get_from_env("", "POWERBI_DATASET_ID")
|
|
TABLE_NAME = get_from_env("", "POWERBI_TABLE_NAME")
|
|
NUM_ROWS = get_from_env("", "POWERBI_NUMROWS")
|
|
|
|
fast_llm = ChatOpenAI(
|
|
temperature=0.5, max_tokens=1000, model_name="gpt-3.5-turbo", verbose=True
|
|
) # type: ignore[call-arg]
|
|
smart_llm = ChatOpenAI(
|
|
temperature=0, max_tokens=100, model_name="gpt-4", verbose=True
|
|
) # type: ignore[call-arg]
|
|
|
|
toolkit = PowerBIToolkit(
|
|
powerbi=PowerBIDataset(
|
|
dataset_id=DATASET_ID,
|
|
table_names=[TABLE_NAME],
|
|
credential=DefaultAzureCredential(),
|
|
),
|
|
llm=smart_llm,
|
|
)
|
|
|
|
agent_executor = create_pbi_agent(llm=fast_llm, toolkit=toolkit, verbose=True)
|
|
|
|
output = agent_executor.run(f"How many rows are in the table, {TABLE_NAME}")
|
|
assert NUM_ROWS in output
|