From 7bc4c124777f420aaa68edf17b403fcff431a289 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sun, 3 Dec 2023 11:06:57 -0800 Subject: [PATCH] delete stray test (#14200) was added to an old path also im not sure this is even really a test file? which is why i didnt move it --- tests/integration_tests/llms/test_tongyi.py | 97 --------------------- 1 file changed, 97 deletions(-) delete mode 100644 tests/integration_tests/llms/test_tongyi.py diff --git a/tests/integration_tests/llms/test_tongyi.py b/tests/integration_tests/llms/test_tongyi.py deleted file mode 100644 index e5858cc02a..0000000000 --- a/tests/integration_tests/llms/test_tongyi.py +++ /dev/null @@ -1,97 +0,0 @@ -import sys -from typing import Any, Dict, List, Union -from queue import Queue - -from langchain.callbacks.base import BaseCallbackHandler -from langchain.schema import AgentAction, AgentFinish, LLMResult -from langchain.llms.tongyi import Tongyi -import os - -os.environ['QIANFAN_AK']='' -os.environ['QIANFAN_SK']='' - - -STOP_ITEM = "###finish###" - - -class StreamingHandler(BaseCallbackHandler): - """Callback handler for streaming. Only works with LLMs that support streaming.""" - - def __init__(self, q: Queue) -> None: - super().__init__() - self.q = q - - def on_llm_start( - self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any - ) -> None: - """Run when LLM starts running.""" - # print("on_llm_start", serialized, prompts) - with self.q.mutex: - self.q.queue.clear() - - def on_llm_new_token(self, token: str, **kwargs: Any) -> None: - """Run on new LLM token. Only available when streaming is enabled.""" - # print('======on_llm_new_token=====') - sys.stdout.write(token) - sys.stdout.flush() - self.q.put(token) - - def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None: - """Run when LLM ends running.""" - self.q.put(STOP_ITEM) - - def on_llm_error( - self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any - ) -> None: - """Run when LLM errors.""" - - def on_chain_start( - self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any - ) -> None: - """Run when chain starts running.""" - # print('on_chain_start') - - def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None: - """Run when chain ends running.""" - - def on_chain_error( - self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any - ) -> None: - """Run when chain errors.""" - - def on_tool_start( - self, serialized: Dict[str, Any], input_str: str, **kwargs: Any - ) -> None: - """Run when tool starts running.""" - - def on_agent_action(self, action: AgentAction, **kwargs: Any) -> Any: - """Run on agent action.""" - pass - - def on_tool_end(self, output: str, **kwargs: Any) -> None: - """Run when tool ends running.""" - - def on_tool_error( - self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any - ) -> None: - """Run when tool errors.""" - - def on_text(self, text: str, **kwargs: Any) -> None: - """Run on arbitrary text.""" - # print('on_text',text) - - def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> None: - """Run on agent end.""" - - -streaming_callback_fn = StreamingHandler(q=Queue()) - -llm = Tongyi(streaming=True, - callbacks=[streaming_callback_fn], - temperature=0.1, - model_name = 'qwen-plus') - -llm('write a Poem about Spring') - - -