mirror of
https://github.com/hwchase17/langchain
synced 2024-11-10 01:10:59 +00:00
core[patch]: Add UP(upgrade) ruff rules (#25358)
This commit is contained in:
parent
294f7fcb38
commit
ee98da4f4e
@ -137,7 +137,7 @@ class Blob(BaseMedia):
|
||||
def as_string(self) -> str:
|
||||
"""Read data as a string."""
|
||||
if self.data is None and self.path:
|
||||
with open(str(self.path), "r", encoding=self.encoding) as f:
|
||||
with open(str(self.path), encoding=self.encoding) as f:
|
||||
return f.read()
|
||||
elif isinstance(self.data, bytes):
|
||||
return self.data.decode(self.encoding)
|
||||
|
@ -39,7 +39,7 @@ class OutputParserException(ValueError, LangChainException):
|
||||
llm_output: Optional[str] = None,
|
||||
send_to_llm: bool = False,
|
||||
):
|
||||
super(OutputParserException, self).__init__(error)
|
||||
super().__init__(error)
|
||||
if send_to_llm:
|
||||
if observation is None or llm_output is None:
|
||||
raise ValueError(
|
||||
|
@ -590,7 +590,7 @@ class GraphVectorStore(VectorStore):
|
||||
"'mmr' or 'traversal'."
|
||||
)
|
||||
|
||||
def as_retriever(self, **kwargs: Any) -> "GraphVectorStoreRetriever":
|
||||
def as_retriever(self, **kwargs: Any) -> GraphVectorStoreRetriever:
|
||||
"""Return GraphVectorStoreRetriever initialized from this GraphVectorStore.
|
||||
|
||||
Args:
|
||||
|
@ -120,7 +120,7 @@ def create_base_retry_decorator(
|
||||
max_seconds = 10
|
||||
# Wait 2^x * 1 second between each retry starting with
|
||||
# 4 seconds, then up to 10 seconds, then 10 seconds afterwards
|
||||
retry_instance: "retry_base" = retry_if_exception_type(error_types[0])
|
||||
retry_instance: retry_base = retry_if_exception_type(error_types[0])
|
||||
for error in error_types[1:]:
|
||||
retry_instance = retry_instance | retry_if_exception_type(error)
|
||||
return retry(
|
||||
|
@ -576,7 +576,7 @@ class _StringImageMessagePromptTemplate(BaseMessagePromptTemplate):
|
||||
Returns:
|
||||
A new instance of this class.
|
||||
"""
|
||||
with open(str(template_file), "r") as f:
|
||||
with open(str(template_file)) as f:
|
||||
template = f.read()
|
||||
return cls.from_template(template, input_variables=input_variables, **kwargs)
|
||||
|
||||
|
@ -173,7 +173,7 @@ def _load_prompt_from_file(
|
||||
with open(file_path, encoding=encoding) as f:
|
||||
config = json.load(f)
|
||||
elif file_path.suffix.endswith((".yaml", ".yml")):
|
||||
with open(file_path, mode="r", encoding=encoding) as f:
|
||||
with open(file_path, encoding=encoding) as f:
|
||||
config = yaml.safe_load(f)
|
||||
else:
|
||||
raise ValueError(f"Got unsupported file type {file_path.suffix}")
|
||||
|
@ -231,7 +231,7 @@ class PromptTemplate(StringPromptTemplate):
|
||||
Returns:
|
||||
The prompt loaded from the file.
|
||||
"""
|
||||
with open(str(template_file), "r", encoding=encoding) as f:
|
||||
with open(str(template_file), encoding=encoding) as f:
|
||||
template = f.read()
|
||||
if input_variables:
|
||||
warnings.warn(
|
||||
|
@ -3208,8 +3208,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
|
||||
else:
|
||||
final_pipeline = step.transform(final_pipeline, config)
|
||||
|
||||
for output in final_pipeline:
|
||||
yield output
|
||||
yield from final_pipeline
|
||||
|
||||
async def _atransform(
|
||||
self,
|
||||
@ -4577,13 +4576,12 @@ class RunnableLambda(Runnable[Input, Output]):
|
||||
**kwargs: Optional[Any],
|
||||
) -> Iterator[Output]:
|
||||
if hasattr(self, "func"):
|
||||
for output in self._transform_stream_with_config(
|
||||
yield from self._transform_stream_with_config(
|
||||
input,
|
||||
self._transform,
|
||||
self._config(config, self.func),
|
||||
**kwargs,
|
||||
):
|
||||
yield output
|
||||
)
|
||||
else:
|
||||
raise TypeError(
|
||||
"Cannot stream a coroutine function synchronously."
|
||||
|
@ -216,7 +216,7 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]):
|
||||
Union[Runnable[Dict[str, Any], Any], Callable[[Dict[str, Any]], Any]],
|
||||
],
|
||||
],
|
||||
) -> "RunnableAssign":
|
||||
) -> RunnableAssign:
|
||||
"""Merge the Dict input with the output produced by the mapping argument.
|
||||
|
||||
Args:
|
||||
|
@ -147,7 +147,7 @@ class RunnableRetry(RunnableBindingBase[Input, Output]):
|
||||
retry_state: RetryCallState,
|
||||
) -> RunnableConfig:
|
||||
attempt = retry_state.attempt_number
|
||||
tag = "retry:attempt:{}".format(attempt) if attempt > 1 else None
|
||||
tag = f"retry:attempt:{attempt}" if attempt > 1 else None
|
||||
return patch_config(config, callbacks=run_manager.get_child(tag))
|
||||
|
||||
def _patch_config_list(
|
||||
|
@ -110,7 +110,7 @@ class Tool(BaseTool):
|
||||
self, name: str, func: Optional[Callable], description: str, **kwargs: Any
|
||||
) -> None:
|
||||
"""Initialize tool."""
|
||||
super(Tool, self).__init__( # type: ignore[call-arg]
|
||||
super().__init__( # type: ignore[call-arg]
|
||||
name=name, func=func, description=description, **kwargs
|
||||
)
|
||||
|
||||
|
@ -153,7 +153,7 @@ def parse_tag(template: str, l_del: str, r_del: str) -> Tuple[Tuple[str, str], s
|
||||
try:
|
||||
tag, template = template.split(r_del, 1)
|
||||
except ValueError:
|
||||
raise ChevronError("unclosed tag " "at line {0}".format(_CURRENT_LINE))
|
||||
raise ChevronError("unclosed tag " f"at line {_CURRENT_LINE}")
|
||||
|
||||
# Find the type meaning of the first character
|
||||
tag_type = tag_types.get(tag[0], "variable")
|
||||
@ -174,7 +174,7 @@ def parse_tag(template: str, l_del: str, r_del: str) -> Tuple[Tuple[str, str], s
|
||||
# Otherwise we should complain
|
||||
else:
|
||||
raise ChevronError(
|
||||
"unclosed set delimiter tag\n" "at line {0}".format(_CURRENT_LINE)
|
||||
"unclosed set delimiter tag\n" f"at line {_CURRENT_LINE}"
|
||||
)
|
||||
|
||||
# If we might be a no html escape tag
|
||||
@ -281,16 +281,16 @@ def tokenize(
|
||||
last_section = open_sections.pop()
|
||||
except IndexError:
|
||||
raise ChevronError(
|
||||
'Trying to close tag "{0}"\n'
|
||||
f'Trying to close tag "{tag_key}"\n'
|
||||
"Looks like it was not opened.\n"
|
||||
"line {1}".format(tag_key, _CURRENT_LINE + 1)
|
||||
f"line {_CURRENT_LINE + 1}"
|
||||
)
|
||||
if tag_key != last_section:
|
||||
# Otherwise we need to complain
|
||||
raise ChevronError(
|
||||
'Trying to close tag "{0}"\n'
|
||||
'last open tag is "{1}"\n'
|
||||
"line {2}".format(tag_key, last_section, _CURRENT_LINE + 1)
|
||||
f'Trying to close tag "{tag_key}"\n'
|
||||
f'last open tag is "{last_section}"\n'
|
||||
f"line {_CURRENT_LINE + 1}"
|
||||
)
|
||||
|
||||
# Do the second check to see if we're a standalone
|
||||
@ -320,8 +320,8 @@ def tokenize(
|
||||
# Then we need to complain
|
||||
raise ChevronError(
|
||||
"Unexpected EOF\n"
|
||||
'the tag "{0}" was never closed\n'
|
||||
"was opened at line {1}".format(open_sections[-1], _LAST_TAG_LINE)
|
||||
f'the tag "{open_sections[-1]}" was never closed\n'
|
||||
f"was opened at line {_LAST_TAG_LINE}"
|
||||
)
|
||||
|
||||
|
||||
@ -403,10 +403,10 @@ def _get_key(
|
||||
# We couldn't find the key in any of the scopes
|
||||
|
||||
if warn:
|
||||
logger.warn("Could not find key '%s'" % (key))
|
||||
logger.warn(f"Could not find key '{key}'")
|
||||
|
||||
if keep:
|
||||
return "%s %s %s" % (def_ldel, key, def_rdel)
|
||||
return f"{def_ldel} {key} {def_rdel}"
|
||||
|
||||
return ""
|
||||
|
||||
@ -565,9 +565,9 @@ def render(
|
||||
if tag_type == "literal":
|
||||
text += tag_key
|
||||
elif tag_type == "no escape":
|
||||
text += "%s& %s %s" % (def_ldel, tag_key, def_rdel)
|
||||
text += f"{def_ldel}& {tag_key} {def_rdel}"
|
||||
else:
|
||||
text += "%s%s %s%s" % (
|
||||
text += "{}{} {}{}".format(
|
||||
def_ldel,
|
||||
{
|
||||
"comment": "!",
|
||||
|
@ -380,7 +380,7 @@ class InMemoryVectorStore(VectorStore):
|
||||
embedding: Embeddings,
|
||||
metadatas: Optional[List[dict]] = None,
|
||||
**kwargs: Any,
|
||||
) -> "InMemoryVectorStore":
|
||||
) -> InMemoryVectorStore:
|
||||
store = cls(
|
||||
embedding=embedding,
|
||||
)
|
||||
@ -394,7 +394,7 @@ class InMemoryVectorStore(VectorStore):
|
||||
embedding: Embeddings,
|
||||
metadatas: Optional[List[dict]] = None,
|
||||
**kwargs: Any,
|
||||
) -> "InMemoryVectorStore":
|
||||
) -> InMemoryVectorStore:
|
||||
store = cls(
|
||||
embedding=embedding,
|
||||
)
|
||||
@ -404,7 +404,7 @@ class InMemoryVectorStore(VectorStore):
|
||||
@classmethod
|
||||
def load(
|
||||
cls, path: str, embedding: Embeddings, **kwargs: Any
|
||||
) -> "InMemoryVectorStore":
|
||||
) -> InMemoryVectorStore:
|
||||
"""Load a vector store from a file.
|
||||
|
||||
Args:
|
||||
|
@ -41,7 +41,11 @@ python = ">=3.12.4"
|
||||
[tool.poetry.extras]
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [ "E", "F", "I", "T201",]
|
||||
select = [ "E", "F", "I", "T201", "UP",]
|
||||
ignore = [
|
||||
"UP006", # Incompatible with pydantic v1
|
||||
"UP007", # Incompatible with pydantic v1
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = [ "tests/*",]
|
||||
|
@ -493,8 +493,7 @@ EXPECTED_STREAMED_JSON_DIFF = [
|
||||
|
||||
def test_partial_text_json_output_parser() -> None:
|
||||
def input_iter(_: Any) -> Iterator[str]:
|
||||
for token in STREAMED_TOKENS:
|
||||
yield token
|
||||
yield from STREAMED_TOKENS
|
||||
|
||||
chain = input_iter | SimpleJsonOutputParser()
|
||||
|
||||
@ -503,8 +502,7 @@ def test_partial_text_json_output_parser() -> None:
|
||||
|
||||
def test_partial_text_json_output_parser_diff() -> None:
|
||||
def input_iter(_: Any) -> Iterator[str]:
|
||||
for token in STREAMED_TOKENS:
|
||||
yield token
|
||||
yield from STREAMED_TOKENS
|
||||
|
||||
chain = input_iter | SimpleJsonOutputParser(diff=True)
|
||||
|
||||
@ -576,8 +574,7 @@ def test_partial_text_json_output_parser_with_json_code_block() -> None:
|
||||
"""Test json parser works correctly when the response contains a json code-block."""
|
||||
|
||||
def input_iter(_: Any) -> Iterator[str]:
|
||||
for token in TOKENS_WITH_JSON_CODE_BLOCK:
|
||||
yield token
|
||||
yield from TOKENS_WITH_JSON_CODE_BLOCK
|
||||
|
||||
chain = input_iter | SimpleJsonOutputParser()
|
||||
|
||||
|
@ -368,8 +368,7 @@ def _get_iter(use_tool_calls: bool = False) -> Any:
|
||||
list_to_iter = STREAMED_MESSAGES
|
||||
|
||||
def input_iter(_: Any) -> Iterator[BaseMessage]:
|
||||
for msg in list_to_iter:
|
||||
yield msg
|
||||
yield from list_to_iter
|
||||
|
||||
return input_iter
|
||||
|
||||
|
@ -825,7 +825,7 @@ def test_get_output_messages_with_value_error() -> None:
|
||||
with_history.bound.invoke([HumanMessage(content="hello")], config)
|
||||
excepted = (
|
||||
"Expected str, BaseMessage, List[BaseMessage], or Tuple[BaseMessage]."
|
||||
+ (" Got {}.".format(illegal_bool_message))
|
||||
+ (f" Got {illegal_bool_message}.")
|
||||
)
|
||||
assert excepted in str(excinfo.value)
|
||||
|
||||
@ -837,6 +837,6 @@ def test_get_output_messages_with_value_error() -> None:
|
||||
with_history.bound.invoke([HumanMessage(content="hello")], config)
|
||||
excepted = (
|
||||
"Expected str, BaseMessage, List[BaseMessage], or Tuple[BaseMessage]."
|
||||
+ (" Got {}.".format(illegal_int_message))
|
||||
+ (f" Got {illegal_int_message}.")
|
||||
)
|
||||
assert excepted in str(excinfo.value)
|
||||
|
@ -5127,8 +5127,7 @@ async def test_runnable_gen_transform() -> None:
|
||||
"""Test that a generator can be used as a runnable."""
|
||||
|
||||
def gen_indexes(length_iter: Iterator[int]) -> Iterator[int]:
|
||||
for i in range(next(length_iter)):
|
||||
yield i
|
||||
yield from range(next(length_iter))
|
||||
|
||||
async def agen_indexes(length_iter: AsyncIterator[int]) -> AsyncIterator[int]:
|
||||
async for length in length_iter:
|
||||
|
@ -1765,7 +1765,7 @@ def test__get_all_basemodel_annotations_v2(use_v1_namespace: bool) -> None:
|
||||
class ModelB(ModelA[str]):
|
||||
b: Annotated[ModelA[Dict[str, Any]], "foo"]
|
||||
|
||||
class Mixin(object):
|
||||
class Mixin:
|
||||
def foo(self) -> str:
|
||||
return "foo"
|
||||
|
||||
@ -1822,7 +1822,7 @@ def test__get_all_basemodel_annotations_v1() -> None:
|
||||
class ModelB(ModelA[str]):
|
||||
b: Annotated[ModelA[Dict[str, Any]], "foo"]
|
||||
|
||||
class Mixin(object):
|
||||
class Mixin:
|
||||
def foo(self) -> str:
|
||||
return "foo"
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user