core: Add ruff rules for PIE (#26939)

All auto-fixes.
This commit is contained in:
Christophe Bornet 2024-09-27 18:08:35 +02:00 committed by GitHub
parent 836c2a4ae0
commit f4e738bb40
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 16 additions and 52 deletions

View File

@ -112,7 +112,6 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
action (AgentAction): The agent action.
**kwargs (Any): Additional keyword arguments.
"""
pass
def on_tool_end(self, output: Any, **kwargs: Any) -> None:
"""Run when tool ends running.

View File

@ -357,7 +357,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
stop: Optional[list[str]] = None,
**kwargs: Any,
) -> Iterator[BaseMessageChunk]:
if not self._should_stream(async_api=False, **{**kwargs, **{"stream": True}}):
if not self._should_stream(async_api=False, **{**kwargs, "stream": True}):
# model doesn't implement streaming, so use default implementation
yield cast(
BaseMessageChunk, self.invoke(input, config=config, stop=stop, **kwargs)
@ -427,7 +427,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
stop: Optional[list[str]] = None,
**kwargs: Any,
) -> AsyncIterator[BaseMessageChunk]:
if not self._should_stream(async_api=True, **{**kwargs, **{"stream": True}}):
if not self._should_stream(async_api=True, **{**kwargs, "stream": True}):
# No async or sync stream is implemented, so fall back to ainvoke
yield cast(
BaseMessageChunk,
@ -550,7 +550,7 @@ class BaseChatModel(BaseLanguageModel[BaseMessage], ABC):
def _get_llm_string(self, stop: Optional[list[str]] = None, **kwargs: Any) -> str:
if self.is_lc_serializable():
params = {**kwargs, **{"stop": stop}}
params = {**kwargs, "stop": stop}
param_string = str(sorted(params.items()))
# This code is not super efficient as it goes back and forth between
# json and dict.

View File

@ -1007,11 +1007,9 @@ class ChatPromptTemplate(BaseChatPromptTemplate):
input_vars.update(_message.input_variables)
kwargs = {
**{
"input_variables": sorted(input_vars),
"optional_variables": sorted(optional_variables),
"partial_variables": partial_vars,
},
"input_variables": sorted(input_vars),
"optional_variables": sorted(optional_variables),
"partial_variables": partial_vars,
**kwargs,
}
cast(type[ChatPromptTemplate], super()).__init__(messages=_messages, **kwargs)

View File

@ -36,8 +36,6 @@ else:
class EmptyDict(TypedDict, total=False):
"""Empty dict type."""
pass
class RunnableConfig(TypedDict, total=False):
"""Configuration for a Runnable."""

View File

@ -457,8 +457,6 @@ RunnableConfigurableFields.model_rebuild()
class StrEnum(str, enum.Enum):
"""String enum."""
pass
_enums_for_spec: WeakValueDictionary[
Union[

View File

@ -619,7 +619,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
return self.__class__(
**{
**self.model_dump(),
**{"runnable": new_runnable, "fallbacks": new_fallbacks},
"runnable": new_runnable,
"fallbacks": new_fallbacks,
}
)

View File

@ -320,8 +320,6 @@ class ToolException(Exception): # noqa: N818
to the agent as observation, and printed in red on the console.
"""
pass
class BaseTool(RunnableSerializable[Union[str, dict, ToolCall], Any]):
"""Interface LangChain tools must implement."""

View File

@ -850,7 +850,6 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
async def _on_run_create(self, run: Run) -> None:
"""Process a run upon creation."""
pass
async def _on_run_update(self, run: Run) -> None:
"""Process a run upon update."""

View File

@ -829,7 +829,6 @@ async def _astream_events_implementation_v1(
inputs = log_entry["inputs"]
if inputs is not None:
data["input"] = inputs
pass
if event_type == "end":
inputs = log_entry["inputs"]

View File

@ -561,7 +561,7 @@ def _parse_google_docstring(
if block.startswith("Args:"):
args_block = block
break
elif block.startswith("Returns:") or block.startswith("Example:"):
elif block.startswith(("Returns:", "Example:")):
# Don't break in case Args come after
past_descriptors = True
elif not past_descriptors:

View File

@ -32,8 +32,6 @@ _LAST_TAG_LINE = None
class ChevronError(SyntaxError):
"""Custom exception for Chevron errors."""
pass
#
# Helper functions

View File

@ -268,8 +268,6 @@ def convert_to_secret_str(value: Union[SecretStr, str]) -> SecretStr:
class _NoDefaultType:
"""Type to indicate no default value is provided."""
pass
_NoDefault = _NoDefaultType()

View File

@ -44,7 +44,7 @@ python = ">=3.12.4"
[tool.poetry.extras]
[tool.ruff.lint]
select = [ "B", "C4", "E", "F", "I", "N", "T201", "UP",]
select = [ "B", "C4", "E", "F", "I", "N", "PIE", "T201", "UP",]
ignore = [ "UP007",]
[tool.coverage.run]

View File

@ -4,4 +4,3 @@ import pytest
@pytest.mark.compile
def test_placeholder() -> None:
"""Used for compiling integration tests without running any real tests."""
pass

View File

@ -68,7 +68,6 @@ async def beta_async_function() -> str:
class ClassWithBetaMethods:
def __init__(self) -> None:
"""original doc"""
pass
@beta()
def beta_method(self) -> str:
@ -244,7 +243,6 @@ def test_whole_class_beta() -> None:
class BetaClass:
def __init__(self) -> None:
"""original doc"""
pass
@beta()
def beta_method(self) -> str:

View File

@ -88,7 +88,6 @@ async def deprecated_async_function() -> str:
class ClassWithDeprecatedMethods:
def __init__(self) -> None:
"""original doc"""
pass
@deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str:
@ -268,7 +267,6 @@ def test_whole_class_deprecation() -> None:
class DeprecatedClass:
def __init__(self) -> None:
"""original doc"""
pass
@deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str:
@ -311,7 +309,6 @@ def test_whole_class_inherited_deprecation() -> None:
class DeprecatedClass:
def __init__(self) -> None:
"""original doc"""
pass
@deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str:
@ -324,7 +321,6 @@ def test_whole_class_inherited_deprecation() -> None:
def __init__(self) -> None:
"""original doc"""
pass
@deprecated(since="2.2.0", removal="3.2.0")
def deprecated_method(self) -> str:

View File

@ -107,7 +107,7 @@ async def test_stream_error_callback() -> None:
else:
assert llm_result.generations[0][0].text == message[:i]
for i in range(0, 2):
for i in range(2):
llm = FakeListChatModel(
responses=[message],
error_on_chunk_number=i,

View File

@ -105,7 +105,7 @@ async def test_stream_error_callback() -> None:
else:
assert llm_result.generations[0][0].text == message[:i]
for i in range(0, 2):
for i in range(2):
llm = FakeStreamingListLLM(
responses=[message],
error_on_chunk_number=i,

View File

@ -429,16 +429,12 @@ def test_message_chunk_to_message() -> None:
expected = AIMessage(
content="I am",
tool_calls=[
create_tool_call(**{"name": "tool1", "args": {"a": 1}, "id": "1"}), # type: ignore[arg-type]
create_tool_call(**{"name": "tool2", "args": {}, "id": "2"}), # type: ignore[arg-type]
create_tool_call(name="tool1", args={"a": 1}, id="1"), # type: ignore[arg-type]
create_tool_call(name="tool2", args={}, id="2"), # type: ignore[arg-type]
],
invalid_tool_calls=[
create_invalid_tool_call(
**{"name": "tool3", "args": None, "id": "3", "error": None}
),
create_invalid_tool_call(
**{"name": "tool4", "args": "abc", "id": "4", "error": None}
),
create_invalid_tool_call(name="tool3", args=None, id="3", error=None),
create_invalid_tool_call(name="tool4", args="abc", id="4", error=None),
],
)
assert message_chunk_to_message(chunk) == expected

View File

@ -357,7 +357,6 @@ def test_structured_tool_types_parsed_pydantic_mixed() -> None:
some_base_model: SomeBaseModel, another_base_model: AnotherBaseModel
) -> None:
"""Return the arguments directly."""
pass
def test_base_tool_inheritance_base_schema() -> None:

View File

@ -54,7 +54,6 @@ def annotated_function() -> Callable:
arg2: ExtensionsAnnotated[Literal["bar", "baz"], "one of 'bar', 'baz'"],
) -> None:
"""dummy function"""
pass
return dummy_function
@ -68,7 +67,6 @@ def function() -> Callable:
arg1: foo
arg2: one of 'bar', 'baz'
"""
pass
return dummy_function
@ -220,7 +218,6 @@ class Dummy:
arg1: foo
arg2: one of 'bar', 'baz'
"""
pass
class DummyWithClassMethod:
@ -232,7 +229,6 @@ class DummyWithClassMethod:
arg1: foo
arg2: one of 'bar', 'baz'
"""
pass
def test_convert_to_openai_function(
@ -334,7 +330,6 @@ def test_convert_to_openai_function_nested_v2() -> None:
def my_function(arg1: NestedV2) -> None:
"""dummy function"""
pass
convert_to_openai_function(my_function)
@ -348,7 +343,6 @@ def test_convert_to_openai_function_nested() -> None:
def my_function(arg1: Nested) -> None:
"""dummy function"""
pass
expected = {
"name": "my_function",
@ -386,7 +380,6 @@ def test_convert_to_openai_function_nested_strict() -> None:
def my_function(arg1: Nested) -> None:
"""dummy function"""
pass
expected = {
"name": "my_function",
@ -429,7 +422,6 @@ def test_function_optional_param() -> None:
c: Optional[list[Optional[str]]],
) -> None:
"""A test function"""
pass
func = convert_to_openai_function(func5)
req = func["parameters"]["required"]
@ -439,7 +431,6 @@ def test_function_optional_param() -> None:
def test_function_no_params() -> None:
def nullary_function() -> None:
"""nullary function"""
pass
func = convert_to_openai_function(nullary_function)
req = func["parameters"].get("required")
@ -781,7 +772,6 @@ def test_convert_union_type_py_39() -> None:
@tool
def magic_function(input: int | float) -> str:
"""Compute a magic function."""
pass
result = convert_to_openai_function(magic_function)
assert result["parameters"]["properties"]["input"] == {