2023-05-10 04:07:56 +00:00
|
|
|
from abc import abstractmethod
|
|
|
|
from typing import Any, List, Optional
|
|
|
|
|
|
|
|
from langchain.callbacks.manager import Callbacks
|
|
|
|
from langchain.chains.llm import LLMChain
|
2023-07-22 01:44:32 +00:00
|
|
|
from pydantic import BaseModel
|
|
|
|
|
|
|
|
from langchain_experimental.plan_and_execute.schema import Plan, PlanOutputParser
|
2023-05-10 04:07:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
class BasePlanner(BaseModel):
|
2023-07-28 20:01:36 +00:00
|
|
|
"""Base planner."""
|
|
|
|
|
2023-05-10 04:07:56 +00:00
|
|
|
@abstractmethod
|
|
|
|
def plan(self, inputs: dict, callbacks: Callbacks = None, **kwargs: Any) -> Plan:
|
2023-06-23 21:47:10 +00:00
|
|
|
"""Given input, decide what to do."""
|
2023-05-10 04:07:56 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
async def aplan(
|
|
|
|
self, inputs: dict, callbacks: Callbacks = None, **kwargs: Any
|
|
|
|
) -> Plan:
|
2023-07-28 20:01:36 +00:00
|
|
|
"""Given input, asynchronously decide what to do."""
|
2023-05-10 04:07:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
class LLMPlanner(BasePlanner):
|
2023-07-28 20:01:36 +00:00
|
|
|
"""LLM planner."""
|
|
|
|
|
2023-05-10 04:07:56 +00:00
|
|
|
llm_chain: LLMChain
|
2023-07-28 20:01:36 +00:00
|
|
|
"""The LLM chain to use."""
|
2023-05-10 04:07:56 +00:00
|
|
|
output_parser: PlanOutputParser
|
2023-07-28 20:01:36 +00:00
|
|
|
"""The output parser to use."""
|
2023-05-10 04:07:56 +00:00
|
|
|
stop: Optional[List] = None
|
2023-07-28 20:01:36 +00:00
|
|
|
"""The stop list to use."""
|
2023-05-10 04:07:56 +00:00
|
|
|
|
|
|
|
def plan(self, inputs: dict, callbacks: Callbacks = None, **kwargs: Any) -> Plan:
|
2023-06-23 21:47:10 +00:00
|
|
|
"""Given input, decide what to do."""
|
2023-05-10 04:07:56 +00:00
|
|
|
llm_response = self.llm_chain.run(**inputs, stop=self.stop, callbacks=callbacks)
|
|
|
|
return self.output_parser.parse(llm_response)
|
|
|
|
|
|
|
|
async def aplan(
|
|
|
|
self, inputs: dict, callbacks: Callbacks = None, **kwargs: Any
|
|
|
|
) -> Plan:
|
2023-07-28 20:01:36 +00:00
|
|
|
"""Given input, asynchronously decide what to do."""
|
2023-05-10 04:07:56 +00:00
|
|
|
llm_response = await self.llm_chain.arun(
|
|
|
|
**inputs, stop=self.stop, callbacks=callbacks
|
|
|
|
)
|
|
|
|
return self.output_parser.parse(llm_response)
|