consistently use getLogger(__name__), no root logger (#2989)

re
https://github.com/hwchase17/langchain/issues/439#issuecomment-1510442791

I think it's not polite for a library to use the root logger

both of these forms are also used:
```
logger = logging.getLogger(__name__)
logger = logging.getLogger(__file__)
```
I am not sure if there is any reason behind one vs the other? (...I am
guessing maybe just contributed by different people)

it seems to me it'd be better to consistently use
`logging.getLogger(__name__)`

this makes it easier for consumers of the library to set up log
handlers, e.g. for everything with `langchain.` prefix
fix_agent_callbacks
Paul Garner 1 year ago committed by GitHub
parent 32db2a2c2f
commit 69698be3e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -30,7 +30,7 @@ from langchain.schema import (
from langchain.tools.base import BaseTool from langchain.tools.base import BaseTool
from langchain.utilities.asyncio import asyncio_timeout from langchain.utilities.asyncio import asyncio_timeout
logger = logging.getLogger() logger = logging.getLogger(__name__)
class BaseSingleActionAgent(BaseModel): class BaseSingleActionAgent(BaseModel):

@ -9,7 +9,7 @@ from pydantic import root_validator
from langchain.chat_models.openai import ChatOpenAI from langchain.chat_models.openai import ChatOpenAI
from langchain.utils import get_from_dict_or_env from langchain.utils import get_from_dict_or_env
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
class AzureChatOpenAI(ChatOpenAI): class AzureChatOpenAI(ChatOpenAI):

@ -26,7 +26,7 @@ from langchain.schema import (
) )
from langchain.utils import get_from_dict_or_env from langchain.utils import get_from_dict_or_env
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
def _create_retry_decorator(llm: ChatOpenAI) -> Callable[[Any], Any]: def _create_retry_decorator(llm: ChatOpenAI) -> Callable[[Any], Any]:

@ -7,7 +7,7 @@ import requests
from langchain.docstore.document import Document from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
class DiffbotLoader(BaseLoader): class DiffbotLoader(BaseLoader):

@ -12,7 +12,7 @@ from langchain.document_loaders.unstructured import UnstructuredFileLoader
FILE_LOADER_TYPE = Union[ FILE_LOADER_TYPE = Union[
Type[UnstructuredFileLoader], Type[TextLoader], Type[BSHTMLLoader] Type[UnstructuredFileLoader], Type[TextLoader], Type[BSHTMLLoader]
] ]
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
def _is_visible(p: Path) -> bool: def _is_visible(p: Path) -> bool:

@ -6,7 +6,7 @@ from typing import Dict, List, Union
from langchain.docstore.document import Document from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
class BSHTMLLoader(BaseLoader): class BSHTMLLoader(BaseLoader):

@ -5,7 +5,7 @@ from typing import Any, List
from langchain.docstore.document import Document from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
class UnstructuredURLLoader(BaseLoader): class UnstructuredURLLoader(BaseLoader):

@ -6,7 +6,7 @@ from typing import List, Optional
from langchain.docstore.document import Document from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
class PlaywrightURLLoader(BaseLoader): class PlaywrightURLLoader(BaseLoader):

@ -9,7 +9,7 @@ if TYPE_CHECKING:
from langchain.docstore.document import Document from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
class SeleniumURLLoader(BaseLoader): class SeleniumURLLoader(BaseLoader):

@ -9,7 +9,7 @@ import requests
from langchain.docstore.document import Document from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
default_header_template = { default_header_template = {
"User-Agent": "", "User-Agent": "",

@ -12,7 +12,7 @@ DEFAULT_MODEL_ID = "gpt2"
DEFAULT_TASK = "text-generation" DEFAULT_TASK = "text-generation"
VALID_TASKS = ("text2text-generation", "text-generation") VALID_TASKS = ("text2text-generation", "text-generation")
logger = logging.getLogger() logger = logging.getLogger(__name__)
class HuggingFacePipeline(LLM): class HuggingFacePipeline(LLM):

@ -9,7 +9,7 @@ from pydantic import Extra
from langchain.llms.base import LLM from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens from langchain.llms.utils import enforce_stop_tokens
logger = logging.getLogger() logger = logging.getLogger(__name__)
def _generate_text( def _generate_text(

@ -12,7 +12,7 @@ DEFAULT_MODEL_ID = "gpt2"
DEFAULT_TASK = "text-generation" DEFAULT_TASK = "text-generation"
VALID_TASKS = ("text2text-generation", "text-generation") VALID_TASKS = ("text2text-generation", "text-generation")
logger = logging.getLogger() logger = logging.getLogger(__name__)
def _generate_text( def _generate_text(

@ -14,7 +14,7 @@ from langchain.prompts.prompt import PromptTemplate
from langchain.utilities.loading import try_load_from_hub from langchain.utilities.loading import try_load_from_hub
URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/prompts/" URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/prompts/"
logger = logging.getLogger(__file__) logger = logging.getLogger(__name__)
def load_prompt_from_config(config: dict) -> BasePromptTemplate: def load_prompt_from_config(config: dict) -> BasePromptTemplate:

@ -18,7 +18,7 @@ from typing import (
from langchain.docstore.document import Document from langchain.docstore.document import Document
logger = logging.getLogger() logger = logging.getLogger(__name__)
class TextSplitter(ABC): class TextSplitter(ABC):

@ -11,7 +11,7 @@ from langchain.docstore.document import Document
from langchain.embeddings.base import Embeddings from langchain.embeddings.base import Embeddings
from langchain.vectorstores.base import VectorStore from langchain.vectorstores.base import VectorStore
logger = logging.getLogger() logger = logging.getLogger(__name__)
class AtlasDB(VectorStore): class AtlasDB(VectorStore):

@ -16,7 +16,7 @@ if TYPE_CHECKING:
import chromadb import chromadb
import chromadb.config import chromadb.config
logger = logging.getLogger() logger = logging.getLogger(__name__)
def _results_to_docs(results: Any) -> List[Document]: def _results_to_docs(results: Any) -> List[Document]:

@ -13,7 +13,7 @@ from langchain.embeddings.base import Embeddings
from langchain.vectorstores.base import VectorStore from langchain.vectorstores.base import VectorStore
from langchain.vectorstores.utils import maximal_marginal_relevance from langchain.vectorstores.utils import maximal_marginal_relevance
logger = logging.getLogger() logger = logging.getLogger(__name__)
distance_metric_map = { distance_metric_map = {
"l2": lambda a, b: np.linalg.norm(a - b, axis=1, ord=2), "l2": lambda a, b: np.linalg.norm(a - b, axis=1, ord=2),

@ -16,7 +16,7 @@ from langchain.schema import BaseRetriever
from langchain.utils import get_from_dict_or_env from langchain.utils import get_from_dict_or_env
from langchain.vectorstores.base import VectorStore from langchain.vectorstores.base import VectorStore
logger = logging.getLogger() logger = logging.getLogger(__name__)
# required modules # required modules

Loading…
Cancel
Save