consistently use getLogger(__name__), no root logger (#2989)

re
https://github.com/hwchase17/langchain/issues/439#issuecomment-1510442791

I think it's not polite for a library to use the root logger

both of these forms are also used:
```
logger = logging.getLogger(__name__)
logger = logging.getLogger(__file__)
```
I am not sure if there is any reason behind one vs the other? (...I am
guessing maybe just contributed by different people)

it seems to me it'd be better to consistently use
`logging.getLogger(__name__)`

this makes it easier for consumers of the library to set up log
handlers, e.g. for everything with `langchain.` prefix
This commit is contained in:
Paul Garner 2023-04-16 20:49:35 +01:00 committed by GitHub
parent 32db2a2c2f
commit 69698be3e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 19 additions and 19 deletions

View File

@ -30,7 +30,7 @@ from langchain.schema import (
from langchain.tools.base import BaseTool
from langchain.utilities.asyncio import asyncio_timeout
logger = logging.getLogger()
logger = logging.getLogger(__name__)
class BaseSingleActionAgent(BaseModel):

View File

@ -9,7 +9,7 @@ from pydantic import root_validator
from langchain.chat_models.openai import ChatOpenAI
from langchain.utils import get_from_dict_or_env
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
class AzureChatOpenAI(ChatOpenAI):

View File

@ -26,7 +26,7 @@ from langchain.schema import (
)
from langchain.utils import get_from_dict_or_env
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
def _create_retry_decorator(llm: ChatOpenAI) -> Callable[[Any], Any]:

View File

@ -7,7 +7,7 @@ import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
class DiffbotLoader(BaseLoader):

View File

@ -12,7 +12,7 @@ from langchain.document_loaders.unstructured import UnstructuredFileLoader
FILE_LOADER_TYPE = Union[
Type[UnstructuredFileLoader], Type[TextLoader], Type[BSHTMLLoader]
]
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
def _is_visible(p: Path) -> bool:

View File

@ -6,7 +6,7 @@ from typing import Dict, List, Union
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
class BSHTMLLoader(BaseLoader):

View File

@ -5,7 +5,7 @@ from typing import Any, List
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
class UnstructuredURLLoader(BaseLoader):

View File

@ -6,7 +6,7 @@ from typing import List, Optional
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
class PlaywrightURLLoader(BaseLoader):

View File

@ -9,7 +9,7 @@ if TYPE_CHECKING:
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
class SeleniumURLLoader(BaseLoader):

View File

@ -9,7 +9,7 @@ import requests
from langchain.docstore.document import Document
from langchain.document_loaders.base import BaseLoader
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
default_header_template = {
"User-Agent": "",

View File

@ -12,7 +12,7 @@ DEFAULT_MODEL_ID = "gpt2"
DEFAULT_TASK = "text-generation"
VALID_TASKS = ("text2text-generation", "text-generation")
logger = logging.getLogger()
logger = logging.getLogger(__name__)
class HuggingFacePipeline(LLM):

View File

@ -9,7 +9,7 @@ from pydantic import Extra
from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens
logger = logging.getLogger()
logger = logging.getLogger(__name__)
def _generate_text(

View File

@ -12,7 +12,7 @@ DEFAULT_MODEL_ID = "gpt2"
DEFAULT_TASK = "text-generation"
VALID_TASKS = ("text2text-generation", "text-generation")
logger = logging.getLogger()
logger = logging.getLogger(__name__)
def _generate_text(

View File

@ -14,7 +14,7 @@ from langchain.prompts.prompt import PromptTemplate
from langchain.utilities.loading import try_load_from_hub
URL_BASE = "https://raw.githubusercontent.com/hwchase17/langchain-hub/master/prompts/"
logger = logging.getLogger(__file__)
logger = logging.getLogger(__name__)
def load_prompt_from_config(config: dict) -> BasePromptTemplate:

View File

@ -18,7 +18,7 @@ from typing import (
from langchain.docstore.document import Document
logger = logging.getLogger()
logger = logging.getLogger(__name__)
class TextSplitter(ABC):

View File

@ -11,7 +11,7 @@ from langchain.docstore.document import Document
from langchain.embeddings.base import Embeddings
from langchain.vectorstores.base import VectorStore
logger = logging.getLogger()
logger = logging.getLogger(__name__)
class AtlasDB(VectorStore):

View File

@ -16,7 +16,7 @@ if TYPE_CHECKING:
import chromadb
import chromadb.config
logger = logging.getLogger()
logger = logging.getLogger(__name__)
def _results_to_docs(results: Any) -> List[Document]:

View File

@ -13,7 +13,7 @@ from langchain.embeddings.base import Embeddings
from langchain.vectorstores.base import VectorStore
from langchain.vectorstores.utils import maximal_marginal_relevance
logger = logging.getLogger()
logger = logging.getLogger(__name__)
distance_metric_map = {
"l2": lambda a, b: np.linalg.norm(a - b, axis=1, ord=2),

View File

@ -16,7 +16,7 @@ from langchain.schema import BaseRetriever
from langchain.utils import get_from_dict_or_env
from langchain.vectorstores.base import VectorStore
logger = logging.getLogger()
logger = logging.getLogger(__name__)
# required modules