From 19a9fa16a9df812f04f5b7dd9359b0f7b2a0435b Mon Sep 17 00:00:00 2001 From: Shobith Alva Date: Sun, 11 Dec 2022 07:09:06 -0800 Subject: [PATCH 01/29] Add `clear()` method for `Memory` (#305) a simple helper to clear the buffer in `Conversation*Memory` classes --- langchain/chains/base.py | 4 ++++ langchain/chains/conversation/memory.py | 12 +++++++++++ tests/unit_tests/chains/test_conversation.py | 21 ++++++++++++++++++++ 3 files changed, 37 insertions(+) diff --git a/langchain/chains/base.py b/langchain/chains/base.py index 44f0bdbf..e041cdea 100644 --- a/langchain/chains/base.py +++ b/langchain/chains/base.py @@ -29,6 +29,10 @@ class Memory(BaseModel, ABC): def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None: """Save the context of this model run to memory.""" + @abstractmethod + def clear(self) -> None: + """Clear memory contents.""" + def _get_verbosity() -> bool: return langchain.verbose diff --git a/langchain/chains/conversation/memory.py b/langchain/chains/conversation/memory.py index 4ab6cbbd..ebdbb7ef 100644 --- a/langchain/chains/conversation/memory.py +++ b/langchain/chains/conversation/memory.py @@ -46,6 +46,10 @@ class ConversationBufferMemory(Memory, BaseModel): ai = "AI: " + outputs[list(outputs.keys())[0]] self.buffer += "\n" + "\n".join([human, ai]) + def clear(self) -> None: + """Clear memory contents.""" + self.buffer = "" + class ConversationalBufferWindowMemory(Memory, BaseModel): """Buffer for storing conversation memory.""" @@ -75,6 +79,10 @@ class ConversationalBufferWindowMemory(Memory, BaseModel): ai = "AI: " + outputs[list(outputs.keys())[0]] self.buffer.append("\n".join([human, ai])) + def clear(self) -> None: + """Clear memory contents.""" + self.buffer = [] + class ConversationSummaryMemory(Memory, BaseModel): """Conversation summarizer to memory.""" @@ -118,3 +126,7 @@ class ConversationSummaryMemory(Memory, BaseModel): new_lines = "\n".join([human, ai]) chain = LLMChain(llm=self.llm, prompt=self.prompt) self.buffer = chain.predict(summary=self.buffer, new_lines=new_lines) + + def clear(self) -> None: + """Clear memory contents.""" + self.buffer = "" diff --git a/tests/unit_tests/chains/test_conversation.py b/tests/unit_tests/chains/test_conversation.py index 82653986..fd7eb55f 100644 --- a/tests/unit_tests/chains/test_conversation.py +++ b/tests/unit_tests/chains/test_conversation.py @@ -4,6 +4,7 @@ import pytest from langchain.chains.base import Memory from langchain.chains.conversation.base import ConversationChain from langchain.chains.conversation.memory import ( + ConversationalBufferWindowMemory, ConversationBufferMemory, ConversationSummaryMemory, ) @@ -66,3 +67,23 @@ def test_conversation_memory(memory: Memory) -> None: bad_outputs = {"foo": "bar", "foo1": "bar"} with pytest.raises(ValueError): memory.save_context(good_inputs, bad_outputs) + + +@pytest.mark.parametrize( + "memory", + [ + ConversationBufferMemory(memory_key="baz"), + ConversationSummaryMemory(llm=FakeLLM(), memory_key="baz"), + ConversationalBufferWindowMemory(memory_key="baz"), + ], +) +def test_clearing_conversation_memory(memory: Memory) -> None: + """Test clearing the conversation memory.""" + # This is a good input because the input is not the same as baz. + good_inputs = {"foo": "bar", "baz": "foo"} + # This is a good output because these is one variable. + good_outputs = {"bar": "foo"} + memory.save_context(good_inputs, good_outputs) + + memory.clear() + assert memory.load_memory_variables({}) == {"baz": ""} From a7c8e37e772a52c829960ff425e442b7b943cf1a Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sun, 11 Dec 2022 07:43:40 -0800 Subject: [PATCH 02/29] Harrison/token counts (#311) Co-authored-by: thepok --- langchain/llms/openai.py | 77 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 22070c1a..4cebef1d 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -116,9 +116,86 @@ class OpenAI(LLM, BaseModel): response = openai("Tell me a joke.") """ params = self._default_params + + if params["max_tokens"] == -1: + params["max_tokens"] = self.max_tokens_for_prompt(prompt) + if stop is not None: if "stop" in params: raise ValueError("`stop` found in both the input and default params.") params["stop"] = stop response = self.client.create(model=self.model_name, prompt=prompt, **params) return response["choices"][0]["text"] + + def modelname_to_contextsize(self, modelname: str) -> int: + """Calculate the maximum number of tokens possible to generate for a model. + + text-davinci-003: 4,000 tokens + text-curie-001: 2,048 tokens + text-babbage-001: 2,048 tokens + text-ada-001: 2,048 tokens + code-davinci-002: 8,000 tokens + code-cushman-001: 2,048 tokens + + Args: + modelname: The modelname we want to know the context size for. + + Returns: + The maximum context size + + Example: + .. code-block:: python + + max_tokens = openai.modelname_to_contextsize("text-davinci-003") + """ + if modelname == "text-davinci-003": + return 4000 + elif modelname == "text-curie-001": + return 2048 + elif modelname == "text-babbage-001": + return 2048 + elif modelname == "text-ada-001": + return 2048 + elif modelname == "code-davinci-002": + return 8000 + elif modelname == "code-cushman-001": + return 2048 + else: + return 4000 + + def max_tokens_for_prompt(self, prompt: str) -> int: + """Calculate the maximum number of tokens possible to generate for a prompt. + + Args: + prompt: The prompt to pass into the model. + + Returns: + The maximum number of tokens to generate for a prompt. + + Example: + .. code-block:: python + + max_tokens = openai.max_token_for_prompt("Tell me a joke.") + """ + # TODO: this method may not be exact. + # TODO: this method may differ based on model (eg codex). + try: + from transformers import GPT2TokenizerFast + except ImportError: + raise ValueError( + "Could not import transformers python package. " + "This is needed in order to calculate max_tokens_for_prompt. " + "Please it install it with `pip install transformers`." + ) + # create a GPT-3 tokenizer instance + tokenizer = GPT2TokenizerFast.from_pretrained("gpt2") + + # tokenize the text using the GPT-3 tokenizer + tokenized_text = tokenizer.tokenize(prompt) + + # calculate the number of tokens in the tokenized text + num_tokens = len(tokenized_text) + + # get max context size for model by name + max_size = self.modelname_to_contextsize(self.model_name) + return max_size - num_tokens From 948e999eff8f8ba8bce47bba940b9352509ca2b4 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sun, 11 Dec 2022 11:07:30 -0800 Subject: [PATCH 03/29] bump version to 0035 (#312) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 455ae90c..6a610538 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain" -version = "0.0.34" +version = "0.0.35" description = "Building applications with LLMs through composability" authors = [] license = "MIT" From 292f1cfa96a204cfda89f02bb994cd854e3f6d9f Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Mon, 12 Dec 2022 06:07:40 -0800 Subject: [PATCH 04/29] Harrison/add contributing docs (#315) --- CONTRIBUTING.md | 145 +++++++++++++++++++++++++++++++++++++++++++++ README.md | 153 ++---------------------------------------------- 2 files changed, 150 insertions(+), 148 deletions(-) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..98b4d8aa --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,145 @@ +# Contributing to LangChain + +Hi there! Thank you for even being interested in contributing to LangChain. +As an open source project in a rapidly developing field, we are extremely open +to contributions, whether it be in the form of a new feature, improved infra, or better documentation. + +To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow. +Please do not try to push directly to this repo unless you are maintainer. + +## 🗺️Contributing Guidelines + +### 🚩GitHub Issues + +Our [issues](https://github.com/hwchase17/langchain/issues) page is kept up to date +with bugs, improvements, and feature requests. There is a taxonomy of labels to help +with sorting and discovery of issues of interest. These include: + +- prompts: related to prompt tooling/infra. +- llms: related to LLM wrappers/tooling/infra. +- chains +- utilities: related to different types of utilities to integrate with (Python, SQL, etc). +- agents +- memory +- applications: related to example applications to build + +If you start working on an issue, please assign it to yourself. + +If you are adding an issue, please try to keep it focused on a single modular bug/improvement/feature. +If two issues are related, or blocking, please link them rather than keep them as one single one. + +We will try to keep these issues as up to date as possible, though +with the rapid rate of develop in this field some may get out of date. +If you notice this happening, please just let us know. + +### 🙋Getting Help + +Although we try to have a developer setup to make it as easy as possible for others to contribute (see below) +it is possible that some pain point may arise around environment setup, linting, documentation, or other. +Should that occur, please contact a maintainer! Not only do we want to help get you unblocked, +but we also want to make sure that the process is smooth for future contributors. + +In a similar vein, we do enforce certain linting, formatting, and documentation standards in the codebase. +If you are finding these difficult (or even just annoying) to work with, +feel free to contact a maintainer for help - we do not want these to get in the way of getting +good code into the codebase. + +### 🏭Release process + +As of now, LangChain has an ad hoc release process: releases are cut with high frequency via by +a developer and published to [PyPI](https://pypi.org/project/ruff/). + +LangChain follows the [semver](https://semver.org/) versioning standard. However, as pre-1.0 software, +even patch releases may contain [non-backwards-compatible changes](https://semver.org/#spec-item-4). + +If your contribution has made its way into a release, we will want to give you credit on Twitter (only if you want though)! +If you have a Twitter account you would like us to mention, please let us know in the PR or in another manner. + + +## 🤖Developer Setup + +### 🚀Quick Start + +This project uses [Poetry](https://python-poetry.org/) as a dependency manager. Check out Poetry's own [documentation on how to install it](https://python-poetry.org/docs/#installation) on your system before proceeding. + +To install requirements: + +```bash +poetry install -E all +``` + +This will install all requirements for running the package, examples, linting, formatting, and tests. Note the `-E all` flag will install all optional dependencies necessary for integration testing. + +Now, you should be able to run the common tasks in the following section. + +### ✅Common Tasks + +#### Code Formatting + +Formatting for this project is a combination of [Black](https://black.readthedocs.io/en/stable/) and [isort](https://pycqa.github.io/isort/). + +To run formatting for this project: + +```bash +make format +``` + +#### Linting + +Linting for this project is a combination of [Black](https://black.readthedocs.io/en/stable/), [isort](https://pycqa.github.io/isort/), [flake8](https://flake8.pycqa.org/en/latest/), and [mypy](http://mypy-lang.org/). + +To run linting for this project: + +```bash +make lint +``` + +We recognize linting can be annoying - if you do not want to do it, please contact a project maintainer and they can help you with it. We do not want this to be a blocker for good code getting contributed. + +#### Testing + +Unit tests cover modular logic that does not require calls to outside apis. + +To run unit tests: + +```bash +make tests +``` + +If you add new logic, please add a unit test. + +Integration tests cover logic that requires making calls to outside APIs (often integration with other services). + +To run integration tests: + +```bash +make integration_tests +``` + +If you add support for a new external API, please add a new integration test. + +#### Adding a Jupyter Notebook + +If you are adding a Jupyter notebook example, you'll want to install the optional `dev` dependencies. + +To install dev dependencies: + +```bash +poetry install --with dev +``` + +Launch a notebook: + +```bash +poetry run jupyter notebook +``` + +When you run `poetry install`, the `langchain` package is installed as editable in the virtualenv, so your new logic can be imported into the notebook. + +#### Contribute Documentation + +Docs are largely autogenerated by [sphinx](https://www.sphinx-doc.org/en/master/) from the code. + +For that reason, we ask that you add good documentation to all classes and methods. + +Similar to linting, we recognize documentation can be annoying - if you do not want to do it, please contact a project maintainer and they can help you with it. We do not want this to be a blocker for good code getting contributed. diff --git a/README.md b/README.md index 4fc020f9..65a0a094 100644 --- a/README.md +++ b/README.md @@ -35,154 +35,11 @@ These are, in increasing order of complexity: 3. Agents 4. Memory -Let's go through these categories and for each one identify key concepts (to clarify terminology) as well as the problems in this area LangChain helps solve. +For more information on these concepts, please see our [full documentation](https://langchain.readthedocs.io/en/latest/?). -### LLMs and Prompts -Calling out to an LLM once is pretty easy, with most of them being behind well documented APIs. -However, there are still some challenges going from that to an application running in production that LangChain attempts to address. +## 🤖 Contributing -**Key Concepts** -- LLM: A large language model, in particular a text-to-text model. -- Prompt: The input to a language model. Typically this is not simply a hardcoded string but rather a combination of a template, some examples, and user input. -- Prompt Template: An object responsible for constructing the final prompt to pass to a LLM. -- Examples: Datapoints that can be included in the prompt in order to give the model more context what to do. -- Few Shot Prompt Template: A subclass of the PromptTemplate class that uses examples. -- Example Selector: A class responsible to selecting examples to use dynamically (depending on user input) in a few shot prompt. +As an open source project in a rapidly developing field, we are extremely open +to contributions, whether it be in the form of a new feature, improved infra, or better documentation. -**Problems Solved** -- Switching costs: by exposing a standard interface for all the top LLM providers, LangChain makes it easy to switch from one provider to another, whether it be for production use cases or just for testing stuff out. -- Prompt management: managing your prompts is easy when you only have one simple one, but can get tricky when you have a bunch or when they start to get more complex. LangChain provides a standard way for storing, constructing, and referencing prompts. -- Prompt optimization: despite the underlying models getting better and better, there is still currently a need for carefully constructing prompts. - -### Chains -Using an LLM in isolation is fine for some simple applications, but many more complex ones require chaining LLMs - either with eachother or with other experts. -LangChain provides several parts to help with that. - -**Key Concepts** -- Tools: APIs designed for assisting with a particular use case (search, databases, Python REPL, etc). Prompt templates, LLMs, and chains can also be considered tools. -- Chains: A combination of multiple tools in a deterministic manner. - -**Problems Solved** -- Standard interface for working with Chains -- Easy way to construct chains of LLMs -- Lots of integrations with other tools that you may want to use in conjunction with LLMs -- End-to-end chains for common workflows (database question/answer, recursive summarization, etc) - -### Agents -Some applications will require not just a predetermined chain of calls to LLMs/other tools, but potentially an unknown chain that depends on the user input. -In these types of chains, there is a “agent” which has access to a suite of tools. -Depending on the user input, the agent can then decide which, if any, of these tools to call. - -**Key Concepts** -- Tools: same as above. -- Agent: An LLM-powered class responsible for determining which tools to use and in what order. - - -**Problems Solved** -- Standard agent interfaces -- A selection of powerful agents to choose from -- Common chains that can be used as tools - -### Memory -By default, Chains and Agents are stateless, meaning that they treat each incoming query independently. -In some applications (chatbots being a GREAT example) it is highly important to remember previous interactions, -both at a short term but also at a long term level. The concept of "Memory" exists to do exactly that. - -**Key Concepts** -- Memory: A class that can be added to an Agent or Chain to (1) pull in memory variables before calling that chain/agent, and (2) create new memories after the chain/agent finishes. -- Memory Variables: Variables returned from a Memory class, to be passed into the chain/agent along with the user input. - -**Problems Solved** -- Standard memory interfaces -- A collection of common memory implementations to choose from -- Common chains/agents that use memory (e.g. chatbots) - -## 🤖 Developer Guide - -To begin developing on this project, first clone the repo locally. - -### Quick Start - -This project uses [Poetry](https://python-poetry.org/) as a dependency manager. Check out Poetry's own [documentation on how to install it](https://python-poetry.org/docs/#installation) on your system before proceeding. - -To install requirements: - -```bash -poetry install -E all -``` - -This will install all requirements for running the package, examples, linting, formatting, and tests. Note the `-E all` flag will install all optional dependencies necessary for integration testing. - -Now, you should be able to run the common tasks in the following section. - -### Common Tasks - -#### Code Formatting - -Formatting for this project is a combination of [Black](https://black.readthedocs.io/en/stable/) and [isort](https://pycqa.github.io/isort/). - -To run formatting for this project: - -```bash -make format -``` - -#### Linting - -Linting for this project is a combination of [Black](https://black.readthedocs.io/en/stable/), [isort](https://pycqa.github.io/isort/), [flake8](https://flake8.pycqa.org/en/latest/), and [mypy](http://mypy-lang.org/). - -To run linting for this project: - -```bash -make lint -``` - -We recognize linting can be annoying - if you do not want to do it, please contact a project maintainer and they can help you with it. We do not want this to be a blocker for good code getting contributed. - -#### Testing - -Unit tests cover modular logic that does not require calls to outside apis. - -To run unit tests: - -```bash -make tests -``` - -If you add new logic, please add a unit test. - -Integration tests cover logic that requires making calls to outside APIs (often integration with other services). - -To run integration tests: - -```bash -make integration_tests -``` - -If you add support for a new external API, please add a new integration test. - -#### Adding a Jupyter Notebook - -If you are adding a Jupyter notebook example, you'll want to install the optional `dev` dependencies. - -To install dev dependencies: - -```bash -poetry install --with dev -``` - -Launch a notebook: - -```bash -poetry run jupyter notebook -``` - -When you run `poetry install`, the `langchain` package is installed as editable in the virtualenv, so your new logic can be imported into the notebook. - -#### Contribute Documentation - -Docs are largely autogenerated by [sphinx](https://www.sphinx-doc.org/en/master/) from the code. - -For that reason, we ask that you add good documentation to all classes and methods. - -Similar to linting, we recognize documentation can be annoying - if you do not want to do it, please contact a project maintainer and they can help you with it. We do not want this to be a blocker for good code getting contributed. +For detailed information on how to contribute, see [here](CONTRIBUTING.md). From d946be2f3d6455dd7d39725b35bd85358d2e1961 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Mon, 12 Dec 2022 15:09:08 +0100 Subject: [PATCH 05/29] Add Python 3.11 to the testing (#323) --- .github/workflows/lint.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 9d62e252..98cd7c38 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -17,6 +17,7 @@ jobs: - "3.8" - "3.9" - "3.10" + - "3.11" steps: - uses: actions/checkout@v3 - name: Install poetry From 2fbb152386cf3a6785156bdedc593c675ce39476 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Mon, 12 Dec 2022 16:19:52 +0100 Subject: [PATCH 06/29] Add Python 3.11 to the testing (#324) --- .github/workflows/test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e3cb4943..ebda0363 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,6 +17,7 @@ jobs: - "3.8" - "3.9" - "3.10" + - "3.11" steps: - uses: actions/checkout@v3 - name: Install poetry From 137356dbec745bb357e441cd69b24412078bb2b6 Mon Sep 17 00:00:00 2001 From: thepok Date: Tue, 13 Dec 2022 14:15:51 +0100 Subject: [PATCH 07/29] -1 max token description for openai (#330) --- langchain/llms/openai.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 4cebef1d..2ea83ee7 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -29,7 +29,9 @@ class OpenAI(LLM, BaseModel): temperature: float = 0.7 """What sampling temperature to use.""" max_tokens: int = 256 - """The maximum number of tokens to generate in the completion.""" + """The maximum number of tokens to generate in the completion. + -1 returns as many tokens as possible given the prompt and + the models maximal context size.""" top_p: float = 1 """Total probability mass of tokens to consider at each step.""" frequency_penalty: float = 0 From 8fdcdf4c2fe8be72ec0756077e6f047a263a5f3f Mon Sep 17 00:00:00 2001 From: Ankush Gola <9536492+agola11@users.noreply.github.com> Date: Tue, 13 Dec 2022 08:20:22 -0500 Subject: [PATCH 08/29] add .idea files to gitignore, add zsh note to installation docs (#329) --- .gitignore | 1 + docs/installation.md | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/.gitignore b/.gitignore index 17d753d6..3252ee1f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .vscode/ +.idea/ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/docs/installation.md b/docs/installation.md index e1ed80f8..2e7fb797 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -21,4 +21,10 @@ To install all modules needed for all integrations, run: ``` pip install langchain[all] +``` + +Note that if you are using `zsh`, you'll need to quote square brackets when passing them as an argument to a command, for example: + +``` +pip install 'langchain[all]' ``` \ No newline at end of file From 8861770bd06b9541f9e6c53e72064a322cb2aafa Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Tue, 13 Dec 2022 05:22:42 -0800 Subject: [PATCH 09/29] expose get_num_tokens method (#327) --- langchain/llms/base.py | 21 +++++++++++++++++++++ langchain/llms/openai.py | 19 +------------------ 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/langchain/llms/base.py b/langchain/llms/base.py index df81242f..60728841 100644 --- a/langchain/llms/base.py +++ b/langchain/llms/base.py @@ -6,6 +6,27 @@ from typing import Any, List, Mapping, Optional class LLM(ABC): """LLM wrapper should take in a prompt and return a string.""" + def get_num_tokens(self, text: str) -> int: + """Get the number of tokens present in the text.""" + # TODO: this method may not be exact. + # TODO: this method may differ based on model (eg codex). + try: + from transformers import GPT2TokenizerFast + except ImportError: + raise ValueError( + "Could not import transformers python package. " + "This is needed in order to calculate max_tokens_for_prompt. " + "Please it install it with `pip install transformers`." + ) + # create a GPT-3 tokenizer instance + tokenizer = GPT2TokenizerFast.from_pretrained("gpt2") + + # tokenize the text using the GPT-3 tokenizer + tokenized_text = tokenizer.tokenize(text) + + # calculate the number of tokens in the tokenized text + return len(tokenized_text) + @abstractmethod def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Run the LLM on the given prompt and input.""" diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 2ea83ee7..9de56f0c 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -179,24 +179,7 @@ class OpenAI(LLM, BaseModel): max_tokens = openai.max_token_for_prompt("Tell me a joke.") """ - # TODO: this method may not be exact. - # TODO: this method may differ based on model (eg codex). - try: - from transformers import GPT2TokenizerFast - except ImportError: - raise ValueError( - "Could not import transformers python package. " - "This is needed in order to calculate max_tokens_for_prompt. " - "Please it install it with `pip install transformers`." - ) - # create a GPT-3 tokenizer instance - tokenizer = GPT2TokenizerFast.from_pretrained("gpt2") - - # tokenize the text using the GPT-3 tokenizer - tokenized_text = tokenizer.tokenize(prompt) - - # calculate the number of tokens in the tokenized text - num_tokens = len(tokenized_text) + num_tokens = self.get_num_tokens(prompt) # get max context size for model by name max_size = self.modelname_to_contextsize(self.model_name) From 482611f4261c5372471f1b06a93e8c6a73467ff0 Mon Sep 17 00:00:00 2001 From: Hunter Gerlach Date: Tue, 13 Dec 2022 08:48:53 -0500 Subject: [PATCH 10/29] unit test / code coverage improvements (#322) This PR has two contributions: 1. Add test for when stop token is found in middle of text 2. Add code coverage tooling and instructions - Add pytest-cov via poetry - Add necessary config files - Add new make instruction for `coverage` - Update README with coverage guidance - Update minor README formatting/spelling Co-authored-by: Hunter Gerlach --- .coveragerc | 2 + .github/workflows/lint.yml | 42 +- .github/workflows/test.yml | 36 +- CONTRIBUTING.md | 29 +- Makefile | 6 + README.md | 9 +- poetry.lock | 4799 ++++++++++++++------------- pyproject.toml | 1 + tests/unit_tests/llms/test_utils.py | 3 + 9 files changed, 2602 insertions(+), 2325 deletions(-) create mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..c712d259 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = tests/* diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 98cd7c38..12ec7320 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -6,31 +6,31 @@ on: pull_request: env: - POETRY_VERSION: "1.2.0" + POETRY_VERSION: "1.3.1" jobs: build: runs-on: ubuntu-latest strategy: matrix: - python-version: - - "3.8" - - "3.9" - - "3.10" - - "3.11" + python-version: + - "3.8" + - "3.9" + - "3.10" + - "3.11" steps: - - uses: actions/checkout@v3 - - name: Install poetry - run: | - pipx install poetry==$POETRY_VERSION - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - cache: poetry - - name: Install dependencies - run: | - poetry install - - name: Analysing the code with our lint - run: | - make lint + - uses: actions/checkout@v3 + - name: Install poetry + run: | + pipx install poetry==$POETRY_VERSION + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: poetry + - name: Install dependencies + run: | + poetry install + - name: Analysing the code with our lint + run: | + make lint diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ebda0363..f33e67ec 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,7 +6,7 @@ on: pull_request: env: - POETRY_VERSION: "1.2.0" + POETRY_VERSION: "1.3.1" jobs: build: @@ -14,21 +14,21 @@ jobs: strategy: matrix: python-version: - - "3.8" - - "3.9" - - "3.10" - - "3.11" + - "3.8" + - "3.9" + - "3.10" + - "3.11" steps: - - uses: actions/checkout@v3 - - name: Install poetry - run: pipx install poetry==$POETRY_VERSION - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - cache: 'poetry' - - name: Install dependencies - run: poetry install - - name: Run unit tests - run: | - make tests + - uses: actions/checkout@v3 + - name: Install poetry + run: pipx install poetry==$POETRY_VERSION + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "poetry" + - name: Install dependencies + run: poetry install + - name: Run unit tests + run: | + make tests diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 98b4d8aa..ee66483f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,7 +18,7 @@ with sorting and discovery of issues of interest. These include: - prompts: related to prompt tooling/infra. - llms: related to LLM wrappers/tooling/infra. - chains -- utilities: related to different types of utilities to integrate with (Python, SQL, etc). +- utilities: related to different types of utilities to integrate with (Python, SQL, etc.). - agents - memory - applications: related to example applications to build @@ -26,7 +26,7 @@ with sorting and discovery of issues of interest. These include: If you start working on an issue, please assign it to yourself. If you are adding an issue, please try to keep it focused on a single modular bug/improvement/feature. -If two issues are related, or blocking, please link them rather than keep them as one single one. +If the two issues are related, or blocking, please link them rather than keep them as one single one. We will try to keep these issues as up to date as possible, though with the rapid rate of develop in this field some may get out of date. @@ -55,12 +55,11 @@ even patch releases may contain [non-backwards-compatible changes](https://semve If your contribution has made its way into a release, we will want to give you credit on Twitter (only if you want though)! If you have a Twitter account you would like us to mention, please let us know in the PR or in another manner. - ## 🤖Developer Setup ### 🚀Quick Start -This project uses [Poetry](https://python-poetry.org/) as a dependency manager. Check out Poetry's own [documentation on how to install it](https://python-poetry.org/docs/#installation) on your system before proceeding. +This project uses [Poetry](https://python-poetry.org/) as a dependency manager. Check out Poetry's [documentation on how to install it](https://python-poetry.org/docs/#installation) on your system before proceeding. To install requirements: @@ -68,7 +67,7 @@ To install requirements: poetry install -E all ``` -This will install all requirements for running the package, examples, linting, formatting, and tests. Note the `-E all` flag will install all optional dependencies necessary for integration testing. +This will install all requirements for running the package, examples, linting, formatting, tests, and coverage. Note the `-E all` flag will install all optional dependencies necessary for integration testing. Now, you should be able to run the common tasks in the following section. @@ -76,7 +75,7 @@ Now, you should be able to run the common tasks in the following section. #### Code Formatting -Formatting for this project is a combination of [Black](https://black.readthedocs.io/en/stable/) and [isort](https://pycqa.github.io/isort/). +Formatting for this project is done via a combination of [Black](https://black.readthedocs.io/en/stable/) and [isort](https://pycqa.github.io/isort/). To run formatting for this project: @@ -86,7 +85,7 @@ make format #### Linting -Linting for this project is a combination of [Black](https://black.readthedocs.io/en/stable/), [isort](https://pycqa.github.io/isort/), [flake8](https://flake8.pycqa.org/en/latest/), and [mypy](http://mypy-lang.org/). +Linting for this project is done via a combination of [Black](https://black.readthedocs.io/en/stable/), [isort](https://pycqa.github.io/isort/), [flake8](https://flake8.pycqa.org/en/latest/), and [mypy](http://mypy-lang.org/). To run linting for this project: @@ -94,11 +93,21 @@ To run linting for this project: make lint ``` -We recognize linting can be annoying - if you do not want to do it, please contact a project maintainer and they can help you with it. We do not want this to be a blocker for good code getting contributed. +We recognize linting can be annoying - if you do not want to do it, please contact a project maintainer, and they can help you with it. We do not want this to be a blocker for good code getting contributed. + +#### Coverage + +Code coverage (i.e. the amount of code that is covered by unit tests) helps identify areas of the code that are potentially more or less brittle. + +To get a report of current coverage, run the following: + +```bash +make coverage +``` #### Testing -Unit tests cover modular logic that does not require calls to outside apis. +Unit tests cover modular logic that does not require calls to outside APIs. To run unit tests: @@ -142,4 +151,4 @@ Docs are largely autogenerated by [sphinx](https://www.sphinx-doc.org/en/master/ For that reason, we ask that you add good documentation to all classes and methods. -Similar to linting, we recognize documentation can be annoying - if you do not want to do it, please contact a project maintainer and they can help you with it. We do not want this to be a blocker for good code getting contributed. +Similar to linting, we recognize documentation can be annoying. If you do not want to do it, please contact a project maintainer, and they can help you with it. We do not want this to be a blocker for good code getting contributed. diff --git a/Makefile b/Makefile index bd3f5cb2..979f6380 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,11 @@ .PHONY: format lint tests integration_tests +coverage: + poetry run pytest --cov \ + --cov-config=.coveragerc \ + --cov-report xml \ + --cov-report term-missing:skip-covered + format: poetry run black . poetry run isort . diff --git a/README.md b/README.md index 65a0a094..ed5c8545 100644 --- a/README.md +++ b/README.md @@ -13,23 +13,24 @@ Large language models (LLMs) are emerging as a transformative technology, enabling developers to build applications that they previously could not. But using these LLMs in isolation is often not enough to -create a truly powerful app - the real power comes when you are able to -combine them with other sources of computation or knowledge. +create a truly powerful app - the real power comes when you can combine them with other sources of computation or knowledge. This library is aimed at assisting in the development of those types of applications. ## 📖 Documentation Please see [here](https://langchain.readthedocs.io/en/latest/?) for full documentation on: -- Getting started (installation, setting up environment, simple examples) + +- Getting started (installation, setting up the environment, simple examples) - How-To examples (demos, integrations, helper functions) - Reference (full API docs) -- Resources (high level explanation of core concepts) + Resources (high-level explanation of core concepts) ## 🚀 What can this help with? There are four main areas that LangChain is designed to help with. These are, in increasing order of complexity: + 1. LLM and Prompts 2. Chains 3. Agents diff --git a/poetry.lock b/poetry.lock index a6a4d326..8d23f0fb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + [[package]] name = "anyio" version = "3.6.2" @@ -5,6 +7,10 @@ description = "High level compatibility layer for multiple asynchronous event lo category = "dev" optional = false python-versions = ">=3.6.2" +files = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] [package.dependencies] idna = ">=2.8" @@ -22,6 +28,10 @@ description = "Disable App Nap on macOS >= 10.9" category = "dev" optional = false python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] [[package]] name = "argon2-cffi" @@ -30,6 +40,10 @@ description = "The secure Argon2 password hashing algorithm." category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, + {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, +] [package.dependencies] argon2-cffi-bindings = "*" @@ -46,2030 +60,7 @@ description = "Low-level CFFI bindings for Argon2" category = "dev" optional = false python-versions = ">=3.6" - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "asttokens" -version = "2.2.1" -description = "Annotate AST trees with source code positions" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -six = "*" - -[package.extras] -test = ["astroid", "pytest"] - -[[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" -category = "main" -optional = true -python-versions = ">=3.6" - -[[package]] -name = "attrs" -version = "22.1.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "beautifulsoup4" -version = "4.11.1" -description = "Screen-scraping library" -category = "main" -optional = false -python-versions = ">=3.6.0" - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "black" -version = "22.10.0" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "bleach" -version = "5.0.1" -description = "An easy safelist-based HTML-sanitizing tool." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.2)"] -dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"] - -[[package]] -name = "blis" -version = "0.7.9" -description = "The Blis BLAS-like linear algebra library, as a self-contained C-extension." -category = "main" -optional = true -python-versions = "*" - -[package.dependencies] -numpy = ">=1.15.0" - -[[package]] -name = "catalogue" -version = "2.0.8" -description = "Super lightweight function registries for your library" -category = "main" -optional = true -python-versions = ">=3.6" - -[[package]] -name = "certifi" -version = "2022.9.24" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.6.0" - -[package.extras] -unicode-backport = ["unicodedata2"] - -[[package]] -name = "click" -version = "8.1.3" -description = "Composable command line interface toolkit" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" - -[[package]] -name = "confection" -version = "0.0.3" -description = "The sweetest config system for Python" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" -srsly = ">=2.4.0,<3.0.0" - -[[package]] -name = "cymem" -version = "2.0.7" -description = "Manage calls to calloc/free through Cython" -category = "main" -optional = true -python-versions = "*" - -[[package]] -name = "debugpy" -version = "1.6.4" -description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "dill" -version = "0.3.6" -description = "serialize all of python" -category = "main" -optional = true -python-versions = ">=3.7" - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - -[[package]] -name = "elastic-transport" -version = "8.4.0" -description = "Transport classes and utilities shared among Python Elastic client libraries" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -certifi = "*" -urllib3 = ">=1.26.2,<2" - -[package.extras] -develop = ["aiohttp", "mock", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "trustme"] - -[[package]] -name = "elasticsearch" -version = "8.5.2" -description = "Python client for Elasticsearch" -category = "main" -optional = true -python-versions = ">=3.6, <4" - -[package.dependencies] -elastic-transport = ">=8,<9" - -[package.extras] -async = ["aiohttp (>=3,<4)"] -requests = ["requests (>=2.4.0,<3.0.0)"] - -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "exceptiongroup" -version = "1.0.4" -description = "Backport of PEP 654 (exception groups)" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "executing" -version = "1.2.0" -description = "Get the currently executing AST node of a frame, and other information" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] - -[[package]] -name = "faiss-cpu" -version = "1.7.3" -description = "A library for efficient similarity search and clustering of dense vectors." -category = "main" -optional = true -python-versions = "*" - -[[package]] -name = "fastjsonschema" -version = "2.16.2" -description = "Fastest Python implementation of JSON schema" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "filelock" -version = "3.8.2" -description = "A platform independent file lock." -category = "main" -optional = true -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "6.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.8.1" - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" - -[[package]] -name = "flake8-docstrings" -version = "1.6.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "greenlet" -version = "2.0.1" -description = "Lightweight in-process concurrent programming" -category = "main" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" - -[package.extras] -docs = ["Sphinx", "docutils (<0.18)"] -test = ["faulthandler", "objgraph", "psutil"] - -[[package]] -name = "huggingface-hub" -version = "0.11.1" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -category = "main" -optional = true -python-versions = ">=3.7.0" - -[package.dependencies] -filelock = "*" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = "*" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] -torch = ["torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "importlib-metadata" -version = "5.1.0" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] - -[[package]] -name = "importlib-resources" -version = "5.10.0" -description = "Read resources from Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "ipykernel" -version = "6.17.1" -description = "IPython Kernel for Jupyter" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -debugpy = ">=1.0" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=17" -tornado = ">=6.1" -traitlets = ">=5.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.7.0" -description = "IPython: Productive Interactive Computing" -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.11,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" - -[package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.20)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] - -[[package]] -name = "ipython-genutils" -version = "0.2.0" -description = "Vestigial utilities from IPython" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "ipywidgets" -version = "8.0.2" -description = "Jupyter interactive widgets" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -ipykernel = ">=4.5.1" -ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0,<4.0" -traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0,<5.0" - -[package.extras] -test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] - -[[package]] -name = "isort" -version = "5.10.1" -description = "A Python utility / library to sort Python imports." -category = "dev" -optional = false -python-versions = ">=3.6.1,<4.0" - -[package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "jedi" -version = "0.18.2" -description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -parso = ">=0.8.0,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.2" -description = "A very fast and expressive template engine." -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.2.0" -description = "Lightweight pipelining with Python functions" -category = "main" -optional = true -python-versions = ">=3.7" - -[[package]] -name = "jsonschema" -version = "4.17.3" -description = "An implementation of JSON Schema validation for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=17.4.0" -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "jupyter" -version = "1.0.0" -description = "Jupyter metapackage. Install all the Jupyter components in one go." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -ipykernel = "*" -ipywidgets = "*" -jupyter-console = "*" -nbconvert = "*" -notebook = "*" -qtconsole = "*" - -[[package]] -name = "jupyter-client" -version = "7.4.8" -description = "Jupyter protocol implementation and client libraries" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -entrypoints = "*" -jupyter-core = ">=4.9.2" -nest-asyncio = ">=1.5.4" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = "*" - -[package.extras] -doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -test = ["codecov", "coverage", "ipykernel (>=6.12)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-console" -version = "6.4.4" -description = "Jupyter terminal console" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -ipykernel = "*" -ipython = "*" -jupyter-client = ">=7.0.0" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" -pygments = "*" - -[package.extras] -test = ["pexpect"] - -[[package]] -name = "jupyter-core" -version = "5.1.0" -description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" -optional = false -python-versions = ">=3.8" - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "sphinxcontrib-github-alt", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-server" -version = "1.23.3" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -anyio = ">=3.1.0,<4" -argon2-cffi = "*" -jinja2 = "*" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.7.0" -nbconvert = ">=6.4.4" -nbformat = ">=5.2.0" -packaging = "*" -prometheus-client = "*" -pywinpty = {version = "*", markers = "os_name == \"nt\""} -pyzmq = ">=17" -Send2Trash = "*" -terminado = ">=0.8.3" -tornado = ">=6.1.0" -traitlets = ">=5.1" -websocket-client = "*" - -[package.extras] -test = ["coverage", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-mock", "pytest-timeout", "pytest-tornasync", "requests"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.2.2" -description = "Pygments theme using JupyterLab CSS variables" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.3" -description = "Jupyter interactive widgets for JupyterLab" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "langcodes" -version = "3.3.0" -description = "Tools for labeling human languages with IETF language tags" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.extras] -data = ["language-data (>=1.1,<2.0)"] - -[[package]] -name = "manifest-ml" -version = "0.0.1" -description = "Manifest for Prompt Programming Foundation Models." -category = "main" -optional = true -python-versions = ">=3.8.0" - -[package.dependencies] -dill = ">=0.3.5" -redis = ">=4.3.1" -requests = ">=2.27.1" -sqlitedict = ">=2.0.0" -tqdm = ">=4.64.0" - -[package.extras] -all = ["Flask (>=2.1.2)", "accelerate (>=0.10.0)", "autopep8 (>=1.6.0)", "black (>=22.3.0)", "docformatter (>=1.4)", "flake8 (>=4.0.0)", "flake8-docstrings (>=1.6.0)", "isort (>=5.9.3)", "mypy (>=0.950)", "nbsphinx (>=0.8.0)", "pep8-naming (>=0.12.1)", "pre-commit (>=2.14.0)", "pytest (>=7.0.0)", "pytest-cov (>=3.0.0)", "python-dotenv (>=0.20.0)", "recommonmark (>=0.7.1)", "sphinx-autobuild", "sphinx-rtd-theme (>=0.5.1)", "torch (>=1.8.0)", "transformers (>=4.20.0)", "twine", "types-PyYAML (>=6.0.7)", "types-protobuf (>=3.19.21)", "types-python-dateutil (>=2.8.16)", "types-redis (>=4.2.6)", "types-requests (>=2.27.29)", "types-setuptools (>=57.4.17)"] -api = ["Flask (>=2.1.2)", "accelerate (>=0.10.0)", "torch (>=1.8.0)", "transformers (>=4.20.0)"] -dev = ["autopep8 (>=1.6.0)", "black (>=22.3.0)", "docformatter (>=1.4)", "flake8 (>=4.0.0)", "flake8-docstrings (>=1.6.0)", "isort (>=5.9.3)", "mypy (>=0.950)", "nbsphinx (>=0.8.0)", "pep8-naming (>=0.12.1)", "pre-commit (>=2.14.0)", "pytest (>=7.0.0)", "pytest-cov (>=3.0.0)", "python-dotenv (>=0.20.0)", "recommonmark (>=0.7.1)", "sphinx-autobuild", "sphinx-rtd-theme (>=0.5.1)", "twine", "types-PyYAML (>=6.0.7)", "types-protobuf (>=3.19.21)", "types-python-dateutil (>=2.8.16)", "types-redis (>=4.2.6)", "types-requests (>=2.27.29)", "types-setuptools (>=57.4.17)"] - -[[package]] -name = "markupsafe" -version = "2.1.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "matplotlib-inline" -version = "0.1.6" -description = "Inline Matplotlib backend for Jupyter" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "mistune" -version = "2.0.4" -description = "A sane Markdown parser with useful plugins and renderers" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "murmurhash" -version = "1.0.9" -description = "Cython bindings for MurmurHash" -category = "main" -optional = true -python-versions = ">=3.6" - -[[package]] -name = "mypy" -version = "0.991" -description = "Optional static typing for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -mypy-extensions = ">=0.4.3" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "nbclassic" -version = "0.4.8" -description = "A web-based notebook environment for interactive computing" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -argon2-cffi = "*" -ipykernel = "*" -ipython-genutils = "*" -jinja2 = "*" -jupyter-client = ">=6.1.1" -jupyter-core = ">=4.6.1" -jupyter-server = ">=1.8" -nbconvert = ">=5" -nbformat = "*" -nest-asyncio = ">=1.5" -notebook-shim = ">=0.1.0" -prometheus-client = "*" -pyzmq = ">=17" -Send2Trash = ">=1.8.0" -terminado = ">=0.8.3" -tornado = ">=6.1" -traitlets = ">=4.2.1" - -[package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] - -[[package]] -name = "nbclient" -version = "0.7.2" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -category = "dev" -optional = false -python-versions = ">=3.7.0" - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -nbformat = ">=5.1" -traitlets = ">=5.3" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] -test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.2.6" -description = "Converting Jupyter Notebooks" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "*" -defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<3" -nbclient = ">=0.5.0" -nbformat = ">=5.1" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.0" - -[package.extras] -all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)"] -qtpdf = ["nbconvert[qtpng]"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-dependency"] -webpdf = ["pyppeteer (>=1,<1.1)"] - -[[package]] -name = "nbformat" -version = "5.7.0" -description = "The Jupyter Notebook format" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -fastjsonschema = "*" -jsonschema = ">=2.6" -jupyter-core = "*" -traitlets = ">=5.1" - -[package.extras] -test = ["check-manifest", "pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.5.6" -description = "Patch asyncio to allow nested event loops" -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "nltk" -version = "3.7" -description = "Natural Language Toolkit" -category = "main" -optional = true -python-versions = ">=3.7" - -[package.dependencies] -click = "*" -joblib = "*" -regex = ">=2021.8.3" -tqdm = "*" - -[package.extras] -all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] -corenlp = ["requests"] -machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] -plot = ["matplotlib"] -tgrep = ["pyparsing"] -twitter = ["twython"] - -[[package]] -name = "notebook" -version = "6.5.2" -description = "A web-based notebook environment for interactive computing" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -argon2-cffi = "*" -ipykernel = "*" -ipython-genutils = "*" -jinja2 = "*" -jupyter-client = ">=5.3.4" -jupyter-core = ">=4.6.1" -nbclassic = ">=0.4.7" -nbconvert = ">=5" -nbformat = "*" -nest-asyncio = ">=1.5" -prometheus-client = "*" -pyzmq = ">=17" -Send2Trash = ">=1.8.0" -terminado = ">=0.8.3" -tornado = ">=6.1" -traitlets = ">=4.2.1" - -[package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] - -[[package]] -name = "notebook-shim" -version = "0.2.2" -description = "A shim layer for notebook traits and config" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-tornasync"] - -[[package]] -name = "numpy" -version = "1.23.5" -description = "NumPy is the fundamental package for array computing with Python." -category = "main" -optional = false -python-versions = ">=3.8" - -[[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - -[[package]] -name = "pandocfilters" -version = "1.5.0" -description = "Utilities for writing pandoc filters in python" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "parso" -version = "0.8.3" -description = "A Python Parser" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] - -[[package]] -name = "pathspec" -version = "0.10.2" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pathy" -version = "0.10.0" -description = "pathlib.Path subclasses for local and cloud bucket storage" -category = "main" -optional = true -python-versions = ">= 3.6" - -[package.dependencies] -smart-open = ">=5.2.1,<6.0.0" -typer = ">=0.3.0,<1.0.0" - -[package.extras] -all = ["azure-storage-blob", "boto3", "google-cloud-storage (>=1.26.0,<2.0.0)", "mock", "pytest", "pytest-coverage", "typer-cli"] -azure = ["azure-storage-blob"] -gcs = ["google-cloud-storage (>=1.26.0,<2.0.0)"] -s3 = ["boto3"] -test = ["mock", "pytest", "pytest-coverage", "typer-cli"] - -[[package]] -name = "pexpect" -version = "4.8.0" -description = "Pexpect allows easy control of interactive console applications." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "platformdirs" -version = "2.5.4" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] - -[[package]] -name = "playwright" -version = "1.28.0" -description = "A high-level API to automate web browsers" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -greenlet = "2.0.1" -pyee = "9.0.4" -typing-extensions = {version = "*", markers = "python_version <= \"3.8\""} - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "preshed" -version = "3.0.8" -description = "Cython hash table that trusts the keys are pre-hashed" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -cymem = ">=2.0.2,<2.1.0" -murmurhash = ">=0.28.0,<1.1.0" - -[[package]] -name = "prometheus-client" -version = "0.15.0" -description = "Python client for the Prometheus monitoring system." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.33" -description = "Library for building powerful interactive command lines in Python" -category = "dev" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "5.9.4" -description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "pycodestyle" -version = "2.10.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pydantic" -version = "1.10.2" -description = "Data validation and settings management using python type hints" -category = "main" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -typing-extensions = ">=4.1.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pydocstyle" -version = "6.1.1" -description = "Python docstring style checker" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -snowballstemmer = "*" - -[package.extras] -toml = ["toml"] - -[[package]] -name = "pyee" -version = "9.0.4" -description = "A port of node.js's EventEmitter to python." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -typing-extensions = "*" - -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pyrsistent" -version = "0.19.2" -description = "Persistent/Functional/Immutable data structures" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-dotenv" -version = "0.5.2" -description = "A py.test plugin that parses environment files before running tests" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -pytest = ">=5.0.0" -python-dotenv = ">=0.9.1" - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "0.21.0" -description = "Read key-value pairs from a .env file and set them as environment variables" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pywin32" -version = "305" -description = "Python for Window Extensions" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "pywinpty" -version = "2.0.9" -description = "Pseudo terminal support for Windows from Python." -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pyzmq" -version = "24.0.1" -description = "Python bindings for 0MQ" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} -py = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "qtconsole" -version = "5.4.0" -description = "Jupyter Qt console" -category = "dev" -optional = false -python-versions = ">= 3.7" - -[package.dependencies] -ipykernel = ">=4.1" -ipython-genutils = "*" -jupyter-client = ">=4.1" -jupyter-core = "*" -pygments = "*" -pyzmq = ">=17.1" -qtpy = ">=2.0.1" -traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" - -[package.extras] -doc = ["Sphinx (>=1.3)"] -test = ["flaky", "pytest", "pytest-qt"] - -[[package]] -name = "qtpy" -version = "2.3.0" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -packaging = "*" - -[package.extras] -test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] - -[[package]] -name = "redis" -version = "4.4.0" -description = "Python client for Redis database and key-value store" -category = "main" -optional = true -python-versions = ">=3.7" - -[package.dependencies] -async-timeout = ">=4.0.2" - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] -name = "regex" -version = "2022.10.31" -description = "Alternative regular expression module, to replace re." -category = "main" -optional = true -python-versions = ">=3.6" - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7, <4" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "send2trash" -version = "1.8.0" -description = "Send file to trash natively under Mac OS X, Windows and Linux." -category = "dev" -optional = false -python-versions = "*" - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "setuptools" -version = "65.6.3" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = true -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "smart-open" -version = "5.2.1" -description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" -category = "main" -optional = true -python-versions = ">=3.6,<4.0" - -[package.extras] -all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage", "requests"] -azure = ["azure-common", "azure-core", "azure-storage-blob"] -gcs = ["google-cloud-storage"] -http = ["requests"] -s3 = ["boto3"] -test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage", "moto[server] (==1.3.14)", "parameterizedtestcase", "paramiko", "pathlib2", "pytest", "pytest-rerunfailures", "requests", "responses"] -webhdfs = ["requests"] - -[[package]] -name = "sniffio" -version = "1.3.0" -description = "Sniff out which async library your code is running under" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "soupsieve" -version = "2.3.2.post1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "spacy" -version = "3.4.3" -description = "Industrial-strength Natural Language Processing (NLP) in Python" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -catalogue = ">=2.0.6,<2.1.0" -cymem = ">=2.0.2,<2.1.0" -jinja2 = "*" -langcodes = ">=3.2.0,<4.0.0" -murmurhash = ">=0.28.0,<1.1.0" -numpy = ">=1.15.0" -packaging = ">=20.0" -pathy = ">=0.3.5" -preshed = ">=3.0.2,<3.1.0" -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" -requests = ">=2.13.0,<3.0.0" -setuptools = "*" -spacy-legacy = ">=3.0.10,<3.1.0" -spacy-loggers = ">=1.0.0,<2.0.0" -srsly = ">=2.4.3,<3.0.0" -thinc = ">=8.1.0,<8.2.0" -tqdm = ">=4.38.0,<5.0.0" -typer = ">=0.3.0,<0.8.0" -wasabi = ">=0.9.1,<1.1.0" - -[package.extras] -apple = ["thinc-apple-ops (>=0.1.0.dev0,<1.0.0)"] -cuda = ["cupy (>=5.0.0b4,<12.0.0)"] -cuda-autodetect = ["cupy-wheel (>=11.0.0,<12.0.0)"] -cuda100 = ["cupy-cuda100 (>=5.0.0b4,<12.0.0)"] -cuda101 = ["cupy-cuda101 (>=5.0.0b4,<12.0.0)"] -cuda102 = ["cupy-cuda102 (>=5.0.0b4,<12.0.0)"] -cuda110 = ["cupy-cuda110 (>=5.0.0b4,<12.0.0)"] -cuda111 = ["cupy-cuda111 (>=5.0.0b4,<12.0.0)"] -cuda112 = ["cupy-cuda112 (>=5.0.0b4,<12.0.0)"] -cuda113 = ["cupy-cuda113 (>=5.0.0b4,<12.0.0)"] -cuda114 = ["cupy-cuda114 (>=5.0.0b4,<12.0.0)"] -cuda115 = ["cupy-cuda115 (>=5.0.0b4,<12.0.0)"] -cuda116 = ["cupy-cuda116 (>=5.0.0b4,<12.0.0)"] -cuda117 = ["cupy-cuda117 (>=5.0.0b4,<12.0.0)"] -cuda11x = ["cupy-cuda11x (>=11.0.0,<12.0.0)"] -cuda80 = ["cupy-cuda80 (>=5.0.0b4,<12.0.0)"] -cuda90 = ["cupy-cuda90 (>=5.0.0b4,<12.0.0)"] -cuda91 = ["cupy-cuda91 (>=5.0.0b4,<12.0.0)"] -cuda92 = ["cupy-cuda92 (>=5.0.0b4,<12.0.0)"] -ja = ["sudachidict-core (>=20211220)", "sudachipy (>=0.5.2,!=0.6.1)"] -ko = ["natto-py (>=0.9.0)"] -lookups = ["spacy-lookups-data (>=1.0.3,<1.1.0)"] -ray = ["spacy-ray (>=0.1.0,<1.0.0)"] -th = ["pythainlp (>=2.0)"] -transformers = ["spacy-transformers (>=1.1.2,<1.2.0)"] - -[[package]] -name = "spacy-legacy" -version = "3.0.10" -description = "Legacy registered functions for spaCy backwards compatibility" -category = "main" -optional = true -python-versions = ">=3.6" - -[[package]] -name = "spacy-loggers" -version = "1.0.3" -description = "Logging utilities for SpaCy" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -wasabi = ">=0.8.1,<1.1.0" - -[[package]] -name = "sqlalchemy" -version = "1.4.44" -description = "Database Abstraction Library" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} - -[package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlitedict" -version = "2.1.0" -description = "Persistent dict in Python, backed up by sqlite3 and pickle, multithread-safe." -category = "main" -optional = true -python-versions = "*" - -[[package]] -name = "srsly" -version = "2.4.5" -description = "Modern high-performance serialization utilities for Python" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -catalogue = ">=2.0.3,<2.1.0" - -[[package]] -name = "stack-data" -version = "0.6.2" -description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "terminado" -version = "0.17.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] - -[[package]] -name = "thinc" -version = "8.1.5" -description = "A refreshing functional take on deep learning, compatible with your favorite libraries" -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -blis = ">=0.7.8,<0.8.0" -catalogue = ">=2.0.4,<2.1.0" -confection = ">=0.0.1,<1.0.0" -cymem = ">=2.0.2,<2.1.0" -murmurhash = ">=1.0.2,<1.1.0" -numpy = ">=1.15.0" -preshed = ">=3.0.2,<3.1.0" -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" -setuptools = "*" -srsly = ">=2.4.0,<3.0.0" -wasabi = ">=0.8.1,<1.1.0" - -[package.extras] -cuda = ["cupy (>=5.0.0b4)"] -cuda-autodetect = ["cupy-wheel (>=11.0.0)"] -cuda100 = ["cupy-cuda100 (>=5.0.0b4)"] -cuda101 = ["cupy-cuda101 (>=5.0.0b4)"] -cuda102 = ["cupy-cuda102 (>=5.0.0b4)"] -cuda110 = ["cupy-cuda110 (>=5.0.0b4)"] -cuda111 = ["cupy-cuda111 (>=5.0.0b4)"] -cuda112 = ["cupy-cuda112 (>=5.0.0b4)"] -cuda113 = ["cupy-cuda113 (>=5.0.0b4)"] -cuda114 = ["cupy-cuda114 (>=5.0.0b4)"] -cuda115 = ["cupy-cuda115 (>=5.0.0b4)"] -cuda116 = ["cupy-cuda116 (>=5.0.0b4)"] -cuda117 = ["cupy-cuda117 (>=5.0.0b4)"] -cuda11x = ["cupy-cuda11x (>=11.0.0)"] -cuda80 = ["cupy-cuda80 (>=5.0.0b4)"] -cuda90 = ["cupy-cuda90 (>=5.0.0b4)"] -cuda91 = ["cupy-cuda91 (>=5.0.0b4)"] -cuda92 = ["cupy-cuda92 (>=5.0.0b4)"] -datasets = ["ml-datasets (>=0.2.0,<0.3.0)"] -mxnet = ["mxnet (>=1.5.1,<1.6.0)"] -tensorflow = ["tensorflow (>=2.0.0,<2.6.0)"] -torch = ["torch (>=1.6.0)"] - -[[package]] -name = "tinycss2" -version = "1.2.1" -description = "A tiny CSS parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] - -[[package]] -name = "tokenizers" -version = "0.13.2" -description = "Fast and Customizable Tokenizers" -category = "main" -optional = true -python-versions = "*" - -[package.extras] -dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tornado" -version = "6.2" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" -optional = false -python-versions = ">= 3.7" - -[[package]] -name = "tqdm" -version = "4.64.1" -description = "Fast, Extensible Progress Meter" -category = "main" -optional = true -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.6.0" -description = "Traitlets Python configuration system" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest"] - -[[package]] -name = "transformers" -version = "4.25.1" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -category = "main" -optional = true -python-versions = ">=3.7.0" - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.10.0,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.10.0)"] -all = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.10.0)", "deepspeed (>=0.6.5)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.6.5)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.4.1)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -docs = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] -docs-specific = ["hf-doc-builder"] -fairscale = ["fairscale (>0.3)"] -flax = ["flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune]", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "pyknp (>=0.6.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.4)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (==22.3)", "datasets (!=2.5.0)", "flake8 (>=3.8.3)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)"] -ray = ["ray[tune]"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm"] -tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["torch (>=1.7,!=1.12.0)"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torchhub = ["filelock", "huggingface-hub (>=0.10.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "tqdm (>=4.27)"] -vision = ["Pillow"] - -[[package]] -name = "typer" -version = "0.7.0" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "main" -optional = true -python-versions = ">=3.6" - -[package.dependencies] -click = ">=7.1.1,<9.0.0" - -[package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.2" -description = "Typing stubs for PyYAML" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-requests" -version = "2.28.11.5" -description = "Typing stubs for requests" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -types-urllib3 = "<1.27" - -[[package]] -name = "types-toml" -version = "0.10.8.1" -description = "Typing stubs for toml" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "types-urllib3" -version = "1.26.25.4" -description = "Typing stubs for urllib3" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "urllib3" -version = "1.26.13" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "wasabi" -version = "0.10.1" -description = "A lightweight console printing and formatting toolkit" -category = "main" -optional = true -python-versions = "*" - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "websocket-client" -version = "1.4.2" -description = "WebSocket client for Python with low level API options" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.3" -description = "Jupyter interactive widgets for Jupyter Notebook" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "wikipedia" -version = "1.4.0" -description = "Wikipedia API for Python" -category = "main" -optional = true -python-versions = "*" - -[package.dependencies] -beautifulsoup4 = "*" -requests = ">=2.0.0,<3.0.0" - -[[package]] -name = "zipp" -version = "3.11.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[extras] -all = ["manifest-ml", "elasticsearch", "faiss-cpu", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4"] -llms = ["manifest-ml"] - -[metadata] -lock-version = "1.1" -python-versions = ">=3.8.1,<4.0" -content-hash = "7f44c3b23d4fa30e192ec0f0a9218bcd646bb48dd64a813ee1bb7d61cbe3a5b2" - -[metadata.files] -anyio = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, -] -appnope = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] -argon2-cffi = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, -] -argon2-cffi-bindings = [ +files = [ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, @@ -2092,54 +83,172 @@ argon2-cffi-bindings = [ {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, ] -asttokens = [ + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "arrow" +version = "1.2.3" +description = "Better dates & times for Python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, + {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" + +[[package]] +name = "asttokens" +version = "2.2.1" +description = "Annotate AST trees with source code positions" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, ] -async-timeout = [ + +[package.dependencies] +six = "*" + +[package.extras] +test = ["astroid", "pytest"] + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, ] -attrs = [ + +[[package]] +name = "attrs" +version = "22.1.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] -backcall = [ + +[package.extras] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] -beautifulsoup4 = [ + +[[package]] +name = "beautifulsoup4" +version = "4.11.1" +description = "Screen-scraping library" +category = "main" +optional = false +python-versions = ">=3.6.0" +files = [ {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, ] -black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, ] -bleach = [ + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bleach" +version = "5.0.1" +description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, ] -blis = [ + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.2)"] +dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"] + +[[package]] +name = "blis" +version = "0.7.9" +description = "The Blis BLAS-like linear algebra library, as a self-contained C-extension." +category = "main" +optional = true +python-versions = "*" +files = [ {file = "blis-0.7.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3ea73707a7938304c08363a0b990600e579bfb52dece7c674eafac4bf2df9f7"}, {file = "blis-0.7.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e85993364cae82707bfe7e637bee64ec96e232af31301e5c81a351778cb394b9"}, {file = "blis-0.7.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d205a7e69523e2bacdd67ea906b82b84034067e0de83b33bd83eb96b9e844ae3"}, @@ -2169,15 +278,42 @@ blis = [ {file = "blis-0.7.9-cp39-cp39-win_amd64.whl", hash = "sha256:d81c3f627d33545fc25c9dcb5fee66c476d89288a27d63ac16ea63453401ffd5"}, {file = "blis-0.7.9.tar.gz", hash = "sha256:29ef4c25007785a90ffc2f0ab3d3bd3b75cd2d7856a9a482b7d0dac8d511a09d"}, ] -catalogue = [ + +[package.dependencies] +numpy = ">=1.15.0" + +[[package]] +name = "catalogue" +version = "2.0.8" +description = "Super lightweight function registries for your library" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "catalogue-2.0.8-py3-none-any.whl", hash = "sha256:2d786e229d8d202b4f8a2a059858e45a2331201d831e39746732daa704b99f69"}, {file = "catalogue-2.0.8.tar.gz", hash = "sha256:b325c77659208bfb6af1b0d93b1a1aa4112e1bb29a4c5ced816758a722f0e388"}, ] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] -cffi = [ + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, @@ -2243,23 +379,160 @@ cffi = [ {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] -charset-normalizer = [ + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "2.1.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.6.0" +files = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] -click = [ + +[package.extras] +unicode-backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] -colorama = [ + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -confection = [ + +[[package]] +name = "comm" +version = "0.1.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, + {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, +] + +[package.dependencies] +traitlets = ">=5.3" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "confection" +version = "0.0.3" +description = "The sweetest config system for Python" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "confection-0.0.3-py3-none-any.whl", hash = "sha256:51af839c1240430421da2b248541ebc95f9d0ee385bcafa768b8acdbd2b0111d"}, {file = "confection-0.0.3.tar.gz", hash = "sha256:4fec47190057c43c9acbecb8b1b87a9bf31c469caa0d6888a5b9384432fdba5a"}, ] -cymem = [ + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" +srsly = ">=2.4.0,<3.0.0" + +[[package]] +name = "coverage" +version = "6.5.0" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cymem" +version = "2.0.7" +description = "Manage calls to calloc/free through Cython" +category = "main" +optional = true +python-versions = "*" +files = [ {file = "cymem-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4981fc9182cc1fe54bfedf5f73bfec3ce0c27582d9be71e130c46e35958beef0"}, {file = "cymem-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:42aedfd2e77aa0518a24a2a60a2147308903abc8b13c84504af58539c39e52a3"}, {file = "cymem-2.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c183257dc5ab237b664f64156c743e788f562417c74ea58c5a3939fe2d48d6f6"}, @@ -2289,7 +562,15 @@ cymem = [ {file = "cymem-2.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:59a09cf0e71b1b88bfa0de544b801585d81d06ea123c1725e7c5da05b7ca0d20"}, {file = "cymem-2.0.7.tar.gz", hash = "sha256:e6034badb5dd4e10344211c81f16505a55553a7164adc314c75bd80cf07e57a8"}, ] -debugpy = [ + +[[package]] +name = "debugpy" +version = "1.6.4" +description = "An implementation of the Debug Adapter Protocol for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "debugpy-1.6.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:6ae238943482c78867ac707c09122688efb700372b617ffd364261e5e41f7a2f"}, {file = "debugpy-1.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a39e7da178e1f22f4bc04b57f085e785ed1bcf424aaf318835a1a7129eefe35"}, {file = "debugpy-1.6.4-cp310-cp310-win32.whl", hash = "sha256:143f79d0798a9acea21cd1d111badb789f19d414aec95fa6389cfea9485ddfb1"}, @@ -2309,39 +590,134 @@ debugpy = [ {file = "debugpy-1.6.4-py2.py3-none-any.whl", hash = "sha256:e886a1296cd20a10172e94788009ce74b759e54229ebd64a43fa5c2b4e62cd76"}, {file = "debugpy-1.6.4.zip", hash = "sha256:d5ab9bd3f4e7faf3765fd52c7c43c074104ab1e109621dc73219099ed1a5399d"}, ] -decorator = [ + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] -defusedxml = [ + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] -dill = [ + +[[package]] +name = "dill" +version = "0.3.6" +description = "serialize all of python" +category = "main" +optional = true +python-versions = ">=3.7" +files = [ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, ] -elastic-transport = [ + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "elastic-transport" +version = "8.4.0" +description = "Transport classes and utilities shared among Python Elastic client libraries" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "elastic-transport-8.4.0.tar.gz", hash = "sha256:b9ad708ceb7fcdbc6b30a96f886609a109f042c0b9d9f2e44403b3133ba7ff10"}, {file = "elastic_transport-8.4.0-py3-none-any.whl", hash = "sha256:19db271ab79c9f70f8c43f8f5b5111408781a6176b54ab2e54d713b6d9ceb815"}, ] -elasticsearch = [ - {file = "elasticsearch-8.5.2-py3-none-any.whl", hash = "sha256:100ead24d2a20d40227bde9c586c9e32b820d15d3dfb4f12204d6b11841f029e"}, - {file = "elasticsearch-8.5.2.tar.gz", hash = "sha256:7fd57b89b1dfc3c976a71af58376d300a4e40dfcdd22ea2573ce6a830c9ad7c2"}, + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.26.2,<2" + +[package.extras] +develop = ["aiohttp", "mock", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "trustme"] + +[[package]] +name = "elasticsearch" +version = "8.5.3" +description = "Python client for Elasticsearch" +category = "main" +optional = true +python-versions = ">=3.6, <4" +files = [ + {file = "elasticsearch-8.5.3-py3-none-any.whl", hash = "sha256:f09adbea8caa633ff79e8fe115fb1d2b635426fe1a23e7e8e3bd7cce5ac3eb70"}, + {file = "elasticsearch-8.5.3.tar.gz", hash = "sha256:4b71ad05b36243c3b13f1c89b3ede4357011eece68917e293c43d4177d565838"}, ] -entrypoints = [ + +[package.dependencies] +elastic-transport = ">=8,<9" + +[package.extras] +async = ["aiohttp (>=3,<4)"] +requests = ["requests (>=2.4.0,<3.0.0)"] + +[[package]] +name = "entrypoints" +version = "0.4" +description = "Discover and load entry points from installed packages." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, ] -exceptiongroup = [ + +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, ] -executing = [ + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "1.2.0" +description = "Get the currently executing AST node of a frame, and other information" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, ] -faiss-cpu = [ + +[package.extras] +tests = ["asttokens", "littleutils", "pytest", "rich"] + +[[package]] +name = "faiss-cpu" +version = "1.7.3" +description = "A library for efficient similarity search and clustering of dense vectors." +category = "main" +optional = true +python-versions = "*" +files = [ {file = "faiss-cpu-1.7.3.tar.gz", hash = "sha256:cb71fe3f2934732d157d9d8cfb6ed2dd4020a0065571c84842ff6a3f0beab310"}, {file = "faiss_cpu-1.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:343f025e0846239d987d0c719772387ad685b74e5ef62b2e5616cabef9062729"}, {file = "faiss_cpu-1.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8b7b1cf693d7c24b5a633ff024717bd715fec501af4854357da0805b4899bcec"}, @@ -2368,23 +744,91 @@ faiss-cpu = [ {file = "faiss_cpu-1.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a14d832b5361ce9af21977eb1dcdebe23b9edcc12aad40316df7ca1bd86bc6b5"}, {file = "faiss_cpu-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:52df8895c5e59d1c9eda368a63790381a6f7fceddb22bed08f9c90a706d8a148"}, ] -fastjsonschema = [ + +[[package]] +name = "fastjsonschema" +version = "2.16.2" +description = "Fastest Python implementation of JSON schema" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "fastjsonschema-2.16.2-py3-none-any.whl", hash = "sha256:21f918e8d9a1a4ba9c22e09574ba72267a6762d47822db9add95f6454e51cc1c"}, {file = "fastjsonschema-2.16.2.tar.gz", hash = "sha256:01e366f25d9047816fe3d288cbfc3e10541daf0af2044763f3d0ade42476da18"}, ] -filelock = [ + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "filelock" +version = "3.8.2" +description = "A platform independent file lock." +category = "main" +optional = true +python-versions = ">=3.7" +files = [ {file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"}, {file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"}, ] -flake8 = [ + +[package.extras] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "6.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.8.1" +files = [ {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, ] -flake8-docstrings = [ + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.10.0,<2.11.0" +pyflakes = ">=3.0.0,<3.1.0" + +[[package]] +name = "flake8-docstrings" +version = "1.6.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, ] -greenlet = [ + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] + +[[package]] +name = "greenlet" +version = "2.0.1" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, @@ -2446,100 +890,576 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, ] -huggingface-hub = [ + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["faulthandler", "objgraph", "psutil"] + +[[package]] +name = "huggingface-hub" +version = "0.11.1" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +category = "main" +optional = true +python-versions = ">=3.7.0" +files = [ {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, ] -idna = [ + +[package.dependencies] +filelock = "*" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = "*" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +torch = ["torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] -importlib-metadata = [ + +[[package]] +name = "importlib-metadata" +version = "5.1.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, ] -importlib-resources = [ - {file = "importlib_resources-5.10.0-py3-none-any.whl", hash = "sha256:ee17ec648f85480d523596ce49eae8ead87d5631ae1551f913c0100b5edd3437"}, - {file = "importlib_resources-5.10.0.tar.gz", hash = "sha256:c01b1b94210d9849f286b86bb51bcea7cd56dde0600d8db721d7b81330711668"}, + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "importlib-resources" +version = "5.10.1" +description = "Read resources from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_resources-5.10.1-py3-none-any.whl", hash = "sha256:c09b067d82e72c66f4f8eb12332f5efbebc9b007c0b6c40818108c9870adc363"}, + {file = "importlib_resources-5.10.1.tar.gz", hash = "sha256:32bb095bda29741f6ef0e5278c42df98d135391bee5f932841efc0041f748dc3"}, ] -iniconfig = [ + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] -ipykernel = [ - {file = "ipykernel-6.17.1-py3-none-any.whl", hash = "sha256:3a9a1b2ad6dbbd5879855aabb4557f08e63fa2208bffed897f03070e2bb436f6"}, - {file = "ipykernel-6.17.1.tar.gz", hash = "sha256:e178c1788399f93a459c241fe07c3b810771c607b1fb064a99d2c5d40c90c5d4"}, + +[[package]] +name = "ipykernel" +version = "6.19.2" +description = "IPython Kernel for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.19.2-py3-none-any.whl", hash = "sha256:1374a55c57ca7a7286c3d8b15799cd76e1a2381b6b1fea99c494b955988926b6"}, + {file = "ipykernel-6.19.2.tar.gz", hash = "sha256:1ab68d3d3654196266baa93990055413e167263ffbe4cfe834f871bcd3d3506d"}, ] -ipython = [ + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.0" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=17" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "ipython" +version = "8.7.0" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ {file = "ipython-8.7.0-py3-none-any.whl", hash = "sha256:352042ddcb019f7c04e48171b4dd78e4c4bb67bf97030d170e154aac42b656d9"}, {file = "ipython-8.7.0.tar.gz", hash = "sha256:882899fe78d5417a0aa07f995db298fa28b58faeba2112d2e3a4c95fe14bb738"}, ] -ipython-genutils = [ + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.11,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.20)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] -ipywidgets = [ - {file = "ipywidgets-8.0.2-py3-none-any.whl", hash = "sha256:1dc3dd4ee19ded045ea7c86eb273033d238d8e43f9e7872c52d092683f263891"}, - {file = "ipywidgets-8.0.2.tar.gz", hash = "sha256:08cb75c6e0a96836147cbfdc55580ae04d13e05d26ffbc377b4e1c68baa28b1f"}, + +[[package]] +name = "ipywidgets" +version = "8.0.3" +description = "Jupyter interactive widgets" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ipywidgets-8.0.3-py3-none-any.whl", hash = "sha256:db7dd35fb1217636cbdbe0ba0bd2216d91a7695cb28b5c1dca17e62cd51378de"}, + {file = "ipywidgets-8.0.3.tar.gz", hash = "sha256:2ec50df8538a1d4ddd5d454830d010922ad1015e81ac23efb27c0908bbc1eece"}, ] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, + +[package.dependencies] +ipykernel = ">=4.5.1" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0,<4.0" +traitlets = ">=4.3.1" +widgetsnbextension = ">=4.0,<5.0" + +[package.extras] +test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] + +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, ] -jedi = [ + +[package.dependencies] +arrow = ">=0.15.0" + +[[package]] +name = "isort" +version = "5.11.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "isort-5.11.1-py3-none-any.whl", hash = "sha256:bf02c95f1fe615ebbe13a619cfed1619ddfe8941274c9e3de3143adca406cb02"}, + {file = "isort-5.11.1.tar.gz", hash = "sha256:7c5bd998504826b6f1e6f2f98b533976b066baba29b8bae83fdeefd0b89c6b70"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "jedi" +version = "0.18.2" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, ] -jinja2 = [ + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] -joblib = [ + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.2.0" +description = "Lightweight pipelining with Python functions" +category = "main" +optional = true +python-versions = ">=3.7" +files = [ {file = "joblib-1.2.0-py3-none-any.whl", hash = "sha256:091138ed78f800342968c523bdde947e7a305b8594b910a0fea2ab83c3c6d385"}, {file = "joblib-1.2.0.tar.gz", hash = "sha256:e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018"}, ] -jsonschema = [ + +[[package]] +name = "jsonpointer" +version = "2.3" +description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, + {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, +] + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, ] -jupyter = [ + +[package.dependencies] +attrs = ">=17.4.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jupyter" +version = "1.0.0" +description = "Jupyter metapackage. Install all the Jupyter components in one go." +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, ] -jupyter-client = [ + +[package.dependencies] +ipykernel = "*" +ipywidgets = "*" +jupyter-console = "*" +nbconvert = "*" +notebook = "*" +qtconsole = "*" + +[[package]] +name = "jupyter-client" +version = "7.4.8" +description = "Jupyter protocol implementation and client libraries" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "jupyter_client-7.4.8-py3-none-any.whl", hash = "sha256:d4a67ae86ee014bcb96bd8190714f6af921f2b0f52f4208b086aa5acfd9f8d65"}, {file = "jupyter_client-7.4.8.tar.gz", hash = "sha256:109a3c33b62a9cf65aa8325850a0999a795fac155d9de4f7555aef5f310ee35a"}, ] -jupyter-console = [ + +[package.dependencies] +entrypoints = "*" +jupyter-core = ">=4.9.2" +nest-asyncio = ">=1.5.4" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = "*" + +[package.extras] +doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +test = ["codecov", "coverage", "ipykernel (>=6.12)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-console" +version = "6.4.4" +description = "Jupyter terminal console" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "jupyter_console-6.4.4-py3-none-any.whl", hash = "sha256:756df7f4f60c986e7bc0172e4493d3830a7e6e75c08750bbe59c0a5403ad6dee"}, {file = "jupyter_console-6.4.4.tar.gz", hash = "sha256:172f5335e31d600df61613a97b7f0352f2c8250bbd1092ef2d658f77249f89fb"}, ] -jupyter-core = [ + +[package.dependencies] +ipykernel = "*" +ipython = "*" +jupyter-client = ">=7.0.0" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" + +[package.extras] +test = ["pexpect"] + +[[package]] +name = "jupyter-core" +version = "5.1.0" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ {file = "jupyter_core-5.1.0-py3-none-any.whl", hash = "sha256:f5740d99606958544396914b08e67b668f45e7eff99ab47a7f4bcead419c02f4"}, {file = "jupyter_core-5.1.0.tar.gz", hash = "sha256:a5ae7c09c55c0b26f692ec69323ba2b62e8d7295354d20f6cd57b749de4a05bf"}, ] -jupyter-server = [ - {file = "jupyter_server-1.23.3-py3-none-any.whl", hash = "sha256:438496cac509709cc85e60172e5538ca45b4c8a0862bb97cd73e49f2ace419cb"}, - {file = "jupyter_server-1.23.3.tar.gz", hash = "sha256:f7f7a2f9d36f4150ad125afef0e20b1c76c8ff83eb5e39fb02d3b9df0f9b79ab"}, + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "sphinxcontrib-github-alt", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-events" +version = "0.5.0" +description = "Jupyter Event System library" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyter_events-0.5.0-py3-none-any.whl", hash = "sha256:6f7b67bf42b8a370c992187194ed02847dfa02307a7aebe9913e2d3979b9b6b8"}, + {file = "jupyter_events-0.5.0.tar.gz", hash = "sha256:e27ffdd6138699d47d42cb65ae6d79334ff7c0d923694381c991ce56a140f2cd"}, ] -jupyterlab-pygments = [ + +[package.dependencies] +jsonschema = {version = ">=4.3.0", extras = ["format-nongpl"]} +python-json-logger = "*" +pyyaml = "*" +traitlets = "*" + +[package.extras] +cli = ["click", "rich"] +test = ["click", "coverage", "pre-commit", "pytest (>=6.1.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] + +[[package]] +name = "jupyter-server" +version = "2.0.1" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server-2.0.1-py3-none-any.whl", hash = "sha256:3bc09974a5290249de6924a614933e6f4f3d6d11f3061423a9f4e0271064a8b3"}, + {file = "jupyter_server-2.0.1.tar.gz", hash = "sha256:6e71268380ad7e4f2d9dda2f3e51a4fd4d1997b5390d5acdb74c7a195cfe4c00"}, +] + +[package.dependencies] +anyio = ">=3.1.0,<4" +argon2-cffi = "*" +jinja2 = "*" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-events = ">=0.4.0" +jupyter-server-terminals = "*" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +packaging = "*" +prometheus-client = "*" +pywinpty = {version = "*", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = "*" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = "*" + +[package.extras] +docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxemoji", "tornado"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] +test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.4.2" +description = "A Jupyter Server Extension Providing Terminals." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server_terminals-0.4.2-py3-none-any.whl", hash = "sha256:c0eaacee6cac21b597c23c38dd523dc4e9b947f97af5101e0396c08f28db3e37"}, + {file = "jupyter_server_terminals-0.4.2.tar.gz", hash = "sha256:0e68cba38eb0f9f2d93f1160e0a7f84b943d0d0c4d2f77eeaabbb4a2919c47c6"}, +] + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<2.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxemoji", "tornado"] +test = ["coverage", "jupyter-server (>=2.0.0rc8)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.2.2" +description = "Pygments theme using JupyterLab CSS variables" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] -jupyterlab-widgets = [ - {file = "jupyterlab_widgets-3.0.3-py3-none-any.whl", hash = "sha256:6aa1bc0045470d54d76b9c0b7609a8f8f0087573bae25700a370c11f82cb38c8"}, - {file = "jupyterlab_widgets-3.0.3.tar.gz", hash = "sha256:c767181399b4ca8b647befe2d913b1260f51bf9d8ef9b7a14632d4c1a7b536bd"}, + +[[package]] +name = "jupyterlab-widgets" +version = "3.0.4" +description = "Jupyter interactive widgets for JupyterLab" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyterlab_widgets-3.0.4-py3-none-any.whl", hash = "sha256:4c9275daa6d20fc96c3aea45756ece7110850d035b0b93a6a40e918016b927da"}, + {file = "jupyterlab_widgets-3.0.4.tar.gz", hash = "sha256:9a568e022b8bb53ab23291f6ddb52f8002b789c2c5763378cbc882be1d619be8"}, ] -langcodes = [ + +[[package]] +name = "langcodes" +version = "3.3.0" +description = "Tools for labeling human languages with IETF language tags" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "langcodes-3.3.0-py3-none-any.whl", hash = "sha256:4d89fc9acb6e9c8fdef70bcdf376113a3db09b67285d9e1d534de6d8818e7e69"}, {file = "langcodes-3.3.0.tar.gz", hash = "sha256:794d07d5a28781231ac335a1561b8442f8648ca07cd518310aeb45d6f0807ef6"}, ] -manifest-ml = [ + +[package.extras] +data = ["language-data (>=1.1,<2.0)"] + +[[package]] +name = "manifest-ml" +version = "0.0.1" +description = "Manifest for Prompt Programming Foundation Models." +category = "main" +optional = true +python-versions = ">=3.8.0" +files = [ {file = "manifest-ml-0.0.1.tar.gz", hash = "sha256:f828faf7de41fad5318254beec08acdf5142196e0e22203a4047412c2d3127a0"}, {file = "manifest_ml-0.0.1-py2.py3-none-any.whl", hash = "sha256:fc4e62e706fd767fd8851d91051fdb71bc79b2df9c66f5879736c46d8163a316"}, ] -markupsafe = [ + +[package.dependencies] +dill = ">=0.3.5" +redis = ">=4.3.1" +requests = ">=2.27.1" +sqlitedict = ">=2.0.0" +tqdm = ">=4.64.0" + +[package.extras] +all = ["Flask (>=2.1.2)", "accelerate (>=0.10.0)", "autopep8 (>=1.6.0)", "black (>=22.3.0)", "docformatter (>=1.4)", "flake8 (>=4.0.0)", "flake8-docstrings (>=1.6.0)", "isort (>=5.9.3)", "mypy (>=0.950)", "nbsphinx (>=0.8.0)", "pep8-naming (>=0.12.1)", "pre-commit (>=2.14.0)", "pytest (>=7.0.0)", "pytest-cov (>=3.0.0)", "python-dotenv (>=0.20.0)", "recommonmark (>=0.7.1)", "sphinx-autobuild", "sphinx-rtd-theme (>=0.5.1)", "torch (>=1.8.0)", "transformers (>=4.20.0)", "twine", "types-PyYAML (>=6.0.7)", "types-protobuf (>=3.19.21)", "types-python-dateutil (>=2.8.16)", "types-redis (>=4.2.6)", "types-requests (>=2.27.29)", "types-setuptools (>=57.4.17)"] +api = ["Flask (>=2.1.2)", "accelerate (>=0.10.0)", "torch (>=1.8.0)", "transformers (>=4.20.0)"] +dev = ["autopep8 (>=1.6.0)", "black (>=22.3.0)", "docformatter (>=1.4)", "flake8 (>=4.0.0)", "flake8-docstrings (>=1.6.0)", "isort (>=5.9.3)", "mypy (>=0.950)", "nbsphinx (>=0.8.0)", "pep8-naming (>=0.12.1)", "pre-commit (>=2.14.0)", "pytest (>=7.0.0)", "pytest-cov (>=3.0.0)", "python-dotenv (>=0.20.0)", "recommonmark (>=0.7.1)", "sphinx-autobuild", "sphinx-rtd-theme (>=0.5.1)", "twine", "types-PyYAML (>=6.0.7)", "types-protobuf (>=3.19.21)", "types-python-dateutil (>=2.8.16)", "types-redis (>=4.2.6)", "types-requests (>=2.27.29)", "types-setuptools (>=57.4.17)"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -2581,19 +1501,54 @@ markupsafe = [ {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] -matplotlib-inline = [ + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, ] -mccabe = [ + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] -mistune = [ + +[[package]] +name = "mistune" +version = "2.0.4" +description = "A sane Markdown parser with useful plugins and renderers" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "mistune-2.0.4-py2.py3-none-any.whl", hash = "sha256:182cc5ee6f8ed1b807de6b7bb50155df7b66495412836b9a74c8fbdfc75fe36d"}, {file = "mistune-2.0.4.tar.gz", hash = "sha256:9ee0a66053e2267aba772c71e06891fa8f1af6d4b01d5e84e267b4570d4d9808"}, ] -murmurhash = [ + +[[package]] +name = "murmurhash" +version = "1.0.9" +description = "Cython bindings for MurmurHash" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "murmurhash-1.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:697ed01454d92681c7ae26eb1adcdc654b54062bcc59db38ed03cad71b23d449"}, {file = "murmurhash-1.0.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ef31b5c11be2c064dbbdd0e22ab3effa9ceb5b11ae735295c717c120087dd94"}, {file = "murmurhash-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a2bd203377a31bbb2d83fe3f968756d6c9bbfa36c64c6ebfc3c6494fc680bc"}, @@ -2623,7 +1578,15 @@ murmurhash = [ {file = "murmurhash-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:379bf6b414bd27dd36772dd1570565a7d69918e980457370838bd514df0d91e9"}, {file = "murmurhash-1.0.9.tar.gz", hash = "sha256:fe7a38cb0d3d87c14ec9dddc4932ffe2dbc77d75469ab80fd5014689b0e07b58"}, ] -mypy = [ + +[[package]] +name = "mypy" +version = "0.991" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, @@ -2655,43 +1618,248 @@ mypy = [ {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, ] -mypy-extensions = [ + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] -nbclassic = [ + +[[package]] +name = "nbclassic" +version = "0.4.8" +description = "A web-based notebook environment for interactive computing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "nbclassic-0.4.8-py3-none-any.whl", hash = "sha256:cbf05df5842b420d5cece0143462380ea9d308ff57c2dc0eb4d6e035b18fbfb3"}, {file = "nbclassic-0.4.8.tar.gz", hash = "sha256:c74d8a500f8e058d46b576a41e5bc640711e1032cf7541dde5f73ea49497e283"}, ] -nbclient = [ + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=6.1.1" +jupyter-core = ">=4.6.1" +jupyter-server = ">=1.8" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +notebook-shim = ">=0.1.0" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +json-logging = ["json-logging"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] + +[[package]] +name = "nbclient" +version = "0.7.2" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ {file = "nbclient-0.7.2-py3-none-any.whl", hash = "sha256:d97ac6257de2794f5397609df754fcbca1a603e94e924eb9b99787c031ae2e7c"}, {file = "nbclient-0.7.2.tar.gz", hash = "sha256:884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547"}, ] -nbconvert = [ + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +nbformat = ">=5.1" +traitlets = ">=5.3" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.2.6" +description = "Converting Jupyter Notebooks" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "nbconvert-7.2.6-py3-none-any.whl", hash = "sha256:f933e82fe48b9a421e4252249f6c0a9a9940dc555642b4729f3f1f526bb16779"}, {file = "nbconvert-7.2.6.tar.gz", hash = "sha256:c9c0e4b26326f7658ebf4cda0acc591b9727c4e3ee3ede962f70c11833b71b40"}, ] -nbformat = [ + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "*" +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<3" +nbclient = ">=0.5.0" +nbformat = ">=5.1" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.0" + +[package.extras] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)"] +qtpdf = ["nbconvert[qtpng]"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-dependency"] +webpdf = ["pyppeteer (>=1,<1.1)"] + +[[package]] +name = "nbformat" +version = "5.7.0" +description = "The Jupyter Notebook format" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "nbformat-5.7.0-py3-none-any.whl", hash = "sha256:1b05ec2c552c2f1adc745f4eddce1eac8ca9ffd59bb9fd859e827eaa031319f9"}, {file = "nbformat-5.7.0.tar.gz", hash = "sha256:1d4760c15c1a04269ef5caf375be8b98dd2f696e5eb9e603ec2bf091f9b0d3f3"}, ] -nest-asyncio = [ + +[package.dependencies] +fastjsonschema = "*" +jsonschema = ">=2.6" +jupyter-core = "*" +traitlets = ">=5.1" + +[package.extras] +test = ["check-manifest", "pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nest-asyncio" +version = "1.5.6" +description = "Patch asyncio to allow nested event loops" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, ] -nltk = [ - {file = "nltk-3.7-py3-none-any.whl", hash = "sha256:ba3de02490308b248f9b94c8bc1ac0683e9aa2ec49ee78536d8667afb5e3eec8"}, - {file = "nltk-3.7.zip", hash = "sha256:d6507d6460cec76d70afea4242a226a7542f85c669177b9c7f562b7cf1b05502"}, + +[[package]] +name = "nltk" +version = "3.8" +description = "Natural Language Toolkit" +category = "main" +optional = true +python-versions = ">=3.7" +files = [ + {file = "nltk-3.8-py3-none-any.whl", hash = "sha256:3306502f487aa9fb0566e23443fa287a85a8d8d0821e2ef1655b4e3f0ea4aeee"}, + {file = "nltk-3.8.zip", hash = "sha256:74b30826a37d78d53427105bbd037dd880251be269fca64ee530838a46ed55fc"}, ] -notebook = [ + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "notebook" +version = "6.5.2" +description = "A web-based notebook environment for interactive computing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "notebook-6.5.2-py3-none-any.whl", hash = "sha256:e04f9018ceb86e4fa841e92ea8fb214f8d23c1cedfde530cc96f92446924f0e4"}, {file = "notebook-6.5.2.tar.gz", hash = "sha256:c1897e5317e225fc78b45549a6ab4b668e4c996fd03a04e938fe5e7af2bfffd0"}, ] -notebook-shim = [ + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=5.3.4" +jupyter-core = ">=4.6.1" +nbclassic = ">=0.4.7" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +json-logging = ["json-logging"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] + +[[package]] +name = "notebook-shim" +version = "0.2.2" +description = "A shim layer for notebook traits and config" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "notebook_shim-0.2.2-py3-none-any.whl", hash = "sha256:9c6c30f74c4fbea6fce55c1be58e7fd0409b1c681b075dcedceb005db5026949"}, {file = "notebook_shim-0.2.2.tar.gz", hash = "sha256:090e0baf9a5582ff59b607af523ca2db68ff216da0c69956b62cab2ef4fc9c3f"}, ] -numpy = [ + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-tornasync"] + +[[package]] +name = "numpy" +version = "1.23.5" +description = "NumPy is the fundamental package for array computing with Python." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ {file = "numpy-1.23.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c88793f78fca17da0145455f0d7826bcb9f37da4764af27ac945488116efe63"}, {file = "numpy-1.23.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e9f4c4e51567b616be64e05d517c79a8a22f3606499941d97bb76f2ca59f982d"}, {file = "numpy-1.23.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7903ba8ab592b82014713c491f6c5d3a1cde5b4a3bf116404e08f5b52f6daf43"}, @@ -2721,43 +1889,145 @@ numpy = [ {file = "numpy-1.23.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d"}, {file = "numpy-1.23.5.tar.gz", hash = "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a"}, ] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + +[[package]] +name = "packaging" +version = "22.0" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, + {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, ] -pandocfilters = [ + +[[package]] +name = "pandocfilters" +version = "1.5.0" +description = "Utilities for writing pandoc filters in python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, ] -parso = [ + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, ] -pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.10.3" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, + {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, ] -pathy = [ - {file = "pathy-0.10.0-py3-none-any.whl", hash = "sha256:205d6da31c47334227d364ad8c13b848eb3254701553eb179f3faaec3abd0204"}, - {file = "pathy-0.10.0.tar.gz", hash = "sha256:939822c326913cd0ab48f5928c8c40afcc59c5b093eac328348dd16700ab49e9"}, + +[[package]] +name = "pathy" +version = "0.10.1" +description = "pathlib.Path subclasses for local and cloud bucket storage" +category = "main" +optional = true +python-versions = ">= 3.6" +files = [ + {file = "pathy-0.10.1-py3-none-any.whl", hash = "sha256:a7613ee2d99a0a3300e1d836322e2d947c85449fde59f52906f995dbff67dad4"}, + {file = "pathy-0.10.1.tar.gz", hash = "sha256:4cd6e71b4cd5ff875cfbb949ad9fa5519d8d1dbe69d5fc1d1b23aa3cb049618b"}, ] -pexpect = [ + +[package.dependencies] +smart-open = ">=5.2.1,<7.0.0" +typer = ">=0.3.0,<1.0.0" + +[package.extras] +all = ["azure-storage-blob", "boto3", "google-cloud-storage (>=1.26.0,<2.0.0)", "mock", "pytest", "pytest-coverage", "typer-cli"] +azure = ["azure-storage-blob"] +gcs = ["google-cloud-storage (>=1.26.0,<2.0.0)"] +s3 = ["boto3"] +test = ["mock", "pytest", "pytest-coverage", "typer-cli"] + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, ] -pickleshare = [ + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] -pkgutil-resolve-name = [ + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, ] -platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, + +[[package]] +name = "platformdirs" +version = "2.6.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"}, + {file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"}, ] -playwright = [ + +[package.extras] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] +test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] + +[[package]] +name = "playwright" +version = "1.28.0" +description = "A high-level API to automate web browsers" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "playwright-1.28.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:2e101b17e4d5252ef96c9dc8b2ac17f2980dde0420728c1c96a77eeaf6f9b11f"}, {file = "playwright-1.28.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:265f47aaa42c7986316100f5f468f8654e9a1609c2a2578743e25d058bddc1e6"}, {file = "playwright-1.28.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:a21ddd7b6f6afd434a73471f7cd39673286f0ca88b62b756d90264eb7b5a7daf"}, @@ -2766,11 +2036,36 @@ playwright = [ {file = "playwright-1.28.0-py3-none-win32.whl", hash = "sha256:8557d92718ce45814aff017fa1774ab92089e40b6c16a8073d5a7c4d583d4aed"}, {file = "playwright-1.28.0-py3-none-win_amd64.whl", hash = "sha256:794b9da616c03354a12e48ddf060a9e776ab59b90662b0131ff74ec1b25739f4"}, ] -pluggy = [ + +[package.dependencies] +greenlet = "2.0.1" +pyee = "9.0.4" +typing-extensions = {version = "*", markers = "python_version <= \"3.8\""} + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] -preshed = [ + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "preshed" +version = "3.0.8" +description = "Cython hash table that trusts the keys are pre-hashed" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "preshed-3.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ea4b6df8ef7af38e864235256793bc3056e9699d991afcf6256fa298858582fc"}, {file = "preshed-3.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e945fc814bdc29564a2ce137c237b3a9848aa1e76a1160369b6e0d328151fdd"}, {file = "preshed-3.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9a4833530fe53001c351974e0c8bb660211b8d0358e592af185fec1ae12b2d0"}, @@ -2800,15 +2095,49 @@ preshed = [ {file = "preshed-3.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:06793022a56782ef51d74f1399925a2ba958e50c5cfbc6fa5b25c4945e158a07"}, {file = "preshed-3.0.8.tar.gz", hash = "sha256:6c74c70078809bfddda17be96483c41d06d717934b07cab7921011d81758b357"}, ] -prometheus-client = [ + +[package.dependencies] +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=0.28.0,<1.1.0" + +[[package]] +name = "prometheus-client" +version = "0.15.0" +description = "Python client for the Prometheus monitoring system." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, ] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.33-py3-none-any.whl", hash = "sha256:ced598b222f6f4029c0800cefaa6a17373fb580cd093223003475ce32805c35b"}, - {file = "prompt_toolkit-3.0.33.tar.gz", hash = "sha256:535c29c31216c77302877d5120aef6c94ff573748a5b5ca5b1b1f76f5e700c73"}, + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.36" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, + {file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"}, ] -psutil = [ + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, @@ -2824,27 +2153,81 @@ psutil = [ {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, ] -ptyprocess = [ + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] -pure-eval = [ + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, ] -py = [ + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] -pycodestyle = [ + +[[package]] +name = "pycodestyle" +version = "2.10.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, ] -pycparser = [ + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -pydantic = [ + +[[package]] +name = "pydantic" +version = "1.10.2" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, @@ -2882,27 +2265,82 @@ pydantic = [ {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] -pydocstyle = [ + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, ] -pyee = [ + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pyee" +version = "9.0.4" +description = "A port of node.js's EventEmitter to python." +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "pyee-9.0.4-py2.py3-none-any.whl", hash = "sha256:9f066570130c554e9cc12de5a9d86f57c7ee47fece163bbdaa3e9c933cfbdfa5"}, {file = "pyee-9.0.4.tar.gz", hash = "sha256:2770c4928abc721f46b705e6a72b0c59480c4a69c9a83ca0b00bb994f1ea4b32"}, ] -pyflakes = [ + +[package.dependencies] +typing-extensions = "*" + +[[package]] +name = "pyflakes" +version = "3.0.1" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, ] -pygments = [ + +[[package]] +name = "pygments" +version = "2.13.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyrsistent = [ + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyrsistent" +version = "0.19.2" +description = "Persistent/Functional/Immutable data structures" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed"}, @@ -2926,23 +2364,116 @@ pyrsistent = [ {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, ] -pytest = [ + +[[package]] +name = "pytest" +version = "7.2.0" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, ] -pytest-dotenv = [ + +[package.dependencies] +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.0.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-dotenv" +version = "0.5.2" +description = "A py.test plugin that parses environment files before running tests" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, ] -python-dateutil = [ + +[package.dependencies] +pytest = ">=5.0.0" +python-dotenv = ">=0.9.1" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -python-dotenv = [ + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, ] -pywin32 = [ + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-json-logger" +version = "2.0.4" +description = "A python library adding a json log formatter" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "python-json-logger-2.0.4.tar.gz", hash = "sha256:764d762175f99fcc4630bd4853b09632acb60a6224acb27ce08cd70f0b1b81bd"}, + {file = "python_json_logger-2.0.4-py3-none-any.whl", hash = "sha256:3b03487b14eb9e4f77e4fc2a023358b5394b82fd89cecf5586259baed57d8c6f"}, +] + +[[package]] +name = "pywin32" +version = "305" +description = "Python for Window Extensions" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, @@ -2958,7 +2489,15 @@ pywin32 = [ {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, ] -pywinpty = [ + +[[package]] +name = "pywinpty" +version = "2.0.9" +description = "Pseudo terminal support for Windows from Python." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "pywinpty-2.0.9-cp310-none-win_amd64.whl", hash = "sha256:30a7b371446a694a6ce5ef906d70ac04e569de5308c42a2bdc9c3bc9275ec51f"}, {file = "pywinpty-2.0.9-cp311-none-win_amd64.whl", hash = "sha256:d78ef6f4bd7a6c6f94dc1a39ba8fb028540cc39f5cb593e756506db17843125f"}, {file = "pywinpty-2.0.9-cp37-none-win_amd64.whl", hash = "sha256:5ed36aa087e35a3a183f833631b3e4c1ae92fe2faabfce0fa91b77ed3f0f1382"}, @@ -2966,7 +2505,15 @@ pywinpty = [ {file = "pywinpty-2.0.9-cp39-none-win_amd64.whl", hash = "sha256:ba75ec55f46c9e17db961d26485b033deb20758b1731e8e208e1e8a387fcf70c"}, {file = "pywinpty-2.0.9.tar.gz", hash = "sha256:01b6400dd79212f50a2f01af1c65b781290ff39610853db99bf03962eb9a615f"}, ] -pyyaml = [ + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -3008,7 +2555,15 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] -pyzmq = [ + +[[package]] +name = "pyzmq" +version = "24.0.1" +description = "Python bindings for 0MQ" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ {file = "pyzmq-24.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:28b119ba97129d3001673a697b7cce47fe6de1f7255d104c2f01108a5179a066"}, {file = "pyzmq-24.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bcbebd369493d68162cddb74a9c1fcebd139dfbb7ddb23d8f8e43e6c87bac3a6"}, {file = "pyzmq-24.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae61446166983c663cee42c852ed63899e43e484abf080089f771df4b9d272ef"}, @@ -3084,19 +2639,82 @@ pyzmq = [ {file = "pyzmq-24.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:687700f8371643916a1d2c61f3fdaa630407dd205c38afff936545d7b7466066"}, {file = "pyzmq-24.0.1.tar.gz", hash = "sha256:216f5d7dbb67166759e59b0479bca82b8acf9bed6015b526b8eb10143fb08e77"}, ] -qtconsole = [ + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} +py = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "qtconsole" +version = "5.4.0" +description = "Jupyter Qt console" +category = "dev" +optional = false +python-versions = ">= 3.7" +files = [ {file = "qtconsole-5.4.0-py3-none-any.whl", hash = "sha256:be13560c19bdb3b54ed9741a915aa701a68d424519e8341ac479a91209e694b2"}, {file = "qtconsole-5.4.0.tar.gz", hash = "sha256:57748ea2fd26320a0b77adba20131cfbb13818c7c96d83fafcb110ff55f58b35"}, ] -qtpy = [ + +[package.dependencies] +ipykernel = ">=4.1" +ipython-genutils = "*" +jupyter-client = ">=4.1" +jupyter-core = "*" +pygments = "*" +pyzmq = ">=17.1" +qtpy = ">=2.0.1" +traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" + +[package.extras] +doc = ["Sphinx (>=1.3)"] +test = ["flaky", "pytest", "pytest-qt"] + +[[package]] +name = "qtpy" +version = "2.3.0" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "QtPy-2.3.0-py3-none-any.whl", hash = "sha256:8d6d544fc20facd27360ea189592e6135c614785f0dec0b4f083289de6beb408"}, {file = "QtPy-2.3.0.tar.gz", hash = "sha256:0603c9c83ccc035a4717a12908bf6bc6cb22509827ea2ec0e94c2da7c9ed57c5"}, ] -redis = [ + +[package.dependencies] +packaging = "*" + +[package.extras] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] + +[[package]] +name = "redis" +version = "4.4.0" +description = "Python client for Redis database and key-value store" +category = "main" +optional = true +python-versions = ">=3.7" +files = [ {file = "redis-4.4.0-py3-none-any.whl", hash = "sha256:cae3ee5d1f57d8caf534cd8764edf3163c77e073bdd74b6f54a87ffafdc5e7d9"}, {file = "redis-4.4.0.tar.gz", hash = "sha256:7b8c87d19c45d3f1271b124858d2a5c13160c4e74d4835e28273400fa34d5228"}, ] -regex = [ + +[package.dependencies] +async-timeout = ">=4.0.2" + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "regex" +version = "2022.10.31" +description = "Alternative regular expression module, to replace re." +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"}, @@ -3186,39 +2804,168 @@ regex = [ {file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"}, {file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"}, ] -requests = [ + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" +files = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] -send2trash = [ + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] + +[[package]] +name = "send2trash" +version = "1.8.0" +description = "Send file to trash natively under Mac OS X, Windows and Linux." +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, ] -setuptools = [ + +[package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] + +[[package]] +name = "setuptools" +version = "65.6.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = true +python-versions = ">=3.7" +files = [ {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, ] -six = [ + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -smart-open = [ - {file = "smart_open-5.2.1-py3-none-any.whl", hash = "sha256:71d14489da58b60ce12fc3ecb823facc59a8b23cd1b58edb97175640350d3a62"}, - {file = "smart_open-5.2.1.tar.gz", hash = "sha256:75abf758717a92a8f53aa96953f0c245c8cedf8e1e4184903db3659b419d4c17"}, + +[[package]] +name = "smart-open" +version = "6.3.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +category = "main" +optional = true +python-versions = ">=3.6,<4.0" +files = [ + {file = "smart_open-6.3.0-py3-none-any.whl", hash = "sha256:b4c9ae193ad6d3e7add50944b86afa0d150bd821ab8ec21edb26d9a06b66f6a8"}, + {file = "smart_open-6.3.0.tar.gz", hash = "sha256:d5238825fe9a9340645fac3d75b287c08fbb99fb2b422477de781c9f5f09e019"}, ] -sniffio = [ + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage (>=2.6.0)"] +http = ["requests"] +s3 = ["boto3"] +ssh = ["paramiko"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] -snowballstemmer = [ + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -soupsieve = [ + +[[package]] +name = "soupsieve" +version = "2.3.2.post1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, ] -spacy = [ + +[[package]] +name = "spacy" +version = "3.4.3" +description = "Industrial-strength Natural Language Processing (NLP) in Python" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "spacy-3.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e546b314f619502ae03e5eb9a0cfd09ca7a9db265bcdd8a3af83cfb0f1432e55"}, {file = "spacy-3.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ded11aa8966236aab145b4d2d024b3eb61ac50078362d77d9ed7d8c240ef0f4a"}, {file = "spacy-3.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:462e141f514d78cff85685b5b12eb8cadac0bad2f7820149cbe18d03ccb2e59c"}, @@ -3248,61 +2995,173 @@ spacy = [ {file = "spacy-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:676ab9ab2cf94ba48caa306f185a166e85bd35b388ec24512c8ba7dfcbc7517e"}, {file = "spacy-3.4.3.tar.gz", hash = "sha256:22698cf5175e2b697e82699fcccee3092b42137a57d352df208d71657fd693bb"}, ] -spacy-legacy = [ + +[package.dependencies] +catalogue = ">=2.0.6,<2.1.0" +cymem = ">=2.0.2,<2.1.0" +jinja2 = "*" +langcodes = ">=3.2.0,<4.0.0" +murmurhash = ">=0.28.0,<1.1.0" +numpy = ">=1.15.0" +packaging = ">=20.0" +pathy = ">=0.3.5" +preshed = ">=3.0.2,<3.1.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" +requests = ">=2.13.0,<3.0.0" +setuptools = "*" +spacy-legacy = ">=3.0.10,<3.1.0" +spacy-loggers = ">=1.0.0,<2.0.0" +srsly = ">=2.4.3,<3.0.0" +thinc = ">=8.1.0,<8.2.0" +tqdm = ">=4.38.0,<5.0.0" +typer = ">=0.3.0,<0.8.0" +wasabi = ">=0.9.1,<1.1.0" + +[package.extras] +apple = ["thinc-apple-ops (>=0.1.0.dev0,<1.0.0)"] +cuda = ["cupy (>=5.0.0b4,<12.0.0)"] +cuda-autodetect = ["cupy-wheel (>=11.0.0,<12.0.0)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4,<12.0.0)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4,<12.0.0)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4,<12.0.0)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4,<12.0.0)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4,<12.0.0)"] +cuda112 = ["cupy-cuda112 (>=5.0.0b4,<12.0.0)"] +cuda113 = ["cupy-cuda113 (>=5.0.0b4,<12.0.0)"] +cuda114 = ["cupy-cuda114 (>=5.0.0b4,<12.0.0)"] +cuda115 = ["cupy-cuda115 (>=5.0.0b4,<12.0.0)"] +cuda116 = ["cupy-cuda116 (>=5.0.0b4,<12.0.0)"] +cuda117 = ["cupy-cuda117 (>=5.0.0b4,<12.0.0)"] +cuda11x = ["cupy-cuda11x (>=11.0.0,<12.0.0)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4,<12.0.0)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4,<12.0.0)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4,<12.0.0)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4,<12.0.0)"] +ja = ["sudachidict-core (>=20211220)", "sudachipy (>=0.5.2,!=0.6.1)"] +ko = ["natto-py (>=0.9.0)"] +lookups = ["spacy-lookups-data (>=1.0.3,<1.1.0)"] +ray = ["spacy-ray (>=0.1.0,<1.0.0)"] +th = ["pythainlp (>=2.0)"] +transformers = ["spacy-transformers (>=1.1.2,<1.2.0)"] + +[[package]] +name = "spacy-legacy" +version = "3.0.10" +description = "Legacy registered functions for spaCy backwards compatibility" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "spacy-legacy-3.0.10.tar.gz", hash = "sha256:16104595d8ab1b7267f817a449ad1f986eb1f2a2edf1050748f08739a479679a"}, {file = "spacy_legacy-3.0.10-py2.py3-none-any.whl", hash = "sha256:8526a54d178dee9b7f218d43e5c21362c59056c5da23380b319b56043e9211f3"}, ] -spacy-loggers = [ - {file = "spacy-loggers-1.0.3.tar.gz", hash = "sha256:00f6fd554db9fd1fde6501b23e1f0e72f6eef14bb1e7fc15456d11d1d2de92ca"}, - {file = "spacy_loggers-1.0.3-py3-none-any.whl", hash = "sha256:f74386b390a023f9615dcb499b7b4ad63338236a8187f0ec4dfe265a9f665ee8"}, + +[[package]] +name = "spacy-loggers" +version = "1.0.4" +description = "Logging utilities for SpaCy" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ + {file = "spacy-loggers-1.0.4.tar.gz", hash = "sha256:e6f983bf71230091d5bb7b11bf64bd54415eca839108d5f83d9155d0ba93bf28"}, + {file = "spacy_loggers-1.0.4-py3-none-any.whl", hash = "sha256:e050bf2e63208b2f096b777e494971c962ad7c1dc997641c8f95c622550044ae"}, ] -sqlalchemy = [ - {file = "SQLAlchemy-1.4.44-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:da60b98b0f6f0df9fbf8b72d67d13b73aa8091923a48af79a951d4088530a239"}, - {file = "SQLAlchemy-1.4.44-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:95f4f8d62589755b507218f2e3189475a4c1f5cc9db2aec772071a7dc6cd5726"}, - {file = "SQLAlchemy-1.4.44-cp27-cp27m-win32.whl", hash = "sha256:afd1ac99179d1864a68c06b31263a08ea25a49df94e272712eb2824ef151e294"}, - {file = "SQLAlchemy-1.4.44-cp27-cp27m-win_amd64.whl", hash = "sha256:f8e5443295b218b08bef8eb85d31b214d184b3690d99a33b7bd8e5591e2b0aa1"}, - {file = "SQLAlchemy-1.4.44-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:53f90a2374f60e703c94118d21533765412da8225ba98659de7dd7998641ab17"}, - {file = "SQLAlchemy-1.4.44-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:65a0ad931944fcb0be12a8e0ac322dbd3ecf17c53f088bc10b6da8f0caac287b"}, - {file = "SQLAlchemy-1.4.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b185041a4dc5c685283ea98c2f67bbfa47bb28e4a4f5b27ebf40684e7a9f8"}, - {file = "SQLAlchemy-1.4.44-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:80ead36fb1d676cc019586ffdc21c7e906ce4bf243fe4021e4973dae332b6038"}, - {file = "SQLAlchemy-1.4.44-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68e0cd5d32a32c4395168d42f2fefbb03b817ead3a8f3704b8bd5697c0b26c24"}, - {file = "SQLAlchemy-1.4.44-cp310-cp310-win32.whl", hash = "sha256:ae1ed1ebc407d2f66c6f0ec44ef7d56e3f455859df5494680e2cf89dad8e3ae0"}, - {file = "SQLAlchemy-1.4.44-cp310-cp310-win_amd64.whl", hash = "sha256:6f0ea4d7348feb5e5d0bf317aace92e28398fa9a6e38b7be9ec1f31aad4a8039"}, - {file = "SQLAlchemy-1.4.44-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5e8ed9cde48b76318ab989deeddc48f833d2a6a7b7c393c49b704f67dedf01d"}, - {file = "SQLAlchemy-1.4.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c857676d810ca196be73c98eb839125d6fa849bfa3589be06201a6517f9961c"}, - {file = "SQLAlchemy-1.4.44-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c56e6899fa6e767e4be5d106941804a4201c5cb9620a409c0b80448ec70b656"}, - {file = "SQLAlchemy-1.4.44-cp311-cp311-win32.whl", hash = "sha256:c46322354c58d4dc039a2c982d28284330f8919f31206894281f4b595b9d8dbe"}, - {file = "SQLAlchemy-1.4.44-cp311-cp311-win_amd64.whl", hash = "sha256:7313e4acebb9ae88dbde14a8a177467a7625b7449306c03a3f9f309b30e163d0"}, - {file = "SQLAlchemy-1.4.44-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:17aee7bfcef7bf0dea92f10e5dfdd67418dcf6fe0759f520e168b605855c003e"}, - {file = "SQLAlchemy-1.4.44-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9470633395e5f24d6741b4c8a6e905bce405a28cf417bba4ccbaadf3dab0111d"}, - {file = "SQLAlchemy-1.4.44-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:393f51a09778e8984d735b59a810731394308b4038acdb1635397c2865dae2b6"}, - {file = "SQLAlchemy-1.4.44-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7e3b9e01fdbe1ce3a165cc7e1ff52b24813ee79c6df6dee0d1e13888a97817e"}, - {file = "SQLAlchemy-1.4.44-cp36-cp36m-win32.whl", hash = "sha256:6a06c2506c41926d2769f7968759995f2505e31c5b5a0821e43ca5a3ddb0e8ae"}, - {file = "SQLAlchemy-1.4.44-cp36-cp36m-win_amd64.whl", hash = "sha256:3ca21b35b714ce36f4b8d1ee8d15f149db8eb43a472cf71600bf18dae32286e7"}, - {file = "SQLAlchemy-1.4.44-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:3cbdbed8cdcae0f83640a9c44fa02b45a6c61e149c58d45a63c9581aba62850f"}, - {file = "SQLAlchemy-1.4.44-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22208c1982f1fe2ae82e5e4c3d4a6f2445a7a0d65fb7983a3d7cbbe3983f5a4"}, - {file = "SQLAlchemy-1.4.44-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d3b9ac11f36ab9a726097fba7c7f6384f0129aedb017f1d4d1d4fce9052a1320"}, - {file = "SQLAlchemy-1.4.44-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d654870a66027af3a26df1372cf7f002e161c6768ebe4c9c6fdc0da331cb5173"}, - {file = "SQLAlchemy-1.4.44-cp37-cp37m-win32.whl", hash = "sha256:0be9b479c5806cece01f1581726573a8d6515f8404e082c375b922c45cfc2a7b"}, - {file = "SQLAlchemy-1.4.44-cp37-cp37m-win_amd64.whl", hash = "sha256:3eba07f740488c3a125f17c092a81eeae24a6c7ec32ac9dbc52bf7afaf0c4f16"}, - {file = "SQLAlchemy-1.4.44-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ad5f966623905ee33694680dda1b735544c99c7638f216045d21546d3d8c6f5b"}, - {file = "SQLAlchemy-1.4.44-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f68eab46649504eb95be36ca529aea16cd199f080726c28cbdbcbf23d20b2a2"}, - {file = "SQLAlchemy-1.4.44-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:21f3df74a0ab39e1255e94613556e33c1dc3b454059fe0b365ec3bbb9ed82e4a"}, - {file = "SQLAlchemy-1.4.44-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8080bc51a775627865e0f1dbfc0040ff4ace685f187f6036837e1727ba2ed10"}, - {file = "SQLAlchemy-1.4.44-cp38-cp38-win32.whl", hash = "sha256:b6a337a2643a41476fb6262059b8740f4b9a2ec29bf00ffb18c18c080f6e0aed"}, - {file = "SQLAlchemy-1.4.44-cp38-cp38-win_amd64.whl", hash = "sha256:b737fbeb2f78926d1f59964feb287bbbd050e7904766f87c8ce5cfb86e6d840c"}, - {file = "SQLAlchemy-1.4.44-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:c9aa372b295a36771cffc226b6517df3011a7d146ac22d19fa6a75f1cdf9d7e6"}, - {file = "SQLAlchemy-1.4.44-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237067ba0ef45a518b64606e1807f7229969ad568288b110ed5f0ca714a3ed3a"}, - {file = "SQLAlchemy-1.4.44-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6d7e1b28342b45f19e3dea7873a9479e4a57e15095a575afca902e517fb89652"}, - {file = "SQLAlchemy-1.4.44-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c0093678001f5d79f2dcbf3104c54d6c89e41ab50d619494c503a4d3f1aef2"}, - {file = "SQLAlchemy-1.4.44-cp39-cp39-win32.whl", hash = "sha256:7cf7c7adbf4417e3f46fc5a2dbf8395a5a69698217337086888f79700a12e93a"}, - {file = "SQLAlchemy-1.4.44-cp39-cp39-win_amd64.whl", hash = "sha256:d3b6d4588994da73567bb00af9d7224a16c8027865a8aab53ae9be83f9b7cbd1"}, - {file = "SQLAlchemy-1.4.44.tar.gz", hash = "sha256:2dda5f96719ae89b3ec0f1b79698d86eb9aecb1d54e990abb3fdd92c04b46a90"}, + +[[package]] +name = "sqlalchemy" +version = "1.4.45" +description = "Database Abstraction Library" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "SQLAlchemy-1.4.45-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:f1d3fb02a4d0b07d1351a4a52f159e5e7b3045c903468b7e9349ebf0020ffdb9"}, + {file = "SQLAlchemy-1.4.45-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b7025d46aba946272f6b6b357a22f3787473ef27451f342df1a2a6de23743e3"}, + {file = "SQLAlchemy-1.4.45-cp27-cp27m-win32.whl", hash = "sha256:26b8424b32eeefa4faad21decd7bdd4aade58640b39407bf43e7d0a7c1bc0453"}, + {file = "SQLAlchemy-1.4.45-cp27-cp27m-win_amd64.whl", hash = "sha256:13578d1cda69bc5e76c59fec9180d6db7ceb71c1360a4d7861c37d87ea6ca0b1"}, + {file = "SQLAlchemy-1.4.45-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6cd53b4c756a6f9c6518a3dc9c05a38840f9ae442c91fe1abde50d73651b6922"}, + {file = "SQLAlchemy-1.4.45-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:ca152ffc7f0aa069c95fba46165030267ec5e4bb0107aba45e5e9e86fe4d9363"}, + {file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06055476d38ed7915eeed22b78580556d446d175c3574a01b9eb04d91f3a8b2e"}, + {file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:081e2a2d75466353c738ca2ee71c0cfb08229b4f9909b5fa085f75c48d021471"}, + {file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96821d806c0c90c68ce3f2ce6dd529c10e5d7587961f31dd5c30e3bfddc4545d"}, + {file = "SQLAlchemy-1.4.45-cp310-cp310-win32.whl", hash = "sha256:c8051bff4ce48cbc98f11e95ac46bfd1e36272401070c010248a3230d099663f"}, + {file = "SQLAlchemy-1.4.45-cp310-cp310-win_amd64.whl", hash = "sha256:16ad798fc121cad5ea019eb2297127b08c54e1aa95fe17b3fea9fdbc5c34fe62"}, + {file = "SQLAlchemy-1.4.45-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099efef0de9fbda4c2d7cb129e4e7f812007901942259d4e6c6e19bd69de1088"}, + {file = "SQLAlchemy-1.4.45-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a29d02c9e6f6b105580c5ed7afb722b97bc2e2fdb85e1d45d7ddd8440cfbca"}, + {file = "SQLAlchemy-1.4.45-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc10423b59d6d032d6dff0bb42aa06dc6a8824eb6029d70c7d1b6981a2e7f4d8"}, + {file = "SQLAlchemy-1.4.45-cp311-cp311-win32.whl", hash = "sha256:1a92685db3b0682776a5abcb5f9e9addb3d7d9a6d841a452a17ec2d8d457bea7"}, + {file = "SQLAlchemy-1.4.45-cp311-cp311-win_amd64.whl", hash = "sha256:db3ccbce4a861bf4338b254f95916fc68dd8b7aa50eea838ecdaf3a52810e9c0"}, + {file = "SQLAlchemy-1.4.45-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a62ae2ea3b940ce9c9cbd675489c2047921ce0a79f971d3082978be91bd58117"}, + {file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a87f8595390764db333a1705591d0934973d132af607f4fa8b792b366eacbb3c"}, + {file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a21c1fb71c69c8ec65430160cd3eee44bbcea15b5a4e556f29d03f246f425ec"}, + {file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7944b04e6fcf8d733964dd9ee36b6a587251a1a4049af3a9b846f6e64eb349a"}, + {file = "SQLAlchemy-1.4.45-cp36-cp36m-win32.whl", hash = "sha256:a3bcd5e2049ceb97e8c273e6a84ff4abcfa1dc47b6d8bbd36e07cce7176610d3"}, + {file = "SQLAlchemy-1.4.45-cp36-cp36m-win_amd64.whl", hash = "sha256:5953e225be47d80410ae519f865b5c341f541d8e383fb6d11f67fb71a45bf890"}, + {file = "SQLAlchemy-1.4.45-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:6a91b7883cb7855a27bc0637166eed622fdf1bb94a4d1630165e5dd88c7e64d3"}, + {file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d458fd0566bc9e10b8be857f089e96b5ca1b1ef033226f24512f9ffdf485a8c0"}, + {file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f4ad3b081c0dbb738886f8d425a5d983328670ee83b38192687d78fc82bd1e"}, + {file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd95a3e6ab46da2c5b0703e797a772f3fab44d085b3919a4f27339aa3b1f51d3"}, + {file = "SQLAlchemy-1.4.45-cp37-cp37m-win32.whl", hash = "sha256:715f5859daa3bee6ecbad64501637fa4640ca6734e8cda6135e3898d5f8ccadd"}, + {file = "SQLAlchemy-1.4.45-cp37-cp37m-win_amd64.whl", hash = "sha256:2d1539fbc82d2206380a86d6d7d0453764fdca5d042d78161bbfb8dd047c80ec"}, + {file = "SQLAlchemy-1.4.45-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:01aa76f324c9bbc0dcb2bc3d9e2a9d7ede4808afa1c38d40d5e2007e3163b206"}, + {file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:416fe7d228937bd37990b5a429fd00ad0e49eabcea3455af7beed7955f192edd"}, + {file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7e32ce2584564d9e068bb7e0ccd1810cbb0a824c0687f8016fe67e97c345a637"}, + {file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:561605cfc26273825ed2fb8484428faf36e853c13e4c90c61c58988aeccb34ed"}, + {file = "SQLAlchemy-1.4.45-cp38-cp38-win32.whl", hash = "sha256:55ddb5585129c5d964a537c9e32a8a68a8c6293b747f3fa164e1c034e1657a98"}, + {file = "SQLAlchemy-1.4.45-cp38-cp38-win_amd64.whl", hash = "sha256:445914dcadc0b623bd9851260ee54915ecf4e3041a62d57709b18a0eed19f33b"}, + {file = "SQLAlchemy-1.4.45-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:2db887dbf05bcc3151de1c4b506b14764c6240a42e844b4269132a7584de1e5f"}, + {file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52b90c9487e4449ad954624d01dea34c90cd8c104bce46b322c83654f37a23c5"}, + {file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f61e54b8c2b389de1a8ad52394729c478c67712dbdcdadb52c2575e41dae94a5"}, + {file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e91a5e45a2ea083fe344b3503405978dff14d60ef3aa836432c9ca8cd47806b6"}, + {file = "SQLAlchemy-1.4.45-cp39-cp39-win32.whl", hash = "sha256:0e068b8414d60dd35d43c693555fc3d2e1d822cef07960bb8ca3f1ee6c4ff762"}, + {file = "SQLAlchemy-1.4.45-cp39-cp39-win_amd64.whl", hash = "sha256:2d6f178ff2923730da271c8aa317f70cf0df11a4d1812f1d7a704b1cf29c5fe3"}, + {file = "SQLAlchemy-1.4.45.tar.gz", hash = "sha256:fd69850860093a3f69fefe0ab56d041edfdfe18510b53d9a2eaecba2f15fa795"}, ] -sqlitedict = [ + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "sqlitedict" +version = "2.1.0" +description = "Persistent dict in Python, backed up by sqlite3 and pickle, multithread-safe." +category = "main" +optional = true +python-versions = "*" +files = [ {file = "sqlitedict-2.1.0.tar.gz", hash = "sha256:03d9cfb96d602996f1d4c2db2856f1224b96a9c431bdd16e78032a72940f9e8c"}, ] -srsly = [ + +[[package]] +name = "srsly" +version = "2.4.5" +description = "Modern high-performance serialization utilities for Python" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "srsly-2.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8fed31ef8acbb5fead2152824ef39e12d749fcd254968689ba5991dd257b63b4"}, {file = "srsly-2.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04d0b4cd91e098cdac12d2c28e256b1181ba98bcd00e460b8e42dee3e8542804"}, {file = "srsly-2.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d83bea1f774b54d9313a374a95f11a776d37bcedcda93c526bf7f1cb5f26428"}, @@ -3332,15 +3191,59 @@ srsly = [ {file = "srsly-2.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:2d3b0d32be2267fb489da172d71399ac59f763189b47dbe68eedb0817afaa6dc"}, {file = "srsly-2.4.5.tar.gz", hash = "sha256:c842258967baa527cea9367986e42b8143a1a890e7d4a18d25a36edc3c7a33c7"}, ] -stack-data = [ + +[package.dependencies] +catalogue = ">=2.0.3,<2.1.0" + +[[package]] +name = "stack-data" +version = "0.6.2" +description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, ] -terminado = [ + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "terminado" +version = "0.17.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, ] -thinc = [ + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] + +[[package]] +name = "thinc" +version = "8.1.5" +description = "A refreshing functional take on deep learning, compatible with your favorite libraries" +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "thinc-8.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5dc6629e4770a13dec34eda3c4d89302f1b5c91ac4663cd53f876a4e761fcc00"}, {file = "thinc-8.1.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8af5639de41a08d358fac073ac116faefe75289d9bed5c1fbf6c7a54724529ea"}, {file = "thinc-8.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d66eeacc29769bf4238a0666f05e38d75dce60ab609eea5089975e6d8b82721"}, @@ -3370,11 +3273,71 @@ thinc = [ {file = "thinc-8.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:16be051c6f71d967fe87c3bda3a760699539cf75fee6b32527ea38feb3002e56"}, {file = "thinc-8.1.5.tar.gz", hash = "sha256:4d3e4de33d2d0eae7c1455c60c680e453b0204c29e3d2d548d7a9e7fe08ccfbd"}, ] -tinycss2 = [ + +[package.dependencies] +blis = ">=0.7.8,<0.8.0" +catalogue = ">=2.0.4,<2.1.0" +confection = ">=0.0.1,<1.0.0" +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=1.0.2,<1.1.0" +numpy = ">=1.15.0" +preshed = ">=3.0.2,<3.1.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" +setuptools = "*" +srsly = ">=2.4.0,<3.0.0" +wasabi = ">=0.8.1,<1.1.0" + +[package.extras] +cuda = ["cupy (>=5.0.0b4)"] +cuda-autodetect = ["cupy-wheel (>=11.0.0)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4)"] +cuda112 = ["cupy-cuda112 (>=5.0.0b4)"] +cuda113 = ["cupy-cuda113 (>=5.0.0b4)"] +cuda114 = ["cupy-cuda114 (>=5.0.0b4)"] +cuda115 = ["cupy-cuda115 (>=5.0.0b4)"] +cuda116 = ["cupy-cuda116 (>=5.0.0b4)"] +cuda117 = ["cupy-cuda117 (>=5.0.0b4)"] +cuda11x = ["cupy-cuda11x (>=11.0.0)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4)"] +datasets = ["ml-datasets (>=0.2.0,<0.3.0)"] +mxnet = ["mxnet (>=1.5.1,<1.6.0)"] +tensorflow = ["tensorflow (>=2.0.0,<2.6.0)"] +torch = ["torch (>=1.6.0)"] + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, ] -tokenizers = [ + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "tokenizers" +version = "0.13.2" +description = "Fast and Customizable Tokenizers" +category = "main" +optional = true +python-versions = "*" +files = [ {file = "tokenizers-0.13.2-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:a6f36b1b499233bb4443b5e57e20630c5e02fba61109632f5e00dab970440157"}, {file = "tokenizers-0.13.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:bc6983282ee74d638a4b6d149e5dadd8bc7ff1d0d6de663d69f099e0c6bddbeb"}, {file = "tokenizers-0.13.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16756e6ab264b162f99c0c0a8d3d521328f428b33374c5ee161c0ebec42bf3c0"}, @@ -3412,11 +3375,32 @@ tokenizers = [ {file = "tokenizers-0.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:486d637b413fddada845a10a45c74825d73d3725da42ccd8796ccd7a1c07a024"}, {file = "tokenizers-0.13.2.tar.gz", hash = "sha256:f9525375582fd1912ac3caa2f727d36c86ff8c0c6de45ae1aaff90f87f33b907"}, ] -tomli = [ + +[package.extras] +dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -tornado = [ + +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" +optional = false +python-versions = ">= 3.7" +files = [ {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, @@ -3429,70 +3413,341 @@ tornado = [ {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, ] -tqdm = [ + +[[package]] +name = "tqdm" +version = "4.64.1" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = true +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ {file = "tqdm-4.64.1-py2.py3-none-any.whl", hash = "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1"}, {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"}, ] -traitlets = [ - {file = "traitlets-5.6.0-py3-none-any.whl", hash = "sha256:1410755385d778aed847d68deb99b3ba30fbbf489e17a1e8cbb753060d5cce73"}, - {file = "traitlets-5.6.0.tar.gz", hash = "sha256:10b6ed1c9cedee83e795db70a8b9c2db157bb3778ec4587a349ecb7ef3b1033b"}, + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "wheel"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.7.1" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "traitlets-5.7.1-py3-none-any.whl", hash = "sha256:57ba2ba951632eeab9388fa45f342a5402060a5cc9f0bb942f760fafb6641581"}, + {file = "traitlets-5.7.1.tar.gz", hash = "sha256:fde8f62c05204ead43c2c1b9389cfc85befa7f54acb5da28529d671175bb4108"}, ] -transformers = [ + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] +test = ["pre-commit", "pytest"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "transformers" +version = "4.25.1" +description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +category = "main" +optional = true +python-versions = ">=3.7.0" +files = [ {file = "transformers-4.25.1-py3-none-any.whl", hash = "sha256:60f1be15e17e4a54373c787c713ec149dabcc63464131ac45611618fe7c2016e"}, {file = "transformers-4.25.1.tar.gz", hash = "sha256:6dad398b792d45dc04e9ee7e9e06bf758ab19dca2efc119065e661bb0f8f843b"}, ] -typer = [ + +[package.dependencies] +filelock = "*" +huggingface-hub = ">=0.10.0,<1.0" +numpy = ">=1.17" +packaging = ">=20.0" +pyyaml = ">=5.1" +regex = "!=2019.12.17" +requests = "*" +tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" +tqdm = ">=4.27" + +[package.extras] +accelerate = ["accelerate (>=0.10.0)"] +all = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +codecarbon = ["codecarbon (==1.2.0)"] +deepspeed = ["accelerate (>=0.10.0)", "deepspeed (>=0.6.5)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.6.5)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.4.1)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +docs = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] +docs-specific = ["hf-doc-builder"] +fairscale = ["fairscale (>0.3)"] +flax = ["flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +ftfy = ["ftfy"] +integrations = ["optuna", "ray[tune]", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "pyknp (>=0.6.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.4)"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +optuna = ["optuna"] +quality = ["GitPython (<3.1.19)", "black (==22.3)", "datasets (!=2.5.0)", "flake8 (>=3.8.3)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)"] +ray = ["ray[tune]"] +retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] +sagemaker = ["sagemaker (>=2.31.0)"] +sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] +sigopt = ["sigopt"] +sklearn = ["scikit-learn"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +timm = ["timm"] +tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] +torch = ["torch (>=1.7,!=1.12.0)"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torchhub = ["filelock", "huggingface-hub (>=0.10.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "tqdm (>=4.27)"] +vision = ["Pillow"] + +[[package]] +name = "typer" +version = "0.7.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "main" +optional = true +python-versions = ">=3.6" +files = [ {file = "typer-0.7.0-py3-none-any.whl", hash = "sha256:b5e704f4e48ec263de1c0b3a2387cd405a13767d2f907f44c1a08cbad96f606d"}, {file = "typer-0.7.0.tar.gz", hash = "sha256:ff797846578a9f2a201b53442aedeb543319466870fbe1c701eab66dd7681165"}, ] -types-pyyaml = [ + +[package.dependencies] +click = ">=7.1.1,<9.0.0" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.2" +description = "Typing stubs for PyYAML" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "types-PyYAML-6.0.12.2.tar.gz", hash = "sha256:6840819871c92deebe6a2067fb800c11b8a063632eb4e3e755914e7ab3604e83"}, {file = "types_PyYAML-6.0.12.2-py3-none-any.whl", hash = "sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b"}, ] -types-requests = [ + +[[package]] +name = "types-requests" +version = "2.28.11.5" +description = "Typing stubs for requests" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"}, {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"}, ] -types-toml = [ + +[package.dependencies] +types-urllib3 = "<1.27" + +[[package]] +name = "types-toml" +version = "0.10.8.1" +description = "Typing stubs for toml" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "types-toml-0.10.8.1.tar.gz", hash = "sha256:171bdb3163d79a520560f24ba916a9fc9bff81659c5448a9fea89240923722be"}, {file = "types_toml-0.10.8.1-py3-none-any.whl", hash = "sha256:b7b5c4977f96ab7b5ac06d8a6590d17c0bf252a96efc03b109c2711fb3e0eafd"}, ] -types-urllib3 = [ + +[[package]] +name = "types-urllib3" +version = "1.26.25.4" +description = "Typing stubs for urllib3" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, ] -typing-extensions = [ + +[[package]] +name = "typing-extensions" +version = "4.4.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] -urllib3 = [ + +[[package]] +name = "uri-template" +version = "1.2.0" +description = "RFC 6570 URI Template Processor" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, + {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, +] + +[package.extras] +dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] + +[[package]] +name = "urllib3" +version = "1.26.13" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, ] -wasabi = [ + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wasabi" +version = "0.10.1" +description = "A lightweight console printing and formatting toolkit" +category = "main" +optional = true +python-versions = "*" +files = [ {file = "wasabi-0.10.1-py3-none-any.whl", hash = "sha256:fe862cc24034fbc9f04717cd312ab884f71f51a8ecabebc3449b751c2a649d83"}, {file = "wasabi-0.10.1.tar.gz", hash = "sha256:c8e372781be19272942382b14d99314d175518d7822057cb7a97010c4259d249"}, ] -wcwidth = [ + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] -webencodings = [ + +[[package]] +name = "webcolors" +version = "1.12" +description = "A library for working with color names and color values formats defined by HTML and CSS." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "webcolors-1.12-py3-none-any.whl", hash = "sha256:d98743d81d498a2d3eaf165196e65481f0d2ea85281463d856b1e51b09f62dce"}, + {file = "webcolors-1.12.tar.gz", hash = "sha256:16d043d3a08fd6a1b1b7e3e9e62640d09790dce80d2bdd4792a175b35fe794a9"}, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "dev" +optional = false +python-versions = "*" +files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] -websocket-client = [ + +[[package]] +name = "websocket-client" +version = "1.4.2" +description = "WebSocket client for Python with low level API options" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "websocket-client-1.4.2.tar.gz", hash = "sha256:d6e8f90ca8e2dd4e8027c4561adeb9456b54044312dba655e7cae652ceb9ae59"}, {file = "websocket_client-1.4.2-py3-none-any.whl", hash = "sha256:d6b06432f184438d99ac1f456eaf22fe1ade524c3dd16e661142dc54e9cba574"}, ] -widgetsnbextension = [ - {file = "widgetsnbextension-4.0.3-py3-none-any.whl", hash = "sha256:7f3b0de8fda692d31ef03743b598620e31c2668b835edbd3962d080ccecf31eb"}, - {file = "widgetsnbextension-4.0.3.tar.gz", hash = "sha256:34824864c062b0b3030ad78210db5ae6a3960dfb61d5b27562d6631774de0286"}, + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "widgetsnbextension" +version = "4.0.4" +description = "Jupyter interactive widgets for Jupyter Notebook" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "widgetsnbextension-4.0.4-py3-none-any.whl", hash = "sha256:fa0e840719ec95dd2ec85c3a48913f1a0c29d323eacbcdb0b29bfed0cc6da678"}, + {file = "widgetsnbextension-4.0.4.tar.gz", hash = "sha256:44c69f18237af0f610557d6c1c7ef76853f5856a0e604c0a517f2320566bb775"}, ] -wikipedia = [ + +[[package]] +name = "wikipedia" +version = "1.4.0" +description = "Wikipedia API for Python" +category = "main" +optional = true +python-versions = "*" +files = [ {file = "wikipedia-1.4.0.tar.gz", hash = "sha256:db0fad1829fdd441b1852306e9856398204dc0786d2996dd2e0c8bb8e26133b2"}, ] -zipp = [ + +[package.dependencies] +beautifulsoup4 = "*" +requests = ">=2.0.0,<3.0.0" + +[[package]] +name = "zipp" +version = "3.11.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, ] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[extras] +all = ["manifest-ml", "elasticsearch", "faiss-cpu", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4"] +llms = ["manifest-ml"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8.1,<4.0" +content-hash = "88dd08a95bacc5be4635b713d49733c7eb4cb3e47f5d67bf4da53d373d264fa2" diff --git a/pyproject.toml b/pyproject.toml index 6a610538..33af4832 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ beautifulsoup4 = {version = "^4", optional = true} [tool.poetry.group.test.dependencies] pytest = "^7.2.0" +pytest-cov = "^4.0.0" pytest-dotenv = "^0.5.2" [tool.poetry.group.lint.dependencies] diff --git a/tests/unit_tests/llms/test_utils.py b/tests/unit_tests/llms/test_utils.py index 5685f650..77cff607 100644 --- a/tests/unit_tests/llms/test_utils.py +++ b/tests/unit_tests/llms/test_utils.py @@ -10,6 +10,9 @@ def test_enforce_stop_tokens() -> None: text = "foo bar baz" output = enforce_stop_tokens(text, ["moo", "baz", "bar"]) assert output == "foo " + text = "foo bar baz" + output = enforce_stop_tokens(text, ["moo", "bar"]) + assert output == "foo " def test_enforce_stop_tokens_none() -> None: From 595cc1ae1ac786e7a9ae940e4ffc1a65871a948f Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Tue, 13 Dec 2022 05:50:03 -0800 Subject: [PATCH 11/29] RFC: more complete return (#313) Co-authored-by: Andrew Williamson Co-authored-by: awilliamson10 --- langchain/llms/base.py | 30 ++++++++++++++++++++- langchain/llms/openai.py | 56 +++++++++++++++++++++++++++++++--------- 2 files changed, 73 insertions(+), 13 deletions(-) diff --git a/langchain/llms/base.py b/langchain/llms/base.py index 60728841..367aa503 100644 --- a/langchain/llms/base.py +++ b/langchain/llms/base.py @@ -1,11 +1,39 @@ """Base interface for large language models to expose.""" from abc import ABC, abstractmethod -from typing import Any, List, Mapping, Optional +from typing import Any, List, Mapping, NamedTuple, Optional + + +class Generation(NamedTuple): + """Output of a single generation.""" + + text: str + """Generated text output.""" + # TODO: add log probs + + +class LLMResult(NamedTuple): + """Class that contains all relevant information for an LLM Result.""" + + generations: List[List[Generation]] + """List of the things generated. This is List[List[]] because + each input could have multiple generations.""" + llm_output: Optional[dict] = None + """For arbitrary LLM provider specific output.""" class LLM(ABC): """LLM wrapper should take in a prompt and return a string.""" + def generate( + self, prompts: List[str], stop: Optional[List[str]] = None + ) -> LLMResult: + """Run the LLM on the given prompt and input.""" + generations = [] + for prompt in prompts: + text = self(prompt, stop=stop) + generations.append([Generation(text=text)]) + return LLMResult(generations=generations) + def get_num_tokens(self, text: str) -> int: """Get the number of tokens present in the text.""" # TODO: this method may not be exact. diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 9de56f0c..fd88d873 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -3,7 +3,7 @@ from typing import Any, Dict, List, Mapping, Optional from pydantic import BaseModel, Extra, Field, root_validator -from langchain.llms.base import LLM +from langchain.llms.base import LLM, Generation, LLMResult from langchain.utils import get_from_dict_or_env @@ -97,6 +97,48 @@ class OpenAI(LLM, BaseModel): } return {**normal_params, **self.model_kwargs} + def generate( + self, prompts: List[str], stop: Optional[List[str]] = None + ) -> LLMResult: + """Call out to OpenAI's endpoint with k unique prompts. + + Args: + prompts: The prompts to pass into the model. + stop: Optional list of stop words to use when generating. + + Returns: + The full LLM output. + + Example: + .. code-block:: python + + response = openai.generate(["Tell me a joke."]) + """ + params = self._default_params + if stop is not None: + if "stop" in params: + raise ValueError("`stop` found in both the input and default params.") + params["stop"] = stop + + if params["max_tokens"] == -1: + if len(prompts) != 1: + raise ValueError( + "max_tokens set to -1 not supported for multiple inputs." + ) + params["max_tokens"] = self.max_tokens_for_prompt(prompts[0]) + + response = self.client.create(model=self.model_name, prompt=prompts, **params) + generations = [] + for i, prompt in enumerate(prompts): + choices = response["choices"][i * self.n : (i + 1) * self.n] + generations.append([Generation(text=choice["text"]) for choice in choices]) + # Get the token usage from the response. + # Includes prompt, completion, and total tokens used. + token_usage = response["usage"] + return LLMResult( + generations=generations, llm_output={"token_usage": token_usage} + ) + @property def _identifying_params(self) -> Mapping[str, Any]: """Get the identifying parameters.""" @@ -117,17 +159,7 @@ class OpenAI(LLM, BaseModel): response = openai("Tell me a joke.") """ - params = self._default_params - - if params["max_tokens"] == -1: - params["max_tokens"] = self.max_tokens_for_prompt(prompt) - - if stop is not None: - if "stop" in params: - raise ValueError("`stop` found in both the input and default params.") - params["stop"] = stop - response = self.client.create(model=self.model_name, prompt=prompt, **params) - return response["choices"][0]["text"] + return self.generate([prompt], stop=stop).generations[0][0].text def modelname_to_contextsize(self, modelname: str) -> int: """Calculate the maximum number of tokens possible to generate for a model. From 9bb7195085a843db3f53e4fd9a51c79f3698a19d Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Tue, 13 Dec 2022 06:46:01 -0800 Subject: [PATCH 12/29] Harrison/llm saving (#331) Co-authored-by: Akash Samant <70665700+asamant21@users.noreply.github.com> --- langchain/llms/__init__.py | 14 ++++- langchain/llms/ai21.py | 7 ++- langchain/llms/base.py | 57 ++++++++++++++++++- langchain/llms/cohere.py | 5 ++ langchain/llms/huggingface_hub.py | 10 +++- langchain/llms/loading.py | 42 ++++++++++++++ langchain/llms/manifest.py | 5 ++ langchain/llms/nlpcloud.py | 5 ++ langchain/llms/openai.py | 7 ++- tests/integration_tests/llms/test_ai21.py | 11 ++++ tests/integration_tests/llms/test_cohere.py | 12 ++++ .../llms/test_huggingface_hub.py | 12 ++++ tests/integration_tests/llms/test_nlpcloud.py | 12 ++++ tests/integration_tests/llms/test_openai.py | 11 ++++ tests/integration_tests/llms/utils.py | 16 ++++++ tests/unit_tests/agents/test_agent.py | 17 ++++-- tests/unit_tests/agents/test_react.py | 23 +++++--- tests/unit_tests/chains/test_natbot.py | 9 ++- tests/unit_tests/llms/fake_llm.py | 17 ++++-- tests/unit_tests/llms/test_loading.py | 15 +++++ 20 files changed, 279 insertions(+), 28 deletions(-) create mode 100644 langchain/llms/loading.py create mode 100644 tests/integration_tests/llms/utils.py create mode 100644 tests/unit_tests/llms/test_loading.py diff --git a/langchain/llms/__init__.py b/langchain/llms/__init__.py index fab84d41..01e3c702 100644 --- a/langchain/llms/__init__.py +++ b/langchain/llms/__init__.py @@ -1,7 +1,19 @@ """Wrappers on top of large language models APIs.""" +from typing import Dict, Type + +from langchain.llms.ai21 import AI21 +from langchain.llms.base import LLM from langchain.llms.cohere import Cohere from langchain.llms.huggingface_hub import HuggingFaceHub from langchain.llms.nlpcloud import NLPCloud from langchain.llms.openai import OpenAI -__all__ = ["Cohere", "NLPCloud", "OpenAI", "HuggingFaceHub"] +__all__ = ["Cohere", "NLPCloud", "OpenAI", "HuggingFaceHub", "AI21"] + +type_to_cls_dict: Dict[str, Type[LLM]] = { + "ai21": AI21, + "cohere": Cohere, + "huggingface_hub": HuggingFaceHub, + "nlpcloud": NLPCloud, + "openai": OpenAI, +} diff --git a/langchain/llms/ai21.py b/langchain/llms/ai21.py index a870d9e4..3678473d 100644 --- a/langchain/llms/ai21.py +++ b/langchain/llms/ai21.py @@ -19,7 +19,7 @@ class AI21PenaltyData(BaseModel): applyToEmojis: bool = True -class AI21(BaseModel, LLM): +class AI21(LLM, BaseModel): """Wrapper around AI21 large language models. To use, you should have the environment variable ``AI21_API_KEY`` @@ -96,6 +96,11 @@ class AI21(BaseModel, LLM): """Get the identifying parameters.""" return {**{"model": self.model}, **self._default_params} + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "ai21" + def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to AI21's complete endpoint. diff --git a/langchain/llms/base.py b/langchain/llms/base.py index 367aa503..0eb8b73b 100644 --- a/langchain/llms/base.py +++ b/langchain/llms/base.py @@ -1,6 +1,11 @@ """Base interface for large language models to expose.""" +import json from abc import ABC, abstractmethod -from typing import Any, List, Mapping, NamedTuple, Optional +from pathlib import Path +from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Union + +import yaml +from pydantic import BaseModel, Extra class Generation(NamedTuple): @@ -21,9 +26,14 @@ class LLMResult(NamedTuple): """For arbitrary LLM provider specific output.""" -class LLM(ABC): +class LLM(BaseModel, ABC): """LLM wrapper should take in a prompt and return a string.""" + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.forbid + def generate( self, prompts: List[str], stop: Optional[List[str]] = None ) -> LLMResult: @@ -68,3 +78,46 @@ class LLM(ABC): """Get a string representation of the object for printing.""" cls_name = f"\033[1m{self.__class__.__name__}\033[0m" return f"{cls_name}\nParams: {self._identifying_params}" + + @property + @abstractmethod + def _llm_type(self) -> str: + """Return type of llm.""" + + def _llm_dict(self) -> Dict: + """Return a dictionary of the prompt.""" + starter_dict = dict(self._identifying_params) + starter_dict["_type"] = self._llm_type + return starter_dict + + def save(self, file_path: Union[Path, str]) -> None: + """Save the LLM. + + Args: + file_path: Path to file to save the LLM to. + + Example: + .. code-block:: python + + llm.save(file_path="path/llm.yaml") + """ + # Convert file to Path object. + if isinstance(file_path, str): + save_path = Path(file_path) + else: + save_path = file_path + + directory_path = save_path.parent + directory_path.mkdir(parents=True, exist_ok=True) + + # Fetch dictionary to save + prompt_dict = self._llm_dict() + + if save_path.suffix == ".json": + with open(file_path, "w") as f: + json.dump(prompt_dict, f, indent=4) + elif save_path.suffix == ".yaml": + with open(file_path, "w") as f: + yaml.dump(prompt_dict, f, default_flow_style=False) + else: + raise ValueError(f"{save_path} must be json or yaml") diff --git a/langchain/llms/cohere.py b/langchain/llms/cohere.py index e051ba47..3853cfb9 100644 --- a/langchain/llms/cohere.py +++ b/langchain/llms/cohere.py @@ -85,6 +85,11 @@ class Cohere(LLM, BaseModel): """Get the identifying parameters.""" return {**{"model": self.model}, **self._default_params} + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "cohere" + def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to Cohere's generate endpoint. diff --git a/langchain/llms/huggingface_hub.py b/langchain/llms/huggingface_hub.py index 5cded677..74f0545a 100644 --- a/langchain/llms/huggingface_hub.py +++ b/langchain/llms/huggingface_hub.py @@ -74,7 +74,15 @@ class HuggingFaceHub(LLM, BaseModel): def _identifying_params(self) -> Mapping[str, Any]: """Get the identifying parameters.""" _model_kwargs = self.model_kwargs or {} - return {**{"repo_id": self.repo_id}, **_model_kwargs} + return { + **{"repo_id": self.repo_id, "task": self.task}, + **{"model_kwargs": _model_kwargs}, + } + + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "huggingface_hub" def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to HuggingFace Hub's inference endpoint. diff --git a/langchain/llms/loading.py b/langchain/llms/loading.py new file mode 100644 index 00000000..d5881ba4 --- /dev/null +++ b/langchain/llms/loading.py @@ -0,0 +1,42 @@ +"""Base interface for loading large language models apis.""" +import json +from pathlib import Path +from typing import Union + +import yaml + +from langchain.llms import type_to_cls_dict +from langchain.llms.base import LLM + + +def load_llm_from_config(config: dict) -> LLM: + """Load LLM from Config Dict.""" + if "_type" not in config: + raise ValueError("Must specify an LLM Type in config") + config_type = config.pop("_type") + + if config_type not in type_to_cls_dict: + raise ValueError(f"Loading {config_type} LLM not supported") + + llm_cls = type_to_cls_dict[config_type] + return llm_cls(**config) + + +def load_llm(file: Union[str, Path]) -> LLM: + """Load LLM from file.""" + # Convert file to Path object. + if isinstance(file, str): + file_path = Path(file) + else: + file_path = file + # Load from either json or yaml. + if file_path.suffix == ".json": + with open(file_path) as f: + config = json.load(f) + elif file_path.suffix == ".yaml": + with open(file_path, "r") as f: + config = yaml.safe_load(f) + else: + raise ValueError("File type must be json or yaml") + # Load the LLM from the config now. + return load_llm_from_config(config) diff --git a/langchain/llms/manifest.py b/langchain/llms/manifest.py index 0e1fb720..665f0c5b 100644 --- a/langchain/llms/manifest.py +++ b/langchain/llms/manifest.py @@ -37,6 +37,11 @@ class ManifestWrapper(LLM, BaseModel): kwargs = self.llm_kwargs or {} return {**self.client.client.get_model_params(), **kwargs} + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "manifest" + def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to LLM through Manifest.""" if stop is not None and len(stop) != 1: diff --git a/langchain/llms/nlpcloud.py b/langchain/llms/nlpcloud.py index d9e4c54e..ae3dc07b 100644 --- a/langchain/llms/nlpcloud.py +++ b/langchain/llms/nlpcloud.py @@ -106,6 +106,11 @@ class NLPCloud(LLM, BaseModel): """Get the identifying parameters.""" return {**{"model_name": self.model_name}, **self._default_params} + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "nlpcloud" + def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to NLPCloud's create endpoint. diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index fd88d873..44484068 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -142,7 +142,12 @@ class OpenAI(LLM, BaseModel): @property def _identifying_params(self) -> Mapping[str, Any]: """Get the identifying parameters.""" - return {**{"model": self.model_name}, **self._default_params} + return {**{"model_name": self.model_name}, **self._default_params} + + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "openai" def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to OpenAI's create endpoint. diff --git a/tests/integration_tests/llms/test_ai21.py b/tests/integration_tests/llms/test_ai21.py index 3737ee5c..16a8716d 100644 --- a/tests/integration_tests/llms/test_ai21.py +++ b/tests/integration_tests/llms/test_ai21.py @@ -1,6 +1,9 @@ """Test AI21 API wrapper.""" +from pathlib import Path + from langchain.llms.ai21 import AI21 +from langchain.llms.loading import load_llm def test_ai21_call() -> None: @@ -8,3 +11,11 @@ def test_ai21_call() -> None: llm = AI21(maxTokens=10) output = llm("Say foo:") assert isinstance(output, str) + + +def test_saving_loading_llm(tmp_path: Path) -> None: + """Test saving/loading an AI21 LLM.""" + llm = AI21(maxTokens=10) + llm.save(file_path=tmp_path / "ai21.yaml") + loaded_llm = load_llm(tmp_path / "ai21.yaml") + assert llm == loaded_llm diff --git a/tests/integration_tests/llms/test_cohere.py b/tests/integration_tests/llms/test_cohere.py index f1a8a6c3..4c260982 100644 --- a/tests/integration_tests/llms/test_cohere.py +++ b/tests/integration_tests/llms/test_cohere.py @@ -1,6 +1,10 @@ """Test Cohere API wrapper.""" +from pathlib import Path + from langchain.llms.cohere import Cohere +from langchain.llms.loading import load_llm +from tests.integration_tests.llms.utils import assert_llm_equality def test_cohere_call() -> None: @@ -8,3 +12,11 @@ def test_cohere_call() -> None: llm = Cohere(max_tokens=10) output = llm("Say foo:") assert isinstance(output, str) + + +def test_saving_loading_llm(tmp_path: Path) -> None: + """Test saving/loading an Cohere LLM.""" + llm = Cohere(max_tokens=10) + llm.save(file_path=tmp_path / "cohere.yaml") + loaded_llm = load_llm(tmp_path / "cohere.yaml") + assert_llm_equality(llm, loaded_llm) diff --git a/tests/integration_tests/llms/test_huggingface_hub.py b/tests/integration_tests/llms/test_huggingface_hub.py index aa181a87..df0b4416 100644 --- a/tests/integration_tests/llms/test_huggingface_hub.py +++ b/tests/integration_tests/llms/test_huggingface_hub.py @@ -1,8 +1,12 @@ """Test HuggingFace API wrapper.""" +from pathlib import Path + import pytest from langchain.llms.huggingface_hub import HuggingFaceHub +from langchain.llms.loading import load_llm +from tests.integration_tests.llms.utils import assert_llm_equality def test_huggingface_text_generation() -> None: @@ -24,3 +28,11 @@ def test_huggingface_call_error() -> None: llm = HuggingFaceHub(model_kwargs={"max_new_tokens": -1}) with pytest.raises(ValueError): llm("Say foo:") + + +def test_saving_loading_llm(tmp_path: Path) -> None: + """Test saving/loading an HuggingFaceHub LLM.""" + llm = HuggingFaceHub(repo_id="gpt2", model_kwargs={"max_new_tokens": 10}) + llm.save(file_path=tmp_path / "hf.yaml") + loaded_llm = load_llm(tmp_path / "hf.yaml") + assert_llm_equality(llm, loaded_llm) diff --git a/tests/integration_tests/llms/test_nlpcloud.py b/tests/integration_tests/llms/test_nlpcloud.py index 9e4664e4..4c5ccca0 100644 --- a/tests/integration_tests/llms/test_nlpcloud.py +++ b/tests/integration_tests/llms/test_nlpcloud.py @@ -1,6 +1,10 @@ """Test NLPCloud API wrapper.""" +from pathlib import Path + +from langchain.llms.loading import load_llm from langchain.llms.nlpcloud import NLPCloud +from tests.integration_tests.llms.utils import assert_llm_equality def test_nlpcloud_call() -> None: @@ -8,3 +12,11 @@ def test_nlpcloud_call() -> None: llm = NLPCloud(max_length=10) output = llm("Say foo:") assert isinstance(output, str) + + +def test_saving_loading_llm(tmp_path: Path) -> None: + """Test saving/loading an NLPCloud LLM.""" + llm = NLPCloud(max_length=10) + llm.save(file_path=tmp_path / "nlpcloud.yaml") + loaded_llm = load_llm(tmp_path / "nlpcloud.yaml") + assert_llm_equality(llm, loaded_llm) diff --git a/tests/integration_tests/llms/test_openai.py b/tests/integration_tests/llms/test_openai.py index 4050e42e..721b5b43 100644 --- a/tests/integration_tests/llms/test_openai.py +++ b/tests/integration_tests/llms/test_openai.py @@ -1,7 +1,10 @@ """Test OpenAI API wrapper.""" +from pathlib import Path + import pytest +from langchain.llms.loading import load_llm from langchain.llms.openai import OpenAI @@ -44,3 +47,11 @@ def test_openai_stop_error() -> None: llm = OpenAI(stop="3", temperature=0) with pytest.raises(ValueError): llm("write an ordered list of five items", stop=["\n"]) + + +def test_saving_loading_llm(tmp_path: Path) -> None: + """Test saving/loading an OpenAPI LLM.""" + llm = OpenAI(max_tokens=10) + llm.save(file_path=tmp_path / "openai.yaml") + loaded_llm = load_llm(tmp_path / "openai.yaml") + assert loaded_llm == llm diff --git a/tests/integration_tests/llms/utils.py b/tests/integration_tests/llms/utils.py new file mode 100644 index 00000000..6047e979 --- /dev/null +++ b/tests/integration_tests/llms/utils.py @@ -0,0 +1,16 @@ +"""Utils for LLM Tests.""" + +from langchain.llms.base import LLM + + +def assert_llm_equality(llm: LLM, loaded_llm: LLM) -> None: + """Assert LLM Equality for tests.""" + # Check that they are the same type. + assert type(llm) == type(loaded_llm) + # Client field can be session based, so hash is different despite + # all other values being the same, so just assess all other fields + for field in llm.__fields__.keys(): + if field != "client": + val = getattr(llm, field) + new_val = getattr(loaded_llm, field) + assert new_val == val diff --git a/tests/unit_tests/agents/test_agent.py b/tests/unit_tests/agents/test_agent.py index 85853e70..d407f4b4 100644 --- a/tests/unit_tests/agents/test_agent.py +++ b/tests/unit_tests/agents/test_agent.py @@ -2,17 +2,17 @@ from typing import Any, List, Mapping, Optional +from pydantic import BaseModel + from langchain.agents import Tool, initialize_agent from langchain.llms.base import LLM -class FakeListLLM(LLM): +class FakeListLLM(LLM, BaseModel): """Fake LLM for testing that outputs elements of a list.""" - def __init__(self, responses: List[str]): - """Initialize with list of responses.""" - self.responses = responses - self.i = -1 + responses: List[str] + i: int = -1 def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Increment counter, and then return response in that index.""" @@ -25,6 +25,11 @@ class FakeListLLM(LLM): def _identifying_params(self) -> Mapping[str, Any]: return {} + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "fake_list" + def test_agent_bad_action() -> None: """Test react chain when bad action given.""" @@ -33,7 +38,7 @@ def test_agent_bad_action() -> None: f"I'm turning evil\nAction: {bad_action_name}\nAction Input: misalignment", "Oh well\nAction: Final Answer\nAction Input: curses foiled again", ] - fake_llm = FakeListLLM(responses) + fake_llm = FakeListLLM(responses=responses) tools = [ Tool("Search", lambda x: x, "Useful for searching"), Tool("Lookup", lambda x: x, "Useful for looking up things in a table"), diff --git a/tests/unit_tests/agents/test_react.py b/tests/unit_tests/agents/test_react.py index 2d6c4cca..917fa745 100644 --- a/tests/unit_tests/agents/test_react.py +++ b/tests/unit_tests/agents/test_react.py @@ -2,6 +2,8 @@ from typing import Any, List, Mapping, Optional, Union +from pydantic import BaseModel + from langchain.agents.react.base import ReActChain, ReActDocstoreAgent from langchain.agents.tools import Tool from langchain.docstore.base import Docstore @@ -20,13 +22,16 @@ Made in 2022.""" _FAKE_PROMPT = PromptTemplate(input_variables=["input"], template="{input}") -class FakeListLLM(LLM): +class FakeListLLM(LLM, BaseModel): """Fake LLM for testing that outputs elements of a list.""" - def __init__(self, responses: List[str]): - """Initialize with list of responses.""" - self.responses = responses - self.i = -1 + responses: List[str] + i: int = -1 + + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "fake_list" def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Increment counter, and then return response in that index.""" @@ -50,7 +55,7 @@ class FakeDocstore(Docstore): def test_predict_until_observation_normal() -> None: """Test predict_until_observation when observation is made normally.""" outputs = ["foo\nAction 1: Search[foo]"] - fake_llm = FakeListLLM(outputs) + fake_llm = FakeListLLM(responses=outputs) tools = [ Tool("Search", lambda x: x), Tool("Lookup", lambda x: x), @@ -65,7 +70,7 @@ def test_predict_until_observation_normal() -> None: def test_predict_until_observation_repeat() -> None: """Test when no action is generated initially.""" outputs = ["foo", " Search[foo]"] - fake_llm = FakeListLLM(outputs) + fake_llm = FakeListLLM(responses=outputs) tools = [ Tool("Search", lambda x: x), Tool("Lookup", lambda x: x), @@ -84,7 +89,7 @@ def test_react_chain() -> None: "I should probably lookup\nAction 2: Lookup[made]", "Ah okay now I know the answer\nAction 3: Finish[2022]", ] - fake_llm = FakeListLLM(responses) + fake_llm = FakeListLLM(responses=responses) react_chain = ReActChain(llm=fake_llm, docstore=FakeDocstore()) output = react_chain.run("when was langchain made") assert output == "2022" @@ -97,7 +102,7 @@ def test_react_chain_bad_action() -> None: f"I'm turning evil\nAction 1: {bad_action_name}[langchain]", "Oh well\nAction 2: Finish[curses foiled again]", ] - fake_llm = FakeListLLM(responses) + fake_llm = FakeListLLM(responses=responses) react_chain = ReActChain(llm=fake_llm, docstore=FakeDocstore()) output = react_chain.run("when was langchain made") assert output == "curses foiled again" diff --git a/tests/unit_tests/chains/test_natbot.py b/tests/unit_tests/chains/test_natbot.py index bf4223b2..a9817f71 100644 --- a/tests/unit_tests/chains/test_natbot.py +++ b/tests/unit_tests/chains/test_natbot.py @@ -2,11 +2,13 @@ from typing import Any, List, Mapping, Optional +from pydantic import BaseModel + from langchain.chains.natbot.base import NatBotChain from langchain.llms.base import LLM -class FakeLLM(LLM): +class FakeLLM(LLM, BaseModel): """Fake LLM wrapper for testing purposes.""" def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: @@ -16,6 +18,11 @@ class FakeLLM(LLM): else: return "bar" + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "fake" + @property def _identifying_params(self) -> Mapping[str, Any]: return {} diff --git a/tests/unit_tests/llms/fake_llm.py b/tests/unit_tests/llms/fake_llm.py index 65fd8c79..a3896b6f 100644 --- a/tests/unit_tests/llms/fake_llm.py +++ b/tests/unit_tests/llms/fake_llm.py @@ -1,20 +1,25 @@ """Fake LLM wrapper for testing purposes.""" from typing import Any, List, Mapping, Optional +from pydantic import BaseModel + from langchain.llms.base import LLM -class FakeLLM(LLM): +class FakeLLM(LLM, BaseModel): """Fake LLM wrapper for testing purposes.""" - def __init__(self, queries: Optional[Mapping] = None): - """Initialize with optional lookup of queries.""" - self._queries = queries + queries: Optional[Mapping] = None + + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "fake" def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: """First try to lookup in queries, else return 'foo' or 'bar'.""" - if self._queries is not None: - return self._queries[prompt] + if self.queries is not None: + return self.queries[prompt] if stop is None: return "foo" else: diff --git a/tests/unit_tests/llms/test_loading.py b/tests/unit_tests/llms/test_loading.py new file mode 100644 index 00000000..e478a0b0 --- /dev/null +++ b/tests/unit_tests/llms/test_loading.py @@ -0,0 +1,15 @@ +"""Test LLM saving and loading functions.""" +from pathlib import Path +from unittest.mock import patch + +from langchain.llms.loading import load_llm +from tests.unit_tests.llms.fake_llm import FakeLLM + + +@patch("langchain.llms.loading.type_to_cls_dict", {"fake": FakeLLM}) +def test_saving_loading_round_trip(tmp_path: Path) -> None: + """Test saving/loading a Fake LLM.""" + fake_llm = FakeLLM() + fake_llm.save(file_path=tmp_path / "fake_llm.yaml") + loaded_llm = load_llm(tmp_path / "fake_llm.yaml") + assert loaded_llm == fake_llm From 996b5a3dfb6a95910ad42a5e5cf0f59bc4d07a45 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Tue, 13 Dec 2022 07:50:46 -0800 Subject: [PATCH 13/29] Harrison/llm final stuff (#332) --- README.md | 21 +- docs/examples/chains/summarize.ipynb | 4 +- docs/examples/prompts.rst | 31 ++- ...examples.ipynb => few_shot_examples.ipynb} | 0 docs/examples/prompts/llm.json | 11 + docs/examples/prompts/llm.yaml | 9 + docs/examples/prompts/llm_functionality.ipynb | 222 ++++++++++++++++++ docs/examples/prompts/llm_serialization.ipynb | 166 +++++++++++++ langchain/chains/llm.py | 38 ++- 9 files changed, 481 insertions(+), 21 deletions(-) rename docs/examples/prompts/{few shot examples.ipynb => few_shot_examples.ipynb} (100%) create mode 100644 docs/examples/prompts/llm.json create mode 100644 docs/examples/prompts/llm.yaml create mode 100644 docs/examples/prompts/llm_functionality.ipynb create mode 100644 docs/examples/prompts/llm_serialization.ipynb diff --git a/README.md b/README.md index ed5c8545..e802a629 100644 --- a/README.md +++ b/README.md @@ -31,14 +31,25 @@ Please see [here](https://langchain.readthedocs.io/en/latest/?) for full documen There are four main areas that LangChain is designed to help with. These are, in increasing order of complexity: -1. LLM and Prompts -2. Chains -3. Agents -4. Memory +**📃 LLMs and Prompts:** + +This includes prompt management, prompt optimization, generic interface for all LLMs, and common utilities for working with LLMs. + +**🔗 Chains:** + +Chains go beyond just a single LLM call, and are sequences of calls (whether to an LLM or a different utility). LangChain provides a standard interface for chains, lots of integrations with other tools, and end-to-end chains for common applications. + +**🤖 Agents:** + +Agents involve an LLM making decisions about which Actions to take, taking that Action, seeing an Observation, and repeating that until done. LangChain provides a standard interface for agents, a selection of agents to choose from, and examples of end to end agents. + +**🧠 Memory:** + +Memory is the concept of persisting state between calls of a chain/agent. LangChain provides a standard interface for memory, a collection of memory implementations, and examples of chains/agents that use memory. For more information on these concepts, please see our [full documentation](https://langchain.readthedocs.io/en/latest/?). -## 🤖 Contributing +## 💁 Contributing As an open source project in a rapidly developing field, we are extremely open to contributions, whether it be in the form of a new feature, improved infra, or better documentation. diff --git a/docs/examples/chains/summarize.ipynb b/docs/examples/chains/summarize.ipynb index cd2fd716..ee6b6572 100644 --- a/docs/examples/chains/summarize.ipynb +++ b/docs/examples/chains/summarize.ipynb @@ -131,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 6, "id": "ef28e1d4", "metadata": {}, "outputs": [], @@ -148,7 +148,7 @@ { "data": { "text/plain": [ - "' In response to Russian aggression in Ukraine, the US and its allies have imposed economic sanctions, cut off access to technology, seized assets of Russian oligarchs, and closed American airspace to Russian flights. The US is also providing military, economic, and humanitarian assistance to Ukraine, mobilizing ground forces, air squadrons, and ship deployments, and releasing 30 million barrels of oil from its Strategic Petroleum Reserve. President Biden has also passed the American Rescue Plan, Bipartisan Infrastructure Law, and Bipartisan Innovation Act to provide economic relief and create jobs.'" + "\" In response to Vladimir Putin's aggression in Ukraine, the US and its allies have taken action to hold him accountable, including economic sanctions, cutting off access to technology, and seizing the assets of Russian oligarchs. They are also providing military, economic, and humanitarian assistance to the Ukrainians, and releasing 60 million barrels of oil from reserves around the world. President Biden has passed several laws to provide economic relief to Americans and create jobs, and is making sure taxpayer dollars support American jobs and businesses.\"" ] }, "execution_count": 9, diff --git a/docs/examples/prompts.rst b/docs/examples/prompts.rst index 6fe34382..8148b1d2 100644 --- a/docs/examples/prompts.rst +++ b/docs/examples/prompts.rst @@ -1,10 +1,35 @@ -Prompts -======= +LLMs & Prompts +============== + +The examples here all highlight how to work with LLMs and prompts. + +**LLMs** + +`LLM Functionality `_: A walkthrough of all the functionality the standard LLM interface exposes. + +`LLM Serialization `_: A walkthrough of how to serialize LLMs to and from disk. + +`Custom LLM `_: How to create and use a custom LLM class, in case you have an LLM not from one of the standard providers (including one that you host yourself). + + +**Prompts** + +`Prompt Management `_: A walkthrough of all the functionality LangChain supports for working with prompts. + +`Prompt Serialization `_: A walkthrough of how to serialize prompts to and from disk. + +`Few Shot Examples `_: How to include examples in the prompt. + +`Generate Examples `_: How to use existing examples to generate more examples. + +`Custom Example Selector `_: How to create and use a custom ExampleSelector (the class responsible for choosing which examples to use in a prompt). + +`Custom Prompt Template `_: How to create and use a custom PromptTemplate, the logic that decides how input variables get formatted into a prompt. -The examples here all highlight how to work with prompts. .. toctree:: :maxdepth: 1 :glob: + :hidden: prompts/* diff --git a/docs/examples/prompts/few shot examples.ipynb b/docs/examples/prompts/few_shot_examples.ipynb similarity index 100% rename from docs/examples/prompts/few shot examples.ipynb rename to docs/examples/prompts/few_shot_examples.ipynb diff --git a/docs/examples/prompts/llm.json b/docs/examples/prompts/llm.json new file mode 100644 index 00000000..df2b7e1e --- /dev/null +++ b/docs/examples/prompts/llm.json @@ -0,0 +1,11 @@ +{ + "model_name": "text-davinci-003", + "temperature": 0.7, + "max_tokens": 256, + "top_p": 1.0, + "frequency_penalty": 0.0, + "presence_penalty": 0.0, + "n": 1, + "best_of": 1, + "_type": "openai" +} \ No newline at end of file diff --git a/docs/examples/prompts/llm.yaml b/docs/examples/prompts/llm.yaml new file mode 100644 index 00000000..ee384ffa --- /dev/null +++ b/docs/examples/prompts/llm.yaml @@ -0,0 +1,9 @@ +_type: openai +best_of: 1 +frequency_penalty: 0.0 +max_tokens: 256 +model_name: text-davinci-003 +n: 1 +presence_penalty: 0.0 +temperature: 0.7 +top_p: 1.0 diff --git a/docs/examples/prompts/llm_functionality.ipynb b/docs/examples/prompts/llm_functionality.ipynb new file mode 100644 index 00000000..27387010 --- /dev/null +++ b/docs/examples/prompts/llm_functionality.ipynb @@ -0,0 +1,222 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "060f7bc9", + "metadata": {}, + "source": [ + "# LLM Functionality\n", + "\n", + "This notebook goes over all the different features of the LLM class in LangChain.\n", + "\n", + "We will work with an OpenAI LLM wrapper, although these functionalities should exist for all LLM types." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "5bddaa9a", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.llms import OpenAI" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "f6bed875", + "metadata": {}, + "outputs": [], + "source": [ + "llm = OpenAI(model_name=\"text-ada-001\", n=2, best_of=2)" + ] + }, + { + "cell_type": "markdown", + "id": "edb2f14e", + "metadata": {}, + "source": [ + "**Generate Text:** The most basic functionality an LLM has is just the ability to call it, passing in a string and getting back a string." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "c29ba285", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.'" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "llm(\"Tell me a joke\")" + ] + }, + { + "cell_type": "markdown", + "id": "1f4a350a", + "metadata": {}, + "source": [ + "**Generate:** More broadly, you can call it with a list of inputs, getting back a more complete response than just the text. This complete response includes things like multiple top responses, as well as LLM provider specific information" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "a586c9ca", + "metadata": {}, + "outputs": [], + "source": [ + "llm_result = llm.generate([\"Tell me a joke\", \"Tell me a poem\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "22470289", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.'),\n", + " Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!')]" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Results of the first input\n", + "llm_result.generations[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "a1e72553", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Generation(text='\\n\\nIn the eyes of the moon\\n\\nI have seen a face\\n\\nThat I will never forget.\\n\\nThe light that I see\\n\\nIs like a fire in my heart.\\n\\nEvery letter I write\\n\\nWill be the last word\\n\\nOf my love for this person.\\n\\nThe darkness that I feel\\n\\nIs like a weight on my heart.'),\n", + " Generation(text=\"\\n\\nA rose by the side of the road\\n\\nIs all I need to find my way\\n\\nTo the place I've been searching for\\n\\nAnd my heart is singing with joy\\n\\nWhen I look at this rose\\n\\nIt reminds me of the love I've found\\n\\nAnd I know that wherever I go\\n\\nI'll always find my rose by the side of the road.\")]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Results of the second input\n", + "llm_result.generations[1]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "90c52536", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'token_usage': JSON: {\n", + " \"completion_tokens\": 199,\n", + " \"prompt_tokens\": 8,\n", + " \"total_tokens\": 207\n", + " }}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Provider specific info\n", + "llm_result.llm_output" + ] + }, + { + "cell_type": "markdown", + "id": "92f6e7a5", + "metadata": {}, + "source": [ + "**Number of Tokens:** You can also estimate how many tokens a piece of text will be in that model. This is useful because models have a context length (and cost more for more tokens), which means you need to be aware of how long the text you are passing in is.\n", + "\n", + "Notice that by default the tokens are estimated using a HuggingFace tokenizer." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "acfd9200", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n" + ] + }, + { + "data": { + "text/plain": [ + "3" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "llm.get_num_tokens(\"what a joke\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68ff3688", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/examples/prompts/llm_serialization.ipynb b/docs/examples/prompts/llm_serialization.ipynb new file mode 100644 index 00000000..99556bfa --- /dev/null +++ b/docs/examples/prompts/llm_serialization.ipynb @@ -0,0 +1,166 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "73f9bf40", + "metadata": {}, + "source": [ + "# LLM Serialization\n", + "\n", + "This notebook walks how to write and read an LLM Configuration to and from disk. This is useful if you want to save the configuration for a given LLM (eg the provider, the temperature, etc)." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "9c9fb6ff", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.llms import OpenAI\n", + "from langchain.llms.loading import load_llm" + ] + }, + { + "cell_type": "markdown", + "id": "88ce018b", + "metadata": {}, + "source": [ + "### Loading\n", + "First, lets go over loading a LLM from disk. LLMs can be saved on disk in two formats: json or yaml. No matter the extension, they are loaded in the same way." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "f12b28f3", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\r\n", + " \"model_name\": \"text-davinci-003\",\r\n", + " \"temperature\": 0.7,\r\n", + " \"max_tokens\": 256,\r\n", + " \"top_p\": 1,\r\n", + " \"frequency_penalty\": 0,\r\n", + " \"presence_penalty\": 0,\r\n", + " \"n\": 1,\r\n", + " \"best_of\": 1,\r\n", + " \"_type\": \"openai\"\r\n", + "}" + ] + } + ], + "source": [ + "!cat llm.json" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "9ab709fc", + "metadata": {}, + "outputs": [], + "source": [ + "llm = load_llm(\"llm.json\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "095b1d56", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "_type: openai\r\n", + "best_of: 1\r\n", + "frequency_penalty: 0\r\n", + "max_tokens: 256\r\n", + "model_name: text-davinci-003\r\n", + "n: 1\r\n", + "presence_penalty: 0\r\n", + "temperature: 0.7\r\n", + "top_p: 1\r\n" + ] + } + ], + "source": [ + "!cat llm.yaml" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "8cafaafe", + "metadata": {}, + "outputs": [], + "source": [ + "llm = load_llm(\"llm.yaml\")" + ] + }, + { + "cell_type": "markdown", + "id": "ab3e4223", + "metadata": {}, + "source": [ + "### Saving\n", + "If you want to go from a LLM in memory to a serialized version of it, you can do so easily by calling the `.save` method. Again, this supports both json and yaml." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "b38f685d", + "metadata": {}, + "outputs": [], + "source": [ + "llm.save(\"llm.json\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "b7365503", + "metadata": {}, + "outputs": [], + "source": [ + "llm.save(\"llm.yaml\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0e494851", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.8" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/langchain/chains/llm.py b/langchain/chains/llm.py index a1575ed4..31800774 100644 --- a/langchain/chains/llm.py +++ b/langchain/chains/llm.py @@ -51,18 +51,34 @@ class LLMChain(Chain, BaseModel): """ return [self.output_key] + def apply(self, input_list: List[Dict[str, Any]]) -> List[Dict[str, str]]: + """Utilize the LLM generate method for speed gains.""" + stop = None + if "stop" in input_list[0]: + stop = input_list[0]["stop"] + prompts = [] + for inputs in input_list: + selected_inputs = {k: inputs[k] for k in self.prompt.input_variables} + prompt = self.prompt.format(**selected_inputs) + if self.verbose: + langchain.logger.log_llm_inputs(selected_inputs, prompt) + if "stop" in inputs and inputs["stop"] != stop: + raise ValueError( + "If `stop` is present in any inputs, should be present in all." + ) + prompts.append(prompt) + response = self.llm.generate(prompts, stop=stop) + outputs = [] + for generation in response.generations: + # Get the text of the top generated string. + response_str = generation[0].text + if self.verbose: + langchain.logger.log_llm_response(response_str) + outputs.append({self.output_key: response_str}) + return outputs + def _call(self, inputs: Dict[str, Any]) -> Dict[str, str]: - selected_inputs = {k: inputs[k] for k in self.prompt.input_variables} - prompt = self.prompt.format(**selected_inputs) - if self.verbose: - langchain.logger.log_llm_inputs(selected_inputs, prompt) - kwargs = {} - if "stop" in inputs: - kwargs["stop"] = inputs["stop"] - response = self.llm(prompt, **kwargs) - if self.verbose: - langchain.logger.log_llm_response(response) - return {self.output_key: response} + return self.apply([inputs])[0] def predict(self, **kwargs: Any) -> str: """Format prompt with kwargs and pass to LLM. From 3c6796b72e96f776525a6dd3d473d68873576794 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Tue, 13 Dec 2022 08:17:41 -0800 Subject: [PATCH 14/29] bump version to 0036 (#333) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 33af4832..92f1ea6d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain" -version = "0.0.35" +version = "0.0.36" description = "Building applications with LLMs through composability" authors = [] license = "MIT" From e26b6f9c89dd1020950edac71993ad3fc317f72f Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Wed, 14 Dec 2022 08:25:37 -0800 Subject: [PATCH 15/29] fix batching (#339) --- docs/examples/prompts/llm_functionality.ipynb | 143 ++++++++++-------- langchain/llms/openai.py | 33 +++- 2 files changed, 106 insertions(+), 70 deletions(-) diff --git a/docs/examples/prompts/llm_functionality.ipynb b/docs/examples/prompts/llm_functionality.ipynb index 27387010..05eec44e 100644 --- a/docs/examples/prompts/llm_functionality.ipynb +++ b/docs/examples/prompts/llm_functionality.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "markdown", - "id": "060f7bc9", + "id": "20ac6b98", "metadata": {}, "source": [ "# LLM Functionality\n", @@ -15,7 +15,7 @@ { "cell_type": "code", "execution_count": 1, - "id": "5bddaa9a", + "id": "df924055", "metadata": {}, "outputs": [], "source": [ @@ -25,7 +25,7 @@ { "cell_type": "code", "execution_count": 2, - "id": "f6bed875", + "id": "182b484c", "metadata": {}, "outputs": [], "source": [ @@ -34,7 +34,7 @@ }, { "cell_type": "markdown", - "id": "edb2f14e", + "id": "9695ccfc", "metadata": {}, "source": [ "**Generate Text:** The most basic functionality an LLM has is just the ability to call it, passing in a string and getting back a string." @@ -43,7 +43,7 @@ { "cell_type": "code", "execution_count": 3, - "id": "c29ba285", + "id": "9d12ac26", "metadata": {}, "outputs": [ { @@ -63,7 +63,7 @@ }, { "cell_type": "markdown", - "id": "1f4a350a", + "id": "e7d4d42d", "metadata": {}, "source": [ "**Generate:** More broadly, you can call it with a list of inputs, getting back a more complete response than just the text. This complete response includes things like multiple top responses, as well as LLM provider specific information" @@ -72,73 +72,23 @@ { "cell_type": "code", "execution_count": 4, - "id": "a586c9ca", + "id": "f4dc241a", "metadata": {}, "outputs": [], "source": [ - "llm_result = llm.generate([\"Tell me a joke\", \"Tell me a poem\"])" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "22470289", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.'),\n", - " Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!')]" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Results of the first input\n", - "llm_result.generations[0]" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "a1e72553", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[Generation(text='\\n\\nIn the eyes of the moon\\n\\nI have seen a face\\n\\nThat I will never forget.\\n\\nThe light that I see\\n\\nIs like a fire in my heart.\\n\\nEvery letter I write\\n\\nWill be the last word\\n\\nOf my love for this person.\\n\\nThe darkness that I feel\\n\\nIs like a weight on my heart.'),\n", - " Generation(text=\"\\n\\nA rose by the side of the road\\n\\nIs all I need to find my way\\n\\nTo the place I've been searching for\\n\\nAnd my heart is singing with joy\\n\\nWhen I look at this rose\\n\\nIt reminds me of the love I've found\\n\\nAnd I know that wherever I go\\n\\nI'll always find my rose by the side of the road.\")]" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Results of the second input\n", - "llm_result.generations[1]" + "llm_result = llm.generate([\"Tell me a joke\", \"Tell me a poem\"]*15)" ] }, { "cell_type": "code", "execution_count": 7, - "id": "90c52536", + "id": "740392f6", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "{'token_usage': JSON: {\n", - " \"completion_tokens\": 199,\n", - " \"prompt_tokens\": 8,\n", - " \"total_tokens\": 207\n", - " }}" + "30" ] }, "execution_count": 7, @@ -146,6 +96,73 @@ "output_type": "execute_result" } ], + "source": [ + "len(llm_result.generations)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "ab6cdcf1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!'),\n", + " Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!')]" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "llm_result.generations[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "4946a778", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Generation(text='\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode'),\n", + " Generation(text=\"\\n\\nWhen I was younger\\nI thought that love\\nI was something like a fairytale\\nI would find my prince\\nAnd we would be together\\nForever\\nI was naïve\\nAnd I was wrong\\nLove is not a fairytale\\nIt's something else entirely\\nSomething that should be cherished\\nAnd loved\\nAnd never taken for granted\\nLove is something that you have to work for\\nIt doesn't come easy\\nYou have to sacrifice\\nYour time, your effort\\nAnd sometimes you have to give up \\nYou have to do what's best for yourself\\nAnd sometimes that means giving love up\")]" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "llm_result.generations[-1]" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "242e4527", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'token_usage': {'completion_tokens': 3721,\n", + " 'prompt_tokens': 120,\n", + " 'total_tokens': 3841}}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Provider specific info\n", "llm_result.llm_output" @@ -153,7 +170,7 @@ }, { "cell_type": "markdown", - "id": "92f6e7a5", + "id": "bde8e04f", "metadata": {}, "source": [ "**Number of Tokens:** You can also estimate how many tokens a piece of text will be in that model. This is useful because models have a context length (and cost more for more tokens), which means you need to be aware of how long the text you are passing in is.\n", @@ -164,7 +181,7 @@ { "cell_type": "code", "execution_count": 8, - "id": "acfd9200", + "id": "b623c774", "metadata": {}, "outputs": [ { @@ -192,7 +209,7 @@ { "cell_type": "code", "execution_count": null, - "id": "68ff3688", + "id": "4196efd9", "metadata": {}, "outputs": [], "source": [] diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 44484068..2e9485ca 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -45,6 +45,8 @@ class OpenAI(LLM, BaseModel): model_kwargs: Dict[str, Any] = Field(default_factory=dict) """Holds any model parameters valid for `create` call not explicitly specified.""" openai_api_key: Optional[str] = None + batch_size: int = 20 + """Batch size to use when passing multiple documents to generate.""" class Config: """Configuration for this pydantic object.""" @@ -114,6 +116,7 @@ class OpenAI(LLM, BaseModel): response = openai.generate(["Tell me a joke."]) """ + # TODO: write a unit test for this params = self._default_params if stop is not None: if "stop" in params: @@ -126,15 +129,31 @@ class OpenAI(LLM, BaseModel): "max_tokens set to -1 not supported for multiple inputs." ) params["max_tokens"] = self.max_tokens_for_prompt(prompts[0]) - - response = self.client.create(model=self.model_name, prompt=prompts, **params) - generations = [] - for i, prompt in enumerate(prompts): - choices = response["choices"][i * self.n : (i + 1) * self.n] - generations.append([Generation(text=choice["text"]) for choice in choices]) + sub_prompts = [ + prompts[i : i + self.batch_size] + for i in range(0, len(prompts), self.batch_size) + ] + choices = [] + token_usage = {} # Get the token usage from the response. # Includes prompt, completion, and total tokens used. - token_usage = response["usage"] + _keys = ["completion_tokens", "prompt_tokens", "total_tokens"] + for _prompts in sub_prompts: + response = self.client.create( + model=self.model_name, prompt=_prompts, **params + ) + choices.extend(response["choices"]) + for _key in _keys: + if _key not in token_usage: + token_usage[_key] = response["usage"][_key] + else: + token_usage[_key] += response["usage"][_key] + generations = [] + for i, prompt in enumerate(prompts): + sub_choices = choices[i * self.n : (i + 1) * self.n] + generations.append( + [Generation(text=choice["text"]) for choice in sub_choices] + ) return LLMResult( generations=generations, llm_output={"token_usage": token_usage} ) From 8c167627edeee187ffedcad533f130a9e87fb498 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Wed, 14 Dec 2022 10:38:31 -0800 Subject: [PATCH 16/29] bump version (#340) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 92f1ea6d..7a2292ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain" -version = "0.0.36" +version = "0.0.37" description = "Building applications with LLMs through composability" authors = [] license = "MIT" From 5161ae7e088de24d950dc3e08ba8e740b5f2c28e Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Wed, 14 Dec 2022 22:31:34 -0800 Subject: [PATCH 17/29] add new example (#345) --- docs/explanation/cool_demos.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/explanation/cool_demos.md b/docs/explanation/cool_demos.md index 27381633..21af928d 100644 --- a/docs/explanation/cool_demos.md +++ b/docs/explanation/cool_demos.md @@ -6,6 +6,9 @@ If you see any other demos that you think we should highlight, be sure to let us ## Open Source +### [YouTube Transcription Question Answering with Sources](https://colab.research.google.com/drive/1sKSTjt9cPstl_WMZ86JsgEqFG-aSAwkn?usp=sharing) +An end-to-end example of doing question answering on YouTube transcripts, returning the timestamps as sources to legitimize the answer. + ### [ThoughtSource](https://github.com/OpenBioLink/ThoughtSource) A central, open resource and community around data and tools related to chain-of-thought reasoning in large language models. From 8cf62ce06e0a2d9dd0e3d4ac48366379bf19ac62 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Thu, 15 Dec 2022 07:52:51 -0800 Subject: [PATCH 18/29] Harrison/single input (#347) allow passing of single input into chain Co-authored-by: thepok --- langchain/chains/base.py | 16 +++++++++++++--- tests/unit_tests/chains/test_base.py | 19 +++++++++++++++++-- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/langchain/chains/base.py b/langchain/chains/base.py index e041cdea..848664e5 100644 --- a/langchain/chains/base.py +++ b/langchain/chains/base.py @@ -1,6 +1,6 @@ """Base interface that all chains should implement.""" from abc import ABC, abstractmethod -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from pydantic import BaseModel, Extra, Field @@ -74,18 +74,28 @@ class Chain(BaseModel, ABC): """Run the logic of this chain and return the output.""" def __call__( - self, inputs: Dict[str, Any], return_only_outputs: bool = False + self, inputs: Union[Dict[str, Any], Any], return_only_outputs: bool = False ) -> Dict[str, str]: """Run the logic of this chain and add to output if desired. Args: - inputs: Dictionary of inputs. + inputs: Dictionary of inputs, or single input if chain expects + only one param. return_only_outputs: boolean for whether to return only outputs in the response. If True, only new keys generated by this chain will be returned. If False, both input keys and new keys generated by this chain will be returned. Defaults to False. """ + if not isinstance(inputs, dict): + if len(self.input_keys) != 1: + raise ValueError( + f"A single string input was passed in, but this chain expects " + f"multiple inputs ({self.input_keys}). When a chain expects " + f"multiple inputs, please call it by passing in a dictionary, " + "eg `chain({'foo': 1, 'bar': 2})`" + ) + inputs = {self.input_keys[0]: inputs} if self.memory is not None: external_context = self.memory.load_memory_variables(inputs) inputs = dict(inputs, **external_context) diff --git a/tests/unit_tests/chains/test_base.py b/tests/unit_tests/chains/test_base.py index 2ab19c31..8fcfa918 100644 --- a/tests/unit_tests/chains/test_base.py +++ b/tests/unit_tests/chains/test_base.py @@ -11,11 +11,12 @@ class FakeChain(Chain, BaseModel): """Fake chain class for testing purposes.""" be_correct: bool = True + the_input_keys: List[str] = ["foo"] @property def input_keys(self) -> List[str]: - """Input key of foo.""" - return ["foo"] + """Input keys.""" + return self.the_input_keys @property def output_keys(self) -> List[str]: @@ -48,3 +49,17 @@ def test_correct_call() -> None: chain = FakeChain() output = chain({"foo": "bar"}) assert output == {"foo": "bar", "bar": "baz"} + + +def test_single_input_correct() -> None: + """Test passing single input works.""" + chain = FakeChain() + output = chain("bar") + assert output == {"foo": "bar", "bar": "baz"} + + +def test_single_input_error() -> None: + """Test passing single input errors as expected.""" + chain = FakeChain(the_input_keys=["foo", "bar"]) + with pytest.raises(ValueError): + chain("bar") From 78b31e596611d2a55dc719c640532290b42ca535 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Thu, 15 Dec 2022 07:53:32 -0800 Subject: [PATCH 19/29] Harrison/cache (#343) --- docs/examples/prompts/custom_llm.ipynb | 23 +- docs/examples/prompts/llm_functionality.ipynb | 198 ++++++++++++++++-- langchain/__init__.py | 4 + langchain/cache.py | 108 ++++++++++ langchain/llms/ai21.py | 2 +- langchain/llms/base.py | 60 +++++- langchain/llms/cohere.py | 2 +- langchain/llms/huggingface_hub.py | 2 +- langchain/llms/manifest.py | 2 +- langchain/llms/nlpcloud.py | 2 +- langchain/llms/openai.py | 7 +- langchain/schema.py | 8 + tests/unit_tests/agents/test_agent.py | 2 +- tests/unit_tests/agents/test_react.py | 2 +- tests/unit_tests/chains/test_natbot.py | 2 +- tests/unit_tests/llms/fake_llm.py | 2 +- 16 files changed, 380 insertions(+), 46 deletions(-) create mode 100644 langchain/cache.py diff --git a/docs/examples/prompts/custom_llm.ipynb b/docs/examples/prompts/custom_llm.ipynb index fd9e6a7a..bb3938aa 100644 --- a/docs/examples/prompts/custom_llm.ipynb +++ b/docs/examples/prompts/custom_llm.ipynb @@ -11,7 +11,7 @@ "\n", "There is only one required thing that a custom LLM needs to implement:\n", "\n", - "1. A `__call__` method that takes in a string, some optional stop words, and returns a string\n", + "1. A `_call` method that takes in a string, some optional stop words, and returns a string\n", "\n", "There is a second optional thing it can implement:\n", "\n", @@ -33,17 +33,20 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 7, "id": "d5ceff02", "metadata": {}, "outputs": [], "source": [ "class CustomLLM(LLM):\n", " \n", - " def __init__(self, n: int):\n", - " self.n = n\n", + " n: int\n", + " \n", + " @property\n", + " def _llm_type(self) -> str:\n", + " return \"custom\"\n", " \n", - " def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str:\n", + " def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:\n", " if stop is not None:\n", " raise ValueError(\"stop kwargs are not permitted.\")\n", " return prompt[:self.n]\n", @@ -64,7 +67,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 8, "id": "10e5ece6", "metadata": {}, "outputs": [], @@ -74,7 +77,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 9, "id": "8cd49199", "metadata": {}, "outputs": [ @@ -84,7 +87,7 @@ "'This is a '" ] }, - "execution_count": 4, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -103,7 +106,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 10, "id": "9c33fa19", "metadata": {}, "outputs": [ @@ -145,7 +148,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.10.8" } }, "nbformat": 4, diff --git a/docs/examples/prompts/llm_functionality.ipynb b/docs/examples/prompts/llm_functionality.ipynb index 05eec44e..a4b00eef 100644 --- a/docs/examples/prompts/llm_functionality.ipynb +++ b/docs/examples/prompts/llm_functionality.ipynb @@ -81,7 +81,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 5, "id": "740392f6", "metadata": {}, "outputs": [ @@ -91,7 +91,7 @@ "30" ] }, - "execution_count": 7, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -102,18 +102,18 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 6, "id": "ab6cdcf1", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "[Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!'),\n", - " Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!')]" + "[Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.'),\n", + " Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.')]" ] }, - "execution_count": 10, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } @@ -124,18 +124,18 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 7, "id": "4946a778", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "[Generation(text='\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode\\n\\nHow can I make you my\\nx- Multiplayer gamemode'),\n", - " Generation(text=\"\\n\\nWhen I was younger\\nI thought that love\\nI was something like a fairytale\\nI would find my prince\\nAnd we would be together\\nForever\\nI was naïve\\nAnd I was wrong\\nLove is not a fairytale\\nIt's something else entirely\\nSomething that should be cherished\\nAnd loved\\nAnd never taken for granted\\nLove is something that you have to work for\\nIt doesn't come easy\\nYou have to sacrifice\\nYour time, your effort\\nAnd sometimes you have to give up \\nYou have to do what's best for yourself\\nAnd sometimes that means giving love up\")]" + "[Generation(text=\"\\n\\nA rose by the side of the road\\n\\nIs all I need to find my way\\n\\nTo the place I've been searching for\\n\\nAnd my heart is singing with joy\\n\\nWhen I look at this rose\\n\\nIt reminds me of the love I've found\\n\\nAnd I know that wherever I go\\n\\nI'll always find my rose by the side of the road.\"),\n", + " Generation(text=\"\\n\\nA rose by the side of the road\\n\\nIs all I need to find my way\\n\\nTo the place I've been searching for\\n\\nAnd my heart is singing with joy\\n\\nWhen I look at this rose\\n\\nIt tells me that true love is nigh\\n\\nAnd I know that this is the day\\n\\nWhen I look at this rose\\n\\nI am sure of what I am doing\\n\\nWhen I look at this rose\\n\\nI am confident in my love for you\\n\\nAnd I know that I am in love with you\\n\\nSo let it be, the rose by the side of the road\\n\\nAnd let it be what you do, what you are\\n\\nAnd you do it well, for this is what we want\\n\\nAnd we want to be with you\\n\\nAnd we want to be with you\\n\\nAnd we want to be with you\\n\\nWhen we find our way home\")]" ] }, - "execution_count": 9, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } @@ -153,9 +153,9 @@ { "data": { "text/plain": [ - "{'token_usage': {'completion_tokens': 3721,\n", + "{'token_usage': {'completion_tokens': 4108,\n", " 'prompt_tokens': 120,\n", - " 'total_tokens': 3841}}" + " 'total_tokens': 4228}}" ] }, "execution_count": 8, @@ -180,7 +180,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "id": "b623c774", "metadata": {}, "outputs": [ @@ -197,7 +197,7 @@ "3" ] }, - "execution_count": 8, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -206,10 +206,178 @@ "llm.get_num_tokens(\"what a joke\")" ] }, + { + "cell_type": "markdown", + "id": "ee6fcf8d", + "metadata": {}, + "source": [ + "### Caching\n", + "With LangChain, you can also enable caching of LLM calls. Note that currently this only applies for individual LLM calls." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "2626ca48", + "metadata": {}, + "outputs": [], + "source": [ + "import langchain\n", + "from langchain.cache import InMemoryCache\n", + "langchain.llm_cache = InMemoryCache()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "97762272", + "metadata": {}, + "outputs": [], + "source": [ + "# To make the caching really obvious, lets use a slower model.\n", + "llm = OpenAI(model_name=\"text-davinci-002\", n=2, best_of=2)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "e80c65e4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 31.2 ms, sys: 11.8 ms, total: 43.1 ms\n", + "Wall time: 1.75 s\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!'" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%%time\n", + "# The first time, it is not yet in cache, so it should take longer\n", + "llm(\"Tell me a joke\")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "678408ec", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 51 µs, sys: 1 µs, total: 52 µs\n", + "Wall time: 67.2 µs\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!'" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%%time\n", + "# The second time it is, so it goes faster\n", + "llm(\"Tell me a joke\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "3f0ac8d2", + "metadata": {}, + "outputs": [], + "source": [ + "# We can do the same thing with a SQLite cache\n", + "from langchain.cache import SQLiteCache\n", + "langchain.llm_cache = SQLiteCache(database_path=\".langchain.db\")" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "0e1dcce3", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 26.6 ms, sys: 11.2 ms, total: 37.7 ms\n", + "Wall time: 1.89 s\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.'" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%%time\n", + "# The first time, it is not yet in cache, so it should take longer\n", + "llm(\"Tell me a joke\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "efadd750", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CPU times: user 2.69 ms, sys: 1.57 ms, total: 4.27 ms\n", + "Wall time: 2.73 ms\n" + ] + }, + { + "data": { + "text/plain": [ + "'\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.'" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "%%time\n", + "# The second time it is, so it goes faster\n", + "llm(\"Tell me a joke\")" + ] + }, { "cell_type": "code", "execution_count": null, - "id": "4196efd9", + "id": "6053408b", "metadata": {}, "outputs": [], "source": [] diff --git a/langchain/__init__.py b/langchain/__init__.py index 31ec09eb..a32ffa95 100644 --- a/langchain/__init__.py +++ b/langchain/__init__.py @@ -1,6 +1,9 @@ """Main entrypoint into package.""" +from typing import Optional + from langchain.agents import MRKLChain, ReActChain, SelfAskWithSearchChain +from langchain.cache import BaseCache from langchain.chains import ( ConversationChain, LLMBashChain, @@ -28,6 +31,7 @@ from langchain.vectorstores import FAISS, ElasticVectorSearch logger: BaseLogger = StdOutLogger() verbose: bool = False +llm_cache: Optional[BaseCache] = None __all__ = [ "LLMChain", diff --git a/langchain/cache.py b/langchain/cache.py new file mode 100644 index 00000000..86a9dc6b --- /dev/null +++ b/langchain/cache.py @@ -0,0 +1,108 @@ +"""Beta Feature: base interface for cache.""" +from abc import ABC, abstractmethod +from typing import Dict, List, Optional, Tuple, Union + +from sqlalchemy import Column, Integer, String, create_engine, select +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session + +from langchain.schema import Generation + +RETURN_VAL_TYPE = Union[List[Generation], str] + + +class BaseCache(ABC): + """Base interface for cache.""" + + @abstractmethod + def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: + """Look up based on prompt and llm_string.""" + + @abstractmethod + def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: + """Update cache based on prompt and llm_string.""" + + +class InMemoryCache(BaseCache): + """Cache that stores things in memory.""" + + def __init__(self) -> None: + """Initialize with empty cache.""" + self._cache: Dict[Tuple[str, str], RETURN_VAL_TYPE] = {} + + def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: + """Look up based on prompt and llm_string.""" + return self._cache.get((prompt, llm_string), None) + + def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: + """Update cache based on prompt and llm_string.""" + self._cache[(prompt, llm_string)] = return_val + + +Base = declarative_base() + + +class LLMCache(Base): # type: ignore + """SQLite table for simple LLM cache (string only).""" + + __tablename__ = "llm_cache" + prompt = Column(String, primary_key=True) + llm = Column(String, primary_key=True) + response = Column(String) + + +class FullLLMCache(Base): # type: ignore + """SQLite table for full LLM Cache (all generations).""" + + __tablename__ = "full_llm_cache" + prompt = Column(String, primary_key=True) + llm = Column(String, primary_key=True) + idx = Column(Integer, primary_key=True) + response = Column(String) + + +class SQLiteCache(BaseCache): + """Cache that uses SQLite as a backend.""" + + def __init__(self, database_path: str = ".langchain.db"): + """Initialize by creating the engine and all tables.""" + self.engine = create_engine(f"sqlite:///{database_path}") + Base.metadata.create_all(self.engine) + + def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: + """Look up based on prompt and llm_string.""" + stmt = ( + select(FullLLMCache.response) + .where(FullLLMCache.prompt == prompt) + .where(FullLLMCache.llm == llm_string) + .order_by(FullLLMCache.idx) + ) + with Session(self.engine) as session: + generations = [] + for row in session.execute(stmt): + generations.append(Generation(text=row[0])) + if len(generations) > 0: + return generations + stmt = ( + select(LLMCache.response) + .where(LLMCache.prompt == prompt) + .where(LLMCache.llm == llm_string) + ) + with Session(self.engine) as session: + for row in session.execute(stmt): + return row[0] + return None + + def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: + """Look up based on prompt and llm_string.""" + if isinstance(return_val, str): + item = LLMCache(prompt=prompt, llm=llm_string, response=return_val) + with Session(self.engine) as session, session.begin(): + session.add(item) + else: + for i, generation in enumerate(return_val): + item = FullLLMCache( + prompt=prompt, llm=llm_string, response=generation.text, idx=i + ) + with Session(self.engine) as session, session.begin(): + session.add(item) diff --git a/langchain/llms/ai21.py b/langchain/llms/ai21.py index 3678473d..77a9300d 100644 --- a/langchain/llms/ai21.py +++ b/langchain/llms/ai21.py @@ -101,7 +101,7 @@ class AI21(LLM, BaseModel): """Return type of llm.""" return "ai21" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to AI21's complete endpoint. Args: diff --git a/langchain/llms/base.py b/langchain/llms/base.py index 0eb8b73b..3b030838 100644 --- a/langchain/llms/base.py +++ b/langchain/llms/base.py @@ -7,13 +7,8 @@ from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Union import yaml from pydantic import BaseModel, Extra - -class Generation(NamedTuple): - """Output of a single generation.""" - - text: str - """Generated text output.""" - # TODO: add log probs +import langchain +from langchain.schema import Generation class LLMResult(NamedTuple): @@ -34,16 +29,44 @@ class LLM(BaseModel, ABC): extra = Extra.forbid - def generate( + def _generate( self, prompts: List[str], stop: Optional[List[str]] = None ) -> LLMResult: """Run the LLM on the given prompt and input.""" + # TODO: add caching here. generations = [] for prompt in prompts: text = self(prompt, stop=stop) generations.append([Generation(text=text)]) return LLMResult(generations=generations) + def generate( + self, prompts: List[str], stop: Optional[List[str]] = None + ) -> LLMResult: + """Run the LLM on the given prompt and input.""" + if langchain.llm_cache is None: + return self._generate(prompts, stop=stop) + params = self._llm_dict() + params["stop"] = stop + llm_string = str(sorted([(k, v) for k, v in params.items()])) + missing_prompts = [] + missing_prompt_idxs = [] + existing_prompts = {} + for i, prompt in enumerate(prompts): + cache_val = langchain.llm_cache.lookup(prompt, llm_string) + if isinstance(cache_val, list): + existing_prompts[i] = cache_val + else: + missing_prompts.append(prompt) + missing_prompt_idxs.append(i) + new_results = self._generate(missing_prompts, stop=stop) + for i, result in enumerate(new_results.generations): + existing_prompts[i] = result + prompt = prompts[i] + langchain.llm_cache.update(prompt, llm_string, result) + generations = [existing_prompts[i] for i in range(len(prompts))] + return LLMResult(generations=generations, llm_output=new_results.llm_output) + def get_num_tokens(self, text: str) -> int: """Get the number of tokens present in the text.""" # TODO: this method may not be exact. @@ -66,9 +89,28 @@ class LLM(BaseModel, ABC): return len(tokenized_text) @abstractmethod - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Run the LLM on the given prompt and input.""" + def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + """Check Cache and run the LLM on the given prompt and input.""" + if langchain.llm_cache is None: + return self._call(prompt, stop=stop) + params = self._llm_dict() + params["stop"] = stop + llm_string = str(sorted([(k, v) for k, v in params.items()])) + if langchain.cache is not None: + cache_val = langchain.llm_cache.lookup(prompt, llm_string) + if cache_val is not None: + if isinstance(cache_val, str): + return cache_val + else: + return cache_val[0].text + return_val = self._call(prompt, stop=stop) + if langchain.cache is not None: + langchain.llm_cache.update(prompt, llm_string, return_val) + return return_val + @property def _identifying_params(self) -> Mapping[str, Any]: """Get the identifying parameters.""" diff --git a/langchain/llms/cohere.py b/langchain/llms/cohere.py index 3853cfb9..d9a3f51a 100644 --- a/langchain/llms/cohere.py +++ b/langchain/llms/cohere.py @@ -90,7 +90,7 @@ class Cohere(LLM, BaseModel): """Return type of llm.""" return "cohere" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to Cohere's generate endpoint. Args: diff --git a/langchain/llms/huggingface_hub.py b/langchain/llms/huggingface_hub.py index 74f0545a..ef53275d 100644 --- a/langchain/llms/huggingface_hub.py +++ b/langchain/llms/huggingface_hub.py @@ -84,7 +84,7 @@ class HuggingFaceHub(LLM, BaseModel): """Return type of llm.""" return "huggingface_hub" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to HuggingFace Hub's inference endpoint. Args: diff --git a/langchain/llms/manifest.py b/langchain/llms/manifest.py index 665f0c5b..b9a4ce14 100644 --- a/langchain/llms/manifest.py +++ b/langchain/llms/manifest.py @@ -42,7 +42,7 @@ class ManifestWrapper(LLM, BaseModel): """Return type of llm.""" return "manifest" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to LLM through Manifest.""" if stop is not None and len(stop) != 1: raise NotImplementedError( diff --git a/langchain/llms/nlpcloud.py b/langchain/llms/nlpcloud.py index ae3dc07b..94f0df7d 100644 --- a/langchain/llms/nlpcloud.py +++ b/langchain/llms/nlpcloud.py @@ -111,7 +111,7 @@ class NLPCloud(LLM, BaseModel): """Return type of llm.""" return "nlpcloud" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to NLPCloud's create endpoint. Args: diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 2e9485ca..ac137c7a 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -3,7 +3,8 @@ from typing import Any, Dict, List, Mapping, Optional from pydantic import BaseModel, Extra, Field, root_validator -from langchain.llms.base import LLM, Generation, LLMResult +from langchain.llms.base import LLM, LLMResult +from langchain.schema import Generation from langchain.utils import get_from_dict_or_env @@ -99,7 +100,7 @@ class OpenAI(LLM, BaseModel): } return {**normal_params, **self.model_kwargs} - def generate( + def _generate( self, prompts: List[str], stop: Optional[List[str]] = None ) -> LLMResult: """Call out to OpenAI's endpoint with k unique prompts. @@ -168,7 +169,7 @@ class OpenAI(LLM, BaseModel): """Return type of llm.""" return "openai" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Call out to OpenAI's create endpoint. Args: diff --git a/langchain/schema.py b/langchain/schema.py index 67a64c01..4e255e3e 100644 --- a/langchain/schema.py +++ b/langchain/schema.py @@ -9,3 +9,11 @@ class AgentAction(NamedTuple): tool: str tool_input: str log: str + + +class Generation(NamedTuple): + """Output of a single generation.""" + + text: str + """Generated text output.""" + # TODO: add log probs diff --git a/tests/unit_tests/agents/test_agent.py b/tests/unit_tests/agents/test_agent.py index d407f4b4..48d0e61f 100644 --- a/tests/unit_tests/agents/test_agent.py +++ b/tests/unit_tests/agents/test_agent.py @@ -14,7 +14,7 @@ class FakeListLLM(LLM, BaseModel): responses: List[str] i: int = -1 - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Increment counter, and then return response in that index.""" self.i += 1 print(self.i) diff --git a/tests/unit_tests/agents/test_react.py b/tests/unit_tests/agents/test_react.py index 917fa745..f3dc8da5 100644 --- a/tests/unit_tests/agents/test_react.py +++ b/tests/unit_tests/agents/test_react.py @@ -33,7 +33,7 @@ class FakeListLLM(LLM, BaseModel): """Return type of llm.""" return "fake_list" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Increment counter, and then return response in that index.""" self.i += 1 return self.responses[self.i] diff --git a/tests/unit_tests/chains/test_natbot.py b/tests/unit_tests/chains/test_natbot.py index a9817f71..0beaa409 100644 --- a/tests/unit_tests/chains/test_natbot.py +++ b/tests/unit_tests/chains/test_natbot.py @@ -11,7 +11,7 @@ from langchain.llms.base import LLM class FakeLLM(LLM, BaseModel): """Fake LLM wrapper for testing purposes.""" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """Return `foo` if longer than 10000 words, else `bar`.""" if len(prompt) > 10000: return "foo" diff --git a/tests/unit_tests/llms/fake_llm.py b/tests/unit_tests/llms/fake_llm.py index a3896b6f..dd8b3462 100644 --- a/tests/unit_tests/llms/fake_llm.py +++ b/tests/unit_tests/llms/fake_llm.py @@ -16,7 +16,7 @@ class FakeLLM(LLM, BaseModel): """Return type of llm.""" return "fake" - def __call__(self, prompt: str, stop: Optional[List[str]] = None) -> str: + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: """First try to lookup in queries, else return 'foo' or 'bar'.""" if self.queries is not None: return self.queries[prompt] From 428508bd75c3bae68108e26ccb3d70e12e13be03 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Thu, 15 Dec 2022 08:27:20 -0800 Subject: [PATCH 20/29] bump version to 0.0.38 (#349) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7a2292ba..2c59151f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain" -version = "0.0.37" +version = "0.0.38" description = "Building applications with LLMs through composability" authors = [] license = "MIT" From ed143b598f48f834b6a1829c80753f5bbf43d5a7 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Thu, 15 Dec 2022 17:01:39 -0800 Subject: [PATCH 21/29] improve openai embeddings (#351) add more formal support for explicitly specifying each model, but in a backwards compatible way --- langchain/embeddings/openai.py | 31 ++++++++++++++++++++++++------- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/langchain/embeddings/openai.py b/langchain/embeddings/openai.py index 864e7758..205d028c 100644 --- a/langchain/embeddings/openai.py +++ b/langchain/embeddings/openai.py @@ -22,9 +22,8 @@ class OpenAIEmbeddings(BaseModel, Embeddings): """ client: Any #: :meta private: - model_name: str = "babbage" - """Model name to use.""" - + document_model_name: str = "text-embedding-ada-002" + query_model_name: str = "text-embedding-ada-002" openai_api_key: Optional[str] = None class Config: @@ -32,6 +31,26 @@ class OpenAIEmbeddings(BaseModel, Embeddings): extra = Extra.forbid + # TODO: deprecate this + @root_validator(pre=True) + def get_model_names(cls, values: Dict) -> Dict: + """Get model names from just old model name.""" + if "model_name" in values: + if "document_model_name" in values: + raise ValueError( + "Both `model_name` and `document_model_name` were provided, " + "but only one should be." + ) + if "query_model_name" in values: + raise ValueError( + "Both `model_name` and `query_model_name` were provided, " + "but only one should be." + ) + model_name = values.pop("model_name") + values["document_model_name"] = f"text-search-{model_name}-doc-001" + values["query_model_name"] = f"text-search-{model_name}-query-001" + return values + @root_validator() def validate_environment(cls, values: Dict) -> Dict: """Validate that api key and python package exists in environment.""" @@ -66,7 +85,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings): List of embeddings, one for each text. """ responses = [ - self._embedding_func(text, engine=f"text-search-{self.model_name}-doc-001") + self._embedding_func(text, engine=self.document_model_name) for text in texts ] return responses @@ -80,7 +99,5 @@ class OpenAIEmbeddings(BaseModel, Embeddings): Returns: Embeddings for the text. """ - embedding = self._embedding_func( - text, engine=f"text-search-{self.model_name}-query-001" - ) + embedding = self._embedding_func(text, engine=self.query_model_name) return embedding From c1b50b7b13545b1549c051182f547cfc2f8ed0be Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Thu, 15 Dec 2022 17:49:14 -0800 Subject: [PATCH 22/29] Harrison/map reduce merge (#344) Co-authored-by: John Nay --- docs/examples/chains/qa_with_sources.ipynb | 10 +- langchain/chains/combine_documents/base.py | 9 +- .../chains/combine_documents/map_reduce.py | 73 ++++++++++- langchain/chains/combine_documents/stuff.py | 16 ++- .../chains/test_combine_documents.py | 118 ++++++++++++++++++ 5 files changed, 217 insertions(+), 9 deletions(-) create mode 100644 tests/unit_tests/chains/test_combine_documents.py diff --git a/docs/examples/chains/qa_with_sources.ipynb b/docs/examples/chains/qa_with_sources.ipynb index 1135e86c..29454eae 100644 --- a/docs/examples/chains/qa_with_sources.ipynb +++ b/docs/examples/chains/qa_with_sources.ipynb @@ -159,6 +159,14 @@ "id": "e417926a", "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n", + "Token indices sequence length is longer than the specified maximum sequence length for this model (1546 > 1024). Running this sequence through the model will result in indexing errors\n" + ] + }, { "data": { "text/plain": [ @@ -204,7 +212,7 @@ { "data": { "text/plain": [ - "{'output_text': \"\\n\\nThe president did not mention Justice Breyer in his speech to the European Parliament. He discussed the situation in Ukraine, the NATO Alliance, and the United States' response to Putin's attack on Ukraine. He spoke about the extensive preparation and coalition building that was done in advance of the attack, and the unified response from the European Union, Canada, Japan, Korea, Australia, New Zealand, and many other countries. He also discussed the economic sanctions that have been imposed on Russia, and the effects they have had on Putin's war fund. Source: 1, 2\"}" + "{'output_text': \"\\n\\nThe president did not mention Justice Breyer in his speech to the European Parliament, which focused on building a coalition of freedom-loving nations to confront Putin, unifying European allies, countering Russia's lies with truth, and enforcing powerful economic sanctions. Source: 2\"}" ] }, "execution_count": 12, diff --git a/langchain/chains/combine_documents/base.py b/langchain/chains/combine_documents/base.py index 26cbcb89..7d5574ca 100644 --- a/langchain/chains/combine_documents/base.py +++ b/langchain/chains/combine_documents/base.py @@ -1,7 +1,7 @@ """Base interface for chains combining documents.""" from abc import ABC, abstractmethod -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from pydantic import BaseModel @@ -31,6 +31,13 @@ class BaseCombineDocumentsChain(Chain, BaseModel, ABC): """ return [self.output_key] + def prompt_length(self, docs: List[Document], **kwargs: Any) -> Optional[int]: + """Return the prompt length given the documents passed in. + + Returns None if the method does not depend on the prompt length. + """ + return None + @abstractmethod def combine_docs(self, docs: List[Document], **kwargs: Any) -> str: """Combine documents into a single string.""" diff --git a/langchain/chains/combine_documents/map_reduce.py b/langchain/chains/combine_documents/map_reduce.py index b182e9e5..ff74bc4f 100644 --- a/langchain/chains/combine_documents/map_reduce.py +++ b/langchain/chains/combine_documents/map_reduce.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, Dict, List +from typing import Any, Callable, Dict, List from pydantic import BaseModel, Extra, root_validator @@ -11,6 +11,47 @@ from langchain.chains.llm import LLMChain from langchain.docstore.document import Document +def _split_list_of_docs( + docs: List[Document], length_func: Callable, token_max: int, **kwargs: Any +) -> List[List[Document]]: + new_result_doc_list = [] + _sub_result_docs = [] + for doc in docs: + _sub_result_docs.append(doc) + _num_tokens = length_func(_sub_result_docs, **kwargs) + if _num_tokens > token_max: + if len(_sub_result_docs) == 1: + raise ValueError( + "A single document was longer than the context length," + " we cannot handle this." + ) + if len(_sub_result_docs) == 2: + raise ValueError( + "A single document was so long it could not be combined " + "with another document, we cannot handle this." + ) + new_result_doc_list.append(_sub_result_docs[:-1]) + _sub_result_docs = _sub_result_docs[-1:] + new_result_doc_list.append(_sub_result_docs) + return new_result_doc_list + + +def _collapse_docs( + docs: List[Document], + combine_document_func: Callable, + **kwargs: Any, +) -> Document: + result = combine_document_func(docs, **kwargs) + combined_metadata = {k: str(v) for k, v in docs[0].metadata.items()} + for doc in docs[1:]: + for k, v in doc.metadata.items(): + if k in combined_metadata: + combined_metadata[k] += f", {v}" + else: + combined_metadata[k] = str(v) + return Document(page_content=result, metadata=combined_metadata) + + class MapReduceDocumentsChain(BaseCombineDocumentsChain, BaseModel): """Combining documents by mapping a chain over them, then combining results.""" @@ -49,14 +90,38 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain, BaseModel): ) return values - def combine_docs(self, docs: List[Document], **kwargs: Any) -> str: - """Combine by mapping first chain over all, then stuffing into final chain.""" + def combine_docs( + self, docs: List[Document], token_max: int = 3000, **kwargs: Any + ) -> str: + """Combine documents in a map reduce manner. + + Combine by mapping first chain over all documents, then reducing the results. + This reducing can be done recursively if needed (if there are many documents). + """ results = self.llm_chain.apply( + # FYI - this is parallelized and so it is fast. [{**{self.document_variable_name: d.page_content}, **kwargs} for d in docs] ) question_result_key = self.llm_chain.output_key result_docs = [ Document(page_content=r[question_result_key], metadata=docs[i].metadata) + # This uses metadata from the docs, and the textual results from `results` for i, r in enumerate(results) ] - return self.combine_document_chain.combine_docs(result_docs, **kwargs) + length_func = self.combine_document_chain.prompt_length + num_tokens = length_func(result_docs, **kwargs) + while num_tokens is not None and num_tokens > token_max: + new_result_doc_list = _split_list_of_docs( + result_docs, length_func, token_max, **kwargs + ) + result_docs = [] + for docs in new_result_doc_list: + new_doc = _collapse_docs( + docs, self.combine_document_chain.combine_docs, **kwargs + ) + result_docs.append(new_doc) + num_tokens = self.combine_document_chain.prompt_length( + result_docs, **kwargs + ) + output = self.combine_document_chain.combine_docs(result_docs, **kwargs) + return output diff --git a/langchain/chains/combine_documents/stuff.py b/langchain/chains/combine_documents/stuff.py index 7b56c211..796de39e 100644 --- a/langchain/chains/combine_documents/stuff.py +++ b/langchain/chains/combine_documents/stuff.py @@ -1,6 +1,6 @@ """Chain that combines documents by stuffing into context.""" -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from pydantic import BaseModel, Extra, Field, root_validator @@ -55,8 +55,7 @@ class StuffDocumentsChain(BaseCombineDocumentsChain, BaseModel): ) return values - def combine_docs(self, docs: List[Document], **kwargs: Any) -> str: - """Stuff all documents into one prompt and pass to LLM.""" + def _get_inputs(self, docs: List[Document], **kwargs: Any) -> dict: # Get relevant information from each document. doc_dicts = [] for doc in docs: @@ -71,5 +70,16 @@ class StuffDocumentsChain(BaseCombineDocumentsChain, BaseModel): # Join the documents together to put them in the prompt. inputs = kwargs.copy() inputs[self.document_variable_name] = "\n\n".join(doc_strings) + return inputs + + def prompt_length(self, docs: List[Document], **kwargs: Any) -> Optional[int]: + """Get the prompt length by formatting the prompt.""" + inputs = self._get_inputs(docs, **kwargs) + prompt = self.llm_chain.prompt.format(**inputs) + return self.llm_chain.llm.get_num_tokens(prompt) + + def combine_docs(self, docs: List[Document], **kwargs: Any) -> str: + """Stuff all documents into one prompt and pass to LLM.""" + inputs = self._get_inputs(docs, **kwargs) # Call predict on the LLM. return self.llm_chain.predict(**inputs) diff --git a/tests/unit_tests/chains/test_combine_documents.py b/tests/unit_tests/chains/test_combine_documents.py new file mode 100644 index 00000000..81468f97 --- /dev/null +++ b/tests/unit_tests/chains/test_combine_documents.py @@ -0,0 +1,118 @@ +"""Test functionality related to combining documents.""" + +from typing import List + +import pytest + +from langchain.chains.combine_documents.map_reduce import ( + _collapse_docs, + _split_list_of_docs, +) +from langchain.docstore.document import Document + + +def _fake_docs_len_func(docs: List[Document]) -> int: + return len(_fake_combine_docs_func(docs)) + + +def _fake_combine_docs_func(docs: List[Document]) -> str: + return "".join([d.page_content for d in docs]) + + +def test__split_list_long_single_doc() -> None: + """Test splitting of a long single doc.""" + docs = [Document(page_content="foo" * 100)] + with pytest.raises(ValueError): + _split_list_of_docs(docs, _fake_docs_len_func, 100) + + +def test__split_list_long_pair_doc() -> None: + """Test splitting of a list with two medium docs.""" + docs = [Document(page_content="foo" * 30)] * 2 + with pytest.raises(ValueError): + _split_list_of_docs(docs, _fake_docs_len_func, 100) + + +def test__split_list_single_doc() -> None: + """Test splitting works with just a single doc.""" + docs = [Document(page_content="foo")] + doc_list = _split_list_of_docs(docs, _fake_docs_len_func, 100) + assert doc_list == [docs] + + +def test__split_list_double_doc() -> None: + """Test splitting works with just two docs.""" + docs = [Document(page_content="foo"), Document(page_content="bar")] + doc_list = _split_list_of_docs(docs, _fake_docs_len_func, 100) + assert doc_list == [docs] + + +def test__split_list_works_correctly() -> None: + """Test splitting works correctly.""" + docs = [ + Document(page_content="foo"), + Document(page_content="bar"), + Document(page_content="baz"), + Document(page_content="foo" * 2), + Document(page_content="bar"), + Document(page_content="baz"), + ] + doc_list = _split_list_of_docs(docs, _fake_docs_len_func, 10) + expected_result = [ + # Test a group of three. + [ + Document(page_content="foo"), + Document(page_content="bar"), + Document(page_content="baz"), + ], + # Test a group of two, where one is bigger. + [Document(page_content="foo" * 2), Document(page_content="bar")], + # Test no errors on last + [Document(page_content="baz")], + ] + assert doc_list == expected_result + + +def test__collapse_docs_no_metadata() -> None: + """Test collapse documents functionality when no metadata.""" + docs = [ + Document(page_content="foo"), + Document(page_content="bar"), + Document(page_content="baz"), + ] + output = _collapse_docs(docs, _fake_combine_docs_func) + expected_output = Document(page_content="foobarbaz") + assert output == expected_output + + +def test__collapse_docs_one_doc() -> None: + """Test collapse documents functionality when only one document present.""" + # Test with no metadata. + docs = [Document(page_content="foo")] + output = _collapse_docs(docs, _fake_combine_docs_func) + assert output == docs[0] + + # Test with metadata. + docs = [Document(page_content="foo", metadata={"source": "a"})] + output = _collapse_docs(docs, _fake_combine_docs_func) + assert output == docs[0] + + +def test__collapse_docs_metadata() -> None: + """Test collapse documents functionality when metadata exists.""" + metadata1 = {"source": "a", "foo": 2, "bar": "1", "extra1": "foo"} + metadata2 = {"source": "b", "foo": "3", "bar": 2, "extra2": "bar"} + docs = [ + Document(page_content="foo", metadata=metadata1), + Document(page_content="bar", metadata=metadata2), + ] + output = _collapse_docs(docs, _fake_combine_docs_func) + expected_metadata = { + "source": "a, b", + "foo": "2, 3", + "bar": "1, 2", + "extra1": "foo", + "extra2": "bar", + } + expected_output = Document(page_content="foobar", metadata=expected_metadata) + assert output == expected_output From 2dd895d98cd6229c16d6e500abfbceb6e6987abe Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Thu, 15 Dec 2022 22:35:42 -0800 Subject: [PATCH 23/29] add openai tokenizer (#355) --- docs/examples/prompts/llm_functionality.ipynb | 17 +- langchain/llms/base.py | 2 +- langchain/llms/openai.py | 19 + langchain/text_splitter.py | 23 +- poetry.lock | 4736 +++++++++-------- pyproject.toml | 3 +- 6 files changed, 2504 insertions(+), 2296 deletions(-) diff --git a/docs/examples/prompts/llm_functionality.ipynb b/docs/examples/prompts/llm_functionality.ipynb index a4b00eef..251e22a6 100644 --- a/docs/examples/prompts/llm_functionality.ipynb +++ b/docs/examples/prompts/llm_functionality.ipynb @@ -49,7 +49,7 @@ { "data": { "text/plain": [ - "'\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.'" + "'\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!'" ] }, "execution_count": 3, @@ -110,7 +110,7 @@ "data": { "text/plain": [ "[Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.'),\n", - " Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side.')]" + " Generation(text='\\n\\nWhy did the chicken cross the road?\\n\\nTo get to the other side!')]" ] }, "execution_count": 6, @@ -132,7 +132,7 @@ "data": { "text/plain": [ "[Generation(text=\"\\n\\nA rose by the side of the road\\n\\nIs all I need to find my way\\n\\nTo the place I've been searching for\\n\\nAnd my heart is singing with joy\\n\\nWhen I look at this rose\\n\\nIt reminds me of the love I've found\\n\\nAnd I know that wherever I go\\n\\nI'll always find my rose by the side of the road.\"),\n", - " Generation(text=\"\\n\\nA rose by the side of the road\\n\\nIs all I need to find my way\\n\\nTo the place I've been searching for\\n\\nAnd my heart is singing with joy\\n\\nWhen I look at this rose\\n\\nIt tells me that true love is nigh\\n\\nAnd I know that this is the day\\n\\nWhen I look at this rose\\n\\nI am sure of what I am doing\\n\\nWhen I look at this rose\\n\\nI am confident in my love for you\\n\\nAnd I know that I am in love with you\\n\\nSo let it be, the rose by the side of the road\\n\\nAnd let it be what you do, what you are\\n\\nAnd you do it well, for this is what we want\\n\\nAnd we want to be with you\\n\\nAnd we want to be with you\\n\\nAnd we want to be with you\\n\\nWhen we find our way home\")]" + " Generation(text=\"\\n\\nWhen I was younger\\nI thought that love\\nI was something like a fairytale\\nI would find my prince and they would be my people\\nI was naïve\\nI thought that\\n\\nLove was a something that happened\\nWhen I was younger\\nI was it for my fairytale prince\\nNow I realize\\nThat love is something that waits\\nFor when my prince comes\\nAnd when I am ready to be his wife\\nI'll tell you a poem\\n\\nWhen I was younger\\nI thought that love\\nI was something like a fairytale\\nI would find my prince and they would be my people\\nI was naïve\\nI thought that\\n\\nLove was a something that happened\\nAnd I would be happy\\nWhen my prince came\\nAnd I was ready to be his wife\")]" ] }, "execution_count": 7, @@ -153,9 +153,9 @@ { "data": { "text/plain": [ - "{'token_usage': {'completion_tokens': 4108,\n", + "{'token_usage': {'completion_tokens': 3722,\n", " 'prompt_tokens': 120,\n", - " 'total_tokens': 4228}}" + " 'total_tokens': 3842}}" ] }, "execution_count": 8, @@ -184,13 +184,6 @@ "id": "b623c774", "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n" - ] - }, { "data": { "text/plain": [ diff --git a/langchain/llms/base.py b/langchain/llms/base.py index 3b030838..4488475d 100644 --- a/langchain/llms/base.py +++ b/langchain/llms/base.py @@ -76,7 +76,7 @@ class LLM(BaseModel, ABC): except ImportError: raise ValueError( "Could not import transformers python package. " - "This is needed in order to calculate max_tokens_for_prompt. " + "This is needed in order to calculate get_num_tokens. " "Please it install it with `pip install transformers`." ) # create a GPT-3 tokenizer instance diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index ac137c7a..c639db63 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -186,6 +186,25 @@ class OpenAI(LLM, BaseModel): """ return self.generate([prompt], stop=stop).generations[0][0].text + def get_num_tokens(self, text: str) -> int: + """Calculate num tokens with tiktoken package.""" + try: + import tiktoken + except ImportError: + raise ValueError( + "Could not import tiktoken python package. " + "This is needed in order to calculate get_num_tokens. " + "Please it install it with `pip install tiktoken`." + ) + # create a GPT-3 encoder instance + enc = tiktoken.get_encoding("gpt2") + + # encode the text using the GPT-3 encoder + tokenized_text = enc.encode(text) + + # calculate the number of tokens in the encoded text + return len(tokenized_text) + def modelname_to_contextsize(self, modelname: str) -> int: """Calculate the maximum number of tokens possible to generate for a model. diff --git a/langchain/text_splitter.py b/langchain/text_splitter.py index c2da1745..daf720e1 100644 --- a/langchain/text_splitter.py +++ b/langchain/text_splitter.py @@ -49,7 +49,7 @@ class TextSplitter(ABC): @classmethod def from_huggingface_tokenizer(cls, tokenizer: Any, **kwargs: Any) -> TextSplitter: - """Text splitter than uses HuggingFace tokenizer to count length.""" + """Text splitter that uses HuggingFace tokenizer to count length.""" try: from transformers import PreTrainedTokenizerBase @@ -68,6 +68,27 @@ class TextSplitter(ABC): ) return cls(length_function=_huggingface_tokenizer_length, **kwargs) + @classmethod + def from_tiktoken_encoder( + cls, encoding_name: str = "gpt2", **kwargs: Any + ) -> TextSplitter: + """Text splitter that uses tiktoken encoder to count length.""" + try: + import tiktoken + except ImportError: + raise ValueError( + "Could not import tiktoken python package. " + "This is needed in order to calculate max_tokens_for_prompt. " + "Please it install it with `pip install tiktoken`." + ) + # create a GPT-3 encoder instance + enc = tiktoken.get_encoding(encoding_name) + + def _tiktoken_encoder(text: str) -> int: + return len(enc.encode(text)) + + return cls(length_function=_tiktoken_encoder, **kwargs) + class CharacterTextSplitter(TextSplitter): """Implementation of splitting text that looks at characters.""" diff --git a/poetry.lock b/poetry.lock index 8d23f0fb..71d31f41 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,3 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. - [[package]] name = "anyio" version = "3.6.2" @@ -7,10 +5,6 @@ description = "High level compatibility layer for multiple asynchronous event lo category = "dev" optional = false python-versions = ">=3.6.2" -files = [ - {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, - {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, -] [package.dependencies] idna = ">=2.8" @@ -28,10 +22,6 @@ description = "Disable App Nap on macOS >= 10.9" category = "dev" optional = false python-versions = "*" -files = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] [[package]] name = "argon2-cffi" @@ -40,10 +30,6 @@ description = "The secure Argon2 password hashing algorithm." category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, -] [package.dependencies] argon2-cffi-bindings = "*" @@ -60,29 +46,6 @@ description = "Low-level CFFI bindings for Argon2" category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] [package.dependencies] cffi = ">=1.0.1" @@ -98,10 +61,6 @@ description = "Better dates & times for Python" category = "dev" optional = false python-versions = ">=3.6" -files = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, -] [package.dependencies] python-dateutil = ">=2.7.0" @@ -113,10 +72,6 @@ description = "Annotate AST trees with source code positions" category = "dev" optional = false python-versions = "*" -files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, -] [package.dependencies] six = "*" @@ -131,10 +86,6 @@ description = "Timeout context manager for asyncio programs" category = "main" optional = true python-versions = ">=3.6" -files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] [[package]] name = "attrs" @@ -143,10 +94,6 @@ description = "Classes Without Boilerplate" category = "dev" optional = false python-versions = ">=3.5" -files = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] [package.extras] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] @@ -161,10 +108,6 @@ description = "Specifications for callback functions passed in to an API" category = "dev" optional = false python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] [[package]] name = "beautifulsoup4" @@ -173,10 +116,6 @@ description = "Screen-scraping library" category = "main" optional = false python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] [package.dependencies] soupsieve = ">1.2" @@ -192,20 +131,6 @@ description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, -] [package.dependencies] click = ">=8.0.0" @@ -228,10 +153,6 @@ description = "An easy safelist-based HTML-sanitizing tool." category = "dev" optional = false python-versions = ">=3.7" -files = [ - {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, - {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, -] [package.dependencies] six = ">=1.9.0" @@ -248,7 +169,2184 @@ description = "The Blis BLAS-like linear algebra library, as a self-contained C- category = "main" optional = true python-versions = "*" -files = [ + +[package.dependencies] +numpy = ">=1.15.0" + +[[package]] +name = "blobfile" +version = "2.0.0" +description = "Read GCS, ABS and local paths with the same interface, clone of tensorflow.io.gfile" +category = "main" +optional = true +python-versions = ">=3.7.0" + +[package.dependencies] +filelock = ">=3.0,<4.0" +lxml = ">=4.9,<5.0" +pycryptodomex = ">=3.8,<4.0" +urllib3 = ">=1.25,<2.0" + +[[package]] +name = "catalogue" +version = "2.0.8" +description = "Super lightweight function registries for your library" +category = "main" +optional = true +python-versions = ">=3.6" + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "2.1.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.6.0" + +[package.extras] +unicode-backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" + +[[package]] +name = "comm" +version = "0.1.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +traitlets = ">=5.3" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "confection" +version = "0.0.3" +description = "The sweetest config system for Python" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" +srsly = ">=2.4.0,<3.0.0" + +[[package]] +name = "coverage" +version = "6.5.0" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cymem" +version = "2.0.7" +description = "Manage calls to calloc/free through Cython" +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "debugpy" +version = "1.6.4" +description = "An implementation of the Debug Adapter Protocol for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "dill" +version = "0.3.6" +description = "serialize all of python" +category = "main" +optional = true +python-versions = ">=3.7" + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "elastic-transport" +version = "8.4.0" +description = "Transport classes and utilities shared among Python Elastic client libraries" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.26.2,<2" + +[package.extras] +develop = ["aiohttp", "mock", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "trustme"] + +[[package]] +name = "elasticsearch" +version = "8.5.3" +description = "Python client for Elasticsearch" +category = "main" +optional = true +python-versions = ">=3.6, <4" + +[package.dependencies] +elastic-transport = ">=8,<9" + +[package.extras] +async = ["aiohttp (>=3,<4)"] +requests = ["requests (>=2.4.0,<3.0.0)"] + +[[package]] +name = "entrypoints" +version = "0.4" +description = "Discover and load entry points from installed packages." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "1.2.0" +description = "Get the currently executing AST node of a frame, and other information" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +tests = ["asttokens", "littleutils", "pytest", "rich"] + +[[package]] +name = "faiss-cpu" +version = "1.7.3" +description = "A library for efficient similarity search and clustering of dense vectors." +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "fastjsonschema" +version = "2.16.2" +description = "Fastest Python implementation of JSON schema" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "filelock" +version = "3.8.2" +description = "A platform independent file lock." +category = "main" +optional = true +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "6.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.8.1" + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.10.0,<2.11.0" +pyflakes = ">=3.0.0,<3.1.0" + +[[package]] +name = "flake8-docstrings" +version = "1.6.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" + +[[package]] +name = "greenlet" +version = "2.0.1" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["faulthandler", "objgraph", "psutil"] + +[[package]] +name = "huggingface-hub" +version = "0.11.1" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +category = "main" +optional = true +python-versions = ">=3.7.0" + +[package.dependencies] +filelock = "*" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = "*" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] +torch = ["torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-metadata" +version = "5.1.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "importlib-resources" +version = "5.10.1" +description = "Read resources from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "ipykernel" +version = "6.19.2" +description = "IPython Kernel for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.0" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=17" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "ipython" +version = "8.7.0" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.11,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.20)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "ipywidgets" +version = "8.0.3" +description = "Jupyter interactive widgets" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ipykernel = ">=4.5.1" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0,<4.0" +traitlets = ">=4.3.1" +widgetsnbextension = ">=4.0,<5.0" + +[package.extras] +test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] + +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +arrow = ">=0.15.0" + +[[package]] +name = "isort" +version = "5.11.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.7.0" + +[package.extras] +colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "jedi" +version = "0.18.2" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "joblib" +version = "1.2.0" +description = "Lightweight pipelining with Python functions" +category = "main" +optional = true +python-versions = ">=3.7" + +[[package]] +name = "jsonpointer" +version = "2.3" +description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=17.4.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jupyter" +version = "1.0.0" +description = "Jupyter metapackage. Install all the Jupyter components in one go." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ipykernel = "*" +ipywidgets = "*" +jupyter-console = "*" +nbconvert = "*" +notebook = "*" +qtconsole = "*" + +[[package]] +name = "jupyter-client" +version = "7.4.8" +description = "Jupyter protocol implementation and client libraries" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +entrypoints = "*" +jupyter-core = ">=4.9.2" +nest-asyncio = ">=1.5.4" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = "*" + +[package.extras] +doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +test = ["codecov", "coverage", "ipykernel (>=6.12)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-console" +version = "6.4.4" +description = "Jupyter terminal console" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ipykernel = "*" +ipython = "*" +jupyter-client = ">=7.0.0" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" + +[package.extras] +test = ["pexpect"] + +[[package]] +name = "jupyter-core" +version = "5.1.0" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "sphinxcontrib-github-alt", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-events" +version = "0.5.0" +description = "Jupyter Event System library" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +jsonschema = {version = ">=4.3.0", extras = ["format-nongpl"]} +python-json-logger = "*" +pyyaml = "*" +traitlets = "*" + +[package.extras] +cli = ["click", "rich"] +test = ["click", "coverage", "pre-commit", "pytest (>=6.1.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] + +[[package]] +name = "jupyter-server" +version = "2.0.1" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +anyio = ">=3.1.0,<4" +argon2-cffi = "*" +jinja2 = "*" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-events = ">=0.4.0" +jupyter-server-terminals = "*" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +packaging = "*" +prometheus-client = "*" +pywinpty = {version = "*", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = "*" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = "*" + +[package.extras] +docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxemoji", "tornado"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] +test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.4.2" +description = "A Jupyter Server Extension Providing Terminals." +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<2.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxemoji", "tornado"] +test = ["coverage", "jupyter-server (>=2.0.0rc8)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.2.2" +description = "Pygments theme using JupyterLab CSS variables" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "jupyterlab-widgets" +version = "3.0.4" +description = "Jupyter interactive widgets for JupyterLab" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "langcodes" +version = "3.3.0" +description = "Tools for labeling human languages with IETF language tags" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.extras] +data = ["language-data (>=1.1,<2.0)"] + +[[package]] +name = "lxml" +version = "4.9.2" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "manifest-ml" +version = "0.0.1" +description = "Manifest for Prompt Programming Foundation Models." +category = "main" +optional = true +python-versions = ">=3.8.0" + +[package.dependencies] +dill = ">=0.3.5" +redis = ">=4.3.1" +requests = ">=2.27.1" +sqlitedict = ">=2.0.0" +tqdm = ">=4.64.0" + +[package.extras] +all = ["Flask (>=2.1.2)", "accelerate (>=0.10.0)", "autopep8 (>=1.6.0)", "black (>=22.3.0)", "docformatter (>=1.4)", "flake8 (>=4.0.0)", "flake8-docstrings (>=1.6.0)", "isort (>=5.9.3)", "mypy (>=0.950)", "nbsphinx (>=0.8.0)", "pep8-naming (>=0.12.1)", "pre-commit (>=2.14.0)", "pytest (>=7.0.0)", "pytest-cov (>=3.0.0)", "python-dotenv (>=0.20.0)", "recommonmark (>=0.7.1)", "sphinx-autobuild", "sphinx-rtd-theme (>=0.5.1)", "torch (>=1.8.0)", "transformers (>=4.20.0)", "twine", "types-PyYAML (>=6.0.7)", "types-protobuf (>=3.19.21)", "types-python-dateutil (>=2.8.16)", "types-redis (>=4.2.6)", "types-requests (>=2.27.29)", "types-setuptools (>=57.4.17)"] +api = ["Flask (>=2.1.2)", "accelerate (>=0.10.0)", "torch (>=1.8.0)", "transformers (>=4.20.0)"] +dev = ["autopep8 (>=1.6.0)", "black (>=22.3.0)", "docformatter (>=1.4)", "flake8 (>=4.0.0)", "flake8-docstrings (>=1.6.0)", "isort (>=5.9.3)", "mypy (>=0.950)", "nbsphinx (>=0.8.0)", "pep8-naming (>=0.12.1)", "pre-commit (>=2.14.0)", "pytest (>=7.0.0)", "pytest-cov (>=3.0.0)", "python-dotenv (>=0.20.0)", "recommonmark (>=0.7.1)", "sphinx-autobuild", "sphinx-rtd-theme (>=0.5.1)", "twine", "types-PyYAML (>=6.0.7)", "types-protobuf (>=3.19.21)", "types-python-dateutil (>=2.8.16)", "types-redis (>=4.2.6)", "types-requests (>=2.27.29)", "types-setuptools (>=57.4.17)"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mistune" +version = "2.0.4" +description = "A sane Markdown parser with useful plugins and renderers" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "murmurhash" +version = "1.0.9" +description = "Cython bindings for MurmurHash" +category = "main" +optional = true +python-versions = ">=3.6" + +[[package]] +name = "mypy" +version = "0.991" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "nbclassic" +version = "0.4.8" +description = "A web-based notebook environment for interactive computing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=6.1.1" +jupyter-core = ">=4.6.1" +jupyter-server = ">=1.8" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +notebook-shim = ">=0.1.0" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +json-logging = ["json-logging"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] + +[[package]] +name = "nbclient" +version = "0.7.2" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" +optional = false +python-versions = ">=3.7.0" + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +nbformat = ">=5.1" +traitlets = ">=5.3" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] +test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.2.6" +description = "Converting Jupyter Notebooks" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "*" +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<3" +nbclient = ">=0.5.0" +nbformat = ">=5.1" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.0" + +[package.extras] +all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)"] +qtpdf = ["nbconvert[qtpng]"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-dependency"] +webpdf = ["pyppeteer (>=1,<1.1)"] + +[[package]] +name = "nbformat" +version = "5.7.0" +description = "The Jupyter Notebook format" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +fastjsonschema = "*" +jsonschema = ">=2.6" +jupyter-core = "*" +traitlets = ">=5.1" + +[package.extras] +test = ["check-manifest", "pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nest-asyncio" +version = "1.5.6" +description = "Patch asyncio to allow nested event loops" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "nltk" +version = "3.8" +description = "Natural Language Toolkit" +category = "main" +optional = true +python-versions = ">=3.7" + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + +[[package]] +name = "notebook" +version = "6.5.2" +description = "A web-based notebook environment for interactive computing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=5.3.4" +jupyter-core = ">=4.6.1" +nbclassic = ">=0.4.7" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +json-logging = ["json-logging"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] + +[[package]] +name = "notebook-shim" +version = "0.2.2" +description = "A shim layer for notebook traits and config" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-tornasync"] + +[[package]] +name = "numpy" +version = "1.23.5" +description = "NumPy is the fundamental package for array computing with Python." +category = "main" +optional = false +python-versions = ">=3.8" + +[[package]] +name = "packaging" +version = "22.0" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pandocfilters" +version = "1.5.0" +description = "Utilities for writing pandoc filters in python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.10.3" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pathy" +version = "0.10.1" +description = "pathlib.Path subclasses for local and cloud bucket storage" +category = "main" +optional = true +python-versions = ">= 3.6" + +[package.dependencies] +smart-open = ">=5.2.1,<7.0.0" +typer = ">=0.3.0,<1.0.0" + +[package.extras] +all = ["azure-storage-blob", "boto3", "google-cloud-storage (>=1.26.0,<2.0.0)", "mock", "pytest", "pytest-coverage", "typer-cli"] +azure = ["azure-storage-blob"] +gcs = ["google-cloud-storage (>=1.26.0,<2.0.0)"] +s3 = ["boto3"] +test = ["mock", "pytest", "pytest-coverage", "typer-cli"] + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "platformdirs" +version = "2.6.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] +test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] + +[[package]] +name = "playwright" +version = "1.28.0" +description = "A high-level API to automate web browsers" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +greenlet = "2.0.1" +pyee = "9.0.4" +typing-extensions = {version = "*", markers = "python_version <= \"3.8\""} + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "preshed" +version = "3.0.8" +description = "Cython hash table that trusts the keys are pre-hashed" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=0.28.0,<1.1.0" + +[[package]] +name = "prometheus-client" +version = "0.15.0" +description = "Python client for the Prometheus monitoring system." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.36" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycodestyle" +version = "2.10.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycryptodomex" +version = "3.16.0" +description = "Cryptographic library for Python" +category = "main" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pydantic" +version = "1.10.2" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pyee" +version = "9.0.4" +description = "A port of node.js's EventEmitter to python." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +typing-extensions = "*" + +[[package]] +name = "pyflakes" +version = "3.0.1" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pygments" +version = "2.13.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyrsistent" +version = "0.19.2" +description = "Persistent/Functional/Immutable data structures" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pytest" +version = "7.2.0" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.0.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-dotenv" +version = "0.5.2" +description = "A py.test plugin that parses environment files before running tests" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pytest = ">=5.0.0" +python-dotenv = ">=0.9.1" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "0.21.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-json-logger" +version = "2.0.4" +description = "A python library adding a json log formatter" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "pywin32" +version = "305" +description = "Python for Window Extensions" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pywinpty" +version = "2.0.9" +description = "Pseudo terminal support for Windows from Python." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pyzmq" +version = "24.0.1" +description = "Python bindings for 0MQ" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} +py = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "qtconsole" +version = "5.4.0" +description = "Jupyter Qt console" +category = "dev" +optional = false +python-versions = ">= 3.7" + +[package.dependencies] +ipykernel = ">=4.1" +ipython-genutils = "*" +jupyter-client = ">=4.1" +jupyter-core = "*" +pygments = "*" +pyzmq = ">=17.1" +qtpy = ">=2.0.1" +traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" + +[package.extras] +doc = ["Sphinx (>=1.3)"] +test = ["flaky", "pytest", "pytest-qt"] + +[[package]] +name = "qtpy" +version = "2.3.0" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = "*" + +[package.extras] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] + +[[package]] +name = "redis" +version = "4.4.0" +description = "Python client for Redis database and key-value store" +category = "main" +optional = true +python-versions = ">=3.7" + +[package.dependencies] +async-timeout = ">=4.0.2" + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "regex" +version = "2022.10.31" +description = "Alternative regular expression module, to replace re." +category = "main" +optional = true +python-versions = ">=3.6" + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "send2trash" +version = "1.8.0" +description = "Send file to trash natively under Mac OS X, Windows and Linux." +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] + +[[package]] +name = "setuptools" +version = "65.6.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = true +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "smart-open" +version = "6.3.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +category = "main" +optional = true +python-versions = ">=3.6,<4.0" + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage (>=2.6.0)"] +http = ["requests"] +s3 = ["boto3"] +ssh = ["paramiko"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "soupsieve" +version = "2.3.2.post1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "spacy" +version = "3.4.3" +description = "Industrial-strength Natural Language Processing (NLP) in Python" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +catalogue = ">=2.0.6,<2.1.0" +cymem = ">=2.0.2,<2.1.0" +jinja2 = "*" +langcodes = ">=3.2.0,<4.0.0" +murmurhash = ">=0.28.0,<1.1.0" +numpy = ">=1.15.0" +packaging = ">=20.0" +pathy = ">=0.3.5" +preshed = ">=3.0.2,<3.1.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" +requests = ">=2.13.0,<3.0.0" +setuptools = "*" +spacy-legacy = ">=3.0.10,<3.1.0" +spacy-loggers = ">=1.0.0,<2.0.0" +srsly = ">=2.4.3,<3.0.0" +thinc = ">=8.1.0,<8.2.0" +tqdm = ">=4.38.0,<5.0.0" +typer = ">=0.3.0,<0.8.0" +wasabi = ">=0.9.1,<1.1.0" + +[package.extras] +apple = ["thinc-apple-ops (>=0.1.0.dev0,<1.0.0)"] +cuda = ["cupy (>=5.0.0b4,<12.0.0)"] +cuda-autodetect = ["cupy-wheel (>=11.0.0,<12.0.0)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4,<12.0.0)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4,<12.0.0)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4,<12.0.0)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4,<12.0.0)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4,<12.0.0)"] +cuda112 = ["cupy-cuda112 (>=5.0.0b4,<12.0.0)"] +cuda113 = ["cupy-cuda113 (>=5.0.0b4,<12.0.0)"] +cuda114 = ["cupy-cuda114 (>=5.0.0b4,<12.0.0)"] +cuda115 = ["cupy-cuda115 (>=5.0.0b4,<12.0.0)"] +cuda116 = ["cupy-cuda116 (>=5.0.0b4,<12.0.0)"] +cuda117 = ["cupy-cuda117 (>=5.0.0b4,<12.0.0)"] +cuda11x = ["cupy-cuda11x (>=11.0.0,<12.0.0)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4,<12.0.0)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4,<12.0.0)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4,<12.0.0)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4,<12.0.0)"] +ja = ["sudachidict-core (>=20211220)", "sudachipy (>=0.5.2,!=0.6.1)"] +ko = ["natto-py (>=0.9.0)"] +lookups = ["spacy-lookups-data (>=1.0.3,<1.1.0)"] +ray = ["spacy-ray (>=0.1.0,<1.0.0)"] +th = ["pythainlp (>=2.0)"] +transformers = ["spacy-transformers (>=1.1.2,<1.2.0)"] + +[[package]] +name = "spacy-legacy" +version = "3.0.10" +description = "Legacy registered functions for spaCy backwards compatibility" +category = "main" +optional = true +python-versions = ">=3.6" + +[[package]] +name = "spacy-loggers" +version = "1.0.4" +description = "Logging utilities for SpaCy" +category = "main" +optional = true +python-versions = ">=3.6" + +[[package]] +name = "sqlalchemy" +version = "1.4.45" +description = "Database Abstraction Library" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlitedict" +version = "2.1.0" +description = "Persistent dict in Python, backed up by sqlite3 and pickle, multithread-safe." +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "srsly" +version = "2.4.5" +description = "Modern high-performance serialization utilities for Python" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +catalogue = ">=2.0.3,<2.1.0" + +[[package]] +name = "stack-data" +version = "0.6.2" +description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "terminado" +version = "0.17.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] + +[[package]] +name = "thinc" +version = "8.1.5" +description = "A refreshing functional take on deep learning, compatible with your favorite libraries" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +blis = ">=0.7.8,<0.8.0" +catalogue = ">=2.0.4,<2.1.0" +confection = ">=0.0.1,<1.0.0" +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=1.0.2,<1.1.0" +numpy = ">=1.15.0" +preshed = ">=3.0.2,<3.1.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" +setuptools = "*" +srsly = ">=2.4.0,<3.0.0" +wasabi = ">=0.8.1,<1.1.0" + +[package.extras] +cuda = ["cupy (>=5.0.0b4)"] +cuda-autodetect = ["cupy-wheel (>=11.0.0)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4)"] +cuda112 = ["cupy-cuda112 (>=5.0.0b4)"] +cuda113 = ["cupy-cuda113 (>=5.0.0b4)"] +cuda114 = ["cupy-cuda114 (>=5.0.0b4)"] +cuda115 = ["cupy-cuda115 (>=5.0.0b4)"] +cuda116 = ["cupy-cuda116 (>=5.0.0b4)"] +cuda117 = ["cupy-cuda117 (>=5.0.0b4)"] +cuda11x = ["cupy-cuda11x (>=11.0.0)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4)"] +datasets = ["ml-datasets (>=0.2.0,<0.3.0)"] +mxnet = ["mxnet (>=1.5.1,<1.6.0)"] +tensorflow = ["tensorflow (>=2.0.0,<2.6.0)"] +torch = ["torch (>=1.6.0)"] + +[[package]] +name = "tiktoken" +version = "0.1.1" +description = "" +category = "main" +optional = true +python-versions = ">=3.9" + +[package.dependencies] +blobfile = ">=2" +regex = ">=2022.1.18" + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "tokenizers" +version = "0.13.2" +description = "Fast and Customizable Tokenizers" +category = "main" +optional = true +python-versions = "*" + +[package.extras] +dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" +optional = false +python-versions = ">= 3.7" + +[[package]] +name = "tqdm" +version = "4.64.1" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = true +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "wheel"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.7.1" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] +test = ["pre-commit", "pytest"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "transformers" +version = "4.25.1" +description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +category = "main" +optional = true +python-versions = ">=3.7.0" + +[package.dependencies] +filelock = "*" +huggingface-hub = ">=0.10.0,<1.0" +numpy = ">=1.17" +packaging = ">=20.0" +pyyaml = ">=5.1" +regex = "!=2019.12.17" +requests = "*" +tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" +tqdm = ">=4.27" + +[package.extras] +accelerate = ["accelerate (>=0.10.0)"] +all = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] +audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +codecarbon = ["codecarbon (==1.2.0)"] +deepspeed = ["accelerate (>=0.10.0)", "deepspeed (>=0.6.5)"] +deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.6.5)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.4.1)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +docs = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] +docs-specific = ["hf-doc-builder"] +fairscale = ["fairscale (>0.3)"] +flax = ["flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8)"] +flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +ftfy = ["ftfy"] +integrations = ["optuna", "ray[tune]", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "pyknp (>=0.6.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +modelcreation = ["cookiecutter (==1.7.3)"] +natten = ["natten (>=0.14.4)"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] +onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +optuna = ["optuna"] +quality = ["GitPython (<3.1.19)", "black (==22.3)", "datasets (!=2.5.0)", "flake8 (>=3.8.3)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)"] +ray = ["ray[tune]"] +retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] +sagemaker = ["sagemaker (>=2.31.0)"] +sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] +sigopt = ["sigopt"] +sklearn = ["scikit-learn"] +speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] +tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] +tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] +tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +timm = ["timm"] +tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] +torch = ["torch (>=1.7,!=1.12.0)"] +torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] +torchhub = ["filelock", "huggingface-hub (>=0.10.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "tqdm (>=4.27)"] +vision = ["Pillow"] + +[[package]] +name = "typer" +version = "0.7.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +click = ">=7.1.1,<9.0.0" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.2" +description = "Typing stubs for PyYAML" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-requests" +version = "2.28.11.5" +description = "Typing stubs for requests" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +types-urllib3 = "<1.27" + +[[package]] +name = "types-toml" +version = "0.10.8.1" +description = "Typing stubs for toml" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "types-urllib3" +version = "1.26.25.4" +description = "Typing stubs for urllib3" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "4.4.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "uri-template" +version = "1.2.0" +description = "RFC 6570 URI Template Processor" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] + +[[package]] +name = "urllib3" +version = "1.26.13" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wasabi" +version = "0.10.1" +description = "A lightweight console printing and formatting toolkit" +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "webcolors" +version = "1.12" +description = "A library for working with color names and color values formats defined by HTML and CSS." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "websocket-client" +version = "1.4.2" +description = "WebSocket client for Python with low level API options" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "widgetsnbextension" +version = "4.0.4" +description = "Jupyter interactive widgets for Jupyter Notebook" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "wikipedia" +version = "1.4.0" +description = "Wikipedia API for Python" +category = "main" +optional = true +python-versions = "*" + +[package.dependencies] +beautifulsoup4 = "*" +requests = ">=2.0.0,<3.0.0" + +[[package]] +name = "zipp" +version = "3.11.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[extras] +all = ["manifest-ml", "elasticsearch", "faiss-cpu", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken"] +llms = ["manifest-ml"] + +[metadata] +lock-version = "1.1" +python-versions = ">=3.8.1,<4.0" +content-hash = "45b0665b465cf551601e1a18f5b99c9a66c99313de21292f2b897d2ec42d2f6a" + +[metadata.files] +anyio = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] +appnope = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] +argon2-cffi = [ + {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, + {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, +] +argon2-cffi-bindings = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] +arrow = [ + {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, + {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, +] +asttokens = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] +attrs = [ + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, +] +black = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] +bleach = [ + {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, + {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, +] +blis = [ {file = "blis-0.7.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3ea73707a7938304c08363a0b990600e579bfb52dece7c674eafac4bf2df9f7"}, {file = "blis-0.7.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e85993364cae82707bfe7e637bee64ec96e232af31301e5c81a351778cb394b9"}, {file = "blis-0.7.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d205a7e69523e2bacdd67ea906b82b84034067e0de83b33bd83eb96b9e844ae3"}, @@ -278,42 +2376,18 @@ files = [ {file = "blis-0.7.9-cp39-cp39-win_amd64.whl", hash = "sha256:d81c3f627d33545fc25c9dcb5fee66c476d89288a27d63ac16ea63453401ffd5"}, {file = "blis-0.7.9.tar.gz", hash = "sha256:29ef4c25007785a90ffc2f0ab3d3bd3b75cd2d7856a9a482b7d0dac8d511a09d"}, ] - -[package.dependencies] -numpy = ">=1.15.0" - -[[package]] -name = "catalogue" -version = "2.0.8" -description = "Super lightweight function registries for your library" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +blobfile = [ + {file = "blobfile-2.0.0-py3-none-any.whl", hash = "sha256:e8701f253c4510edc24290f48198f6b8edde30774387df05bc932ff927519904"}, +] +catalogue = [ {file = "catalogue-2.0.8-py3-none-any.whl", hash = "sha256:2d786e229d8d202b4f8a2a059858e45a2331201d831e39746732daa704b99f69"}, {file = "catalogue-2.0.8.tar.gz", hash = "sha256:b325c77659208bfb6af1b0d93b1a1aa4112e1bb29a4c5ced816758a722f0e388"}, ] - -[[package]] -name = "certifi" -version = "2022.12.7" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +certifi = [ {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, ] - -[[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." -category = "dev" -optional = false -python-versions = "*" -files = [ +cffi = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, @@ -379,94 +2453,27 @@ files = [ {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "2.1.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.6.0" -files = [ +charset-normalizer = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] - -[package.extras] -unicode-backport = ["unicodedata2"] - -[[package]] -name = "click" -version = "8.1.3" -description = "Composable command line interface toolkit" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ +colorama = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] - -[[package]] -name = "comm" -version = "0.1.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +comm = [ {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, ] - -[package.dependencies] -traitlets = ">=5.3" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "confection" -version = "0.0.3" -description = "The sweetest config system for Python" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +confection = [ {file = "confection-0.0.3-py3-none-any.whl", hash = "sha256:51af839c1240430421da2b248541ebc95f9d0ee385bcafa768b8acdbd2b0111d"}, {file = "confection-0.0.3.tar.gz", hash = "sha256:4fec47190057c43c9acbecb8b1b87a9bf31c469caa0d6888a5b9384432fdba5a"}, ] - -[package.dependencies] -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" -srsly = ">=2.4.0,<3.0.0" - -[[package]] -name = "coverage" -version = "6.5.0" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +coverage = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, @@ -518,21 +2525,7 @@ files = [ {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cymem" -version = "2.0.7" -description = "Manage calls to calloc/free through Cython" -category = "main" -optional = true -python-versions = "*" -files = [ +cymem = [ {file = "cymem-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4981fc9182cc1fe54bfedf5f73bfec3ce0c27582d9be71e130c46e35958beef0"}, {file = "cymem-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:42aedfd2e77aa0518a24a2a60a2147308903abc8b13c84504af58539c39e52a3"}, {file = "cymem-2.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c183257dc5ab237b664f64156c743e788f562417c74ea58c5a3939fe2d48d6f6"}, @@ -562,15 +2555,7 @@ files = [ {file = "cymem-2.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:59a09cf0e71b1b88bfa0de544b801585d81d06ea123c1725e7c5da05b7ca0d20"}, {file = "cymem-2.0.7.tar.gz", hash = "sha256:e6034badb5dd4e10344211c81f16505a55553a7164adc314c75bd80cf07e57a8"}, ] - -[[package]] -name = "debugpy" -version = "1.6.4" -description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +debugpy = [ {file = "debugpy-1.6.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:6ae238943482c78867ac707c09122688efb700372b617ffd364261e5e41f7a2f"}, {file = "debugpy-1.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a39e7da178e1f22f4bc04b57f085e785ed1bcf424aaf318835a1a7129eefe35"}, {file = "debugpy-1.6.4-cp310-cp310-win32.whl", hash = "sha256:143f79d0798a9acea21cd1d111badb789f19d414aec95fa6389cfea9485ddfb1"}, @@ -590,134 +2575,39 @@ files = [ {file = "debugpy-1.6.4-py2.py3-none-any.whl", hash = "sha256:e886a1296cd20a10172e94788009ce74b759e54229ebd64a43fa5c2b4e62cd76"}, {file = "debugpy-1.6.4.zip", hash = "sha256:d5ab9bd3f4e7faf3765fd52c7c43c074104ab1e109621dc73219099ed1a5399d"}, ] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ +decorator = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ +defusedxml = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] - -[[package]] -name = "dill" -version = "0.3.6" -description = "serialize all of python" -category = "main" -optional = true -python-versions = ">=3.7" -files = [ +dill = [ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, ] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - -[[package]] -name = "elastic-transport" -version = "8.4.0" -description = "Transport classes and utilities shared among Python Elastic client libraries" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +elastic-transport = [ {file = "elastic-transport-8.4.0.tar.gz", hash = "sha256:b9ad708ceb7fcdbc6b30a96f886609a109f042c0b9d9f2e44403b3133ba7ff10"}, {file = "elastic_transport-8.4.0-py3-none-any.whl", hash = "sha256:19db271ab79c9f70f8c43f8f5b5111408781a6176b54ab2e54d713b6d9ceb815"}, ] - -[package.dependencies] -certifi = "*" -urllib3 = ">=1.26.2,<2" - -[package.extras] -develop = ["aiohttp", "mock", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "trustme"] - -[[package]] -name = "elasticsearch" -version = "8.5.3" -description = "Python client for Elasticsearch" -category = "main" -optional = true -python-versions = ">=3.6, <4" -files = [ +elasticsearch = [ {file = "elasticsearch-8.5.3-py3-none-any.whl", hash = "sha256:f09adbea8caa633ff79e8fe115fb1d2b635426fe1a23e7e8e3bd7cce5ac3eb70"}, {file = "elasticsearch-8.5.3.tar.gz", hash = "sha256:4b71ad05b36243c3b13f1c89b3ede4357011eece68917e293c43d4177d565838"}, ] - -[package.dependencies] -elastic-transport = ">=8,<9" - -[package.extras] -async = ["aiohttp (>=3,<4)"] -requests = ["requests (>=2.4.0,<3.0.0)"] - -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +entrypoints = [ {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, ] - -[[package]] -name = "exceptiongroup" -version = "1.0.4" -description = "Backport of PEP 654 (exception groups)" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +exceptiongroup = [ {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, ] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "executing" -version = "1.2.0" -description = "Get the currently executing AST node of a frame, and other information" -category = "dev" -optional = false -python-versions = "*" -files = [ +executing = [ {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, ] - -[package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] - -[[package]] -name = "faiss-cpu" -version = "1.7.3" -description = "A library for efficient similarity search and clustering of dense vectors." -category = "main" -optional = true -python-versions = "*" -files = [ +faiss-cpu = [ {file = "faiss-cpu-1.7.3.tar.gz", hash = "sha256:cb71fe3f2934732d157d9d8cfb6ed2dd4020a0065571c84842ff6a3f0beab310"}, {file = "faiss_cpu-1.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:343f025e0846239d987d0c719772387ad685b74e5ef62b2e5616cabef9062729"}, {file = "faiss_cpu-1.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8b7b1cf693d7c24b5a633ff024717bd715fec501af4854357da0805b4899bcec"}, @@ -744,91 +2634,27 @@ files = [ {file = "faiss_cpu-1.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a14d832b5361ce9af21977eb1dcdebe23b9edcc12aad40316df7ca1bd86bc6b5"}, {file = "faiss_cpu-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:52df8895c5e59d1c9eda368a63790381a6f7fceddb22bed08f9c90a706d8a148"}, ] - -[[package]] -name = "fastjsonschema" -version = "2.16.2" -description = "Fastest Python implementation of JSON schema" -category = "dev" -optional = false -python-versions = "*" -files = [ +fastjsonschema = [ {file = "fastjsonschema-2.16.2-py3-none-any.whl", hash = "sha256:21f918e8d9a1a4ba9c22e09574ba72267a6762d47822db9add95f6454e51cc1c"}, {file = "fastjsonschema-2.16.2.tar.gz", hash = "sha256:01e366f25d9047816fe3d288cbfc3e10541daf0af2044763f3d0ade42476da18"}, ] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "filelock" -version = "3.8.2" -description = "A platform independent file lock." -category = "main" -optional = true -python-versions = ">=3.7" -files = [ +filelock = [ {file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"}, {file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"}, ] - -[package.extras] -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "6.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ +flake8 = [ {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, ] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" - -[[package]] -name = "flake8-docstrings" -version = "1.6.0" -description = "Extension for flake8 which uses pydocstyle to check docstrings" -category = "dev" -optional = false -python-versions = "*" -files = [ +flake8-docstrings = [ {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, ] - -[package.dependencies] -flake8 = ">=3" -pydocstyle = ">=2.1" - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ +fqdn = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, ] - -[[package]] -name = "greenlet" -version = "2.0.1" -description = "Lightweight in-process concurrent programming" -category = "main" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ +greenlet = [ {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, @@ -890,576 +2716,193 @@ files = [ {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, ] - -[package.extras] -docs = ["Sphinx", "docutils (<0.18)"] -test = ["faulthandler", "objgraph", "psutil"] - -[[package]] -name = "huggingface-hub" -version = "0.11.1" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -category = "main" -optional = true -python-versions = ">=3.7.0" -files = [ +huggingface-hub = [ {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, ] - -[package.dependencies] -filelock = "*" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = "*" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -quality = ["black (==22.3)", "flake8 (>=3.8.3)", "flake8-bugbear", "isort (>=5.5.4)", "mypy (==0.982)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "isort (>=5.5.4)", "jedi", "pytest", "pytest-cov", "pytest-env", "soundfile"] -torch = ["torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ +idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] - -[[package]] -name = "importlib-metadata" -version = "5.1.0" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +importlib-metadata = [ {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, ] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] - -[[package]] -name = "importlib-resources" -version = "5.10.1" -description = "Read resources from Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +importlib-resources = [ {file = "importlib_resources-5.10.1-py3-none-any.whl", hash = "sha256:c09b067d82e72c66f4f8eb12332f5efbebc9b007c0b6c40818108c9870adc363"}, {file = "importlib_resources-5.10.1.tar.gz", hash = "sha256:32bb095bda29741f6ef0e5278c42df98d135391bee5f932841efc0041f748dc3"}, ] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = "*" -files = [ +iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] - -[[package]] -name = "ipykernel" -version = "6.19.2" -description = "IPython Kernel for Jupyter" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ +ipykernel = [ {file = "ipykernel-6.19.2-py3-none-any.whl", hash = "sha256:1374a55c57ca7a7286c3d8b15799cd76e1a2381b6b1fea99c494b955988926b6"}, {file = "ipykernel-6.19.2.tar.gz", hash = "sha256:1ab68d3d3654196266baa93990055413e167263ffbe4cfe834f871bcd3d3506d"}, ] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.0" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=17" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] -typing = ["mypy (>=0.990)"] - -[[package]] -name = "ipython" -version = "8.7.0" -description = "IPython: Productive Interactive Computing" -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ +ipython = [ {file = "ipython-8.7.0-py3-none-any.whl", hash = "sha256:352042ddcb019f7c04e48171b4dd78e4c4bb67bf97030d170e154aac42b656d9"}, {file = "ipython-8.7.0.tar.gz", hash = "sha256:882899fe78d5417a0aa07f995db298fa28b58faeba2112d2e3a4c95fe14bb738"}, ] - -[package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.11,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" - -[package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.20)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] - -[[package]] -name = "ipython-genutils" -version = "0.2.0" -description = "Vestigial utilities from IPython" -category = "dev" -optional = false -python-versions = "*" -files = [ +ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] - -[[package]] -name = "ipywidgets" -version = "8.0.3" -description = "Jupyter interactive widgets" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +ipywidgets = [ {file = "ipywidgets-8.0.3-py3-none-any.whl", hash = "sha256:db7dd35fb1217636cbdbe0ba0bd2216d91a7695cb28b5c1dca17e62cd51378de"}, {file = "ipywidgets-8.0.3.tar.gz", hash = "sha256:2ec50df8538a1d4ddd5d454830d010922ad1015e81ac23efb27c0908bbc1eece"}, ] - -[package.dependencies] -ipykernel = ">=4.5.1" -ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0,<4.0" -traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0,<5.0" - -[package.extras] -test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +isoduration = [ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, ] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "isort" -version = "5.11.1" -description = "A Python utility / library to sort Python imports." -category = "dev" -optional = false -python-versions = ">=3.7.0" -files = [ +isort = [ {file = "isort-5.11.1-py3-none-any.whl", hash = "sha256:bf02c95f1fe615ebbe13a619cfed1619ddfe8941274c9e3de3143adca406cb02"}, {file = "isort-5.11.1.tar.gz", hash = "sha256:7c5bd998504826b6f1e6f2f98b533976b066baba29b8bae83fdeefd0b89c6b70"}, ] - -[package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "jedi" -version = "0.18.2" -description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +jedi = [ {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, ] - -[package.dependencies] -parso = ">=0.8.0,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.2" -description = "A very fast and expressive template engine." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.2.0" -description = "Lightweight pipelining with Python functions" -category = "main" -optional = true -python-versions = ">=3.7" -files = [ +joblib = [ {file = "joblib-1.2.0-py3-none-any.whl", hash = "sha256:091138ed78f800342968c523bdde947e7a305b8594b910a0fea2ab83c3c6d385"}, {file = "joblib-1.2.0.tar.gz", hash = "sha256:e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018"}, ] - -[[package]] -name = "jsonpointer" -version = "2.3" -description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ +jsonpointer = [ {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, ] - -[[package]] -name = "jsonschema" -version = "4.17.3" -description = "An implementation of JSON Schema validation for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +jsonschema = [ {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, ] - -[package.dependencies] -attrs = ">=17.4.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "jupyter" -version = "1.0.0" -description = "Jupyter metapackage. Install all the Jupyter components in one go." -category = "dev" -optional = false -python-versions = "*" -files = [ +jupyter = [ {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, ] - -[package.dependencies] -ipykernel = "*" -ipywidgets = "*" -jupyter-console = "*" -nbconvert = "*" -notebook = "*" -qtconsole = "*" - -[[package]] -name = "jupyter-client" -version = "7.4.8" -description = "Jupyter protocol implementation and client libraries" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +jupyter-client = [ {file = "jupyter_client-7.4.8-py3-none-any.whl", hash = "sha256:d4a67ae86ee014bcb96bd8190714f6af921f2b0f52f4208b086aa5acfd9f8d65"}, {file = "jupyter_client-7.4.8.tar.gz", hash = "sha256:109a3c33b62a9cf65aa8325850a0999a795fac155d9de4f7555aef5f310ee35a"}, ] - -[package.dependencies] -entrypoints = "*" -jupyter-core = ">=4.9.2" -nest-asyncio = ">=1.5.4" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = "*" - -[package.extras] -doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -test = ["codecov", "coverage", "ipykernel (>=6.12)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-console" -version = "6.4.4" -description = "Jupyter terminal console" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +jupyter-console = [ {file = "jupyter_console-6.4.4-py3-none-any.whl", hash = "sha256:756df7f4f60c986e7bc0172e4493d3830a7e6e75c08750bbe59c0a5403ad6dee"}, {file = "jupyter_console-6.4.4.tar.gz", hash = "sha256:172f5335e31d600df61613a97b7f0352f2c8250bbd1092ef2d658f77249f89fb"}, ] - -[package.dependencies] -ipykernel = "*" -ipython = "*" -jupyter-client = ">=7.0.0" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" -pygments = "*" - -[package.extras] -test = ["pexpect"] - -[[package]] -name = "jupyter-core" -version = "5.1.0" -description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ +jupyter-core = [ {file = "jupyter_core-5.1.0-py3-none-any.whl", hash = "sha256:f5740d99606958544396914b08e67b668f45e7eff99ab47a7f4bcead419c02f4"}, {file = "jupyter_core-5.1.0.tar.gz", hash = "sha256:a5ae7c09c55c0b26f692ec69323ba2b62e8d7295354d20f6cd57b749de4a05bf"}, ] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "sphinxcontrib-github-alt", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-events" -version = "0.5.0" -description = "Jupyter Event System library" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +jupyter-events = [ {file = "jupyter_events-0.5.0-py3-none-any.whl", hash = "sha256:6f7b67bf42b8a370c992187194ed02847dfa02307a7aebe9913e2d3979b9b6b8"}, {file = "jupyter_events-0.5.0.tar.gz", hash = "sha256:e27ffdd6138699d47d42cb65ae6d79334ff7c0d923694381c991ce56a140f2cd"}, ] - -[package.dependencies] -jsonschema = {version = ">=4.3.0", extras = ["format-nongpl"]} -python-json-logger = "*" -pyyaml = "*" -traitlets = "*" - -[package.extras] -cli = ["click", "rich"] -test = ["click", "coverage", "pre-commit", "pytest (>=6.1.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] - -[[package]] -name = "jupyter-server" -version = "2.0.1" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ +jupyter-server = [ {file = "jupyter_server-2.0.1-py3-none-any.whl", hash = "sha256:3bc09974a5290249de6924a614933e6f4f3d6d11f3061423a9f4e0271064a8b3"}, {file = "jupyter_server-2.0.1.tar.gz", hash = "sha256:6e71268380ad7e4f2d9dda2f3e51a4fd4d1997b5390d5acdb74c7a195cfe4c00"}, ] - -[package.dependencies] -anyio = ">=3.1.0,<4" -argon2-cffi = "*" -jinja2 = "*" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -jupyter-events = ">=0.4.0" -jupyter-server-terminals = "*" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -packaging = "*" -prometheus-client = "*" -pywinpty = {version = "*", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = "*" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = "*" - -[package.extras] -docs = ["docutils (<0.20)", "ipykernel", "jinja2", "jupyter-client", "jupyter-server", "mistune (<1.0.0)", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxemoji", "tornado"] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] -test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] -typing = ["mypy (>=0.990)"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.4.2" -description = "A Jupyter Server Extension Providing Terminals." -category = "dev" -optional = false -python-versions = ">=3.8" -files = [ +jupyter-server-terminals = [ {file = "jupyter_server_terminals-0.4.2-py3-none-any.whl", hash = "sha256:c0eaacee6cac21b597c23c38dd523dc4e9b947f97af5101e0396c08f28db3e37"}, {file = "jupyter_server_terminals-0.4.2.tar.gz", hash = "sha256:0e68cba38eb0f9f2d93f1160e0a7f84b943d0d0c4d2f77eeaabbb4a2919c47c6"}, ] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<2.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0rc8)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.2.2" -description = "Pygments theme using JupyterLab CSS variables" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.4" -description = "Jupyter interactive widgets for JupyterLab" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +jupyterlab-widgets = [ {file = "jupyterlab_widgets-3.0.4-py3-none-any.whl", hash = "sha256:4c9275daa6d20fc96c3aea45756ece7110850d035b0b93a6a40e918016b927da"}, {file = "jupyterlab_widgets-3.0.4.tar.gz", hash = "sha256:9a568e022b8bb53ab23291f6ddb52f8002b789c2c5763378cbc882be1d619be8"}, ] - -[[package]] -name = "langcodes" -version = "3.3.0" -description = "Tools for labeling human languages with IETF language tags" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +langcodes = [ {file = "langcodes-3.3.0-py3-none-any.whl", hash = "sha256:4d89fc9acb6e9c8fdef70bcdf376113a3db09b67285d9e1d534de6d8818e7e69"}, {file = "langcodes-3.3.0.tar.gz", hash = "sha256:794d07d5a28781231ac335a1561b8442f8648ca07cd518310aeb45d6f0807ef6"}, ] - -[package.extras] -data = ["language-data (>=1.1,<2.0)"] - -[[package]] -name = "manifest-ml" -version = "0.0.1" -description = "Manifest for Prompt Programming Foundation Models." -category = "main" -optional = true -python-versions = ">=3.8.0" -files = [ +lxml = [ + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, +] +manifest-ml = [ {file = "manifest-ml-0.0.1.tar.gz", hash = "sha256:f828faf7de41fad5318254beec08acdf5142196e0e22203a4047412c2d3127a0"}, {file = "manifest_ml-0.0.1-py2.py3-none-any.whl", hash = "sha256:fc4e62e706fd767fd8851d91051fdb71bc79b2df9c66f5879736c46d8163a316"}, ] - -[package.dependencies] -dill = ">=0.3.5" -redis = ">=4.3.1" -requests = ">=2.27.1" -sqlitedict = ">=2.0.0" -tqdm = ">=4.64.0" - -[package.extras] -all = ["Flask (>=2.1.2)", "accelerate (>=0.10.0)", "autopep8 (>=1.6.0)", "black (>=22.3.0)", "docformatter (>=1.4)", "flake8 (>=4.0.0)", "flake8-docstrings (>=1.6.0)", "isort (>=5.9.3)", "mypy (>=0.950)", "nbsphinx (>=0.8.0)", "pep8-naming (>=0.12.1)", "pre-commit (>=2.14.0)", "pytest (>=7.0.0)", "pytest-cov (>=3.0.0)", "python-dotenv (>=0.20.0)", "recommonmark (>=0.7.1)", "sphinx-autobuild", "sphinx-rtd-theme (>=0.5.1)", "torch (>=1.8.0)", "transformers (>=4.20.0)", "twine", "types-PyYAML (>=6.0.7)", "types-protobuf (>=3.19.21)", "types-python-dateutil (>=2.8.16)", "types-redis (>=4.2.6)", "types-requests (>=2.27.29)", "types-setuptools (>=57.4.17)"] -api = ["Flask (>=2.1.2)", "accelerate (>=0.10.0)", "torch (>=1.8.0)", "transformers (>=4.20.0)"] -dev = ["autopep8 (>=1.6.0)", "black (>=22.3.0)", "docformatter (>=1.4)", "flake8 (>=4.0.0)", "flake8-docstrings (>=1.6.0)", "isort (>=5.9.3)", "mypy (>=0.950)", "nbsphinx (>=0.8.0)", "pep8-naming (>=0.12.1)", "pre-commit (>=2.14.0)", "pytest (>=7.0.0)", "pytest-cov (>=3.0.0)", "python-dotenv (>=0.20.0)", "recommonmark (>=0.7.1)", "sphinx-autobuild", "sphinx-rtd-theme (>=0.5.1)", "twine", "types-PyYAML (>=6.0.7)", "types-protobuf (>=3.19.21)", "types-python-dateutil (>=2.8.16)", "types-redis (>=4.2.6)", "types-requests (>=2.27.29)", "types-setuptools (>=57.4.17)"] - -[[package]] -name = "markupsafe" -version = "2.1.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +markupsafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -1501,54 +2944,19 @@ files = [ {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] - -[[package]] -name = "matplotlib-inline" -version = "0.1.6" -description = "Inline Matplotlib backend for Jupyter" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ +matplotlib-inline = [ {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, ] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +mccabe = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] - -[[package]] -name = "mistune" -version = "2.0.4" -description = "A sane Markdown parser with useful plugins and renderers" -category = "dev" -optional = false -python-versions = "*" -files = [ +mistune = [ {file = "mistune-2.0.4-py2.py3-none-any.whl", hash = "sha256:182cc5ee6f8ed1b807de6b7bb50155df7b66495412836b9a74c8fbdfc75fe36d"}, {file = "mistune-2.0.4.tar.gz", hash = "sha256:9ee0a66053e2267aba772c71e06891fa8f1af6d4b01d5e84e267b4570d4d9808"}, ] - -[[package]] -name = "murmurhash" -version = "1.0.9" -description = "Cython bindings for MurmurHash" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +murmurhash = [ {file = "murmurhash-1.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:697ed01454d92681c7ae26eb1adcdc654b54062bcc59db38ed03cad71b23d449"}, {file = "murmurhash-1.0.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ef31b5c11be2c064dbbdd0e22ab3effa9ceb5b11ae735295c717c120087dd94"}, {file = "murmurhash-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a2bd203377a31bbb2d83fe3f968756d6c9bbfa36c64c6ebfc3c6494fc680bc"}, @@ -1578,15 +2986,7 @@ files = [ {file = "murmurhash-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:379bf6b414bd27dd36772dd1570565a7d69918e980457370838bd514df0d91e9"}, {file = "murmurhash-1.0.9.tar.gz", hash = "sha256:fe7a38cb0d3d87c14ec9dddc4932ffe2dbc77d75469ab80fd5014689b0e07b58"}, ] - -[[package]] -name = "mypy" -version = "0.991" -description = "Optional static typing for Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +mypy = [ {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, @@ -1618,248 +3018,43 @@ files = [ {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, ] - -[package.dependencies] -mypy-extensions = ">=0.4.3" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" -optional = false -python-versions = "*" -files = [ +mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] - -[[package]] -name = "nbclassic" -version = "0.4.8" -description = "A web-based notebook environment for interactive computing" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +nbclassic = [ {file = "nbclassic-0.4.8-py3-none-any.whl", hash = "sha256:cbf05df5842b420d5cece0143462380ea9d308ff57c2dc0eb4d6e035b18fbfb3"}, {file = "nbclassic-0.4.8.tar.gz", hash = "sha256:c74d8a500f8e058d46b576a41e5bc640711e1032cf7541dde5f73ea49497e283"}, ] - -[package.dependencies] -argon2-cffi = "*" -ipykernel = "*" -ipython-genutils = "*" -jinja2 = "*" -jupyter-client = ">=6.1.1" -jupyter-core = ">=4.6.1" -jupyter-server = ">=1.8" -nbconvert = ">=5" -nbformat = "*" -nest-asyncio = ">=1.5" -notebook-shim = ">=0.1.0" -prometheus-client = "*" -pyzmq = ">=17" -Send2Trash = ">=1.8.0" -terminado = ">=0.8.3" -tornado = ">=6.1" -traitlets = ">=4.2.1" - -[package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-playwright", "pytest-tornasync", "requests", "requests-unixsocket", "testpath"] - -[[package]] -name = "nbclient" -version = "0.7.2" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -category = "dev" -optional = false -python-versions = ">=3.7.0" -files = [ +nbclient = [ {file = "nbclient-0.7.2-py3-none-any.whl", hash = "sha256:d97ac6257de2794f5397609df754fcbca1a603e94e924eb9b99787c031ae2e7c"}, {file = "nbclient-0.7.2.tar.gz", hash = "sha256:884a3f4a8c4fc24bb9302f263e0af47d97f0d01fe11ba714171b320c8ac09547"}, ] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -nbformat = ">=5.1" -traitlets = ">=5.3" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme"] -test = ["ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.2.6" -description = "Converting Jupyter Notebooks" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +nbconvert = [ {file = "nbconvert-7.2.6-py3-none-any.whl", hash = "sha256:f933e82fe48b9a421e4252249f6c0a9a9940dc555642b4729f3f1f526bb16779"}, {file = "nbconvert-7.2.6.tar.gz", hash = "sha256:c9c0e4b26326f7658ebf4cda0acc591b9727c4e3ee3ede962f70c11833b71b40"}, ] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "*" -defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<3" -nbclient = ">=0.5.0" -nbformat = ">=5.1" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.0" - -[package.extras] -all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)"] -qtpdf = ["nbconvert[qtpng]"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-dependency"] -webpdf = ["pyppeteer (>=1,<1.1)"] - -[[package]] -name = "nbformat" -version = "5.7.0" -description = "The Jupyter Notebook format" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +nbformat = [ {file = "nbformat-5.7.0-py3-none-any.whl", hash = "sha256:1b05ec2c552c2f1adc745f4eddce1eac8ca9ffd59bb9fd859e827eaa031319f9"}, {file = "nbformat-5.7.0.tar.gz", hash = "sha256:1d4760c15c1a04269ef5caf375be8b98dd2f696e5eb9e603ec2bf091f9b0d3f3"}, ] - -[package.dependencies] -fastjsonschema = "*" -jsonschema = ">=2.6" -jupyter-core = "*" -traitlets = ">=5.1" - -[package.extras] -test = ["check-manifest", "pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.5.6" -description = "Patch asyncio to allow nested event loops" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ +nest-asyncio = [ {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, ] - -[[package]] -name = "nltk" -version = "3.8" -description = "Natural Language Toolkit" -category = "main" -optional = true -python-versions = ">=3.7" -files = [ +nltk = [ {file = "nltk-3.8-py3-none-any.whl", hash = "sha256:3306502f487aa9fb0566e23443fa287a85a8d8d0821e2ef1655b4e3f0ea4aeee"}, {file = "nltk-3.8.zip", hash = "sha256:74b30826a37d78d53427105bbd037dd880251be269fca64ee530838a46ed55fc"}, ] - -[package.dependencies] -click = "*" -joblib = "*" -regex = ">=2021.8.3" -tqdm = "*" - -[package.extras] -all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] -corenlp = ["requests"] -machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] -plot = ["matplotlib"] -tgrep = ["pyparsing"] -twitter = ["twython"] - -[[package]] -name = "notebook" -version = "6.5.2" -description = "A web-based notebook environment for interactive computing" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +notebook = [ {file = "notebook-6.5.2-py3-none-any.whl", hash = "sha256:e04f9018ceb86e4fa841e92ea8fb214f8d23c1cedfde530cc96f92446924f0e4"}, {file = "notebook-6.5.2.tar.gz", hash = "sha256:c1897e5317e225fc78b45549a6ab4b668e4c996fd03a04e938fe5e7af2bfffd0"}, ] - -[package.dependencies] -argon2-cffi = "*" -ipykernel = "*" -ipython-genutils = "*" -jinja2 = "*" -jupyter-client = ">=5.3.4" -jupyter-core = ">=4.6.1" -nbclassic = ">=0.4.7" -nbconvert = ">=5" -nbformat = "*" -nest-asyncio = ">=1.5" -prometheus-client = "*" -pyzmq = ">=17" -Send2Trash = ">=1.8.0" -terminado = ">=0.8.3" -tornado = ">=6.1" -traitlets = ">=4.2.1" - -[package.extras] -docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -json-logging = ["json-logging"] -test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] - -[[package]] -name = "notebook-shim" -version = "0.2.2" -description = "A shim layer for notebook traits and config" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +notebook-shim = [ {file = "notebook_shim-0.2.2-py3-none-any.whl", hash = "sha256:9c6c30f74c4fbea6fce55c1be58e7fd0409b1c681b075dcedceb005db5026949"}, {file = "notebook_shim-0.2.2.tar.gz", hash = "sha256:090e0baf9a5582ff59b607af523ca2db68ff216da0c69956b62cab2ef4fc9c3f"}, ] - -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-tornasync"] - -[[package]] -name = "numpy" -version = "1.23.5" -description = "NumPy is the fundamental package for array computing with Python." -category = "main" -optional = false -python-versions = ">=3.8" -files = [ +numpy = [ {file = "numpy-1.23.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c88793f78fca17da0145455f0d7826bcb9f37da4764af27ac945488116efe63"}, {file = "numpy-1.23.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e9f4c4e51567b616be64e05d517c79a8a22f3606499941d97bb76f2ca59f982d"}, {file = "numpy-1.23.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7903ba8ab592b82014713c491f6c5d3a1cde5b4a3bf116404e08f5b52f6daf43"}, @@ -1889,145 +3084,43 @@ files = [ {file = "numpy-1.23.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d"}, {file = "numpy-1.23.5.tar.gz", hash = "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a"}, ] - -[[package]] -name = "packaging" -version = "22.0" -description = "Core utilities for Python packages" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +packaging = [ {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, ] - -[[package]] -name = "pandocfilters" -version = "1.5.0" -description = "Utilities for writing pandoc filters in python" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ +pandocfilters = [ {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, ] - -[[package]] -name = "parso" -version = "0.8.3" -description = "A Python Parser" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +parso = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, ] - -[package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] - -[[package]] -name = "pathspec" -version = "0.10.3" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pathspec = [ {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, ] - -[[package]] -name = "pathy" -version = "0.10.1" -description = "pathlib.Path subclasses for local and cloud bucket storage" -category = "main" -optional = true -python-versions = ">= 3.6" -files = [ +pathy = [ {file = "pathy-0.10.1-py3-none-any.whl", hash = "sha256:a7613ee2d99a0a3300e1d836322e2d947c85449fde59f52906f995dbff67dad4"}, {file = "pathy-0.10.1.tar.gz", hash = "sha256:4cd6e71b4cd5ff875cfbb949ad9fa5519d8d1dbe69d5fc1d1b23aa3cb049618b"}, ] - -[package.dependencies] -smart-open = ">=5.2.1,<7.0.0" -typer = ">=0.3.0,<1.0.0" - -[package.extras] -all = ["azure-storage-blob", "boto3", "google-cloud-storage (>=1.26.0,<2.0.0)", "mock", "pytest", "pytest-coverage", "typer-cli"] -azure = ["azure-storage-blob"] -gcs = ["google-cloud-storage (>=1.26.0,<2.0.0)"] -s3 = ["boto3"] -test = ["mock", "pytest", "pytest-coverage", "typer-cli"] - -[[package]] -name = "pexpect" -version = "4.8.0" -description = "Pexpect allows easy control of interactive console applications." -category = "dev" -optional = false -python-versions = "*" -files = [ +pexpect = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, ] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" -optional = false -python-versions = "*" -files = [ +pickleshare = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] - -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pkgutil-resolve-name = [ {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, ] - -[[package]] -name = "platformdirs" -version = "2.6.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +platformdirs = [ {file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"}, {file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"}, ] - -[package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] - -[[package]] -name = "playwright" -version = "1.28.0" -description = "A high-level API to automate web browsers" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +playwright = [ {file = "playwright-1.28.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:2e101b17e4d5252ef96c9dc8b2ac17f2980dde0420728c1c96a77eeaf6f9b11f"}, {file = "playwright-1.28.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:265f47aaa42c7986316100f5f468f8654e9a1609c2a2578743e25d058bddc1e6"}, {file = "playwright-1.28.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:a21ddd7b6f6afd434a73471f7cd39673286f0ca88b62b756d90264eb7b5a7daf"}, @@ -2036,36 +3129,11 @@ files = [ {file = "playwright-1.28.0-py3-none-win32.whl", hash = "sha256:8557d92718ce45814aff017fa1774ab92089e40b6c16a8073d5a7c4d583d4aed"}, {file = "playwright-1.28.0-py3-none-win_amd64.whl", hash = "sha256:794b9da616c03354a12e48ddf060a9e776ab59b90662b0131ff74ec1b25739f4"}, ] - -[package.dependencies] -greenlet = "2.0.1" -pyee = "9.0.4" -typing-extensions = {version = "*", markers = "python_version <= \"3.8\""} - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "preshed" -version = "3.0.8" -description = "Cython hash table that trusts the keys are pre-hashed" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +preshed = [ {file = "preshed-3.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ea4b6df8ef7af38e864235256793bc3056e9699d991afcf6256fa298858582fc"}, {file = "preshed-3.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e945fc814bdc29564a2ce137c237b3a9848aa1e76a1160369b6e0d328151fdd"}, {file = "preshed-3.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9a4833530fe53001c351974e0c8bb660211b8d0358e592af185fec1ae12b2d0"}, @@ -2095,49 +3163,15 @@ files = [ {file = "preshed-3.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:06793022a56782ef51d74f1399925a2ba958e50c5cfbc6fa5b25c4945e158a07"}, {file = "preshed-3.0.8.tar.gz", hash = "sha256:6c74c70078809bfddda17be96483c41d06d717934b07cab7921011d81758b357"}, ] - -[package.dependencies] -cymem = ">=2.0.2,<2.1.0" -murmurhash = ">=0.28.0,<1.1.0" - -[[package]] -name = "prometheus-client" -version = "0.15.0" -description = "Python client for the Prometheus monitoring system." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +prometheus-client = [ {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, ] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.36" -description = "Library for building powerful interactive command lines in Python" -category = "dev" -optional = false -python-versions = ">=3.6.2" -files = [ +prompt-toolkit = [ {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, {file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"}, ] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "5.9.4" -description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ +psutil = [ {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, @@ -2153,81 +3187,55 @@ files = [ {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, ] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -category = "dev" -optional = false -python-versions = "*" -files = [ +ptyprocess = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -category = "dev" -optional = false -python-versions = "*" -files = [ +pure-eval = [ {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, ] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ +py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] - -[[package]] -name = "pycodestyle" -version = "2.10.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pycodestyle = [ {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, ] - -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ +pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] - -[[package]] -name = "pydantic" -version = "1.10.2" -description = "Data validation and settings management using python type hints" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +pycryptodomex = [ + {file = "pycryptodomex-3.16.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b3d04c00d777c36972b539fb79958790126847d84ec0129fce1efef250bfe3ce"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e5a670919076b71522c7d567a9043f66f14b202414a63c3a078b5831ae342c03"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:ce338a9703f54b2305a408fc9890eb966b727ce72b69f225898bb4e9d9ed3f1f"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:a1c0ae7123448ecb034c75c713189cb00ebe2d415b11682865b6c54d200d9c93"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-win32.whl", hash = "sha256:8851585ff19871e5d69e1790f4ca5f6fd1699d6b8b14413b472a4c0dbc7ea780"}, + {file = "pycryptodomex-3.16.0-cp27-cp27m-win_amd64.whl", hash = "sha256:8dd2d9e3c617d0712ed781a77efd84ea579e76c5f9b2a4bc0b684ebeddf868b2"}, + {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2ad9bb86b355b6104796567dd44c215b3dc953ef2fae5e0bdfb8516731df92cf"}, + {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e25a2f5667d91795f9417cb856f6df724ccdb0cdd5cbadb212ee9bf43946e9f8"}, + {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b0789a8490114a2936ed77c87792cfe77582c829cb43a6d86ede0f9624ba8aa3"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:0da835af786fdd1c9930994c78b23e88d816dc3f99aa977284a21bbc26d19735"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:22aed0868622d95179217c298e37ed7410025c7b29dac236d3230617d1e4ed56"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1619087fb5b31510b0b0b058a54f001a5ffd91e6ffee220d9913064519c6a69d"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:70288d9bfe16b2fd0d20b6c365db614428f1bcde7b20d56e74cf88ade905d9eb"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7993d26dae4d83b8f4ce605bb0aecb8bee330bb3c95475ef06f3694403621e71"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:1cda60207be8c1cf0b84b9138f9e3ca29335013d2b690774a5e94678ff29659a"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:04610536921c1ec7adba158ef570348550c9f3a40bc24be9f8da2ef7ab387981"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-win32.whl", hash = "sha256:daa67f5ebb6fbf1ee9c90decaa06ca7fc88a548864e5e484d52b0920a57fe8a5"}, + {file = "pycryptodomex-3.16.0-cp35-abi3-win_amd64.whl", hash = "sha256:231dc8008cbdd1ae0e34645d4523da2dbc7a88c325f0d4a59635a86ee25b41dd"}, + {file = "pycryptodomex-3.16.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:4dbbe18cc232b5980c7633972ae5417d0df76fe89e7db246eefd17ef4d8e6d7a"}, + {file = "pycryptodomex-3.16.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:893f8a97d533c66cc3a56e60dd3ed40a3494ddb4aafa7e026429a08772f8a849"}, + {file = "pycryptodomex-3.16.0-pp27-pypy_73-win32.whl", hash = "sha256:6a465e4f856d2a4f2a311807030c89166529ccf7ccc65bef398de045d49144b6"}, + {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba57ac7861fd2c837cdb33daf822f2a052ff57dd769a2107807f52a36d0e8d38"}, + {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f2b971a7b877348a27dcfd0e772a0343fb818df00b74078e91c008632284137d"}, + {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e2453162f473c1eae4826eb10cd7bce19b5facac86d17fb5f29a570fde145abd"}, + {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0ba28aa97cdd3ff5ed1a4f2b7f5cd04e721166bd75bd2b929e2734433882b583"}, + {file = "pycryptodomex-3.16.0.tar.gz", hash = "sha256:e9ba9d8ed638733c9e95664470b71d624a6def149e2db6cc52c1aca5a6a2df1d"}, +] +pydantic = [ {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, @@ -2265,82 +3273,23 @@ files = [ {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] - -[package.dependencies] -typing-extensions = ">=4.1.0" - -[package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] - -[[package]] -name = "pydocstyle" -version = "6.1.1" -description = "Python docstring style checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, ] - -[package.dependencies] -snowballstemmer = "*" - -[package.extras] -toml = ["toml"] - -[[package]] -name = "pyee" -version = "9.0.4" -description = "A port of node.js's EventEmitter to python." -category = "dev" -optional = false -python-versions = "*" -files = [ +pyee = [ {file = "pyee-9.0.4-py2.py3-none-any.whl", hash = "sha256:9f066570130c554e9cc12de5a9d86f57c7ee47fece163bbdaa3e9c933cfbdfa5"}, {file = "pyee-9.0.4.tar.gz", hash = "sha256:2770c4928abc721f46b705e6a72b0c59480c4a69c9a83ca0b00bb994f1ea4b32"}, ] - -[package.dependencies] -typing-extensions = "*" - -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pyflakes = [ {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, ] - -[[package]] -name = "pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pygments = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyrsistent" -version = "0.19.2" -description = "Persistent/Functional/Immutable data structures" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pyrsistent = [ {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:055ab45d5911d7cae397dc418808d8802fb95262751872c841c170b0dbf51eed"}, @@ -2364,116 +3313,31 @@ files = [ {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, ] - -[[package]] -name = "pytest" -version = "7.2.0" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pytest = [ {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, ] - -[package.dependencies] -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] - -[[package]] -name = "pytest-cov" -version = "4.0.0" -description = "Pytest plugin for measuring coverage." -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pytest-cov = [ {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, ] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-dotenv" -version = "0.5.2" -description = "A py.test plugin that parses environment files before running tests" -category = "dev" -optional = false -python-versions = "*" -files = [ +pytest-dotenv = [ {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, ] - -[package.dependencies] -pytest = ">=5.0.0" -python-dotenv = ">=0.9.1" - -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ +python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "0.21.0" -description = "Read key-value pairs from a .env file and set them as environment variables" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +python-dotenv = [ {file = "python-dotenv-0.21.0.tar.gz", hash = "sha256:b77d08274639e3d34145dfa6c7008e66df0f04b7be7a75fd0d5292c191d79045"}, {file = "python_dotenv-0.21.0-py3-none-any.whl", hash = "sha256:1684eb44636dd462b66c3ee016599815514527ad99965de77f43e0944634a7e5"}, ] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-json-logger" -version = "2.0.4" -description = "A python library adding a json log formatter" -category = "dev" -optional = false -python-versions = ">=3.5" -files = [ +python-json-logger = [ {file = "python-json-logger-2.0.4.tar.gz", hash = "sha256:764d762175f99fcc4630bd4853b09632acb60a6224acb27ce08cd70f0b1b81bd"}, {file = "python_json_logger-2.0.4-py3-none-any.whl", hash = "sha256:3b03487b14eb9e4f77e4fc2a023358b5394b82fd89cecf5586259baed57d8c6f"}, ] - -[[package]] -name = "pywin32" -version = "305" -description = "Python for Window Extensions" -category = "dev" -optional = false -python-versions = "*" -files = [ +pywin32 = [ {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, @@ -2489,15 +3353,7 @@ files = [ {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, ] - -[[package]] -name = "pywinpty" -version = "2.0.9" -description = "Pseudo terminal support for Windows from Python." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +pywinpty = [ {file = "pywinpty-2.0.9-cp310-none-win_amd64.whl", hash = "sha256:30a7b371446a694a6ce5ef906d70ac04e569de5308c42a2bdc9c3bc9275ec51f"}, {file = "pywinpty-2.0.9-cp311-none-win_amd64.whl", hash = "sha256:d78ef6f4bd7a6c6f94dc1a39ba8fb028540cc39f5cb593e756506db17843125f"}, {file = "pywinpty-2.0.9-cp37-none-win_amd64.whl", hash = "sha256:5ed36aa087e35a3a183f833631b3e4c1ae92fe2faabfce0fa91b77ed3f0f1382"}, @@ -2505,15 +3361,7 @@ files = [ {file = "pywinpty-2.0.9-cp39-none-win_amd64.whl", hash = "sha256:ba75ec55f46c9e17db961d26485b033deb20758b1731e8e208e1e8a387fcf70c"}, {file = "pywinpty-2.0.9.tar.gz", hash = "sha256:01b6400dd79212f50a2f01af1c65b781290ff39610853db99bf03962eb9a615f"}, ] - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -2555,15 +3403,7 @@ files = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] - -[[package]] -name = "pyzmq" -version = "24.0.1" -description = "Python bindings for 0MQ" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +pyzmq = [ {file = "pyzmq-24.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:28b119ba97129d3001673a697b7cce47fe6de1f7255d104c2f01108a5179a066"}, {file = "pyzmq-24.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bcbebd369493d68162cddb74a9c1fcebd139dfbb7ddb23d8f8e43e6c87bac3a6"}, {file = "pyzmq-24.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae61446166983c663cee42c852ed63899e43e484abf080089f771df4b9d272ef"}, @@ -2639,82 +3479,19 @@ files = [ {file = "pyzmq-24.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:687700f8371643916a1d2c61f3fdaa630407dd205c38afff936545d7b7466066"}, {file = "pyzmq-24.0.1.tar.gz", hash = "sha256:216f5d7dbb67166759e59b0479bca82b8acf9bed6015b526b8eb10143fb08e77"}, ] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} -py = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "qtconsole" -version = "5.4.0" -description = "Jupyter Qt console" -category = "dev" -optional = false -python-versions = ">= 3.7" -files = [ +qtconsole = [ {file = "qtconsole-5.4.0-py3-none-any.whl", hash = "sha256:be13560c19bdb3b54ed9741a915aa701a68d424519e8341ac479a91209e694b2"}, {file = "qtconsole-5.4.0.tar.gz", hash = "sha256:57748ea2fd26320a0b77adba20131cfbb13818c7c96d83fafcb110ff55f58b35"}, ] - -[package.dependencies] -ipykernel = ">=4.1" -ipython-genutils = "*" -jupyter-client = ">=4.1" -jupyter-core = "*" -pygments = "*" -pyzmq = ">=17.1" -qtpy = ">=2.0.1" -traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" - -[package.extras] -doc = ["Sphinx (>=1.3)"] -test = ["flaky", "pytest", "pytest-qt"] - -[[package]] -name = "qtpy" -version = "2.3.0" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +qtpy = [ {file = "QtPy-2.3.0-py3-none-any.whl", hash = "sha256:8d6d544fc20facd27360ea189592e6135c614785f0dec0b4f083289de6beb408"}, {file = "QtPy-2.3.0.tar.gz", hash = "sha256:0603c9c83ccc035a4717a12908bf6bc6cb22509827ea2ec0e94c2da7c9ed57c5"}, ] - -[package.dependencies] -packaging = "*" - -[package.extras] -test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] - -[[package]] -name = "redis" -version = "4.4.0" -description = "Python client for Redis database and key-value store" -category = "main" -optional = true -python-versions = ">=3.7" -files = [ +redis = [ {file = "redis-4.4.0-py3-none-any.whl", hash = "sha256:cae3ee5d1f57d8caf534cd8764edf3163c77e073bdd74b6f54a87ffafdc5e7d9"}, {file = "redis-4.4.0.tar.gz", hash = "sha256:7b8c87d19c45d3f1271b124858d2a5c13160c4e74d4835e28273400fa34d5228"}, ] - -[package.dependencies] -async-timeout = ">=4.0.2" - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] -name = "regex" -version = "2022.10.31" -description = "Alternative regular expression module, to replace re." -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +regex = [ {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, {file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"}, @@ -2804,168 +3581,47 @@ files = [ {file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"}, {file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"}, ] - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7, <4" -files = [ +requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ +rfc3339-validator = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, ] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ +rfc3986-validator = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, ] - -[[package]] -name = "send2trash" -version = "1.8.0" -description = "Send file to trash natively under Mac OS X, Windows and Linux." -category = "dev" -optional = false -python-versions = "*" -files = [ +send2trash = [ {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, ] - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "setuptools" -version = "65.6.3" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" -optional = true -python-versions = ">=3.7" -files = [ +setuptools = [ {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, ] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ +six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] - -[[package]] -name = "smart-open" -version = "6.3.0" -description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" -category = "main" -optional = true -python-versions = ">=3.6,<4.0" -files = [ +smart-open = [ {file = "smart_open-6.3.0-py3-none-any.whl", hash = "sha256:b4c9ae193ad6d3e7add50944b86afa0d150bd821ab8ec21edb26d9a06b66f6a8"}, {file = "smart_open-6.3.0.tar.gz", hash = "sha256:d5238825fe9a9340645fac3d75b287c08fbb99fb2b422477de781c9f5f09e019"}, ] - -[package.extras] -all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] -azure = ["azure-common", "azure-core", "azure-storage-blob"] -gcs = ["google-cloud-storage (>=2.6.0)"] -http = ["requests"] -s3 = ["boto3"] -ssh = ["paramiko"] -test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] -webhdfs = ["requests"] - -[[package]] -name = "sniffio" -version = "1.3.0" -description = "Sniff out which async library your code is running under" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +sniffio = [ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" -files = [ +snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] - -[[package]] -name = "soupsieve" -version = "2.3.2.post1" -description = "A modern CSS selector implementation for Beautiful Soup." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ +soupsieve = [ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, ] - -[[package]] -name = "spacy" -version = "3.4.3" -description = "Industrial-strength Natural Language Processing (NLP) in Python" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +spacy = [ {file = "spacy-3.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e546b314f619502ae03e5eb9a0cfd09ca7a9db265bcdd8a3af83cfb0f1432e55"}, {file = "spacy-3.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ded11aa8966236aab145b4d2d024b3eb61ac50078362d77d9ed7d8c240ef0f4a"}, {file = "spacy-3.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:462e141f514d78cff85685b5b12eb8cadac0bad2f7820149cbe18d03ccb2e59c"}, @@ -2995,87 +3651,15 @@ files = [ {file = "spacy-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:676ab9ab2cf94ba48caa306f185a166e85bd35b388ec24512c8ba7dfcbc7517e"}, {file = "spacy-3.4.3.tar.gz", hash = "sha256:22698cf5175e2b697e82699fcccee3092b42137a57d352df208d71657fd693bb"}, ] - -[package.dependencies] -catalogue = ">=2.0.6,<2.1.0" -cymem = ">=2.0.2,<2.1.0" -jinja2 = "*" -langcodes = ">=3.2.0,<4.0.0" -murmurhash = ">=0.28.0,<1.1.0" -numpy = ">=1.15.0" -packaging = ">=20.0" -pathy = ">=0.3.5" -preshed = ">=3.0.2,<3.1.0" -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" -requests = ">=2.13.0,<3.0.0" -setuptools = "*" -spacy-legacy = ">=3.0.10,<3.1.0" -spacy-loggers = ">=1.0.0,<2.0.0" -srsly = ">=2.4.3,<3.0.0" -thinc = ">=8.1.0,<8.2.0" -tqdm = ">=4.38.0,<5.0.0" -typer = ">=0.3.0,<0.8.0" -wasabi = ">=0.9.1,<1.1.0" - -[package.extras] -apple = ["thinc-apple-ops (>=0.1.0.dev0,<1.0.0)"] -cuda = ["cupy (>=5.0.0b4,<12.0.0)"] -cuda-autodetect = ["cupy-wheel (>=11.0.0,<12.0.0)"] -cuda100 = ["cupy-cuda100 (>=5.0.0b4,<12.0.0)"] -cuda101 = ["cupy-cuda101 (>=5.0.0b4,<12.0.0)"] -cuda102 = ["cupy-cuda102 (>=5.0.0b4,<12.0.0)"] -cuda110 = ["cupy-cuda110 (>=5.0.0b4,<12.0.0)"] -cuda111 = ["cupy-cuda111 (>=5.0.0b4,<12.0.0)"] -cuda112 = ["cupy-cuda112 (>=5.0.0b4,<12.0.0)"] -cuda113 = ["cupy-cuda113 (>=5.0.0b4,<12.0.0)"] -cuda114 = ["cupy-cuda114 (>=5.0.0b4,<12.0.0)"] -cuda115 = ["cupy-cuda115 (>=5.0.0b4,<12.0.0)"] -cuda116 = ["cupy-cuda116 (>=5.0.0b4,<12.0.0)"] -cuda117 = ["cupy-cuda117 (>=5.0.0b4,<12.0.0)"] -cuda11x = ["cupy-cuda11x (>=11.0.0,<12.0.0)"] -cuda80 = ["cupy-cuda80 (>=5.0.0b4,<12.0.0)"] -cuda90 = ["cupy-cuda90 (>=5.0.0b4,<12.0.0)"] -cuda91 = ["cupy-cuda91 (>=5.0.0b4,<12.0.0)"] -cuda92 = ["cupy-cuda92 (>=5.0.0b4,<12.0.0)"] -ja = ["sudachidict-core (>=20211220)", "sudachipy (>=0.5.2,!=0.6.1)"] -ko = ["natto-py (>=0.9.0)"] -lookups = ["spacy-lookups-data (>=1.0.3,<1.1.0)"] -ray = ["spacy-ray (>=0.1.0,<1.0.0)"] -th = ["pythainlp (>=2.0)"] -transformers = ["spacy-transformers (>=1.1.2,<1.2.0)"] - -[[package]] -name = "spacy-legacy" -version = "3.0.10" -description = "Legacy registered functions for spaCy backwards compatibility" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +spacy-legacy = [ {file = "spacy-legacy-3.0.10.tar.gz", hash = "sha256:16104595d8ab1b7267f817a449ad1f986eb1f2a2edf1050748f08739a479679a"}, {file = "spacy_legacy-3.0.10-py2.py3-none-any.whl", hash = "sha256:8526a54d178dee9b7f218d43e5c21362c59056c5da23380b319b56043e9211f3"}, ] - -[[package]] -name = "spacy-loggers" -version = "1.0.4" -description = "Logging utilities for SpaCy" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +spacy-loggers = [ {file = "spacy-loggers-1.0.4.tar.gz", hash = "sha256:e6f983bf71230091d5bb7b11bf64bd54415eca839108d5f83d9155d0ba93bf28"}, {file = "spacy_loggers-1.0.4-py3-none-any.whl", hash = "sha256:e050bf2e63208b2f096b777e494971c962ad7c1dc997641c8f95c622550044ae"}, ] - -[[package]] -name = "sqlalchemy" -version = "1.4.45" -description = "Database Abstraction Library" -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ +sqlalchemy = [ {file = "SQLAlchemy-1.4.45-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:f1d3fb02a4d0b07d1351a4a52f159e5e7b3045c903468b7e9349ebf0020ffdb9"}, {file = "SQLAlchemy-1.4.45-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b7025d46aba946272f6b6b357a22f3787473ef27451f342df1a2a6de23743e3"}, {file = "SQLAlchemy-1.4.45-cp27-cp27m-win32.whl", hash = "sha256:26b8424b32eeefa4faad21decd7bdd4aade58640b39407bf43e7d0a7c1bc0453"}, @@ -3118,50 +3702,10 @@ files = [ {file = "SQLAlchemy-1.4.45-cp39-cp39-win_amd64.whl", hash = "sha256:2d6f178ff2923730da271c8aa317f70cf0df11a4d1812f1d7a704b1cf29c5fe3"}, {file = "SQLAlchemy-1.4.45.tar.gz", hash = "sha256:fd69850860093a3f69fefe0ab56d041edfdfe18510b53d9a2eaecba2f15fa795"}, ] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} - -[package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3-binary"] - -[[package]] -name = "sqlitedict" -version = "2.1.0" -description = "Persistent dict in Python, backed up by sqlite3 and pickle, multithread-safe." -category = "main" -optional = true -python-versions = "*" -files = [ +sqlitedict = [ {file = "sqlitedict-2.1.0.tar.gz", hash = "sha256:03d9cfb96d602996f1d4c2db2856f1224b96a9c431bdd16e78032a72940f9e8c"}, ] - -[[package]] -name = "srsly" -version = "2.4.5" -description = "Modern high-performance serialization utilities for Python" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +srsly = [ {file = "srsly-2.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8fed31ef8acbb5fead2152824ef39e12d749fcd254968689ba5991dd257b63b4"}, {file = "srsly-2.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04d0b4cd91e098cdac12d2c28e256b1181ba98bcd00e460b8e42dee3e8542804"}, {file = "srsly-2.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d83bea1f774b54d9313a374a95f11a776d37bcedcda93c526bf7f1cb5f26428"}, @@ -3191,59 +3735,15 @@ files = [ {file = "srsly-2.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:2d3b0d32be2267fb489da172d71399ac59f763189b47dbe68eedb0817afaa6dc"}, {file = "srsly-2.4.5.tar.gz", hash = "sha256:c842258967baa527cea9367986e42b8143a1a890e7d4a18d25a36edc3c7a33c7"}, ] - -[package.dependencies] -catalogue = ">=2.0.3,<2.1.0" - -[[package]] -name = "stack-data" -version = "0.6.2" -description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" -optional = false -python-versions = "*" -files = [ +stack-data = [ {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, ] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "terminado" -version = "0.17.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +terminado = [ {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, ] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] - -[[package]] -name = "thinc" -version = "8.1.5" -description = "A refreshing functional take on deep learning, compatible with your favorite libraries" -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +thinc = [ {file = "thinc-8.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5dc6629e4770a13dec34eda3c4d89302f1b5c91ac4663cd53f876a4e761fcc00"}, {file = "thinc-8.1.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8af5639de41a08d358fac073ac116faefe75289d9bed5c1fbf6c7a54724529ea"}, {file = "thinc-8.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d66eeacc29769bf4238a0666f05e38d75dce60ab609eea5089975e6d8b82721"}, @@ -3273,71 +3773,29 @@ files = [ {file = "thinc-8.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:16be051c6f71d967fe87c3bda3a760699539cf75fee6b32527ea38feb3002e56"}, {file = "thinc-8.1.5.tar.gz", hash = "sha256:4d3e4de33d2d0eae7c1455c60c680e453b0204c29e3d2d548d7a9e7fe08ccfbd"}, ] - -[package.dependencies] -blis = ">=0.7.8,<0.8.0" -catalogue = ">=2.0.4,<2.1.0" -confection = ">=0.0.1,<1.0.0" -cymem = ">=2.0.2,<2.1.0" -murmurhash = ">=1.0.2,<1.1.0" -numpy = ">=1.15.0" -preshed = ">=3.0.2,<3.1.0" -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" -setuptools = "*" -srsly = ">=2.4.0,<3.0.0" -wasabi = ">=0.8.1,<1.1.0" - -[package.extras] -cuda = ["cupy (>=5.0.0b4)"] -cuda-autodetect = ["cupy-wheel (>=11.0.0)"] -cuda100 = ["cupy-cuda100 (>=5.0.0b4)"] -cuda101 = ["cupy-cuda101 (>=5.0.0b4)"] -cuda102 = ["cupy-cuda102 (>=5.0.0b4)"] -cuda110 = ["cupy-cuda110 (>=5.0.0b4)"] -cuda111 = ["cupy-cuda111 (>=5.0.0b4)"] -cuda112 = ["cupy-cuda112 (>=5.0.0b4)"] -cuda113 = ["cupy-cuda113 (>=5.0.0b4)"] -cuda114 = ["cupy-cuda114 (>=5.0.0b4)"] -cuda115 = ["cupy-cuda115 (>=5.0.0b4)"] -cuda116 = ["cupy-cuda116 (>=5.0.0b4)"] -cuda117 = ["cupy-cuda117 (>=5.0.0b4)"] -cuda11x = ["cupy-cuda11x (>=11.0.0)"] -cuda80 = ["cupy-cuda80 (>=5.0.0b4)"] -cuda90 = ["cupy-cuda90 (>=5.0.0b4)"] -cuda91 = ["cupy-cuda91 (>=5.0.0b4)"] -cuda92 = ["cupy-cuda92 (>=5.0.0b4)"] -datasets = ["ml-datasets (>=0.2.0,<0.3.0)"] -mxnet = ["mxnet (>=1.5.1,<1.6.0)"] -tensorflow = ["tensorflow (>=2.0.0,<2.6.0)"] -torch = ["torch (>=1.6.0)"] - -[[package]] -name = "tinycss2" -version = "1.2.1" -description = "A tiny CSS parser" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +tiktoken = [ + {file = "tiktoken-0.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4362363d129c0dcfd5acc8c9675a0f2372c23ac1239b11349eac991a814c046"}, + {file = "tiktoken-0.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b16afffbc1a546521a2fc04c6ab185b263df71ff77eca8a6ff10effc7562b1af"}, + {file = "tiktoken-0.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50d0f446750bb2390bff3d63d517e3457b9b182b162f6d90586b9c5ac95899ce"}, + {file = "tiktoken-0.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c88e6eab36d4cf950da4686149e03b5ef3274fb0b3711cb50aef0e03eb9051d"}, + {file = "tiktoken-0.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:ca0096dca00b0ed60c6eee22a8bbaaee8c8de09fc13417d5cf786c6a910dcd0c"}, + {file = "tiktoken-0.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6151dc8cd871bf24b60788403c0425e3a8c69259aa985fc5c451a4a5c344e0d"}, + {file = "tiktoken-0.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6af53a1f76cffe9429aa2a2b9091556d157462212ef53e63505177fa9196809"}, + {file = "tiktoken-0.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:580bd89f80fbbe9a5add29d585c94fc72a1c8648d935eddeb677721985efd7b1"}, + {file = "tiktoken-0.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ca6cb6e806261b0f33440c9b064068c1cb2b5c506aff7d9133e6b643b4fc1652"}, + {file = "tiktoken-0.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:c5402c7871aa2507a749374a39a5b88f7e96be3f0cc7bf52e0845e865fc2cf52"}, + {file = "tiktoken-0.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:301e9d61a45846f1751f552eb6501c9ad4086763b55f3fee66852ae3532a9a3c"}, + {file = "tiktoken-0.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2450f71b3219bae8be1e55cae7385de48ee96f0dd7490d8de6b7c53e97ba06b7"}, + {file = "tiktoken-0.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6df2d41e3176cba3015c5c3183b7c83de927db81d6ad44a08c90547774065a"}, + {file = "tiktoken-0.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5c48cc16483ab4313012b5656c8c02c0fd631329b255e0723019f2097d7d4aa1"}, + {file = "tiktoken-0.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:c2f5697a5fe2bd053997728c7522f0d0285b6d4fd6be65602dccf43341e4dd82"}, + {file = "tiktoken-0.1.1.tar.gz", hash = "sha256:328ee64d63c68e04f11d68b45198053134fbe42a169363e6b5405fabf59696e1"}, +] +tinycss2 = [ {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, ] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] - -[[package]] -name = "tokenizers" -version = "0.13.2" -description = "Fast and Customizable Tokenizers" -category = "main" -optional = true -python-versions = "*" -files = [ +tokenizers = [ {file = "tokenizers-0.13.2-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:a6f36b1b499233bb4443b5e57e20630c5e02fba61109632f5e00dab970440157"}, {file = "tokenizers-0.13.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:bc6983282ee74d638a4b6d149e5dadd8bc7ff1d0d6de663d69f099e0c6bddbeb"}, {file = "tokenizers-0.13.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16756e6ab264b162f99c0c0a8d3d521328f428b33374c5ee161c0ebec42bf3c0"}, @@ -3375,32 +3833,11 @@ files = [ {file = "tokenizers-0.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:486d637b413fddada845a10a45c74825d73d3725da42ccd8796ccd7a1c07a024"}, {file = "tokenizers-0.13.2.tar.gz", hash = "sha256:f9525375582fd1912ac3caa2f727d36c86ff8c0c6de45ae1aaff90f87f33b907"}, ] - -[package.extras] -dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] - -[[package]] -name = "tornado" -version = "6.2" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" -optional = false -python-versions = ">= 3.7" -files = [ +tornado = [ {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, @@ -3413,341 +3850,78 @@ files = [ {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, ] - -[[package]] -name = "tqdm" -version = "4.64.1" -description = "Fast, Extensible Progress Meter" -category = "main" -optional = true -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -files = [ +tqdm = [ {file = "tqdm-4.64.1-py2.py3-none-any.whl", hash = "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1"}, {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"}, ] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.7.1" -description = "Traitlets Python configuration system" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +traitlets = [ {file = "traitlets-5.7.1-py3-none-any.whl", hash = "sha256:57ba2ba951632eeab9388fa45f342a5402060a5cc9f0bb942f760fafb6641581"}, {file = "traitlets-5.7.1.tar.gz", hash = "sha256:fde8f62c05204ead43c2c1b9389cfc85befa7f54acb5da28529d671175bb4108"}, ] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] -test = ["pre-commit", "pytest"] -typing = ["mypy (>=0.990)"] - -[[package]] -name = "transformers" -version = "4.25.1" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -category = "main" -optional = true -python-versions = ">=3.7.0" -files = [ +transformers = [ {file = "transformers-4.25.1-py3-none-any.whl", hash = "sha256:60f1be15e17e4a54373c787c713ec149dabcc63464131ac45611618fe7c2016e"}, {file = "transformers-4.25.1.tar.gz", hash = "sha256:6dad398b792d45dc04e9ee7e9e06bf758ab19dca2efc119065e661bb0f8f843b"}, ] - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.10.0,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.10.0)"] -all = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.10.0)", "deepspeed (>=0.6.5)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.6.5)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.4.1)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -docs = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] -docs-specific = ["hf-doc-builder"] -fairscale = ["fairscale (>0.3)"] -flax = ["flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune]", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "pyknp (>=0.6.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.4)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (==22.3)", "datasets (!=2.5.0)", "flake8 (>=3.8.3)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)"] -ray = ["ray[tune]"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.11)", "tensorflow-text", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm"] -tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["torch (>=1.7,!=1.12.0)"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torchhub = ["filelock", "huggingface-hub (>=0.10.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "tqdm (>=4.27)"] -vision = ["Pillow"] - -[[package]] -name = "typer" -version = "0.7.0" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "main" -optional = true -python-versions = ">=3.6" -files = [ +typer = [ {file = "typer-0.7.0-py3-none-any.whl", hash = "sha256:b5e704f4e48ec263de1c0b3a2387cd405a13767d2f907f44c1a08cbad96f606d"}, {file = "typer-0.7.0.tar.gz", hash = "sha256:ff797846578a9f2a201b53442aedeb543319466870fbe1c701eab66dd7681165"}, ] - -[package.dependencies] -click = ">=7.1.1,<9.0.0" - -[package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.2" -description = "Typing stubs for PyYAML" -category = "dev" -optional = false -python-versions = "*" -files = [ +types-pyyaml = [ {file = "types-PyYAML-6.0.12.2.tar.gz", hash = "sha256:6840819871c92deebe6a2067fb800c11b8a063632eb4e3e755914e7ab3604e83"}, {file = "types_PyYAML-6.0.12.2-py3-none-any.whl", hash = "sha256:1e94e80aafee07a7e798addb2a320e32956a373f376655128ae20637adb2655b"}, ] - -[[package]] -name = "types-requests" -version = "2.28.11.5" -description = "Typing stubs for requests" -category = "dev" -optional = false -python-versions = "*" -files = [ +types-requests = [ {file = "types-requests-2.28.11.5.tar.gz", hash = "sha256:a7df37cc6fb6187a84097da951f8e21d335448aa2501a6b0a39cbd1d7ca9ee2a"}, {file = "types_requests-2.28.11.5-py3-none-any.whl", hash = "sha256:091d4a5a33c1b4f20d8b1b952aa8fa27a6e767c44c3cf65e56580df0b05fd8a9"}, ] - -[package.dependencies] -types-urllib3 = "<1.27" - -[[package]] -name = "types-toml" -version = "0.10.8.1" -description = "Typing stubs for toml" -category = "dev" -optional = false -python-versions = "*" -files = [ +types-toml = [ {file = "types-toml-0.10.8.1.tar.gz", hash = "sha256:171bdb3163d79a520560f24ba916a9fc9bff81659c5448a9fea89240923722be"}, {file = "types_toml-0.10.8.1-py3-none-any.whl", hash = "sha256:b7b5c4977f96ab7b5ac06d8a6590d17c0bf252a96efc03b109c2711fb3e0eafd"}, ] - -[[package]] -name = "types-urllib3" -version = "1.26.25.4" -description = "Typing stubs for urllib3" -category = "dev" -optional = false -python-versions = "*" -files = [ +types-urllib3 = [ {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, ] - -[[package]] -name = "typing-extensions" -version = "4.4.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ +typing-extensions = [ {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] - -[[package]] -name = "uri-template" -version = "1.2.0" -description = "RFC 6570 URI Template Processor" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ +uri-template = [ {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, ] - -[package.extras] -dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] - -[[package]] -name = "urllib3" -version = "1.26.13" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ +urllib3 = [ {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, ] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "wasabi" -version = "0.10.1" -description = "A lightweight console printing and formatting toolkit" -category = "main" -optional = true -python-versions = "*" -files = [ +wasabi = [ {file = "wasabi-0.10.1-py3-none-any.whl", hash = "sha256:fe862cc24034fbc9f04717cd312ab884f71f51a8ecabebc3449b751c2a649d83"}, {file = "wasabi-0.10.1.tar.gz", hash = "sha256:c8e372781be19272942382b14d99314d175518d7822057cb7a97010c4259d249"}, ] - -[[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" -optional = false -python-versions = "*" -files = [ +wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] - -[[package]] -name = "webcolors" -version = "1.12" -description = "A library for working with color names and color values formats defined by HTML and CSS." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +webcolors = [ {file = "webcolors-1.12-py3-none-any.whl", hash = "sha256:d98743d81d498a2d3eaf165196e65481f0d2ea85281463d856b1e51b09f62dce"}, {file = "webcolors-1.12.tar.gz", hash = "sha256:16d043d3a08fd6a1b1b7e3e9e62640d09790dce80d2bdd4792a175b35fe794a9"}, ] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -category = "dev" -optional = false -python-versions = "*" -files = [ +webencodings = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] - -[[package]] -name = "websocket-client" -version = "1.4.2" -description = "WebSocket client for Python with low level API options" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +websocket-client = [ {file = "websocket-client-1.4.2.tar.gz", hash = "sha256:d6e8f90ca8e2dd4e8027c4561adeb9456b54044312dba655e7cae652ceb9ae59"}, {file = "websocket_client-1.4.2-py3-none-any.whl", hash = "sha256:d6b06432f184438d99ac1f456eaf22fe1ade524c3dd16e661142dc54e9cba574"}, ] - -[package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "widgetsnbextension" -version = "4.0.4" -description = "Jupyter interactive widgets for Jupyter Notebook" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +widgetsnbextension = [ {file = "widgetsnbextension-4.0.4-py3-none-any.whl", hash = "sha256:fa0e840719ec95dd2ec85c3a48913f1a0c29d323eacbcdb0b29bfed0cc6da678"}, {file = "widgetsnbextension-4.0.4.tar.gz", hash = "sha256:44c69f18237af0f610557d6c1c7ef76853f5856a0e604c0a517f2320566bb775"}, ] - -[[package]] -name = "wikipedia" -version = "1.4.0" -description = "Wikipedia API for Python" -category = "main" -optional = true -python-versions = "*" -files = [ +wikipedia = [ {file = "wikipedia-1.4.0.tar.gz", hash = "sha256:db0fad1829fdd441b1852306e9856398204dc0786d2996dd2e0c8bb8e26133b2"}, ] - -[package.dependencies] -beautifulsoup4 = "*" -requests = ">=2.0.0,<3.0.0" - -[[package]] -name = "zipp" -version = "3.11.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ +zipp = [ {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, ] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[extras] -all = ["manifest-ml", "elasticsearch", "faiss-cpu", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4"] -llms = ["manifest-ml"] - -[metadata] -lock-version = "2.0" -python-versions = ">=3.8.1,<4.0" -content-hash = "88dd08a95bacc5be4635b713d49733c7eb4cb3e47f5d67bf4da53d373d264fa2" diff --git a/pyproject.toml b/pyproject.toml index 2c59151f..885e78d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ spacy = {version = "^3", optional = true} nltk = {version = "^3", optional = true} transformers = {version = "^4", optional = true} beautifulsoup4 = {version = "^4", optional = true} +tiktoken = {version = "^0", optional = true, python="^3.9"} [tool.poetry.group.test.dependencies] pytest = "^7.2.0" @@ -49,7 +50,7 @@ playwright = "^1.28.0" [tool.poetry.extras] llms = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml"] -all = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml", "elasticsearch", "google-search-results", "faiss-cpu", "sentence_transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4"] +all = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml", "elasticsearch", "google-search-results", "faiss-cpu", "sentence_transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken"] [tool.isort] profile = "black" From 750edfb440c5284589a3bfa905f549ac7ac08e10 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Fri, 16 Dec 2022 06:25:29 -0800 Subject: [PATCH 24/29] add optional collapse prompt (#358) --- langchain/chains/combine_documents/map_reduce.py | 16 +++++++++++++--- langchain/chains/qa_with_sources/__init__.py | 12 +++++++++++- langchain/chains/question_answering/__init__.py | 11 ++++++++++- langchain/chains/summarize/__init__.py | 11 ++++++++++- 4 files changed, 44 insertions(+), 6 deletions(-) diff --git a/langchain/chains/combine_documents/map_reduce.py b/langchain/chains/combine_documents/map_reduce.py index ff74bc4f..8653ef1a 100644 --- a/langchain/chains/combine_documents/map_reduce.py +++ b/langchain/chains/combine_documents/map_reduce.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, Callable, Dict, List +from typing import Any, Callable, Dict, List, Optional from pydantic import BaseModel, Extra, root_validator @@ -56,9 +56,12 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain, BaseModel): """Combining documents by mapping a chain over them, then combining results.""" llm_chain: LLMChain - """Chain to apply to each document individually..""" + """Chain to apply to each document individually.""" combine_document_chain: BaseCombineDocumentsChain """Chain to use to combine results of applying llm_chain to documents.""" + collapse_document_chain: Optional[BaseCombineDocumentsChain] = None + """Chain to use to collapse intermediary results if needed. + If None, will use the combine_document_chain.""" document_variable_name: str """The variable name in the llm_chain to put the documents in. If only one variable in the llm_chain, this need not be provided.""" @@ -90,6 +93,13 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain, BaseModel): ) return values + @property + def _collapse_chain(self) -> BaseCombineDocumentsChain: + if self.collapse_document_chain is not None: + return self.collapse_document_chain + else: + return self.combine_document_chain + def combine_docs( self, docs: List[Document], token_max: int = 3000, **kwargs: Any ) -> str: @@ -117,7 +127,7 @@ class MapReduceDocumentsChain(BaseCombineDocumentsChain, BaseModel): result_docs = [] for docs in new_result_doc_list: new_doc = _collapse_docs( - docs, self.combine_document_chain.combine_docs, **kwargs + docs, self._collapse_chain.combine_docs, **kwargs ) result_docs.append(new_doc) num_tokens = self.combine_document_chain.prompt_length( diff --git a/langchain/chains/qa_with_sources/__init__.py b/langchain/chains/qa_with_sources/__init__.py index 159abd76..82d93f50 100644 --- a/langchain/chains/qa_with_sources/__init__.py +++ b/langchain/chains/qa_with_sources/__init__.py @@ -1,5 +1,5 @@ """Load question answering with sources chains.""" -from typing import Any, Mapping, Protocol +from typing import Any, Mapping, Optional, Protocol from langchain.chains.combine_documents.base import BaseCombineDocumentsChain from langchain.chains.combine_documents.map_reduce import MapReduceDocumentsChain @@ -44,6 +44,7 @@ def _load_map_reduce_chain( document_prompt: BasePromptTemplate = map_reduce_prompt.EXAMPLE_PROMPT, combine_document_variable_name: str = "summaries", map_reduce_document_variable_name: str = "context", + collapse_prompt: Optional[BasePromptTemplate] = None, **kwargs: Any, ) -> MapReduceDocumentsChain: map_chain = LLMChain(llm=llm, prompt=question_prompt) @@ -53,10 +54,19 @@ def _load_map_reduce_chain( document_variable_name=combine_document_variable_name, document_prompt=document_prompt, ) + if collapse_prompt is None: + collapse_chain = None + else: + collapse_chain = StuffDocumentsChain( + llm_chain=LLMChain(llm=llm, prompt=collapse_prompt), + document_variable_name=combine_document_variable_name, + document_prompt=document_prompt, + ) return MapReduceDocumentsChain( llm_chain=map_chain, combine_document_chain=combine_document_chain, document_variable_name=map_reduce_document_variable_name, + collapse_document_chain=collapse_chain, **kwargs, ) diff --git a/langchain/chains/question_answering/__init__.py b/langchain/chains/question_answering/__init__.py index 1591054c..9883e068 100644 --- a/langchain/chains/question_answering/__init__.py +++ b/langchain/chains/question_answering/__init__.py @@ -1,5 +1,5 @@ """Load question answering chains.""" -from typing import Any, Mapping, Protocol +from typing import Any, Mapping, Optional, Protocol from langchain.chains.combine_documents.base import BaseCombineDocumentsChain from langchain.chains.combine_documents.map_reduce import MapReduceDocumentsChain @@ -41,6 +41,7 @@ def _load_map_reduce_chain( combine_prompt: BasePromptTemplate = map_reduce_prompt.COMBINE_PROMPT, combine_document_variable_name: str = "summaries", map_reduce_document_variable_name: str = "context", + collapse_prompt: Optional[BasePromptTemplate] = None, **kwargs: Any, ) -> MapReduceDocumentsChain: map_chain = LLMChain(llm=llm, prompt=question_prompt) @@ -49,10 +50,18 @@ def _load_map_reduce_chain( combine_document_chain = StuffDocumentsChain( llm_chain=reduce_chain, document_variable_name=combine_document_variable_name ) + if collapse_prompt is None: + collapse_chain = None + else: + collapse_chain = StuffDocumentsChain( + llm_chain=LLMChain(llm=llm, prompt=collapse_prompt), + document_variable_name=combine_document_variable_name, + ) return MapReduceDocumentsChain( llm_chain=map_chain, combine_document_chain=combine_document_chain, document_variable_name=map_reduce_document_variable_name, + collapse_document_chain=collapse_chain, **kwargs, ) diff --git a/langchain/chains/summarize/__init__.py b/langchain/chains/summarize/__init__.py index ab90c70b..e613ff8d 100644 --- a/langchain/chains/summarize/__init__.py +++ b/langchain/chains/summarize/__init__.py @@ -1,5 +1,5 @@ """Load summarizing chains.""" -from typing import Any, Mapping, Protocol +from typing import Any, Mapping, Optional, Protocol from langchain.chains.combine_documents.base import BaseCombineDocumentsChain from langchain.chains.combine_documents.map_reduce import MapReduceDocumentsChain @@ -37,6 +37,7 @@ def _load_map_reduce_chain( combine_prompt: BasePromptTemplate = map_reduce_prompt.PROMPT, combine_document_variable_name: str = "text", map_reduce_document_variable_name: str = "text", + collapse_prompt: Optional[BasePromptTemplate] = None, **kwargs: Any, ) -> MapReduceDocumentsChain: map_chain = LLMChain(llm=llm, prompt=map_prompt) @@ -45,10 +46,18 @@ def _load_map_reduce_chain( combine_document_chain = StuffDocumentsChain( llm_chain=reduce_chain, document_variable_name=combine_document_variable_name ) + if collapse_prompt is None: + collapse_chain = None + else: + collapse_chain = StuffDocumentsChain( + llm_chain=LLMChain(llm=llm, prompt=collapse_prompt), + document_variable_name=combine_document_variable_name, + ) return MapReduceDocumentsChain( llm_chain=map_chain, combine_document_chain=combine_document_chain, document_variable_name=map_reduce_document_variable_name, + collapse_document_chain=collapse_chain, **kwargs, ) From 809a9f485f577499c67386697d3efaf167416cf6 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Fri, 16 Dec 2022 07:42:31 -0800 Subject: [PATCH 25/29] Harrison/new version (#362) --- docs/examples/integrations/textsplitter.ipynb | 60 ++++++++++++++++--- langchain/llms/openai.py | 4 ++ pyproject.toml | 2 +- 3 files changed, 58 insertions(+), 8 deletions(-) diff --git a/docs/examples/integrations/textsplitter.ipynb b/docs/examples/integrations/textsplitter.ipynb index 5f57d110..3103b4c6 100644 --- a/docs/examples/integrations/textsplitter.ipynb +++ b/docs/examples/integrations/textsplitter.ipynb @@ -19,7 +19,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 1, "id": "e82c4685", "metadata": {}, "outputs": [], @@ -42,7 +42,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 2, "id": "79ff6737", "metadata": {}, "outputs": [], @@ -57,7 +57,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 3, "id": "38547666", "metadata": {}, "outputs": [ @@ -67,7 +67,7 @@ "'Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \\n\\nLast year COVID-19 kept us apart. This year we are finally together again. \\n\\nTonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \\n\\nWith a duty to one another to the American people to the Constitution. \\n\\nAnd with an unwavering resolve that freedom will always triumph over tyranny. \\n\\nSix days ago, Russia’s Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. \\n\\nHe thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. \\n\\nHe met the Ukrainian people. \\n\\nFrom President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \\n\\nGroups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland. '" ] }, - "execution_count": 8, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -88,7 +88,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 4, "id": "a8ce51d5", "metadata": {}, "outputs": [ @@ -108,7 +108,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 5, "id": "ca5e72c0", "metadata": {}, "outputs": [], @@ -119,7 +119,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 6, "id": "37cdfbeb", "metadata": {}, "outputs": [ @@ -143,6 +143,52 @@ "print(texts[0])" ] }, + { + "cell_type": "markdown", + "id": "7683b36a", + "metadata": {}, + "source": [ + "## tiktoken (OpenAI) Length Function\n", + "You can also use tiktoken, a open source tokenizer package from OpenAI to estimate tokens used. Will probably be ore accurate for their models." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "825f7c0a", + "metadata": {}, + "outputs": [], + "source": [ + "text_splitter = CharacterTextSplitter.from_tiktoken_encoder(chunk_size=100, chunk_overlap=0)\n", + "texts = text_splitter.split_text(state_of_the_union)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "ae35d165", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n", + "\n", + "Last year COVID-19 kept us apart. This year we are finally together again. \n", + "\n", + "Tonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n", + "\n", + "With a duty to one another to the American people to the Constitution. \n", + "\n", + "And with an unwavering resolve that freedom will always triumph over tyranny. \n" + ] + } + ], + "source": [ + "print(texts[0])" + ] + }, { "cell_type": "markdown", "id": "ea2973ac", diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index c639db63..2b020595 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -1,4 +1,5 @@ """Wrapper around OpenAI APIs.""" +import sys from typing import Any, Dict, List, Mapping, Optional from pydantic import BaseModel, Extra, Field, root_validator @@ -188,6 +189,9 @@ class OpenAI(LLM, BaseModel): def get_num_tokens(self, text: str) -> int: """Calculate num tokens with tiktoken package.""" + # tiktoken NOT supported for Python 3.8 or below + if sys.version_info[1] <= 8: + return super().get_num_tokens(text) try: import tiktoken except ImportError: diff --git a/pyproject.toml b/pyproject.toml index 885e78d7..be4705dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain" -version = "0.0.38" +version = "0.0.39" description = "Building applications with LLMs through composability" authors = [] license = "MIT" From 85c1bd2cd04dcdb2367e96fb49b05e40bc755f41 Mon Sep 17 00:00:00 2001 From: Benjamin Date: Sat, 17 Dec 2022 01:47:23 +0100 Subject: [PATCH 26/29] add sqlalchemy generic cache (#361) Created a generic SQLAlchemyCache class to plug any database supported by SQAlchemy. (I am using Postgres). I also based the class SQLiteCache class on this class SQLAlchemyCache. As a side note, I'm questioning the need for two distinct class LLMCache, FullLLMCache. Shouldn't we merge both ? --- docs/examples/prompts/llm_functionality.ipynb | 18 ++++++++++++++--- langchain/cache.py | 20 ++++++++++++++----- 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/docs/examples/prompts/llm_functionality.ipynb b/docs/examples/prompts/llm_functionality.ipynb index 251e22a6..7edfc841 100644 --- a/docs/examples/prompts/llm_functionality.ipynb +++ b/docs/examples/prompts/llm_functionality.ipynb @@ -373,12 +373,19 @@ "id": "6053408b", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# You can use SQLAlchemyCache to cache with any SQL database supported by SQLAlchemy.\n", + "from langchain.cache import SQLAlchemyCache\n", + "from sqlalchemy import create_engine\n", + "\n", + "engine = create_engine(\"postgresql://postgres:postgres@localhost:5432/postgres\")\n", + "langchain.llm_cache = SQLAlchemyCache(engine)" + ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "base", "language": "python", "name": "python3" }, @@ -392,7 +399,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.8" + "version": "3.9.12 (main, Jun 1 2022, 06:34:44) \n[Clang 12.0.0 ]" + }, + "vscode": { + "interpreter": { + "hash": "1235b9b19e8e9828b5c1fdb2cd89fe8d3de0fcde5ef5f3db36e4b671adb8660f" + } } }, "nbformat": 4, diff --git a/langchain/cache.py b/langchain/cache.py index 86a9dc6b..258c0383 100644 --- a/langchain/cache.py +++ b/langchain/cache.py @@ -3,6 +3,7 @@ from abc import ABC, abstractmethod from typing import Dict, List, Optional, Tuple, Union from sqlalchemy import Column, Integer, String, create_engine, select +from sqlalchemy.engine.base import Engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session @@ -61,12 +62,12 @@ class FullLLMCache(Base): # type: ignore response = Column(String) -class SQLiteCache(BaseCache): - """Cache that uses SQLite as a backend.""" +class SQLAlchemyCache(BaseCache): + """Cache that uses SQAlchemy as a backend.""" - def __init__(self, database_path: str = ".langchain.db"): - """Initialize by creating the engine and all tables.""" - self.engine = create_engine(f"sqlite:///{database_path}") + def __init__(self, engine: Engine): + """Initialize by creating all tables.""" + self.engine = engine Base.metadata.create_all(self.engine) def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: @@ -106,3 +107,12 @@ class SQLiteCache(BaseCache): ) with Session(self.engine) as session, session.begin(): session.add(item) + + +class SQLiteCache(SQLAlchemyCache): + """Cache that uses SQLite as a backend.""" + + def __init__(self, database_path: str = ".langchain.db"): + """Initialize by creating the engine and all tables.""" + engine = create_engine(f"sqlite:///{database_path}") + super().__init__(engine) From 2eef76ed3fcc20df66f060e74589f84cb6b62bf6 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Fri, 16 Dec 2022 16:48:54 -0800 Subject: [PATCH 27/29] fix documentation (#365) --- docs/explanation/combine_docs.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/explanation/combine_docs.md b/docs/explanation/combine_docs.md index 81a0d13d..2f86ad95 100644 --- a/docs/explanation/combine_docs.md +++ b/docs/explanation/combine_docs.md @@ -113,7 +113,7 @@ asking the LLM to refine the output based on the new document. **Pros:** Can pull in more relevant context, and may be less lossy than `RefineDocumentsChain`. -**Cons:** Requires many more calls to the LLM than `StuffDocumentsChain`. The calls are also NOT independent, meaning they cannot be paralleled like `RefineDocumentsChain`. There is also some potential dependencies on the ordering of the documents. +**Cons:** Requires many more calls to the LLM than `StuffDocumentsChain`. The calls are also NOT independent, meaning they cannot be paralleled like `MapReduceDocumentsChain`. There is also some potential dependencies on the ordering of the documents. ## Use Cases LangChain supports the above three methods of augmenting LLMs with external data. From fe6695b9e7b91770de7a0e33412faf71f94d9689 Mon Sep 17 00:00:00 2001 From: mrbean <43734688+sam-h-bean@users.noreply.github.com> Date: Sat, 17 Dec 2022 10:00:04 -0500 Subject: [PATCH 28/29] Add HuggingFacePipeline LLM (#353) https://github.com/hwchase17/langchain/issues/354 Add support for running your own HF pipeline locally. This would allow you to get a lot more dynamic with what HF features and models you support since you wouldn't be beholden to what is hosted in HF hub. You could also do stuff with HF Optimum to quantize your models and stuff to get pretty fast inference even running on a laptop. --- langchain/__init__.py | 2 + langchain/llms/__init__.py | 11 +- langchain/llms/huggingface_pipeline.py | 118 +++++++++++ poetry.lock | 192 ++++++++++++++---- pyproject.toml | 5 +- .../llms/test_huggingface_pipeline.py | 41 ++++ tests/integration_tests/llms/utils.py | 2 +- 7 files changed, 332 insertions(+), 39 deletions(-) create mode 100644 langchain/llms/huggingface_pipeline.py create mode 100644 tests/integration_tests/llms/test_huggingface_pipeline.py diff --git a/langchain/__init__.py b/langchain/__init__.py index a32ffa95..c100d0e3 100644 --- a/langchain/__init__.py +++ b/langchain/__init__.py @@ -18,6 +18,7 @@ from langchain.chains import ( ) from langchain.docstore import InMemoryDocstore, Wikipedia from langchain.llms import Cohere, HuggingFaceHub, OpenAI +from langchain.llms.huggingface_pipeline import HuggingFacePipeline from langchain.logger import BaseLogger, StdOutLogger from langchain.prompts import ( BasePromptTemplate, @@ -50,6 +51,7 @@ __all__ = [ "ReActChain", "Wikipedia", "HuggingFaceHub", + "HuggingFacePipeline", "SQLDatabase", "SQLDatabaseChain", "FAISS", diff --git a/langchain/llms/__init__.py b/langchain/llms/__init__.py index 01e3c702..f32ef72d 100644 --- a/langchain/llms/__init__.py +++ b/langchain/llms/__init__.py @@ -5,10 +5,18 @@ from langchain.llms.ai21 import AI21 from langchain.llms.base import LLM from langchain.llms.cohere import Cohere from langchain.llms.huggingface_hub import HuggingFaceHub +from langchain.llms.huggingface_pipeline import HuggingFacePipeline from langchain.llms.nlpcloud import NLPCloud from langchain.llms.openai import OpenAI -__all__ = ["Cohere", "NLPCloud", "OpenAI", "HuggingFaceHub", "AI21"] +__all__ = [ + "Cohere", + "NLPCloud", + "OpenAI", + "HuggingFaceHub", + "HuggingFacePipeline", + "AI21", +] type_to_cls_dict: Dict[str, Type[LLM]] = { "ai21": AI21, @@ -16,4 +24,5 @@ type_to_cls_dict: Dict[str, Type[LLM]] = { "huggingface_hub": HuggingFaceHub, "nlpcloud": NLPCloud, "openai": OpenAI, + "huggingface_pipeline": HuggingFacePipeline, } diff --git a/langchain/llms/huggingface_pipeline.py b/langchain/llms/huggingface_pipeline.py new file mode 100644 index 00000000..6db9bcd0 --- /dev/null +++ b/langchain/llms/huggingface_pipeline.py @@ -0,0 +1,118 @@ +"""Wrapper around HuggingFace Pipeline APIs.""" +from typing import Any, List, Mapping, Optional + +from pydantic import BaseModel, Extra + +from langchain.llms.base import LLM +from langchain.llms.utils import enforce_stop_tokens + +DEFAULT_MODEL_ID = "gpt2" +DEFAULT_TASK = "text-generation" +VALID_TASKS = ("text2text-generation", "text-generation") + + +class HuggingFacePipeline(LLM, BaseModel): + """Wrapper around HuggingFace Pipeline API. + + To use, you should have the ``transformers`` python package installed. + + Only supports `text-generation` and `text2text-generation` for now. + + Example using from_model_id: + .. code-block:: python + + from langchain.llms.huggingface_pipeline import HuggingFacePipeline + hf = HuggingFacePipeline.from_model_id( + model_id="gpt2", task="text-generation" + ) + Example passing pipeline in directly: + .. code-block:: python + + from langchain.llms.huggingface_pipeline import HuggingFacePipeline + from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline + + model_id = "gpt2" + tokenizer = AutoTokenizer.from_pretrained(model_id) + model = AutoModelForCausalLM.from_pretrained(model_id) + pipe = pipeline( + "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=10 + ) + hf = HuggingFacePipeline(pipeline=pipe + """ + + pipeline: Any #: :meta private: + model_id: str = DEFAULT_MODEL_ID + """Model name to use.""" + model_kwargs: Optional[dict] = None + """Key word arguments to pass to the model.""" + + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.forbid + + @classmethod + def from_model_id( + cls, + model_id: str, + task: str, + model_kwargs: Optional[dict] = None, + **kwargs: Any, + ) -> LLM: + """Construct the pipeline object from model_id and task.""" + try: + from transformers import AutoModelForCausalLM, AutoTokenizer + from transformers import pipeline as hf_pipeline + + tokenizer = AutoTokenizer.from_pretrained(model_id) + model = AutoModelForCausalLM.from_pretrained(model_id) + pipeline = hf_pipeline( + task=task, model=model, tokenizer=tokenizer, **model_kwargs + ) + if pipeline.task not in VALID_TASKS: + raise ValueError( + f"Got invalid task {pipeline.task}, " + f"currently only {VALID_TASKS} are supported" + ) + + return cls( + pipeline=pipeline, + model_id=model_id, + model_kwargs=model_kwargs, + **kwargs, + ) + except ImportError: + raise ValueError( + "Could not import transformers python package. " + "Please it install it with `pip install transformers`." + ) + + @property + def _identifying_params(self) -> Mapping[str, Any]: + """Get the identifying parameters.""" + return { + **{"model_id": self.model_id}, + **{"model_kwargs": self.model_kwargs}, + } + + @property + def _llm_type(self) -> str: + return "huggingface_pipeline" + + def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: + response = self.pipeline(text_inputs=prompt) + if self.pipeline.task == "text-generation": + # Text generation return includes the starter text. + text = response[0]["generated_text"][len(prompt) :] + elif self.pipeline.task == "text2text-generation": + text = response[0]["generated_text"] + else: + raise ValueError( + f"Got invalid task {self.pipeline.task}, " + f"currently only {VALID_TASKS} are supported" + ) + if stop is not None: + # This is a bit hacky, but I can't figure out a better way to enforce + # stop tokens when making calls to huggingface_hub. + text = enforce_stop_tokens(text, stop) + return text diff --git a/poetry.lock b/poetry.lock index 71d31f41..9dcbd09b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -640,7 +640,7 @@ arrow = ">=0.15.0" [[package]] name = "isort" -version = "5.11.1" +version = "5.11.2" description = "A Python utility / library to sort Python imports." category = "dev" optional = false @@ -1181,6 +1181,54 @@ category = "main" optional = false python-versions = ">=3.8" +[[package]] +name = "nvidia-cublas-cu11" +version = "11.10.3.66" +description = "CUBLAS native runtime libraries" +category = "main" +optional = true +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-nvrtc-cu11" +version = "11.7.99" +description = "NVRTC native runtime libraries" +category = "main" +optional = true +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-runtime-cu11" +version = "11.7.99" +description = "CUDA Runtime native Libraries" +category = "main" +optional = true +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cudnn-cu11" +version = "8.5.0.96" +description = "cuDNN runtime libraries" +category = "main" +optional = true +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + [[package]] name = "packaging" version = "22.0" @@ -1750,7 +1798,7 @@ python-versions = ">=3.6" [[package]] name = "spacy" -version = "3.4.3" +version = "3.4.4" description = "Industrial-strength Natural Language Processing (NLP) in Python" category = "main" optional = true @@ -1769,6 +1817,7 @@ preshed = ">=3.0.2,<3.1.0" pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<1.11.0" requests = ">=2.13.0,<3.0.0" setuptools = "*" +smart-open = ">=5.2.1,<7.0.0" spacy-legacy = ">=3.0.10,<3.1.0" spacy-loggers = ">=1.0.0,<2.0.0" srsly = ">=2.4.3,<3.0.0" @@ -1997,6 +2046,24 @@ category = "dev" optional = false python-versions = ">=3.7" +[[package]] +name = "torch" +version = "1.13.1" +description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +category = "main" +optional = true +python-versions = ">=3.7.0" + +[package.dependencies] +nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\""} +nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""} +nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\""} +nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\""} +typing-extensions = "*" + +[package.extras] +opt-einsum = ["opt-einsum (>=3.3)"] + [[package]] name = "tornado" version = "6.2" @@ -2227,6 +2294,17 @@ docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "wheel" +version = "0.38.4" +description = "A built-package format for Python" +category = "main" +optional = true +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=3.0.0)"] + [[package]] name = "widgetsnbextension" version = "4.0.4" @@ -2260,13 +2338,13 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["manifest-ml", "elasticsearch", "faiss-cpu", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken"] -llms = ["manifest-ml"] +all = ["manifest-ml", "elasticsearch", "faiss-cpu", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch"] +llms = ["manifest-ml", "torch", "transformers"] [metadata] lock-version = "1.1" python-versions = ">=3.8.1,<4.0" -content-hash = "45b0665b465cf551601e1a18f5b99c9a66c99313de21292f2b897d2ec42d2f6a" +content-hash = "d9ed2c5e1b2c51d7f8a9f74c858ab5058db14b7d6ca542777d7ecd07ccef5ee8" [metadata.files] anyio = [ @@ -2757,8 +2835,8 @@ isoduration = [ {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, ] isort = [ - {file = "isort-5.11.1-py3-none-any.whl", hash = "sha256:bf02c95f1fe615ebbe13a619cfed1619ddfe8941274c9e3de3143adca406cb02"}, - {file = "isort-5.11.1.tar.gz", hash = "sha256:7c5bd998504826b6f1e6f2f98b533976b066baba29b8bae83fdeefd0b89c6b70"}, + {file = "isort-5.11.2-py3-none-any.whl", hash = "sha256:e486966fba83f25b8045f8dd7455b0a0d1e4de481e1d7ce4669902d9fb85e622"}, + {file = "isort-5.11.2.tar.gz", hash = "sha256:dd8bbc5c0990f2a095d754e50360915f73b4c26fc82733eb5bfc6b48396af4d2"}, ] jedi = [ {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, @@ -3084,6 +3162,23 @@ numpy = [ {file = "numpy-1.23.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d"}, {file = "numpy-1.23.5.tar.gz", hash = "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a"}, ] +nvidia-cublas-cu11 = [ + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl", hash = "sha256:d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded"}, + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-win_amd64.whl", hash = "sha256:8ac17ba6ade3ed56ab898a036f9ae0756f1e81052a317bf98f8c6d18dc3ae49e"}, +] +nvidia-cuda-nvrtc-cu11 = [ + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:f7d9610d9b7c331fa0da2d1b2858a4a8315e6d49765091d28711c8946e7425e7"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:f2effeb1309bdd1b3854fc9b17eaf997808f8b25968ce0c7070945c4265d64a3"}, +] +nvidia-cuda-runtime-cu11 = [ + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31"}, + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:bc77fa59a7679310df9d5c70ab13c4e34c64ae2124dd1efd7e5474b71be125c7"}, +] +nvidia-cudnn-cu11 = [ + {file = "nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7"}, + {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"}, +] packaging = [ {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, @@ -3622,34 +3717,34 @@ soupsieve = [ {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, ] spacy = [ - {file = "spacy-3.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e546b314f619502ae03e5eb9a0cfd09ca7a9db265bcdd8a3af83cfb0f1432e55"}, - {file = "spacy-3.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ded11aa8966236aab145b4d2d024b3eb61ac50078362d77d9ed7d8c240ef0f4a"}, - {file = "spacy-3.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:462e141f514d78cff85685b5b12eb8cadac0bad2f7820149cbe18d03ccb2e59c"}, - {file = "spacy-3.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c966d25b3f3e49f5de08546b3638928f49678c365cbbebd0eec28f74e0adb539"}, - {file = "spacy-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:2ddba486c4c981abe6f1e3fd72648dc8811966e5f0e05808f9c9fab155c388d7"}, - {file = "spacy-3.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c87117dd335fba44d1c0d77602f0763c3addf4e7ef9bdbe9a495466c3484c69"}, - {file = "spacy-3.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ce3938720f48eaeeb360a7f623f15a0d9efd1a688d5d740e3d4cdcd6f6da8a3"}, - {file = "spacy-3.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ad6bf5e4e7f0bc2ef94b7ff6fe59abd766f74c192bca2f17430a3b3cd5bda5a"}, - {file = "spacy-3.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6644c678bd7af567c6ce679f71d64119282e7d6f1a6f787162a91be3ea39333"}, - {file = "spacy-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:e6b871de8857a6820140358db3943180fdbe03d44ed792155cee6cb95f4ac4ea"}, - {file = "spacy-3.4.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d211c2b8894354bf8d961af9a9dcab38f764e1dcddd7b80760e438fcd4c9fe43"}, - {file = "spacy-3.4.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ea41f9de30435456235c4182d8bc2eb54a0a64719856e66e780350bb4c8cfbe"}, - {file = "spacy-3.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:afaf6e716cbac4a0fbfa9e9bf95decff223936597ddd03ea869118a7576aa1b1"}, - {file = "spacy-3.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7115da36369b3c537caf2fe08e0b45528bd091c7f56ba3580af1e6fdfa9b1081"}, - {file = "spacy-3.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b3e629c889cac9656151286ec1232c6a948ce0d44a39f1ef5e60fed4f183a10"}, - {file = "spacy-3.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9277cd0fcb96ee5dd885f7e96c639f21afd96198d61ca32100446afbff4dfbef"}, - {file = "spacy-3.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a36bd06a5a147350e5f5f6903c4777296c37b18199251bb41056c3a73aa4494f"}, - {file = "spacy-3.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bdafcd0823ca804c39d0bed9e677eb7d0235b1259563d0fd4d3a201c71108af8"}, - {file = "spacy-3.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cdc23a48e6543402b4c56ebf2d36246001175c29fd56d3081efcec684651abc"}, - {file = "spacy-3.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:455c2fbd1de24b6fe34fa121d87525134d7498f9f458ebc8274d7940b473999e"}, - {file = "spacy-3.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1c85279fbb6b75d7fb8d7c59c2b734502e51271cad90926e8df1d21b67da5aa"}, - {file = "spacy-3.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:5c0d65f39184f522b4e67b965a42d121a3b2d799362682fe8847b64b0ce5bc7c"}, - {file = "spacy-3.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a7b97ec21ed773edb2479ae5d6c7686b8034f418df6bccd9218f5c3c2b7cf888"}, - {file = "spacy-3.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:36a9a506029842795099fd97ad95f0da2845c319020fcc7164cbf33650726f83"}, - {file = "spacy-3.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ab293eb1423fa05c7ee71b2fedda57c2b4a4ca8dc054ce678809457287b01dc"}, - {file = "spacy-3.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb6d0f185126decc8392cde7d28eb6e85ba4bca15424713288cccc49c2a3c52b"}, - {file = "spacy-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:676ab9ab2cf94ba48caa306f185a166e85bd35b388ec24512c8ba7dfcbc7517e"}, - {file = "spacy-3.4.3.tar.gz", hash = "sha256:22698cf5175e2b697e82699fcccee3092b42137a57d352df208d71657fd693bb"}, + {file = "spacy-3.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:07a10999a3e37f896758a92c2eed263638bcbf2747dc3a4aeea929aaa20ea28c"}, + {file = "spacy-3.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e6d98511dc8a88d3a96bcae13971a284459362076738c85053d1a3791f6cde92"}, + {file = "spacy-3.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2cad9c5543f03b3375c252e4dd45670ee8ed99c925dca15eadab5084fd1b033"}, + {file = "spacy-3.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ade19c1e676cac2546f268db22bc5eba08d12beafabe80f1b9f06028b3a0b52"}, + {file = "spacy-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:e782c8a7c4805cc1b34ed2b11f72a5cf2b9851e20f7afe3e97caf206f19f761b"}, + {file = "spacy-3.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa027e69ef9fe42c8b02b940872e5bde0ce1bf66b6bf488c6493e3ce660c4b3a"}, + {file = "spacy-3.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddeb5d725b6fa9c9009b1ff645db8f5caab9ed8956ee3a84b8379951caad1d36"}, + {file = "spacy-3.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d6bb428a6bb19e026d8bbb9d4385c25b21e1ce51fcaabadfb5599b2390a79c"}, + {file = "spacy-3.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a21187ad4c44e166dc3deed23992ea1a74d731c9a6bdd9fca306d455181577fa"}, + {file = "spacy-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:10643c6d335a02805f6676738a3e992323cfd9438115cc253435e5053dc93824"}, + {file = "spacy-3.4.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:486228cfa7ced18ec99008388028bd2329262ab8108e7c19252c1a67b2801909"}, + {file = "spacy-3.4.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcb7a213178c298b95532075d6dddfb374bbe56ef8d2687212763b4583048da2"}, + {file = "spacy-3.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:15e5c41d408d1d30d8f3dd8e4eed9ed28e6174e011b8d61c1345981562e2e8f5"}, + {file = "spacy-3.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8979dbd3594c5c268cedad53f456a3ec3a0a2b78a1199788aacedcd68eef3a00"}, + {file = "spacy-3.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f4736fea2630e696422dfe38bfb3d0a7864bc6a9072d6e49a906af46870e36e"}, + {file = "spacy-3.4.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498bf01e8c7ab601c3f8d6c51497817b40a3322a3967c032536b18ce9ea26d0a"}, + {file = "spacy-3.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:95f880c6fea57d51c448ad84f96d79d8758e5e18bdbaaee060c15af11641079b"}, + {file = "spacy-3.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ccbede9be470c5d795168bf3be41fc86e18892a9247a742b394ba866c005391"}, + {file = "spacy-3.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2f1edbecfde9c11b17e87768bb5f2c33948fb1e3bf54b2197031ff9053607277"}, + {file = "spacy-3.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66eaf4764e95699934cbd8f38717b283db185c896cfd3d1fb1ad5c6552e8b3c9"}, + {file = "spacy-3.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb7d53f1a780bb8cc1b27a81e02e8b9bc71abb959f4dc13c21af4041fdd2c7a"}, + {file = "spacy-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c1a5ce5c9b19cdfb4469079e710e72bb09c3cab855f21ef6a614b84c765e0311"}, + {file = "spacy-3.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7044dca3542579ea1e3ac6cdd821640c2f65dd0c56230688f36e15aca1b8217"}, + {file = "spacy-3.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a495b0fc00910fb5c1fbe64fdbfe1d3c11b09f421d1ae4e30cdb4c2388a91e4"}, + {file = "spacy-3.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31e9a637960b60c1bb7a36a187271425717e97c14e9d1df613dc4efeffefcbec"}, + {file = "spacy-3.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f9449ffadef85b048c9735ee235da5dca9d0a87038dba6d4ed20c5188e0f13"}, + {file = "spacy-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1b7791a6c0592615b0566001596cc48c72325d1b97e46e574c91bff34f4e3f4c"}, + {file = "spacy-3.4.4.tar.gz", hash = "sha256:e500cf2cb5f1849461a7928fa269703756069bdfb71559065240af6d0208b08c"}, ] spacy-legacy = [ {file = "spacy-legacy-3.0.10.tar.gz", hash = "sha256:16104595d8ab1b7267f817a449ad1f986eb1f2a2edf1050748f08739a479679a"}, @@ -3837,6 +3932,29 @@ tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +torch = [ + {file = "torch-1.13.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:fd12043868a34a8da7d490bf6db66991108b00ffbeecb034228bfcbbd4197143"}, + {file = "torch-1.13.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d9fe785d375f2e26a5d5eba5de91f89e6a3be5d11efb497e76705fdf93fa3c2e"}, + {file = "torch-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:98124598cdff4c287dbf50f53fb455f0c1e3a88022b39648102957f3445e9b76"}, + {file = "torch-1.13.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:393a6273c832e047581063fb74335ff50b4c566217019cc6ace318cd79eb0566"}, + {file = "torch-1.13.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:0122806b111b949d21fa1a5f9764d1fd2fcc4a47cb7f8ff914204fd4fc752ed5"}, + {file = "torch-1.13.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:22128502fd8f5b25ac1cd849ecb64a418382ae81dd4ce2b5cebaa09ab15b0d9b"}, + {file = "torch-1.13.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:76024be052b659ac1304ab8475ab03ea0a12124c3e7626282c9c86798ac7bc11"}, + {file = "torch-1.13.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ea8dda84d796094eb8709df0fcd6b56dc20b58fdd6bc4e8d7109930dafc8e419"}, + {file = "torch-1.13.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2ee7b81e9c457252bddd7d3da66fb1f619a5d12c24d7074de91c4ddafb832c93"}, + {file = "torch-1.13.1-cp37-none-macosx_10_9_x86_64.whl", hash = "sha256:0d9b8061048cfb78e675b9d2ea8503bfe30db43d583599ae8626b1263a0c1380"}, + {file = "torch-1.13.1-cp37-none-macosx_11_0_arm64.whl", hash = "sha256:f402ca80b66e9fbd661ed4287d7553f7f3899d9ab54bf5c67faada1555abde28"}, + {file = "torch-1.13.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:727dbf00e2cf858052364c0e2a496684b9cb5aa01dc8a8bc8bbb7c54502bdcdd"}, + {file = "torch-1.13.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:df8434b0695e9ceb8cc70650afc1310d8ba949e6db2a0525ddd9c3b2b181e5fe"}, + {file = "torch-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:5e1e722a41f52a3f26f0c4fcec227e02c6c42f7c094f32e49d4beef7d1e213ea"}, + {file = "torch-1.13.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:33e67eea526e0bbb9151263e65417a9ef2d8fa53cbe628e87310060c9dcfa312"}, + {file = "torch-1.13.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:eeeb204d30fd40af6a2d80879b46a7efbe3cf43cdbeb8838dd4f3d126cc90b2b"}, + {file = "torch-1.13.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:50ff5e76d70074f6653d191fe4f6a42fdbe0cf942fbe2a3af0b75eaa414ac038"}, + {file = "torch-1.13.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:2c3581a3fd81eb1f0f22997cddffea569fea53bafa372b2c0471db373b26aafc"}, + {file = "torch-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:0aa46f0ac95050c604bcf9ef71da9f1172e5037fdf2ebe051962d47b123848e7"}, + {file = "torch-1.13.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6930791efa8757cb6974af73d4996b6b50c592882a324b8fb0589c6a9ba2ddaf"}, + {file = "torch-1.13.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:e0df902a7c7dd6c795698532ee5970ce898672625635d885eade9976e5a04949"}, +] tornado = [ {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, @@ -3914,6 +4032,10 @@ websocket-client = [ {file = "websocket-client-1.4.2.tar.gz", hash = "sha256:d6e8f90ca8e2dd4e8027c4561adeb9456b54044312dba655e7cae652ceb9ae59"}, {file = "websocket_client-1.4.2-py3-none-any.whl", hash = "sha256:d6b06432f184438d99ac1f456eaf22fe1ade524c3dd16e661142dc54e9cba574"}, ] +wheel = [ + {file = "wheel-0.38.4-py3-none-any.whl", hash = "sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8"}, + {file = "wheel-0.38.4.tar.gz", hash = "sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac"}, +] widgetsnbextension = [ {file = "widgetsnbextension-4.0.4-py3-none-any.whl", hash = "sha256:fa0e840719ec95dd2ec85c3a48913f1a0c29d323eacbcdb0b29bfed0cc6da678"}, {file = "widgetsnbextension-4.0.4.tar.gz", hash = "sha256:44c69f18237af0f610557d6c1c7ef76853f5856a0e604c0a517f2320566bb775"}, diff --git a/pyproject.toml b/pyproject.toml index be4705dc..26866076 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ spacy = {version = "^3", optional = true} nltk = {version = "^3", optional = true} transformers = {version = "^4", optional = true} beautifulsoup4 = {version = "^4", optional = true} +torch = {version = "^1.13.1", optional = true} tiktoken = {version = "^0", optional = true, python="^3.9"} [tool.poetry.group.test.dependencies] @@ -49,8 +50,8 @@ jupyter = "^1.0.0" playwright = "^1.28.0" [tool.poetry.extras] -llms = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml"] -all = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml", "elasticsearch", "google-search-results", "faiss-cpu", "sentence_transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken"] +llms = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml", "torch", "transformers"] +all = ["cohere", "openai", "nlpcloud", "huggingface_hub", "manifest-ml", "elasticsearch", "google-search-results", "faiss-cpu", "sentence_transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch"] [tool.isort] profile = "black" diff --git a/tests/integration_tests/llms/test_huggingface_pipeline.py b/tests/integration_tests/llms/test_huggingface_pipeline.py new file mode 100644 index 00000000..7cf3b6d1 --- /dev/null +++ b/tests/integration_tests/llms/test_huggingface_pipeline.py @@ -0,0 +1,41 @@ +"""Test HuggingFace Pipeline wrapper.""" + +from pathlib import Path + +from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline + +from langchain.llms.huggingface_pipeline import HuggingFacePipeline +from langchain.llms.loading import load_llm +from tests.integration_tests.llms.utils import assert_llm_equality + + +def test_huggingface_pipeline_text_generation() -> None: + """Test valid call to HuggingFace text generation model.""" + llm = HuggingFacePipeline.from_model_id( + model_id="gpt2", task="text-generation", model_kwargs={"max_new_tokens": 10} + ) + output = llm("Say foo:") + assert isinstance(output, str) + + +def test_saving_loading_llm(tmp_path: Path) -> None: + """Test saving/loading an HuggingFaceHub LLM.""" + llm = HuggingFacePipeline.from_model_id( + model_id="gpt2", task="text-generation", model_kwargs={"max_new_tokens": 10} + ) + llm.save(file_path=tmp_path / "hf.yaml") + loaded_llm = load_llm(tmp_path / "hf.yaml") + assert_llm_equality(llm, loaded_llm) + + +def test_init_with_pipeline() -> None: + """Test initialization with a HF pipeline.""" + model_id = "gpt2" + tokenizer = AutoTokenizer.from_pretrained(model_id) + model = AutoModelForCausalLM.from_pretrained(model_id) + pipe = pipeline( + "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=10 + ) + llm = HuggingFacePipeline(pipeline=pipe) + output = llm("Say foo:") + assert isinstance(output, str) diff --git a/tests/integration_tests/llms/utils.py b/tests/integration_tests/llms/utils.py index 6047e979..c05445d4 100644 --- a/tests/integration_tests/llms/utils.py +++ b/tests/integration_tests/llms/utils.py @@ -10,7 +10,7 @@ def assert_llm_equality(llm: LLM, loaded_llm: LLM) -> None: # Client field can be session based, so hash is different despite # all other values being the same, so just assess all other fields for field in llm.__fields__.keys(): - if field != "client": + if field != "client" and field != "pipeline": val = getattr(llm, field) new_val = getattr(loaded_llm, field) assert new_val == val From 50257fce5946a0d0a06e1f921ff9fb43ee8c520a Mon Sep 17 00:00:00 2001 From: mrbean <43734688+sam-h-bean@users.noreply.github.com> Date: Sat, 17 Dec 2022 10:02:58 -0500 Subject: [PATCH 29/29] Support Streaming Tokens from OpenAI (#364) https://github.com/hwchase17/langchain/issues/363 @hwchase17 how much does this make you want to cry? --- langchain/llms/openai.py | 26 ++++++++++++++++++++- tests/integration_tests/llms/test_openai.py | 19 +++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 2b020595..2cca3e2e 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -1,6 +1,6 @@ """Wrapper around OpenAI APIs.""" import sys -from typing import Any, Dict, List, Mapping, Optional +from typing import Any, Dict, Generator, List, Mapping, Optional from pydantic import BaseModel, Extra, Field, root_validator @@ -160,6 +160,30 @@ class OpenAI(LLM, BaseModel): generations=generations, llm_output={"token_usage": token_usage} ) + def stream(self, prompt: str) -> Generator: + """Call OpenAI with streaming flag and return the resulting generator. + + Args: + prompt: The prompts to pass into the model. + + Returns: + A generator representing the stream of tokens from OpenAI. + + Example: + .. code-block:: python + + generator = openai.stream("Tell me a joke.") + for token in generator: + yield token + """ + params = self._default_params + if params["best_of"] != 1: + raise ValueError("OpenAI only supports best_of == 1 for streaming") + params["stream"] = True + generator = self.client.create(model=self.model_name, prompt=prompt, **params) + + return generator + @property def _identifying_params(self) -> Mapping[str, Any]: """Get the identifying parameters.""" diff --git a/tests/integration_tests/llms/test_openai.py b/tests/integration_tests/llms/test_openai.py index 721b5b43..9c03b92f 100644 --- a/tests/integration_tests/llms/test_openai.py +++ b/tests/integration_tests/llms/test_openai.py @@ -1,6 +1,7 @@ """Test OpenAI API wrapper.""" from pathlib import Path +from typing import Generator import pytest @@ -55,3 +56,21 @@ def test_saving_loading_llm(tmp_path: Path) -> None: llm.save(file_path=tmp_path / "openai.yaml") loaded_llm = load_llm(tmp_path / "openai.yaml") assert loaded_llm == llm + + +def test_openai_streaming() -> None: + """Test streaming tokens from OpenAI.""" + llm = OpenAI(max_tokens=10) + generator = llm.stream("I'm Pickle Rick") + + assert isinstance(generator, Generator) + + for token in generator: + assert isinstance(token["choices"][0]["text"], str) + + +def test_openai_streaming_error() -> None: + """Test error handling in stream.""" + llm = OpenAI(best_of=2) + with pytest.raises(ValueError): + llm.stream("I'm Pickle Rick")