Add CLI command to create a new project (#7837)

First version of CLI command to create a new langchain project template

Co-authored-by: Bagatur <baskaryan@gmail.com>
Co-authored-by: Eugene Yurtsev <eyurtsev@gmail.com>
pull/11402/head
Nuno Campos 10 months ago committed by GitHub
parent 9cea796671
commit fde19c8667
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,54 @@
"""A CLI for creating a new project with LangChain."""
from pathlib import Path
try:
import typer
except ImportError:
raise ImportError(
"Typer must be installed to use the CLI. "
"You can install it with `pip install typer`."
)
from typing_extensions import Annotated
from langchain.cli.create_repo.base import create, is_poetry_installed
from langchain.cli.create_repo.user_info import get_git_user_email, get_git_user_name
app = typer.Typer(no_args_is_help=False, add_completion=False)
AUTHOR_NAME_OPTION = typer.Option(
default_factory=get_git_user_name,
prompt=True,
help="If not specified, will be inferred from git config if possible. ",
)
AUTHOR_EMAIL_OPTION = typer.Option(
default_factory=get_git_user_email,
prompt=True,
help="If not specified, will be inferred from git config if possible. ",
)
USE_POETRY_OPTION = typer.Option(
default_factory=is_poetry_installed,
prompt=True,
help=(
"Whether to use Poetry to manage the project. "
"If not specified, Poetry will be used if poetry is installed."
),
)
@app.command()
def new(
project_directory: Annotated[
Path, typer.Argument(help="The directory to create the project in.")
],
author_name: Annotated[str, AUTHOR_NAME_OPTION],
author_email: Annotated[str, AUTHOR_EMAIL_OPTION],
use_poetry: Annotated[bool, USE_POETRY_OPTION],
) -> None:
"""Create a new project with LangChain."""
create(project_directory, author_name, author_email, use_poetry)
if __name__ == "__main__":
app()

@ -0,0 +1,291 @@
""""""
import os
import pathlib
import string
import subprocess
from pathlib import Path
from typing import List, Sequence
import typer
import langchain
from langchain.cli.create_repo.pypi_name import is_name_taken, lint_name
class UnderscoreTemplate(string.Template):
delimiter = "____"
def _create_project_dir(
project_directory_path: Path,
use_poetry: bool,
project_name: str,
project_name_identifier: str,
author_name: str,
author_email: str,
) -> None:
project_directory_path.mkdir(parents=True, exist_ok=True)
template_directories = _get_template_directories(use_poetry)
_check_conflicting_files(template_directories, project_directory_path)
_copy_template_files(
template_directories,
project_directory_path,
project_name,
project_name_identifier,
author_name,
author_email,
)
def _get_template_directories(use_poetry: bool) -> List[Path]:
"""Get the directories containing the templates.
Args:
use_poetry: If true, will set up the project with Poetry.
"""
template_parent_path = Path(__file__).parent / "templates"
template_directories = [template_parent_path / "repo"]
if use_poetry:
template_directories.append(template_parent_path / "poetry")
else:
template_directories.append(template_parent_path / "pip")
return template_directories
def _check_conflicting_files(
template_directories: Sequence[Path], project_directory_path: Path
) -> None:
"""Validate project directory doesn't contain conflicting files."""
for template_directory_path in template_directories:
for template_file_path in template_directory_path.glob("**/*"):
relative_template_file_path = template_file_path.relative_to(
template_directory_path
)
project_file_path = project_directory_path / relative_template_file_path
if project_file_path.exists():
typer.echo(
f"{typer.style('Error:', fg=typer.colors.RED)}"
f" The project directory already contains a file"
f" {typer.style(project_file_path, fg=typer.colors.BRIGHT_CYAN)}"
f" that would be overwritten by the template.",
err=True,
)
typer.echo(
"Please remove this file and try again.",
err=True,
)
raise typer.Exit(code=1)
def _copy_template_files(
template_directories: Sequence[Path],
project_directory_path: Path,
project_name: str,
project_name_identifier: str,
author_name: str,
author_email: str,
) -> None:
"""Copy template files to project directory and substitute variables.
Args:
template_directories: The directories containing the templates.
project_directory_path: The destination directory.
project_name: The name of the project.
project_name_identifier: The identifier of the project name.
author_name: The name of the author.
author_email: The email of the author.
"""
for template_directory_path in template_directories:
for template_file_path in template_directory_path.glob("**/*"):
relative_template_file_path = UnderscoreTemplate(
str(template_file_path.relative_to(template_directory_path))
).substitute(project_name_identifier=project_name_identifier)
project_file_path = project_directory_path / relative_template_file_path
if template_file_path.is_dir():
project_file_path.mkdir(parents=True, exist_ok=True)
else:
project_file_path.write_text(
UnderscoreTemplate(template_file_path.read_text()).substitute(
project_name=project_name,
project_name_identifier=project_name_identifier,
author_name=author_name,
author_email=author_email,
langchain_version=langchain.__version__,
)
)
def _poetry_install(project_directory_path: Path) -> None:
"""Install dependencies with Poetry."""
typer.echo(
f"\n{typer.style('2.', bold=True, fg=typer.colors.GREEN)}"
f" Installing dependencies with Poetry..."
)
subprocess.run(["pwd"], cwd=project_directory_path)
subprocess.run(
["poetry", "install"],
cwd=project_directory_path,
env={**os.environ.copy(), "VIRTUAL_ENV": ""},
)
def _pip_install(project_directory_path: Path) -> None:
"""Create virtual environment and install dependencies."""
typer.echo(
f"\n{typer.style('2.', bold=True, fg=typer.colors.GREEN)}"
f" Creating virtual environment..."
)
subprocess.run(["pwd"], cwd=project_directory_path)
subprocess.run(["python", "-m", "venv", ".venv"], cwd=project_directory_path)
# TODO install dependencies
def _init_git(project_directory_path: Path) -> None:
"""Initialize git repository."""
typer.echo(
f"\n{typer.style('3.', bold=True, fg=typer.colors.GREEN)} Initializing git..."
)
subprocess.run(["git", "init"], cwd=project_directory_path)
# 7. Create initial commit
subprocess.run(["git", "add", "."], cwd=project_directory_path)
subprocess.run(
["git", "commit", "-m", "Initial commit"],
cwd=project_directory_path,
)
def _select_project_name(suggested_project_name: str) -> str:
"""Help the user select a valid project name."""
while True:
project_name = typer.prompt(
"Please choose a project name: ", default=suggested_project_name
)
project_name_diagnostics = lint_name(project_name)
if project_name_diagnostics:
typer.echo(
f"{typer.style('Error:', fg=typer.colors.RED)}"
f" The project name"
f" {typer.style(project_name, fg=typer.colors.BRIGHT_CYAN)}"
f" is not valid:",
err=True,
)
for diagnostic in project_name_diagnostics:
typer.echo(f" - {diagnostic}")
if typer.confirm(
"Would you like to choose another name? "
"Choose NO to proceed with existing name.",
default=True,
):
continue
if is_name_taken(project_name):
typer.echo(
f"{typer.style('Error:', fg=typer.colors.RED)}"
f" The project name"
f" {typer.style(project_name, fg=typer.colors.BRIGHT_CYAN)}"
f" is already taken on pypi",
err=True,
)
if typer.confirm(
"Would you like to choose another name? "
"Choose NO to proceed with existing name.",
default=True,
):
continue
# If we got here then the project name is valid and not taken
return project_name
# PUBLIC API
def create(
project_directory: pathlib.Path,
author_name: str,
author_email: str,
use_poetry: bool,
) -> None:
"""Create a new LangChain project.
Args:
project_directory (str): The directory to create the project in.
author_name (str): The name of the author.
author_email (str): The email of the author.
use_poetry (bool): Whether to use Poetry to manage the project.
"""
project_directory_path = Path(project_directory)
project_name_suggestion = project_directory_path.name.replace("-", "_")
project_name = _select_project_name(project_name_suggestion)
project_name_identifier = project_name
resolved_path = project_directory_path.resolve()
if not typer.confirm(
f"\n{typer.style('>', bold=True, fg=typer.colors.GREEN)} "
f"Creating new LangChain project "
f"{typer.style(project_name, fg=typer.colors.BRIGHT_CYAN)}"
f" in"
f" {typer.style(resolved_path, fg=typer.colors.BRIGHT_CYAN)}",
default=True,
):
typer.echo("OK! Canceling project creation.")
raise typer.Exit(code=0)
_create_project_dir(
project_directory_path,
use_poetry,
project_name,
project_name_identifier,
author_name,
author_email,
)
# TODO(Team): Add installation
# if use_poetry:
# _poetry_install(project_directory_path)
# else:
# _pip_install(project_directory_path)
_init_git(project_directory_path)
typer.echo(
f"\n{typer.style('Done!', bold=True, fg=typer.colors.GREEN)}"
f" Your new LangChain project"
f" {typer.style(project_name, fg=typer.colors.BRIGHT_CYAN)}"
f" has been created in"
f" {typer.style(project_directory_path.resolve(), fg=typer.colors.BRIGHT_CYAN)}"
f"."
)
# TODO(Team): Add surfacing information from make file and installation
# cd_dir = typer.style(
# f"cd {project_directory_path.resolve()}", fg=typer.colors.BRIGHT_CYAN
# )
# typer.echo(
# f"\nChange into the project directory with {cd_dir}."
# f" The following commands are available:"
# )
# subprocess.run(["make"], cwd=project_directory_path)
# if not use_poetry:
# pip_install = typer.style(
# 'pip install -e ".[dev]"', fg=typer.colors.BRIGHT_CYAN
# )
# typer.echo(
# f"\nTo install all dependencies activate your environment run:"
# f"\n{typer.style('source .venv/bin/activate', fg=typer.colors.BRIGHT_CYAN)}"
# f"\n{pip_install}."
# )
def is_poetry_installed() -> bool:
"""Check if Poetry is installed."""
return subprocess.run(["poetry", "--version"], capture_output=True).returncode == 0

@ -0,0 +1,70 @@
"""Code helps to check availability of the name of the project on PyPi
Adapted from https://github.com/danishprakash/pip-name/blob/master/pip-name
"""
from typing import List, Optional
import requests
BASE_URL = "https://pypi.org/pypi"
UPPERCASE_SUGGESTION = "Use of uppercase letters is discouraged"
SEPARATOR_SUGGESTION = "Use of `-` is discouraged, consider using `_`"
NUMERIC_SUGGESTION = "Use of numbers is discouraged"
def _request_pypi(name: str) -> Optional[dict]:
"""Request response from PyPi API.
Args:
name (str): Name of the project
Returns:
Optional[dict]: Response from PyPi API
"""
target_url = f"{BASE_URL}/{name}/json"
response = requests.get(target_url)
return response.json() if response.status_code != 404 else None
# PUBLIC API
def lint_name(name: str) -> List[str]:
"""Check name against PEP8's naming conventions.
Args:
name (str): Name of the project
Returns:
List[str]: List of suggestions
"""
suggestions = []
if "-" in name or " " in name:
suggestions.append(SEPARATOR_SUGGESTION)
if any(x.isupper() for x in name):
suggestions.append(UPPERCASE_SUGGESTION)
if any(x.isnumeric() for x in name):
suggestions.append(NUMERIC_SUGGESTION)
return suggestions
def is_name_taken(name: str) -> bool:
"""Check module filename for conflict.
Args:
name (str): Name of the project
Returns:
bool: True if name is taken, False otherwise
"""
response = _request_pypi(name)
if response:
package_url = response.get("info").get("package_url") # type: ignore
module_name = package_url.split("/")[-2]
return name.lower() == module_name.lower()
return False

@ -0,0 +1,79 @@
# Contributing to ____project_name
Hi there! Thank you for even being interested in contributing to ____project_name.
## 🚀 Quick Start
To install requirements:
```bash
poetry install -e ".[dev]"
```
This will install all requirements for running the package, examples, linting, formatting, tests, and coverage.
Now, you should be able to run the common tasks in the following section. To double check, run `make test`, all tests should pass.
## ✅ Common Tasks
Type `make` for a list of common tasks.
### Code Formatting
Formatting for this project is done via a combination of [Black](https://black.readthedocs.io/en/stable/) and [isort](https://pycqa.github.io/isort/).
To run formatting for this project:
```bash
make format
```
Additionally, you can run the formatter only on the files that have been modified in your current branch as compared to the main branch using the format_diff command:
```bash
make format_diff
```
This is especially useful when you have made changes to a subset of the project and want to ensure your changes are properly formatted without affecting the rest of the codebase.
### Linting
Linting for this project is done via a combination of [Black](https://black.readthedocs.io/en/stable/), [isort](https://pycqa.github.io/isort/), [flake8](https://flake8.pycqa.org/en/latest/), and [mypy](http://mypy-lang.org/).
To run linting for this project:
```bash
make lint
```
In addition, you can run the linter only on the files that have been modified in your current branch as compared to the main branch using the lint_diff command:
```bash
make lint_diff
```
This can be very helpful when you've made changes to only certain parts of the project and want to ensure your changes meet the linting standards without having to check the entire codebase.
We recognize linting can be annoying - if you do not want to do it, please contact a project maintainer, and they can help you with it. We do not want this to be a blocker for good code getting contributed.
### Testing
To run unit tests:
```bash
make test
```
If you add new logic, please add a unit test.
## 🏭 Release Process
____project_name follows the [semver](https://semver.org/) versioning standard.
To use the [automated release workflow](./workflows/release.yml) you'll need to set up a PyPI account and [create an API token](https://pypi.org/help/#apitoken). Configure the API token for this GitHub repo by going to settings -> security -> secrets -> actions, creating the `PYPI_API_TOKEN` variable and setting the value to be your PyPI API token.
Once that's set up, you can release a new version of the package by opening a PR that:
1. updates the package version in the [pyproject.toml file](../pyproject.toml),
2. labels the PR with a `release` tag.
When the PR is merged into main, a new release will be created.

@ -0,0 +1,10 @@
FROM python:3.11-slim
WORKDIR /app
COPY . /app
RUN pip install --no-cache-dir .
CMD exec uvicorn ____project_name_identifier.server:app --host 0.0.0.0 --port $PORT

@ -0,0 +1,38 @@
.PHONY: all format lint test help
# Default target executed when no arguments are given to make.
all: help
start:
uvicorn ____project_name_identifier.server:app --reload
# Define a variable for the test file path.
TEST_FILE ?= tests/
test:
pytest $(TEST_FILE)
# Define a variable for Python and notebook files.
PYTHON_FILES=.
lint format: PYTHON_FILES=.
lint_diff format_diff: PYTHON_FILES=$(shell git diff --name-only --diff-filter=d main | grep -E '\.py$$|\.ipynb$$')
lint lint_diff:
mypy $(PYTHON_FILES)
black $(PYTHON_FILES) --check
ruff .
format format_diff:
black $(PYTHON_FILES)
ruff --select I --fix $(PYTHON_FILES)
######################
# HELP
######################
help:
@echo '----'
@echo 'make start - start server'
@echo 'make format - run code formatters'
@echo 'make lint - run linters'
@echo 'make test - run unit tests'

@ -0,0 +1,52 @@
[project]
name = "____project_name"
version = "0.0.1"
description = ""
authors = [{name = "____author_name", email = "____author_email"}]
readme = "README.md"
requires-python = ">=3.8,<4.0"
dependencies = [
"langchain~=____langchain_version",
"langserve[server]>=0.0.6",
"tiktoken~=0.4.0",
"openai~=0.27.8",
"fastapi~=0.96.0",
"uvicorn[standard]~=0.22.0",
]
[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project.optional-dependencies]
dev = [
"pytest~=7.4.0",
"pytest-asyncio~=0.21.1",
"mypy~=1.4.1",
"ruff~=0.0.278",
"black~=23.7.0",
"syrupy~=4.0.2",
]
[tool.ruff]
select = [
"E", # pycodestyle
"F", # pyflakes
"I", # isort
]
[tool.mypy]
ignore_missing_imports = "True"
disallow_untyped_defs = "True"
[tool.pytest.ini_options]
# --strict-markers will raise errors on unknown marks.
# https://docs.pytest.org/en/7.1.x/how-to/mark.html#raising-errors-on-unknown-marks
#
# https://docs.pytest.org/en/7.1.x/reference/reference.html
# --strict-config any warnings encountered while parsing the `pytest`
# section of the configuration file raise errors.
#
# https://github.com/tophat/syrupy
# --snapshot-warn-unused Prints a warning on unused snapshots rather than fail the test suite.
addopts = "--strict-markers --strict-config --durations=5 --snapshot-warn-unused"

@ -0,0 +1,89 @@
# Contributing to ____project_name
Hi there! Thank you for even being interested in contributing to ____project_name.
## 🚀 Quick Start
This project uses [Poetry](https://python-poetry.org/) as a dependency manager. Check out Poetry's [documentation on how to install it](https://python-poetry.org/docs/#installation) on your system before proceeding.
❗Note: If you use `Conda` or `Pyenv` as your environment / package manager, avoid dependency conflicts by doing the following first:
1. *Before installing Poetry*, create and activate a new Conda env (e.g. `conda create -n langchain python=3.9`)
2. Install Poetry (see above)
3. Tell Poetry to use the virtualenv python environment (`poetry config virtualenvs.prefer-active-python true`)
4. Continue with the following steps.
To install requirements:
```bash
poetry install
```
This will install all requirements for running the package, examples, linting, formatting, tests, and coverage.
❗Note: If you're running Poetry 1.4.1 and receive a `WheelFileValidationError` for `debugpy` during installation, you can try either downgrading to Poetry 1.4.0 or disabling "modern installation" (`poetry config installer.modern-installation false`) and re-install requirements. See [this `debugpy` issue](https://github.com/microsoft/debugpy/issues/1246) for more details.
Now, you should be able to run the common tasks in the following section.
## ✅ Common Tasks
Type `make` for a list of common tasks.
### Code Formatting
Formatting for this project is done via a combination of [Black](https://black.readthedocs.io/en/stable/) and [isort](https://pycqa.github.io/isort/).
To run formatting for this project:
```bash
make format
```
Additionally, you can run the formatter only on the files that have been modified in your current branch as compared to the main branch using the format_diff command:
```bash
make format_diff
```
This is especially useful when you have made changes to a subset of the project and want to ensure your changes are properly formatted without affecting the rest of the codebase.
### Linting
Linting for this project is done via a combination of [Black](https://black.readthedocs.io/en/stable/), [isort](https://pycqa.github.io/isort/), [flake8](https://flake8.pycqa.org/en/latest/), and [mypy](http://mypy-lang.org/).
To run linting for this project:
```bash
make lint
```
In addition, you can run the linter only on the files that have been modified in your current branch as compared to the main branch using the lint_diff command:
```bash
make lint_diff
```
This can be very helpful when you've made changes to only certain parts of the project and want to ensure your changes meet the linting standards without having to check the entire codebase.
We recognize linting can be annoying - if you do not want to do it, please contact a project maintainer, and they can help you with it. We do not want this to be a blocker for good code getting contributed.
### Testing
To run unit tests:
```bash
make test
```
If you add new logic, please add a unit test.
## 🏭 Release Process
____project_name follows the [semver](https://semver.org/) versioning standard.
To use the [automated release workflow](./workflows/release.yml) you'll need to set up a PyPI account and [create an API token](https://pypi.org/help/#apitoken). Configure the API token for this GitHub repo by going to settings -> security -> secrets -> actions, creating the `PYPI_API_TOKEN` variable and setting the value to be your PyPI API token.
Once that's set up, you can release a new version of the package by opening a PR that:
1. updates the package version in the [pyproject.toml file](../pyproject.toml),
2. labels the PR with a `release` tag.
When the PR is merged into main, a new release will be created.

@ -0,0 +1,76 @@
# An action for setting up poetry install with caching.
# Using a custom action since the default action does not
# take poetry install groups into account.
# Action code from:
# https://github.com/actions/setup-python/issues/505#issuecomment-1273013236
name: poetry-install-with-caching
description: Poetry install with support for caching of dependency groups.
inputs:
python-version:
description: Python version, supporting MAJOR.MINOR only
required: true
poetry-version:
description: Poetry version
required: true
install-command:
description: Command run for installing dependencies
required: false
default: poetry install
cache-key:
description: Cache key to use for manual handling of caching
required: true
working-directory:
description: Directory to run install-command in
required: false
default: ""
runs:
using: composite
steps:
- uses: actions/setup-python@v4
name: Setup python $${ inputs.python-version }}
with:
python-version: ${{ inputs.python-version }}
- uses: actions/cache@v3
id: cache-pip
name: Cache Pip ${{ inputs.python-version }}
env:
SEGMENT_DOWNLOAD_TIMEOUT_MIN: "15"
with:
path: |
~/.cache/pip
key: pip-${{ runner.os }}-${{ runner.arch }}-py-${{ inputs.python-version }}
- run: pipx install poetry==${{ inputs.poetry-version }} --python python${{ inputs.python-version }}
shell: bash
- name: Check Poetry File
shell: bash
run: |
poetry check
- name: Check lock file
shell: bash
run: |
poetry lock --check
- uses: actions/cache@v3
id: cache-poetry
env:
SEGMENT_DOWNLOAD_TIMEOUT_MIN: "15"
with:
path: |
~/.cache/pypoetry/virtualenvs
~/.cache/pypoetry/cache
~/.cache/pypoetry/artifacts
key: poetry-${{ runner.os }}-${{ runner.arch }}-py-${{ inputs.python-version }}-poetry-${{ inputs.poetry-version }}-${{ hashFiles('poetry.lock') }}
- run: ${{ inputs.install-command }}
working-directory: ${{ inputs.working-directory }}
shell: bash

@ -0,0 +1,36 @@
name: lint
on:
push:
branches: [master]
pull_request:
env:
POETRY_VERSION: "1.4.2"
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
steps:
- uses: actions/checkout@v3
- name: Install poetry
run: |
pipx install poetry==$POETRY_VERSION
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: poetry
- name: Install dependencies
run: |
poetry install
- name: Analysing the code with our lint
run: |
make lint

@ -0,0 +1,49 @@
name: release
on:
pull_request:
types:
- closed
branches:
- master
paths:
- 'pyproject.toml'
env:
POETRY_VERSION: "1.4.2"
jobs:
if_release:
if: |
${{ github.event.pull_request.merged == true }}
&& ${{ contains(github.event.pull_request.labels.*.name, 'release') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install poetry
run: pipx install poetry==$POETRY_VERSION
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "poetry"
- name: Build project for distribution
run: poetry build
- name: Check Version
id: check-version
run: |
echo version=$(poetry version --short) >> $GITHUB_OUTPUT
- name: Create Release
uses: ncipollo/release-action@v1
with:
artifacts: "dist/*"
token: ${{ secrets.GITHUB_TOKEN }}
draft: false
generateReleaseNotes: true
tag: v${{ steps.check-version.outputs.version }}
commit: master
- name: Publish to PyPI
env:
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }}
run: |
poetry publish

@ -0,0 +1,36 @@
name: test
on:
push:
branches: [master]
pull_request:
workflow_dispatch:
env:
POETRY_VERSION: "1.4.2"
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
name: Python ${{ matrix.python-version }}
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: "./.github/actions/poetry_setup"
with:
python-version: ${{ matrix.python-version }}
poetry-version: "1.4.2"
install-command: |
echo "Running tests, installing dependencies with poetry..."
poetry install
- name: Run tests
run: |
make test
shell: bash

@ -0,0 +1,11 @@
FROM python:3.11-slim
WORKDIR /app
COPY . /app
RUN pip install poetry && \
poetry config virtualenvs.create false && \
poetry install --no-interaction --no-ansi --only main
CMD exec uvicorn ____project_name_identifier.server:app --host 0.0.0.0 --port $PORT

@ -0,0 +1,38 @@
.PHONY: all format lint test help
# Default target executed when no arguments are given to make.
all: help
start:
poetry run uvicorn ____project_name_identifier.server:app --reload
# Define a variable for the test file path.
TEST_FILE ?= tests/
test:
poetry run pytest $(TEST_FILE)
# Define a variable for Python and notebook files.
PYTHON_FILES=.
lint format: PYTHON_FILES=.
lint_diff format_diff: PYTHON_FILES=$(shell git diff --name-only --diff-filter=d main | grep -E '\.py$$|\.ipynb$$')
lint lint_diff:
poetry run mypy $(PYTHON_FILES)
poetry run black $(PYTHON_FILES) --check
poetry run ruff .
format format_diff:
poetry run black $(PYTHON_FILES)
poetry run ruff --select I --fix $(PYTHON_FILES)
######################
# HELP
######################
help:
@echo '----'
@echo 'make start - start server'
@echo 'make format - run code formatters'
@echo 'make lint - run linters'
@echo 'make test - run unit tests'

@ -0,0 +1,46 @@
# `____project_name`
<!--- This is a LangChain project bootstrapped by [LangChain CLI](https://github.com/langchain-ai/langchain). --->
## Customise
To customise this project, edit the following files:
- `____project_name_identifier/chain.py` contains an example chain, which you can edit to suit your needs.
- `____project_name_identifier/server.py` contains a FastAPI app that serves that chain using `langserve`. You can edit this to add more endpoints or customise your server.
- `tests/test_chain.py` contains tests for the chain. You can edit this to add more tests.
- `pyproject.toml` contains the project metadata, including the project name, version, and dependencies. You can edit this to add more dependencies or customise your project metadata.
## Install dependencies
```bash
poetry install
```
## Usage
To run the project locally, run
```
make start
```
This will launch a webserver on port 8000.
## Deploy
To deploy the project, first build the docker image:
```
docker build . -t ____project_name_identifier:latest
```
Then run the image:
```
docker run -p 8000:8000 ____project_name_identifier:latest
```
## Contributing
For information on how to set up your dev environment and contribute, see [here](.github/CONTRIBUTING.md).

@ -0,0 +1,52 @@
[tool.poetry]
name = "____project_name"
version = "0.0.1"
description = ""
authors = ["____author_name <____author_email>"]
license = "MIT"
readme = "README.md"
packages = [{include = "____project_name_identifier"}]
[tool.poetry.dependencies]
python = "^3.8.1"
langchain = "^____langchain_version"
langserve = { version = ">=0.0.6", extras = ["server"] }
tiktoken = "^0.4.0"
openai = "^0.27.8"
fastapi = "^0.96.0"
uvicorn = {extras = ["standard"], version = "^0.22.0"}
[tool.poetry.group.dev.dependencies]
pytest = "^7.4.0"
pytest-asyncio = "^0.21.1"
mypy = "^1.4.1"
ruff = "^0.0.278"
black = "^23.7.0"
syrupy = "^4.0.2"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.ruff]
select = [
"E", # pycodestyle
"F", # pyflakes
"I", # isort
]
[tool.mypy]
ignore_missing_imports = "True"
disallow_untyped_defs = "True"
[tool.pytest.ini_options]
# --strict-markers will raise errors on unknown marks.
# https://docs.pytest.org/en/7.1.x/how-to/mark.html#raising-errors-on-unknown-marks
#
# https://docs.pytest.org/en/7.1.x/reference/reference.html
# --strict-config any warnings encountered while parsing the `pytest`
# section of the configuration file raise errors.
#
# https://github.com/tophat/syrupy
# --snapshot-warn-unused Prints a warning on unused snapshots rather than fail the test suite.
addopts = "--strict-markers --strict-config --durations=5 --snapshot-warn-unused"

@ -0,0 +1,157 @@
.vs/
.vscode/
.idea/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
docs/docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
notebooks/
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.envrc
.venv
.venvs
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# macOS display setting files
.DS_Store
# Wandb directory
wandb/
# asdf tool versions
.tool-versions
/.ruff_cache/
*.pkl
*.bin
# integration test artifacts
data_map*
\[('_type', 'fake'), ('stop', None)]
# Replit files
*replit*

@ -0,0 +1,21 @@
The MIT License
Copyright (c) ____author_name
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

@ -0,0 +1,19 @@
# `____project_name`
<!--- This is a LangChain project bootstrapped by [LangChain CLI](https://github.com/langchain-ai/langchain). --->
## Run locally
To install run:
```bash
poetry
```
## Usage
<!--- Add detailed documentation on how to use this package. --->
## Contributing
For information on how to set up your dev environment and contribute, see [here](.github/CONTRIBUTING.md).

@ -0,0 +1,12 @@
"""____project_name_identifier package."""
from importlib import metadata
from ____project_name_identifier.chain import get_chain
try:
__version__ = metadata.version(__package__)
except metadata.PackageNotFoundError:
# Case where package metadata is not available.
__version__ = ""
__all__ = [__version__, "get_chain"]

@ -0,0 +1,29 @@
"""This is a template for a custom chain.
Edit this file to implement your chain logic.
"""
from typing import Optional
from langchain.chat_models.openai import ChatOpenAI
from langchain.output_parsers.list import CommaSeparatedListOutputParser
from langchain.prompts.chat import ChatPromptTemplate
from langchain.schema.language_model import BaseLanguageModel
from langchain.schema.runnable import Runnable
template = """You are a helpful assistant who generates comma separated lists.
A user will pass in a category, and you should generate 5 objects in that category in a comma separated list.
ONLY return a comma separated list, and nothing more.""" # noqa: E501
human_template = "{text}"
def get_chain(model: Optional[BaseLanguageModel] = None) -> Runnable:
"""Return a chain."""
model = model or ChatOpenAI()
prompt = ChatPromptTemplate.from_messages(
[
("system", template),
("human", human_template),
]
)
return prompt | model | CommaSeparatedListOutputParser()

@ -0,0 +1,17 @@
from fastapi import FastAPI
from langserve import add_routes
from ____project_name_identifier.chain import get_chain
app = FastAPI()
add_routes(
app,
get_chain(),
config_keys=["tags"],
)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8001)

@ -0,0 +1,10 @@
from ____project_name_identifier import get_chain
def test_my_chain() -> None:
"""Edit this test to test your chain."""
from langchain.llms.human import HumanInputLLM
llm = HumanInputLLM(input_func=lambda *args, **kwargs: "foo")
chain = get_chain(llm)
chain.invoke({"text": "foo"})

@ -0,0 +1,29 @@
"""Look up user information from local git."""
import subprocess
from typing import Optional
def get_git_user_name() -> Optional[str]:
"""Get the user's name from git, if it is configured, otherwise None."""
try:
return (
subprocess.run(["git", "config", "--get", "user.name"], capture_output=True)
.stdout.decode()
.strip()
)
except FileNotFoundError:
return None
def get_git_user_email() -> Optional[str]:
"""Get the user's email from git if it is configured, otherwise None."""
try:
return (
subprocess.run(
["git", "config", "--get", "user.email"], capture_output=True
)
.stdout.decode()
.strip()
)
except FileNotFoundError:
return None

@ -2846,6 +2846,7 @@ files = [
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
@ -2854,6 +2855,7 @@ files = [
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
@ -2883,6 +2885,7 @@ files = [
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
@ -2891,6 +2894,7 @@ files = [
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
@ -4478,6 +4482,16 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
{file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
{file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
{file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
{file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
{file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
{file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
{file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
{file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
{file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
{file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@ -7437,6 +7451,7 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@ -7444,8 +7459,15 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@ -7462,6 +7484,7 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@ -7469,6 +7492,7 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@ -9678,6 +9702,27 @@ torchhub = ["filelock", "huggingface-hub (>=0.15.1,<1.0)", "importlib-metadata",
video = ["av (==9.2.0)", "decord (==0.6.0)"]
vision = ["Pillow (<10.0.0)"]
[[package]]
name = "typer"
version = "0.9.0"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
optional = true
python-versions = ">=3.6"
files = [
{file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"},
{file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"},
]
[package.dependencies]
click = ">=7.1.1,<9.0.0"
typing-extensions = ">=3.7.4.3"
[package.extras]
all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
[[package]]
name = "types-chardet"
version = "5.0.4.6"
@ -10602,6 +10647,7 @@ cffi = ["cffi (>=1.11)"]
all = ["O365", "aleph-alpha-client", "amadeus", "arxiv", "atlassian-python-api", "awadb", "azure-ai-formrecognizer", "azure-ai-vision", "azure-cognitiveservices-speech", "azure-cosmos", "azure-identity", "beautifulsoup4", "clarifai", "clickhouse-connect", "cohere", "deeplake", "docarray", "duckduckgo-search", "elasticsearch", "esprima", "faiss-cpu", "google-api-python-client", "google-auth", "google-search-results", "gptcache", "html2text", "huggingface_hub", "jinja2", "jq", "lancedb", "langkit", "lark", "libdeeplake", "librosa", "lxml", "manifest-ml", "marqo", "momento", "nebula3-python", "neo4j", "networkx", "nlpcloud", "nltk", "nomic", "openai", "openlm", "opensearch-py", "pdfminer-six", "pexpect", "pgvector", "pinecone-client", "pinecone-text", "psycopg2-binary", "pymongo", "pyowm", "pypdf", "pytesseract", "python-arango", "pyvespa", "qdrant-client", "rdflib", "redis", "requests-toolbelt", "sentence-transformers", "singlestoredb", "tensorflow-text", "tigrisdb", "tiktoken", "torch", "transformers", "weaviate-client", "wikipedia", "wolframalpha"]
azure = ["azure-ai-formrecognizer", "azure-ai-vision", "azure-cognitiveservices-speech", "azure-core", "azure-cosmos", "azure-identity", "azure-search-documents", "openai"]
clarifai = ["clarifai"]
cli = ["typer"]
cohere = ["cohere"]
docarray = ["docarray"]
embeddings = ["sentence-transformers"]
@ -10615,4 +10661,4 @@ text-helpers = ["chardet"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8.1,<4.0"
content-hash = "d40457accab6666901b6d8c2fd0d911814ab8f28265637c189f2512e1496fd92"
content-hash = "498a5510e617012122596bf4e947f7466d7f574e7c7f1bb69e264ff0990f2277"

@ -9,6 +9,7 @@ repository = "https://github.com/langchain-ai/langchain"
[tool.poetry.scripts]
langchain-server = "langchain.server:main"
langchain = "langchain.cli.cli:app"
[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
@ -133,6 +134,7 @@ motor = {version = "^3.3.1", optional = true}
anyio = "<4.0"
jsonpatch = "^1.33"
timescale-vector = {version = "^0.0.1", optional = true}
typer = {version= "^0.9.0", optional = true}
anthropic = {version = "^0.3.11", optional = true}
@ -302,6 +304,10 @@ all = [
"python-arango",
]
cli = [
"typer"
]
# An extra used to be able to add extended testing.
# Please use new-line on formatting to make it easier to add new packages without
# merge-conflicts

Loading…
Cancel
Save