{ "cells": [ { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "# C Transformers\n", "\n", "The [C Transformers](https://github.com/marella/ctransformers) library provides Python bindings for GGML models.\n", "\n", "This example goes over how to use LangChain to interact with `C Transformers` [models](https://github.com/marella/ctransformers#supported-models)." ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "**Install**" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "%pip install ctransformers" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "**Load Model**" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from langchain.llms import CTransformers\n", "\n", "llm = CTransformers(model=\"marella/gpt-2-ggml\")" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "**Generate Text**" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "print(llm(\"AI is going to\"))" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "**Streaming**" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", "\n", "llm = CTransformers(\n", " model=\"marella/gpt-2-ggml\", callbacks=[StreamingStdOutCallbackHandler()]\n", ")\n", "\n", "response = llm(\"AI is going to\")" ] }, { "attachments": {}, "cell_type": "markdown", "metadata": {}, "source": [ "**LLMChain**" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from langchain import PromptTemplate, LLMChain\n", "\n", "template = \"\"\"Question: {question}\n", "\n", "Answer:\"\"\"\n", "\n", "prompt = PromptTemplate(template=template, input_variables=[\"question\"])\n", "\n", "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", "\n", "response = llm_chain.run(\"What is AI?\")" ] } ], "metadata": { "language_info": { "name": "python" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }