{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Prediction Guard" ], "id": "3f0a201c" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "3RqWPav7AtKL" }, "outputs": [], "source": [ "! pip install predictionguard langchain" ], "id": "4f810331" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "2xe8JEUwA7_y" }, "outputs": [], "source": [ "import os\n", "\n", "import predictionguard as pg\n", "from langchain.llms import PredictionGuard\n", "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ], "id": "7191a5ce" }, { "cell_type": "markdown", "metadata": { "id": "mesCTyhnJkNS" }, "source": [ "## Basic LLM usage\n", "\n" ], "id": "a8d356d3" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "kp_Ymnx1SnDG" }, "outputs": [], "source": [ "# Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows\n", "# you to access all the latest open access models (see https://docs.predictionguard.com)\n", "os.environ[\"OPENAI_API_KEY\"] = \"\"\n", "\n", "# Your Prediction Guard API key. Get one at predictionguard.com\n", "os.environ[\"PREDICTIONGUARD_TOKEN\"] = \"\"" ], "id": "158b109a" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "Ua7Mw1N4HcER" }, "outputs": [], "source": [ "pgllm = PredictionGuard(model=\"OpenAI-text-davinci-003\")" ], "id": "140717c9" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "Qo2p5flLHxrB" }, "outputs": [], "source": [ "pgllm(\"Tell me a joke\")" ], "id": "605f7ab6" }, { "cell_type": "markdown", "metadata": { "id": "EyBYaP_xTMXH" }, "source": [ "## Control the output structure/ type of LLMs" ], "id": "99de09f9" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "55uxzhQSTPqF" }, "outputs": [], "source": [ "template = \"\"\"Respond to the following query based on the context.\n", "\n", "Context: EVERY comment, DM + email suggestion has led us to this EXCITING announcement! 🎉 We have officially added TWO new candle subscription box options! 📦\n", "Exclusive Candle Box - $80 \n", "Monthly Candle Box - $45 (NEW!)\n", "Scent of The Month Box - $28 (NEW!)\n", "Head to stories to get ALLL the deets on each box! 👆 BONUS: Save 50% on your first box with code 50OFF! 🎉\n", "\n", "Query: {query}\n", "\n", "Result: \"\"\"\n", "prompt = PromptTemplate(template=template, input_variables=[\"query\"])" ], "id": "ae6bd8a1" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "yersskWbTaxU" }, "outputs": [], "source": [ "# Without \"guarding\" or controlling the output of the LLM.\n", "pgllm(prompt.format(query=\"What kind of post is this?\"))" ], "id": "f81be0fb" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "PzxSbYwqTm2w" }, "outputs": [], "source": [ "# With \"guarding\" or controlling the output of the LLM. See the\n", "# Prediction Guard docs (https://docs.predictionguard.com) to learn how to\n", "# control the output with integer, float, boolean, JSON, and other types and\n", "# structures.\n", "pgllm = PredictionGuard(\n", " model=\"OpenAI-text-davinci-003\",\n", " output={\n", " \"type\": \"categorical\",\n", " \"categories\": [\"product announcement\", \"apology\", \"relational\"],\n", " },\n", ")\n", "pgllm(prompt.format(query=\"What kind of post is this?\"))" ], "id": "0cb3b91f" }, { "cell_type": "markdown", "metadata": { "id": "v3MzIUItJ8kV" }, "source": [ "## Chaining" ], "id": "c3b6211f" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "pPegEZExILrT" }, "outputs": [], "source": [ "pgllm = PredictionGuard(model=\"OpenAI-text-davinci-003\")" ], "id": "8d57d1b5" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "suxw62y-J-bg" }, "outputs": [], "source": [ "template = \"\"\"Question: {question}\n", "\n", "Answer: Let's think step by step.\"\"\"\n", "prompt = PromptTemplate(template=template, input_variables=[\"question\"])\n", "llm_chain = LLMChain(prompt=prompt, llm=pgllm, verbose=True)\n", "\n", "question = \"What NFL team won the Super Bowl in the year Justin Beiber was born?\"\n", "\n", "llm_chain.predict(question=question)" ], "id": "7915b7fa" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "l2bc26KHKr7n" }, "outputs": [], "source": [ "template = \"\"\"Write a {adjective} poem about {subject}.\"\"\"\n", "prompt = PromptTemplate(template=template, input_variables=[\"adjective\", \"subject\"])\n", "llm_chain = LLMChain(prompt=prompt, llm=pgllm, verbose=True)\n", "\n", "llm_chain.predict(adjective=\"sad\", subject=\"ducks\")" ], "id": "32ffd783" }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "I--eSa2PLGqq" }, "outputs": [], "source": [], "id": "408ad1e1" } ], "metadata": { "colab": { "provenance": [] }, "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.3" } }, "nbformat": 4, "nbformat_minor": 5 }