localai + wip lecl

master
blob42 1 month ago
parent 89684fbd99
commit 99d440acdb

@ -0,0 +1,93 @@
#!/usr/bin/env python3
# vim: set expandtab sw=4 ts=4 sts=4 foldmethod=indent filetype=python:
import asyncio
import os
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import ChatOpenAI
os.environ['OPENAI_API_KEY'] = '2708b7c21129e408899d5a38e6d1af8d'
os.environ['OPENAI_API_BASE'] = 'http://localai.srvlan:8080'
#|%%--%%| <9871Wi18GN|qMj6mA5jLr>
r"""°°°
# Basic invokation
°°°"""
#|%%--%%| <qMj6mA5jLr|LxJxvWuF27>
llm = ChatOpenAI(model="dolphin-mixtral", temperature=0.2, max_tokens=100)
#|%%--%%| <LxJxvWuF27|eTnVsVGPqG>
llm.invoke("how can you help me ?")
#|%%--%%| <eTnVsVGPqG|ChQEPJ4k0e>
r"""°°°
# Using prompt templates
°°°"""
#|%%--%%| <ChQEPJ4k0e|Tl3tcWLlrF>
from langchain_core.prompts import ChatPromptTemplate
prompt = ChatPromptTemplate.from_messages([
("system", "You are a genius DIY maker assistant."),
("user", "{input}")
])
# combine llm and prompt to create a chain
chain = prompt | llm
chain.invoke(dict(input="what are the best adhesives to bind metal to wood ?"))
#|%%--%%| <Tl3tcWLlrF|z6Tf8SVZ6W>
r"""°°°
# Creating a chain
- Every element in chain implements Runnable interface (invoke, stream, ainvoke, batch ...)
°°°"""
#|%%--%%| <z6Tf8SVZ6W|qMHVOmTbk0>
prompt = ChatPromptTemplate.from_template("Tell me a short joke about {topic}")
output_parser = StrOutputParser()
llm = ChatOpenAI(model="dolphin-mixtral", temperature=0.6, max_tokens=100)
chain = (
{"topic": RunnablePassthrough()}
| prompt
| llm
| output_parser
)
chain.invoke("ice cream")
#|%%--%%| <qMHVOmTbk0|RV24rYEjeh>
prompt = ChatPromptTemplate.from_template("Tell me a short joke about {topic}")
output_parser = StrOutputParser()
llm = ChatOpenAI(model="dolphin-mixtral", temperature=0.6, max_tokens=100)
chain = (
{"topic": RunnablePassthrough()}
| prompt
| llm
| output_parser
)
chain.invoke("ice cream")
#|%%--%%| <RV24rYEjeh|RDvrxXtUVe>
r"""°°°
# Streaming the response
https://python.langchain.com/docs/expression_language/streaming
°°°"""
#|%%--%%| <RDvrxXtUVe|arP98HJHVd>
async for chunk in chain.astream(dict(input="how can I bind metal to plastic ?")):
print(chunk.content, end="", flush=True)

@ -0,0 +1,26 @@
{
"cells": [
{
"cell_type": "code",
"metadata": {
"jukit_cell_id": "NONE"
},
"source": [
"#!/usr/bin/env python3\n",
"# vim: set expandtab sw=4 ts=4 sts=4 foldmethod=indent filetype=python:"
],
"outputs": [],
"execution_count": null
}
],
"metadata": {
"anaconda-cloud": {},
"kernelspec": {
"display_name": "python",
"language": "python",
"name": "python3"
}
},
"nbformat": 4,
"nbformat_minor": 4
}

@ -0,0 +1,26 @@
#|%%--%%| <v7QcmovzRW|SzAmGcGuz7>
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_openai import ChatOpenAI
#|%%--%%| <SzAmGcGuz7|5hpjZkupU5>
messages = [
SystemMessage(content="You are a helpful assistant."),
HumanMessage(content="what is a good company name for colorful socks ?")
]
#|%%--%%| <5hpjZkupU5|qXWlUuUqLc>
chat = ChatOpenAI(temperature=0.2, model="dolphin-mixtral", max_tokens=50)
#|%%--%%| <qXWlUuUqLc|00yiGVKIRw>
result = chat.invoke(messages)
#|%%--%%| <00yiGVKIRw|ZrwfqMozR5>
print(result.content)
Loading…
Cancel
Save