gpt4free/interference/app.py

96 lines
2.7 KiB
Python
Raw Normal View History

2023-06-24 01:47:00 +00:00
import json
import random
2023-07-28 10:07:17 +00:00
import string
import time
from typing import Any
2023-06-24 01:47:00 +00:00
2023-07-28 10:07:17 +00:00
from flask import Flask, request
2023-06-24 01:47:00 +00:00
from flask_cors import CORS
2023-07-28 10:07:17 +00:00
from g4f import ChatCompletion
2023-06-24 01:47:00 +00:00
app = Flask(__name__)
CORS(app)
2023-08-31 11:32:23 +00:00
2023-07-28 10:07:17 +00:00
@app.route("/chat/completions", methods=["POST"])
2023-06-24 01:47:00 +00:00
def chat_completions():
2023-07-28 10:07:17 +00:00
model = request.get_json().get("model", "gpt-3.5-turbo")
stream = request.get_json().get("stream", False)
messages = request.get_json().get("messages")
2023-06-24 01:47:00 +00:00
2023-07-28 10:07:17 +00:00
response = ChatCompletion.create(model=model, stream=stream, messages=messages)
2023-06-24 01:47:00 +00:00
2023-07-28 10:07:17 +00:00
completion_id = "".join(random.choices(string.ascii_letters + string.digits, k=28))
completion_timestamp = int(time.time())
if not stream:
2023-06-24 01:47:00 +00:00
return {
2023-07-28 10:07:17 +00:00
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": response,
},
"finish_reason": "stop",
}
],
"usage": {
"prompt_tokens": None,
"completion_tokens": None,
"total_tokens": None,
2023-06-24 01:47:00 +00:00
},
}
2023-07-28 10:07:17 +00:00
def streaming():
for chunk in response:
2023-06-24 01:47:00 +00:00
completion_data = {
2023-07-28 10:07:17 +00:00
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion.chunk",
"created": completion_timestamp,
"model": model,
"choices": [
2023-06-24 01:47:00 +00:00
{
2023-07-28 10:07:17 +00:00
"index": 0,
"delta": {
"content": chunk,
2023-06-24 01:47:00 +00:00
},
2023-07-28 10:07:17 +00:00
"finish_reason": None,
2023-06-24 01:47:00 +00:00
}
2023-07-28 10:07:17 +00:00
],
2023-06-24 01:47:00 +00:00
}
2023-07-28 10:07:17 +00:00
content = json.dumps(completion_data, separators=(",", ":"))
yield f"data: {content}\n\n"
2023-06-24 01:47:00 +00:00
time.sleep(0.1)
2023-07-28 10:07:17 +00:00
end_completion_data: dict[str, Any] = {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion.chunk",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"delta": {},
"finish_reason": "stop",
}
],
}
content = json.dumps(end_completion_data, separators=(",", ":"))
yield f"data: {content}\n\n"
2023-06-24 01:47:00 +00:00
2023-07-28 10:07:17 +00:00
return app.response_class(streaming(), mimetype="text/event-stream")
2023-06-24 01:47:00 +00:00
2023-08-31 11:32:23 +00:00
def main():
app.run(host="0.0.0.0", port=1337, debug=True)
2023-07-28 10:07:17 +00:00
if __name__ == "__main__":
2023-09-17 21:38:55 +00:00
main()