Use the temperature parameter

pull/769/head
grandsonneo@gmail.com 1 year ago
parent 4e86e5b176
commit 0e07ae77cf

@ -22,8 +22,8 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
"prompt": base,
"options": {},
"systemMessage": "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
"temperature": 0.8,
"top_p": 1
"temperature": kwargs.get("temperature", 0.8),
"top_p": kwargs.get("top_p", 1)
}
response = requests.post(url, headers=headers, json=data)
if response.status_code == 200:

@ -75,7 +75,7 @@ def _create_completion(model: str, messages: list, temperature: float = 0.6, str
json_data = json.dumps(separators=(',', ':'), obj={
'model': 'gpt-3.5-turbo',
'temperature': 0.6,
'temperature': temperature,
'stream': True,
'messages': messages} | sig)

@ -75,7 +75,7 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
'userName': '<div class="mwai-name-text">User:</div>',
'aiName': '<div class="mwai-name-text">AI:</div>',
'model': 'gpt-3.5-turbo',
'temperature': 0.8,
'temperature': kwargs.get('temperature', 0.8),
'maxTokens': 1024,
'maxResults': 1,
'apiKey': '',

@ -32,10 +32,10 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
json_data = {
'messages': messages,
'model': 'gpt-3.5-turbo',
'temperature': 0.5,
'presence_penalty': 0,
'frequency_penalty': 0,
'top_p': 1,
'temperature': kwargs.get('temperature', 0.5),
'presence_penalty': kwargs.get('presence_penalty', 0),
'frequency_penalty': kwargs.get('frequency_penalty', 0),
'top_p': kwargs.get('top_p', 1),
"stream": True,
}
response = requests.post('https://chat.dfehub.com/api/openai/v1/chat/completions',

@ -34,10 +34,10 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs):
'messages': messages,
'stream': True,
'model': model,
'temperature': 0.5,
'presence_penalty': 0,
'frequency_penalty': 0,
'top_p': 1,
'temperature': kwargs.get('temperature', 0.5),
'presence_penalty': kwargs.get('presence_penalty', 0),
'frequency_penalty': kwargs.get('frequency_penalty', 0),
'top_p': kwargs.get('top_p', 1),
}
response = requests.post('https://free.easychat.work/api/openai/v1/chat/completions',

@ -11,7 +11,7 @@ working = False
def _create_completion(model: str, messages: list, stream: bool, temperature: float = 0.7, **kwargs):
payload = {
"temperature": 0.7,
"temperature": temperature,
"messages": messages,
"model": model,
"stream": True,

Loading…
Cancel
Save