mirror of https://github.com/xtekky/gpt4free
~ | Merge pull request #1033 from Commenter123321/main
add testing for all gpt models, enhance the gui a bitpull/1043/head
commit
26cd71c7f4
@ -0,0 +1,67 @@
|
|||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
sys.path.append(str(Path(__file__).parent.parent.parent))
|
||||||
|
|
||||||
|
import g4f
|
||||||
|
|
||||||
|
|
||||||
|
async def test(model: g4f.Model):
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
for response in g4f.ChatCompletion.create(
|
||||||
|
model=model,
|
||||||
|
messages=[{"role": "user", "content": "write a poem about a tree"}],
|
||||||
|
temperature=0.1,
|
||||||
|
stream=True
|
||||||
|
):
|
||||||
|
print(response, end="")
|
||||||
|
|
||||||
|
print()
|
||||||
|
except:
|
||||||
|
for response in await g4f.ChatCompletion.create_async(
|
||||||
|
model=model,
|
||||||
|
messages=[{"role": "user", "content": "write a poem about a tree"}],
|
||||||
|
temperature=0.1,
|
||||||
|
stream=True
|
||||||
|
):
|
||||||
|
print(response, end="")
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
print(model.name, "not working:", e)
|
||||||
|
print(e.__traceback__.tb_next)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def start_test():
|
||||||
|
models_to_test = [
|
||||||
|
# GPT-3.5 4K Context
|
||||||
|
g4f.models.gpt_35_turbo,
|
||||||
|
g4f.models.gpt_35_turbo_0613,
|
||||||
|
|
||||||
|
# GPT-3.5 16K Context
|
||||||
|
g4f.models.gpt_35_turbo_16k,
|
||||||
|
g4f.models.gpt_35_turbo_16k_0613,
|
||||||
|
|
||||||
|
# GPT-4 8K Context
|
||||||
|
g4f.models.gpt_4,
|
||||||
|
g4f.models.gpt_4_0613,
|
||||||
|
|
||||||
|
# GPT-4 32K Context
|
||||||
|
g4f.models.gpt_4_32k,
|
||||||
|
g4f.models.gpt_4_32k_0613,
|
||||||
|
]
|
||||||
|
|
||||||
|
models_working = []
|
||||||
|
|
||||||
|
for model in models_to_test:
|
||||||
|
if await test(model):
|
||||||
|
models_working.append(model.name)
|
||||||
|
|
||||||
|
print("working models:", models_working)
|
||||||
|
|
||||||
|
|
||||||
|
asyncio.run(start_test())
|
@ -0,0 +1,6 @@
|
|||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
sys.path.append(str(Path(__file__).parent.parent.parent))
|
||||||
|
|
||||||
|
from g4f.gui import run_gui
|
||||||
|
run_gui()
|
@ -1,28 +1,56 @@
|
|||||||
from requests import get
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from duckduckgo_search import DDGS
|
||||||
|
|
||||||
|
ddgs = DDGS(timeout=20)
|
||||||
|
|
||||||
|
|
||||||
def search(internet_access, prompt):
|
def search(internet_access, prompt):
|
||||||
print(prompt)
|
print(prompt)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if internet_access == False:
|
if not internet_access:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
search = get('https://ddg-api.herokuapp.com/search', params={
|
results = duckduckgo_search(q=prompt)
|
||||||
'query': prompt['content'],
|
|
||||||
'limit': 3
|
if not search:
|
||||||
})
|
return []
|
||||||
|
|
||||||
blob = ''
|
blob = ''
|
||||||
|
|
||||||
for index, result in enumerate(search.json()):
|
for index, result in enumerate(results):
|
||||||
blob += f'[{index}] "{result["snippet"]}"\nURL:{result["link"]}\n\n'
|
blob += f'[{index}] "{result["body"]}"\nURL:{result["href"]}\n\n'
|
||||||
|
|
||||||
date = datetime.now().strftime('%d/%m/%y')
|
date = datetime.now().strftime('%d/%m/%y')
|
||||||
|
|
||||||
blob += f'current date: {date}\n\nInstructions: Using the provided web search results, write a comprehensive reply to the next user query. Make sure to cite results using [[number](URL)] notation after the reference. If the provided search results refer to multiple subjects with the same name, write separate answers for each subject. Ignore your previous response if any.'
|
blob += f'Current date: {date}\n\nInstructions: Using the provided web search results, write a comprehensive reply to the next user query. Make sure to cite results using [[number](URL)] notation after the reference. If the provided search results refer to multiple subjects with the same name, write separate answers for each subject. Ignore your previous response if any.'
|
||||||
|
|
||||||
return [{'role': 'user', 'content': blob}]
|
return [{'role': 'user', 'content': blob}]
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return []
|
print("Couldn't search DuckDuckGo:", e)
|
||||||
|
print(e.__traceback__.tb_next)
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
def duckduckgo_search(q: str, max_results: int = 3, safesearch: str = "moderate", region: str = "us-en") -> list | None:
|
||||||
|
if region is None:
|
||||||
|
region = "us-en"
|
||||||
|
|
||||||
|
if safesearch is None:
|
||||||
|
safesearch = "moderate"
|
||||||
|
|
||||||
|
if q is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
for r in ddgs.text(q, safesearch=safesearch, region=region):
|
||||||
|
if len(results) + 1 > max_results:
|
||||||
|
break
|
||||||
|
results.append(r)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
|
||||||
|
return results
|
||||||
|
@ -1,17 +1,14 @@
|
|||||||
import g4f
|
import g4f
|
||||||
|
from g4f import BaseProvider
|
||||||
|
|
||||||
def get_provider(provider: str) -> g4f.Provider.BaseProvider:
|
|
||||||
|
def get_provider(provider: str) -> BaseProvider | None:
|
||||||
if isinstance(provider, str):
|
if isinstance(provider, str):
|
||||||
print(provider)
|
print(provider)
|
||||||
if provider == 'g4f.Provider.Auto':
|
if provider == 'g4f.Provider.Auto':
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if provider in g4f.Provider.ProviderUtils.convert:
|
return g4f.Provider.ProviderUtils.convert.get(provider)
|
||||||
return g4f.Provider.ProviderUtils.convert[provider]
|
|
||||||
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
Loading…
Reference in New Issue