HuggingChat: Strip leading whitespace from the first token in the stream

For some reason first token from the stream on HuggingChat always starts with a whitespace. This commit strips the leading whitespace from the first token in the stream to fix this issue.
This commit is contained in:
nullstreak 2023-12-15 23:58:13 +01:00 committed by GitHub
parent 71cd54ce69
commit f554018da2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -47,14 +47,19 @@ class HuggingChat(AsyncGeneratorProvider):
"web_search": web_search
}
async with session.post(f"{cls.url}/conversation/{conversation_id}", json=send, proxy=proxy) as response:
first_token = True
async for line in response.content:
line = json.loads(line[:-1])
if "type" not in line:
raise RuntimeError(f"Response: {line}")
elif line["type"] == "stream":
yield line["token"]
token = line["token"]
if first_token:
token = token.lstrip()
first_token = False
yield token
elif line["type"] == "finalAnswer":
break
async with session.delete(f"{cls.url}/conversation/{conversation_id}", proxy=proxy) as response:
response.raise_for_status()
response.raise_for_status()