pull/854/head
ManishMadan2882 4 months ago
commit 70ad1fb3d8

@ -39,6 +39,9 @@ class Settings(BaseSettings):
SAGEMAKER_ACCESS_KEY: Optional[str] = None # SageMaker access key
SAGEMAKER_SECRET_KEY: Optional[str] = None # SageMaker secret key
# prem ai project id
PREMAI_PROJECT_ID: Optional[str] = None
path = Path(__file__).parent.parent.absolute()
settings = Settings(_env_file=path.joinpath(".env"), _env_file_encoding="utf-8")

@ -20,7 +20,7 @@ class DocsGPTAPILLM(BaseLLM):
"max_new_tokens": 30
}
)
response_clean = response.json()['a'].split("###")[0]
response_clean = response.json()['a'].replace("###", "")
return response_clean

@ -4,6 +4,7 @@ from application.llm.huggingface import HuggingFaceLLM
from application.llm.llama_cpp import LlamaCpp
from application.llm.anthropic import AnthropicLLM
from application.llm.docsgpt_provider import DocsGPTAPILLM
from application.llm.premai import PremAILLM
@ -15,7 +16,8 @@ class LLMCreator:
'huggingface': HuggingFaceLLM,
'llama.cpp': LlamaCpp,
'anthropic': AnthropicLLM,
'docsgpt': DocsGPTAPILLM
'docsgpt': DocsGPTAPILLM,
'premai': PremAILLM,
}
@classmethod

@ -0,0 +1,33 @@
from application.llm.base import BaseLLM
from application.core.settings import settings
class PremAILLM(BaseLLM):
def __init__(self, api_key):
from premai import Prem
self.client = Prem(
api_key=api_key
)
self.api_key = api_key
self.project_id = settings.PREMAI_PROJECT_ID
def gen(self, model, engine, messages, stream=False, **kwargs):
response = self.client.chat.completions.create(model=model,
project_id=self.project_id,
messages=messages,
stream=stream,
**kwargs)
return response.choices[0].message["content"]
def gen_stream(self, model, engine, messages, stream=True, **kwargs):
response = self.client.chat.completions.create(model=model,
project_id=self.project_id,
messages=messages,
stream=stream,
**kwargs)
for line in response:
if line.choices[0].delta["content"] is not None:
yield line.choices[0].delta["content"]
Loading…
Cancel
Save