fixed the gui

This commit is contained in:
Jonathan Dunn 2024-04-01 11:33:34 -04:00
parent d7ca76cc5c
commit f30559bc63

View File

@ -207,23 +207,33 @@ function updateOrAddKey(envContent, keyName, keyValue) {
} }
async function getOllamaModels() { async function getOllamaModels() {
ollama = new Ollama.Ollama(); try {
const _models = await ollama.list(); ollama = new Ollama.Ollama();
return _models.models.map((x) => x.name); const _models = await ollama.list();
return _models.models.map((x) => x.name);
} catch (error) {
if (error.cause && error.cause.code === "ECONNREFUSED") {
console.error(
"Failed to connect to Ollama. Make sure Ollama is running and accessible."
);
return []; // Return an empty array instead of throwing an error
} else {
console.error("Error fetching models from Ollama:", error);
throw error; // Re-throw the error for other types of errors
}
}
} }
async function getModels() { async function getModels() {
ollama = new Ollama.Ollama();
allModels = { allModels = {
gptModels: [], gptModels: [],
claudeModels: [], claudeModels: [],
ollamaModels: [], ollamaModels: [],
}; };
let keys = await loadApiKeys(); // Assuming loadApiKeys() is updated to return both keys let keys = await loadApiKeys();
if (keys.claudeKey) { if (keys.claudeKey) {
// Assuming claudeModels do not require an asynchronous call to be fetched
claudeModels = [ claudeModels = [
"claude-3-opus-20240229", "claude-3-opus-20240229",
"claude-3-sonnet-20240229", "claude-3-sonnet-20240229",
@ -232,14 +242,13 @@ async function getModels() {
]; ];
allModels.claudeModels = claudeModels; allModels.claudeModels = claudeModels;
} }
let gptModelsPromise = [];
if (keys.openAIKey) { if (keys.openAIKey) {
openai = new OpenAI({ apiKey: keys.openAIKey }); openai = new OpenAI({ apiKey: keys.openAIKey });
// Wrap asynchronous call with a Promise to handle it in parallel
try { try {
gptModelsPromise = openai.models.list(); const response = await openai.models.list();
allModels.gptModels = response.data;
} catch (error) { } catch (error) {
gptModelsPromise = [];
console.error("Error fetching models from OpenAI:", error); console.error("Error fetching models from OpenAI:", error);
} }
} }
@ -250,24 +259,16 @@ async function getModels() {
ollama.list && ollama.list &&
typeof ollama.list === "function" typeof ollama.list === "function"
) { ) {
// Assuming ollama.list() returns a Promise try {
ollamaModelsPromise = getOllamaModels(); allModels.ollamaModels = await getOllamaModels();
} catch (error) {
console.error("Error fetching models from Ollama:", error);
}
} else { } else {
console.log("ollama is not available or does not support listing models."); console.log("Ollama is not available or does not support listing models.");
} }
// Wait for all asynchronous operations to complete return allModels;
try {
const results = await Promise.all(
[gptModelsPromise, ollamaModelsPromise].filter(Boolean)
); // Filter out undefined promises
allModels.gptModels = results[0]?.data || []; // Assuming the first promise is always GPT models if it exists
allModels.ollamaModels = results[1] || []; // Assuming the second promise is always Ollama models if it exists
} catch (error) {
console.error("Error fetching models from OpenAI or Ollama:", error);
}
return allModels; // Return the aggregated results
} }
async function getPatternContent(patternName) { async function getPatternContent(patternName) {