diff --git a/src/public/app/widgets/type_widgets/options/ai_settings.ts b/src/public/app/widgets/type_widgets/options/ai_settings.ts index 8eaf95e17..badc45721 100644 --- a/src/public/app/widgets/type_widgets/options/ai_settings.ts +++ b/src/public/app/widgets/type_widgets/options/ai_settings.ts @@ -252,45 +252,52 @@ export default class AiSettingsWidget extends OptionsWidget { const $refreshModels = this.$widget.find('.refresh-models'); $refreshModels.on('click', async () => { $refreshModels.prop('disabled', true); - $refreshModels.text(t("ai_llm.refresh_models")); - + $refreshModels.text(t("ai_llm.refreshing_models")); + try { const ollamaBaseUrl = this.$widget.find('.ollama-base-url').val() as string; const response = await server.post('ollama/list-models', { baseUrl: ollamaBaseUrl }); - - if (response && response.models) { + + if (response && response.success && response.models && response.models.length > 0) { const $embedModelSelect = this.$widget.find('.ollama-embedding-model'); const currentValue = $embedModelSelect.val(); - + // Clear existing options $embedModelSelect.empty(); - + // Add embedding-specific models first - const embeddingModels = response.models.filter(model => + const embeddingModels = response.models.filter(model => model.name.includes('embed') || model.name.includes('bert')); - + embeddingModels.forEach(model => { $embedModelSelect.append(``); }); - - // Add separator - $embedModelSelect.append(``); - + + // Add separator if we have both types + if (embeddingModels.length > 0) { + $embedModelSelect.append(``); + } + // Add other models (LLMs can also generate embeddings) - const otherModels = response.models.filter(model => + const otherModels = response.models.filter(model => !model.name.includes('embed') && !model.name.includes('bert')); - + otherModels.forEach(model => { $embedModelSelect.append(``); }); - + // Restore previous selection if possible if (currentValue) { $embedModelSelect.val(currentValue); } + + toastService.showMessage("Models refreshed successfully"); + } else { + toastService.showError("No models found from Ollama server"); } - } catch (error) { + } catch (error: any) { console.error("Error refreshing Ollama models:", error); + toastService.showError(`Error refreshing models: ${error.message || 'Unknown error'}`); } finally { $refreshModels.prop('disabled', false); $refreshModels.text(t("ai_llm.refresh_models")); diff --git a/src/routes/api/ollama.ts b/src/routes/api/ollama.ts index a945a993f..1486a94c9 100644 --- a/src/routes/api/ollama.ts +++ b/src/routes/api/ollama.ts @@ -20,18 +20,18 @@ async function listModels(req: Request, res: Response) { }); // Return the models list - return res.send({ + const models = response.data.models || []; + + // Important: don't use "return res.send()" - just return the data + return { success: true, - models: response.data.models || [] - }); + models: models + }; } catch (error: any) { log.error(`Error listing Ollama models: ${error.message || 'Unknown error'}`); - return res.status(500).send({ - success: false, - message: error.message || 'Failed to list Ollama models', - error: error.toString() - }); + // Properly throw the error to be handled by the global error handler + throw new Error(`Failed to list Ollama models: ${error.message || 'Unknown error'}`); } }