mirror of
				https://github.com/TriliumNext/Notes.git
				synced 2025-10-31 21:11:30 +08:00 
			
		
		
		
	fix(llm): don't show embedding models in the chat section
This commit is contained in:
		
							parent
							
								
									4732d7784f
								
							
						
					
					
						commit
						6fdd0d021c
					
				| @ -118,7 +118,6 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|         this.setupChangeHandler('.ollama-base-url', 'ollamaBaseUrl'); | ||||
|         this.setupChangeHandler('.ollama-default-model', 'ollamaDefaultModel'); | ||||
|         this.setupChangeHandler('.ollama-embedding-model', 'ollamaEmbeddingModel'); | ||||
|         this.setupChangeHandler('.ollama-chat-embedding-model', 'ollamaEmbeddingModel'); | ||||
|         this.setupChangeHandler('.ollama-embedding-base-url', 'ollamaEmbeddingBaseUrl'); | ||||
| 
 | ||||
|         // Embedding-specific provider options
 | ||||
| @ -671,7 +670,6 @@ export default class AiSettingsWidget extends OptionsWidget { | ||||
|         this.$widget.find('.ollama-embedding-base-url').val(options.ollamaEmbeddingBaseUrl || 'http://localhost:11434'); | ||||
|         this.setModelDropdownValue('.ollama-default-model', options.ollamaDefaultModel); | ||||
|         this.setModelDropdownValue('.ollama-embedding-model', options.ollamaEmbeddingModel); | ||||
|         this.setModelDropdownValue('.ollama-chat-embedding-model', options.ollamaEmbeddingModel); | ||||
| 
 | ||||
|         // Embedding-specific provider options
 | ||||
|         this.$widget.find('.openai-embedding-api-key').val(options.openaiEmbeddingApiKey || ''); | ||||
|  | ||||
| @ -98,14 +98,6 @@ export const TPL = ` | ||||
|                     <div class="form-text">${t("ai_llm.openai_model_description")}</div> | ||||
|                     <button class="btn btn-sm btn-outline-secondary refresh-openai-models">${t("ai_llm.refresh_models")}</button> | ||||
|                 </div> | ||||
| 
 | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.embedding_model")}</label> | ||||
|                     <select class="openai-embedding-model form-control"> | ||||
|                         <option value="">${t("ai_llm.select_model")}</option> | ||||
|                     </select> | ||||
|                     <div class="form-text">${t("ai_llm.openai_embedding_model_description")}</div> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
| @ -162,14 +154,6 @@ export const TPL = ` | ||||
|                     <div class="form-text">${t("ai_llm.ollama_model_description")}</div> | ||||
|                     <button class="btn btn-sm btn-outline-secondary refresh-models"><span class="bx bx-refresh"></span></button> | ||||
|                 </div> | ||||
| 
 | ||||
|                 <div class="form-group"> | ||||
|                     <label>${t("ai_llm.embedding_model")}</label> | ||||
|                     <select class="ollama-chat-embedding-model form-control"> | ||||
|                         <option value="">${t("ai_llm.select_model")}</option> | ||||
|                     </select> | ||||
|                     <div class="form-text">${t("ai_llm.ollama_embedding_model_description")}</div> | ||||
|                 </div> | ||||
|             </div> | ||||
|         </div> | ||||
|     </div> | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 perf3ct
						perf3ct