Use OpenAI-compatible /v1/models endpoint by default (#17689)
* refactor: Data fetching via stores * chore: update webui build output * refactor: Use OpenAI compat `/v1/models` endpoint by default to list models * chore: update webui build output * chore: update webui build output
This commit is contained in:
committed by
GitHub
parent
41c5e02f42
commit
e9f9483464
@@ -677,48 +677,6 @@ export class ChatService {
|
||||
// Utilities
|
||||
// ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Get server properties - static method for API compatibility (to be refactored)
|
||||
*/
|
||||
static async getServerProps(): Promise<ApiLlamaCppServerProps> {
|
||||
try {
|
||||
const response = await fetch(`./props`, {
|
||||
headers: getJsonHeaders()
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch server props: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error('Error fetching server props:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get model information from /models endpoint (to be refactored)
|
||||
*/
|
||||
static async getModels(): Promise<ApiModelListResponse> {
|
||||
try {
|
||||
const response = await fetch(`./models`, {
|
||||
headers: getJsonHeaders()
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch models: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data;
|
||||
} catch (error) {
|
||||
console.error('Error fetching models:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Injects a system message at the beginning of the conversation if provided.
|
||||
* Checks for existing system messages to avoid duplication.
|
||||
|
||||
@@ -7,7 +7,7 @@ import { getJsonHeaders } from '$lib/utils';
|
||||
*
|
||||
* This service handles communication with model-related endpoints:
|
||||
* - `/v1/models` - OpenAI-compatible model list (MODEL + ROUTER mode)
|
||||
* - `/models` - Router-specific model management (ROUTER mode only)
|
||||
* - `/models/load`, `/models/unload` - Router-specific model management (ROUTER mode only)
|
||||
*
|
||||
* **Responsibilities:**
|
||||
* - List available models
|
||||
@@ -43,7 +43,7 @@ export class ModelsService {
|
||||
* Returns models with load status, paths, and other metadata
|
||||
*/
|
||||
static async listRouter(): Promise<ApiRouterModelsListResponse> {
|
||||
const response = await fetch(`${base}/models`, {
|
||||
const response = await fetch(`${base}/v1/models`, {
|
||||
headers: getJsonHeaders()
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user