server: remove default "gpt-3.5-turbo" model name (#17668)

* server: remove default "gpt-3.5-turbo" model name

* do not reflect back model name from request

* fix test
This commit is contained in:
Xuan-Son Nguyen
2025-12-02 11:38:57 +01:00
committed by GitHub
parent fd3abe849e
commit 5d6bd842ea
5 changed files with 40 additions and 18 deletions
+8 -3
View File
@@ -1263,7 +1263,11 @@ json convert_anthropic_to_oai(const json & body) {
return oai_body;
}
json format_embeddings_response_oaicompat(const json & request, const json & embeddings, bool use_base64) {
json format_embeddings_response_oaicompat(
const json & request,
const std::string & model_name,
const json & embeddings,
bool use_base64) {
json data = json::array();
int32_t n_tokens = 0;
int i = 0;
@@ -1293,7 +1297,7 @@ json format_embeddings_response_oaicompat(const json & request, const json & emb
}
json res = json {
{"model", json_value(request, "model", std::string(DEFAULT_OAICOMPAT_MODEL))},
{"model", json_value(request, "model", model_name)},
{"object", "list"},
{"usage", json {
{"prompt_tokens", n_tokens},
@@ -1307,6 +1311,7 @@ json format_embeddings_response_oaicompat(const json & request, const json & emb
json format_response_rerank(
const json & request,
const std::string & model_name,
const json & ranks,
bool is_tei_format,
std::vector<std::string> & texts,
@@ -1338,7 +1343,7 @@ json format_response_rerank(
if (is_tei_format) return results;
json res = json{
{"model", json_value(request, "model", std::string(DEFAULT_OAICOMPAT_MODEL))},
{"model", json_value(request, "model", model_name)},
{"object", "list"},
{"usage", json{
{"prompt_tokens", n_tokens},