libs : rename libcommon -> libllama-common (#21936)
* cmake : allow libcommon to be shared * cmake : rename libcommon to libllama-common * cont : set -fPIC for httplib * cont : export all symbols * cont : fix build_info exports * libs : add libllama-common-base * log : add common_log_get_verbosity_thold()
This commit is contained in:
@@ -23,7 +23,7 @@ endif()
|
||||
|
||||
target_include_directories(${TARGET} PRIVATE ../mtmd)
|
||||
target_include_directories(${TARGET} PRIVATE ${CMAKE_SOURCE_DIR})
|
||||
target_link_libraries(${TARGET} PUBLIC common mtmd ${CMAKE_THREAD_LIBS_INIT})
|
||||
target_link_libraries(${TARGET} PUBLIC llama-common mtmd ${CMAKE_THREAD_LIBS_INIT})
|
||||
|
||||
|
||||
# llama-server executable
|
||||
@@ -68,6 +68,6 @@ install(TARGETS ${TARGET} RUNTIME)
|
||||
|
||||
target_include_directories(${TARGET} PRIVATE ../mtmd)
|
||||
target_include_directories(${TARGET} PRIVATE ${CMAKE_SOURCE_DIR})
|
||||
target_link_libraries(${TARGET} PRIVATE server-context PUBLIC common cpp-httplib ${CMAKE_THREAD_LIBS_INIT})
|
||||
target_link_libraries(${TARGET} PRIVATE server-context PUBLIC llama-common cpp-httplib ${CMAKE_THREAD_LIBS_INIT})
|
||||
|
||||
target_compile_features(${TARGET} PRIVATE cxx_std_17)
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
#include "server-task.h"
|
||||
#include "server-queue.h"
|
||||
|
||||
#include "build-info.h"
|
||||
#include "common.h"
|
||||
#include "llama.h"
|
||||
#include "log.h"
|
||||
@@ -3010,7 +3011,7 @@ server_context_meta server_context::get_meta() const {
|
||||
auto eos_token_str = eos_id != LLAMA_TOKEN_NULL ? common_token_to_piece(impl->ctx, eos_id, true) : "";
|
||||
|
||||
return server_context_meta {
|
||||
/* build_info */ build_info,
|
||||
/* build_info */ std::string(llama_build_info()),
|
||||
/* model_name */ impl->model_name,
|
||||
/* model_aliases */ impl->model_aliases,
|
||||
/* model_tags */ impl->model_tags,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#include "server-common.h"
|
||||
#include "server-models.h"
|
||||
|
||||
#include "build-info.h"
|
||||
#include "preset.h"
|
||||
#include "download.h"
|
||||
|
||||
@@ -936,7 +937,7 @@ void server_models_routes::init_routes() {
|
||||
{"n_ctx", 0},
|
||||
}},
|
||||
{"webui_settings", webui_settings},
|
||||
{"build_info", build_info},
|
||||
{"build_info", std::string(llama_build_info())},
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#include "server-task.h"
|
||||
|
||||
#include "build-info.h"
|
||||
#include "chat.h"
|
||||
#include "common.h"
|
||||
#include "json-schema-to-grammar.h"
|
||||
@@ -791,7 +792,7 @@ json server_task_result_cmpl_final::to_json_oaicompat() {
|
||||
})},
|
||||
{"created", t},
|
||||
{"model", oaicompat_model},
|
||||
{"system_fingerprint", build_info},
|
||||
{"system_fingerprint", std::string(llama_build_info())},
|
||||
{"object", "text_completion"},
|
||||
{"usage", usage_json_oaicompat()},
|
||||
{"id", oaicompat_cmpl_id}
|
||||
@@ -839,7 +840,7 @@ json server_task_result_cmpl_final::to_json_oaicompat_chat() {
|
||||
{"choices", json::array({choice})},
|
||||
{"created", t},
|
||||
{"model", oaicompat_model},
|
||||
{"system_fingerprint", build_info},
|
||||
{"system_fingerprint", std::string(llama_build_info())},
|
||||
{"object", "chat.completion"},
|
||||
{"usage", usage_json_oaicompat()},
|
||||
{"id", oaicompat_cmpl_id}
|
||||
@@ -876,7 +877,7 @@ json server_task_result_cmpl_final::to_json_oaicompat_chat_stream() {
|
||||
{"created", t},
|
||||
{"id", oaicompat_cmpl_id},
|
||||
{"model", oaicompat_model},
|
||||
{"system_fingerprint", build_info},
|
||||
{"system_fingerprint", std::string(llama_build_info())},
|
||||
{"object", "chat.completion.chunk"},
|
||||
});
|
||||
}
|
||||
@@ -892,7 +893,7 @@ json server_task_result_cmpl_final::to_json_oaicompat_chat_stream() {
|
||||
{"created", t},
|
||||
{"id", oaicompat_cmpl_id},
|
||||
{"model", oaicompat_model},
|
||||
{"system_fingerprint", build_info},
|
||||
{"system_fingerprint", std::string(llama_build_info())},
|
||||
{"object", "chat.completion.chunk"},
|
||||
});
|
||||
|
||||
@@ -904,7 +905,7 @@ json server_task_result_cmpl_final::to_json_oaicompat_chat_stream() {
|
||||
{"created", t},
|
||||
{"id", oaicompat_cmpl_id},
|
||||
{"model", oaicompat_model},
|
||||
{"system_fingerprint", build_info},
|
||||
{"system_fingerprint", std::string(llama_build_info())},
|
||||
{"object", "chat.completion.chunk"},
|
||||
{"usage", usage_json_oaicompat()},
|
||||
});
|
||||
@@ -1469,7 +1470,7 @@ json server_task_result_cmpl_partial::to_json_oaicompat() {
|
||||
})},
|
||||
{"created", t},
|
||||
{"model", oaicompat_model},
|
||||
{"system_fingerprint", build_info},
|
||||
{"system_fingerprint", std::string(llama_build_info())},
|
||||
{"object", "text_completion"},
|
||||
{"id", oaicompat_cmpl_id}
|
||||
};
|
||||
@@ -1506,7 +1507,7 @@ json server_task_result_cmpl_partial::to_json_oaicompat_chat() {
|
||||
{"created", t},
|
||||
{"id", oaicompat_cmpl_id},
|
||||
{"model", oaicompat_model},
|
||||
{"system_fingerprint", build_info},
|
||||
{"system_fingerprint", std::string(llama_build_info())},
|
||||
{"object", "chat.completion.chunk"},
|
||||
});
|
||||
};
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
#include "server-tools.h"
|
||||
|
||||
#include "arg.h"
|
||||
#include "build-info.h"
|
||||
#include "common.h"
|
||||
#include "llama.h"
|
||||
#include "log.h"
|
||||
@@ -108,7 +109,7 @@ int main(int argc, char ** argv) {
|
||||
llama_backend_init();
|
||||
llama_numa_init(params.numa);
|
||||
|
||||
LOG_INF("build_info: %s\n", build_info.c_str());
|
||||
LOG_INF("build_info: %s\n", llama_build_info());
|
||||
LOG_INF("%s\n", common_params_get_system_info(params).c_str());
|
||||
|
||||
server_http_context ctx_http;
|
||||
|
||||
Reference in New Issue
Block a user