make llama server configurable

This commit is contained in:
2024-01-26 15:47:02 +01:00
parent 06eca937bf
commit 2aea0bbdac
4 changed files with 20 additions and 5 deletions

View File

@ -18,6 +18,11 @@ static std::string convertToSafeGrammarString(std::string_view input) {
return res;
}
LlamaCppWeb::LlamaCppWeb(
ConfigModelI& conf
) : _conf(conf), _cli(_conf.get_string("LlamaCppWeb", "server").value_or("localhost:8080")) {
}
LlamaCppWeb::~LlamaCppWeb(void) {
}

View File

@ -2,6 +2,8 @@
#include "./text_completion_interface.hpp"
#include <solanaceae/util/config_model.hpp>
#include <httplib.h>
#include <nlohmann/json_fwd.hpp>
@ -9,14 +11,19 @@
#include <atomic>
struct LlamaCppWeb : public TextCompletionI {
// this mutex locks internally
httplib::Client _cli{"http://localhost:8080"};
ConfigModelI& _conf;
// this mutex-locks internally
httplib::Client _cli;
// this is a bad idea
static std::minstd_rand thread_local _rng;
std::atomic<bool> _use_server_cache {true};
LlamaCppWeb(
ConfigModelI& conf
);
~LlamaCppWeb(void);
bool isGood(void) override;

View File

@ -1,5 +1,7 @@
#include <solanaceae/llama-cpp-web/llama_cpp_web_impl.hpp>
#include <solanaceae/util/simple_config_model.hpp>
#include <nlohmann/json.hpp>
#include <iostream>
@ -9,7 +11,8 @@
#include <cstdint>
int main(void) {
LlamaCppWeb lcw;
SimpleConfigModel scm;
LlamaCppWeb lcw{scm};
if (!lcw.isGood()) {
std::cerr << lcw._cli.host() << " " << lcw._cli.port() << " endpoint not healthy\n";