Compare commits

..

No commits in common. "301900c507d5719374b82a3730c19ad66eb10820" and "f38da08334d1dfb3cdd9fb5f795d6083d808c12a" have entirely different histories.

7 changed files with 16 additions and 21 deletions

View File

@ -37,7 +37,7 @@ SOLANA_PLUGIN_EXPORT uint32_t solana_plugin_start(struct SolanaAPI* solana_api)
auto* completion = PLUG_RESOLVE_INSTANCE(TextCompletionI); auto* completion = PLUG_RESOLVE_INSTANCE(TextCompletionI);
auto* conf = PLUG_RESOLVE_INSTANCE(ConfigModelI); auto* conf = PLUG_RESOLVE_INSTANCE(ConfigModelI);
auto* cr = PLUG_RESOLVE_INSTANCE_VERSIONED(Contact3Registry, "1"); auto* cr = PLUG_RESOLVE_INSTANCE_VERSIONED(Contact3Registry, "1");
auto* rmm = PLUG_RESOLVE_INSTANCE(RegistryMessageModelI); auto* rmm = PLUG_RESOLVE_INSTANCE(RegistryMessageModel);
auto* mcd = PLUG_RESOLVE_INSTANCE(MessageCommandDispatcher); auto* mcd = PLUG_RESOLVE_INSTANCE(MessageCommandDispatcher);
// static store, could be anywhere tho // static store, could be anywhere tho

View File

@ -77,7 +77,7 @@ int64_t LlamaCppWeb::completeSelect(const std::string_view prompt, const std::ve
} }
//grammar += ")"; //grammar += ")";
std::cerr << "generated grammar:\n" << grammar << "\n"; //std::cout << "generated grammar:\n" << grammar << "\n";
auto ret = complete(nlohmann::json{ auto ret = complete(nlohmann::json{
{"prompt", prompt}, {"prompt", prompt},
@ -89,23 +89,19 @@ int64_t LlamaCppWeb::completeSelect(const std::string_view prompt, const std::ve
{"top_p", 1.0}, // disable {"top_p", 1.0}, // disable
{"n_predict", 256}, // unlikely to ever be so high {"n_predict", 256}, // unlikely to ever be so high
{"seed", _rng()}, {"seed", _rng()},
{"ignore_eos", true},
{"cache_prompt", static_cast<bool>(_use_server_cache)}, {"cache_prompt", static_cast<bool>(_use_server_cache)},
}); });
if (ret.empty()) { if (ret.empty()) {
assert("ret empty" && false);
return -2; return -2;
} }
if (!ret.count("content")) { if (!ret.count("content")) {
assert("no content" && false);
return -3; return -3;
} }
std::string selected = ret.at("content"); std::string selected = ret.at("content");
if (selected.empty()) { if (selected.empty()) {
assert("content empty" && false);
return -4; return -4;
} }
@ -115,7 +111,6 @@ int64_t LlamaCppWeb::completeSelect(const std::string_view prompt, const std::ve
} }
} }
std::cerr << "content does not contain match\n";
std::cerr << "complete failed j:'" << ret.dump() << "'\n"; std::cerr << "complete failed j:'" << ret.dump() << "'\n";
return -5; return -5;
} }
@ -130,7 +125,7 @@ std::string LlamaCppWeb::completeLine(const std::string_view prompt) {
{"top_p", 1.0}, // disable {"top_p", 1.0}, // disable
{"n_predict", 400}, {"n_predict", 400},
{"seed", _rng()}, {"seed", _rng()},
{"stop", nlohmann::json::array({"\n"})}, {"stop", {"\n"}},
{"cache_prompt", static_cast<bool>(_use_server_cache)}, {"cache_prompt", static_cast<bool>(_use_server_cache)},
}); });
@ -152,7 +147,7 @@ nlohmann::json LlamaCppWeb::complete(const nlohmann::json& request_j) {
// steaming instead would be better // steaming instead would be better
_cli.set_read_timeout(std::chrono::minutes(10)); _cli.set_read_timeout(std::chrono::minutes(10));
std::cerr << "j dump: '" << request_j.dump(-1, ' ', true) << "'\n"; //std::cout << "j dump: '" << request_j.dump(-1, ' ', true) << "'\n";
auto res = _cli.Post("/completion", request_j.dump(-1, ' ', true), "application/json"); auto res = _cli.Post("/completion", request_j.dump(-1, ' ', true), "application/json");
@ -164,7 +159,7 @@ nlohmann::json LlamaCppWeb::complete(const nlohmann::json& request_j) {
//res->body.empty() || //res->body.empty() ||
//res->get_header_value("Content-Type") != "application/json" //res->get_header_value("Content-Type") != "application/json"
) { ) {
std::cerr << "error posting: '" << res->body << "'\n"; std::cerr << "error posting\n";
return {}; return {};
} }

View File

@ -19,7 +19,7 @@ struct LlamaCppWeb : public TextCompletionI {
// this is a bad idea // this is a bad idea
static std::minstd_rand thread_local _rng; static std::minstd_rand thread_local _rng;
std::atomic<bool> _use_server_cache {false}; std::atomic<bool> _use_server_cache {true};
LlamaCppWeb( LlamaCppWeb(
ConfigModelI& conf ConfigModelI& conf

View File

@ -10,7 +10,7 @@
struct MessagePromptBuilder { struct MessagePromptBuilder {
Contact3Registry& _cr; Contact3Registry& _cr;
const Contact3 _c; const Contact3 _c;
RegistryMessageModelI& _rmm; RegistryMessageModel& _rmm;
// lookup table, string_view since no name-components are changed // lookup table, string_view since no name-components are changed
entt::dense_map<Contact3, std::string_view> names; entt::dense_map<Contact3, std::string_view> names;

View File

@ -138,7 +138,7 @@ RPBot::RPBot(
TextCompletionI& completion, TextCompletionI& completion,
ConfigModelI& conf, ConfigModelI& conf,
Contact3Registry& cr, Contact3Registry& cr,
RegistryMessageModelI& rmm, RegistryMessageModel& rmm,
MessageCommandDispatcher* mcd MessageCommandDispatcher* mcd
) : _completion(completion), _conf(conf), _cr(cr), _rmm(rmm), _mcd(mcd) { ) : _completion(completion), _conf(conf), _cr(cr), _rmm(rmm), _mcd(mcd) {
//system_prompt = R"sys(Transcript of a group chat, where Bob talks to online strangers. //system_prompt = R"sys(Transcript of a group chat, where Bob talks to online strangers.

View File

@ -20,7 +20,7 @@ struct RPBot : public RegistryMessageModelEventI {
TextCompletionI& _completion; TextCompletionI& _completion;
ConfigModelI& _conf; ConfigModelI& _conf;
Contact3Registry& _cr; Contact3Registry& _cr;
RegistryMessageModelI& _rmm; RegistryMessageModel& _rmm;
MessageCommandDispatcher* _mcd; MessageCommandDispatcher* _mcd;
std::minstd_rand _rng{std::random_device{}()}; std::minstd_rand _rng{std::random_device{}()};
@ -30,7 +30,7 @@ struct RPBot : public RegistryMessageModelEventI {
TextCompletionI& completion, TextCompletionI& completion,
ConfigModelI& conf, ConfigModelI& conf,
Contact3Registry& cr, Contact3Registry& cr,
RegistryMessageModelI& rmm, RegistryMessageModel& rmm,
MessageCommandDispatcher* mcd MessageCommandDispatcher* mcd
); );

View File

@ -20,7 +20,7 @@ int main(void) {
} }
std::cerr << lcw._cli.host() << " " << lcw._cli.port() << " endpoint healthy\n"; std::cerr << lcw._cli.host() << " " << lcw._cli.port() << " endpoint healthy\n";
std::cerr << "The meaning of life is to" std::cout << "The meaning of life is to"
<< lcw.complete(nlohmann::json{ << lcw.complete(nlohmann::json{
{"prompt", "The meaning of life is to"}, {"prompt", "The meaning of life is to"},
{"min_p", 0.1}, // model dependent {"min_p", 0.1}, // model dependent
@ -34,9 +34,9 @@ int main(void) {
}) })
<< "\n"; << "\n";
std::cerr << "-------------------------\n"; std::cout << "-------------------------\n";
std::cerr << "complete from select:\n"; std::cout << "complete from select:\n";
std::vector<std::string_view> possible { std::vector<std::string_view> possible {
" die", " die",
" die.", " die.",
@ -46,12 +46,12 @@ int main(void) {
" Hi", " Hi",
}; };
for (size_t i = 0; i < 10; i++) { for (size_t i = 0; i < 10; i++) {
std::cerr << "The meaning of life is to"; std::cout << "The meaning of life is to";
auto res = lcw.completeSelect("The meaning of life is to", possible); auto res = lcw.completeSelect("The meaning of life is to", possible);
if (res < 0) { if (res < 0) {
std::cerr << " error\n"; std::cout << " error--\n";
} else { } else {
std::cerr << possible[res] << "\n"; std::cout << possible[res] << "\n";
} }
} }