Compare commits

..

No commits in common. "301900c507d5719374b82a3730c19ad66eb10820" and "f38da08334d1dfb3cdd9fb5f795d6083d808c12a" have entirely different histories.

7 changed files with 16 additions and 21 deletions

View File

@ -37,7 +37,7 @@ SOLANA_PLUGIN_EXPORT uint32_t solana_plugin_start(struct SolanaAPI* solana_api)
auto* completion = PLUG_RESOLVE_INSTANCE(TextCompletionI);
auto* conf = PLUG_RESOLVE_INSTANCE(ConfigModelI);
auto* cr = PLUG_RESOLVE_INSTANCE_VERSIONED(Contact3Registry, "1");
auto* rmm = PLUG_RESOLVE_INSTANCE(RegistryMessageModelI);
auto* rmm = PLUG_RESOLVE_INSTANCE(RegistryMessageModel);
auto* mcd = PLUG_RESOLVE_INSTANCE(MessageCommandDispatcher);
// static store, could be anywhere tho

View File

@ -77,7 +77,7 @@ int64_t LlamaCppWeb::completeSelect(const std::string_view prompt, const std::ve
}
//grammar += ")";
std::cerr << "generated grammar:\n" << grammar << "\n";
//std::cout << "generated grammar:\n" << grammar << "\n";
auto ret = complete(nlohmann::json{
{"prompt", prompt},
@ -89,23 +89,19 @@ int64_t LlamaCppWeb::completeSelect(const std::string_view prompt, const std::ve
{"top_p", 1.0}, // disable
{"n_predict", 256}, // unlikely to ever be so high
{"seed", _rng()},
{"ignore_eos", true},
{"cache_prompt", static_cast<bool>(_use_server_cache)},
});
if (ret.empty()) {
assert("ret empty" && false);
return -2;
}
if (!ret.count("content")) {
assert("no content" && false);
return -3;
}
std::string selected = ret.at("content");
if (selected.empty()) {
assert("content empty" && false);
return -4;
}
@ -115,7 +111,6 @@ int64_t LlamaCppWeb::completeSelect(const std::string_view prompt, const std::ve
}
}
std::cerr << "content does not contain match\n";
std::cerr << "complete failed j:'" << ret.dump() << "'\n";
return -5;
}
@ -130,7 +125,7 @@ std::string LlamaCppWeb::completeLine(const std::string_view prompt) {
{"top_p", 1.0}, // disable
{"n_predict", 400},
{"seed", _rng()},
{"stop", nlohmann::json::array({"\n"})},
{"stop", {"\n"}},
{"cache_prompt", static_cast<bool>(_use_server_cache)},
});
@ -152,7 +147,7 @@ nlohmann::json LlamaCppWeb::complete(const nlohmann::json& request_j) {
// steaming instead would be better
_cli.set_read_timeout(std::chrono::minutes(10));
std::cerr << "j dump: '" << request_j.dump(-1, ' ', true) << "'\n";
//std::cout << "j dump: '" << request_j.dump(-1, ' ', true) << "'\n";
auto res = _cli.Post("/completion", request_j.dump(-1, ' ', true), "application/json");
@ -164,7 +159,7 @@ nlohmann::json LlamaCppWeb::complete(const nlohmann::json& request_j) {
//res->body.empty() ||
//res->get_header_value("Content-Type") != "application/json"
) {
std::cerr << "error posting: '" << res->body << "'\n";
std::cerr << "error posting\n";
return {};
}

View File

@ -19,7 +19,7 @@ struct LlamaCppWeb : public TextCompletionI {
// this is a bad idea
static std::minstd_rand thread_local _rng;
std::atomic<bool> _use_server_cache {false};
std::atomic<bool> _use_server_cache {true};
LlamaCppWeb(
ConfigModelI& conf

View File

@ -10,7 +10,7 @@
struct MessagePromptBuilder {
Contact3Registry& _cr;
const Contact3 _c;
RegistryMessageModelI& _rmm;
RegistryMessageModel& _rmm;
// lookup table, string_view since no name-components are changed
entt::dense_map<Contact3, std::string_view> names;

View File

@ -138,7 +138,7 @@ RPBot::RPBot(
TextCompletionI& completion,
ConfigModelI& conf,
Contact3Registry& cr,
RegistryMessageModelI& rmm,
RegistryMessageModel& rmm,
MessageCommandDispatcher* mcd
) : _completion(completion), _conf(conf), _cr(cr), _rmm(rmm), _mcd(mcd) {
//system_prompt = R"sys(Transcript of a group chat, where Bob talks to online strangers.

View File

@ -20,7 +20,7 @@ struct RPBot : public RegistryMessageModelEventI {
TextCompletionI& _completion;
ConfigModelI& _conf;
Contact3Registry& _cr;
RegistryMessageModelI& _rmm;
RegistryMessageModel& _rmm;
MessageCommandDispatcher* _mcd;
std::minstd_rand _rng{std::random_device{}()};
@ -30,7 +30,7 @@ struct RPBot : public RegistryMessageModelEventI {
TextCompletionI& completion,
ConfigModelI& conf,
Contact3Registry& cr,
RegistryMessageModelI& rmm,
RegistryMessageModel& rmm,
MessageCommandDispatcher* mcd
);

View File

@ -20,7 +20,7 @@ int main(void) {
}
std::cerr << lcw._cli.host() << " " << lcw._cli.port() << " endpoint healthy\n";
std::cerr << "The meaning of life is to"
std::cout << "The meaning of life is to"
<< lcw.complete(nlohmann::json{
{"prompt", "The meaning of life is to"},
{"min_p", 0.1}, // model dependent
@ -34,9 +34,9 @@ int main(void) {
})
<< "\n";
std::cerr << "-------------------------\n";
std::cout << "-------------------------\n";
std::cerr << "complete from select:\n";
std::cout << "complete from select:\n";
std::vector<std::string_view> possible {
" die",
" die.",
@ -46,12 +46,12 @@ int main(void) {
" Hi",
};
for (size_t i = 0; i < 10; i++) {
std::cerr << "The meaning of life is to";
std::cout << "The meaning of life is to";
auto res = lcw.completeSelect("The meaning of life is to", possible);
if (res < 0) {
std::cerr << " error\n";
std::cout << " error--\n";
} else {
std::cerr << possible[res] << "\n";
std::cout << possible[res] << "\n";
}
}