Skip to content

Commit

Permalink
WIP: ollama and OpenAI preferences WITHOUT MF restart #1539
Browse files Browse the repository at this point in the history
  • Loading branch information
dvorka committed Mar 29, 2024
1 parent ed1b5a5 commit b47802f
Show file tree
Hide file tree
Showing 12 changed files with 91 additions and 50 deletions.
45 changes: 22 additions & 23 deletions app/src/qt/dialogs/configuration_dialog.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ void ConfigurationDialog::saveSlot()
mindTab->save();
wingmanTab->save();

// callback: notify components on config change using signals defined in
// the main window presenter
emit saveConfigSignal();
}

Expand Down Expand Up @@ -686,26 +688,30 @@ ConfigurationDialog::WingmanOpenAiTab::WingmanOpenAiTab(QWidget* parent)
tr(
"<html><a href='https://openai.com'>OpenAI</a> LLM provider configuration:\n"
"<ul>"
"<li><a href='https://platform.openai.com/api-keys'>Generate</a> an OpenAI API key.</li>"
"<li>Generate new OpenAI API key at <a href='https://platform.openai.com/api-keys'>openai.com</a>.</li>"
"<li>Set the API key:"
"<br>a) either set the <b>%1</b> environment variable<br/>"
"with the API key<br/>"
"b) or paste the API key below to save it <font color='#ff0000'>unencrypted</font> to<br/>"
"<b>.mindforger.md</b> file in your home directory.</li>"
"<li><font color='#ff0000'>Restart</font> MindForger to apply the change.</li>"
"</ul>"
).arg(ENV_VAR_OPENAI_API_KEY));
helpLabel->setVisible(!config.canWingmanOpenAi());
apiKeyLabel = new QLabel(tr("<br>API key:"));
apiKeyLabel->setVisible(helpLabel->isVisible());
apiKeyEdit = new QLineEdit(this);
apiKeyEdit->setVisible(helpLabel->isVisible());
clearApiKeyButton = new QPushButton(tr("Clear OpenAI API Key"), this);
setOllamaButton = new QPushButton(tr("Set ollama"), this); // enabled on valid config > add ollama to drop down > choose it in drop down
clearApiKeyButton = new QPushButton(tr("Clear API Key"), this);
clearApiKeyButton->setVisible(helpLabel->isVisible());

configuredLabel = new QLabel(
tr("The OpenAI API key is configured using the environment variable."), this);
configuredLabel->setVisible(!helpLabel->isVisible());

QVBoxLayout* llmProvidersLayout = new QVBoxLayout();
llmProvidersLayout->addWidget(helpLabel);
llmProvidersLayout->addWidget(apiKeyLabel);
llmProvidersLayout->addWidget(apiKeyEdit);
llmProvidersLayout->addWidget(clearApiKeyButton);
llmProvidersLayout->addWidget(configuredLabel);
Expand All @@ -725,6 +731,7 @@ ConfigurationDialog::WingmanOpenAiTab::~WingmanOpenAiTab()
{
delete helpLabel;
delete configuredLabel;
delete apiKeyLabel;
delete apiKeyEdit;
delete clearApiKeyButton;
}
Expand All @@ -737,22 +744,14 @@ void ConfigurationDialog::WingmanOpenAiTab::clearApiKeySlot()
tr("OpenAI API Key Cleared"),
tr(
"API key has been cleared from the configuration. "
"Please close the configuration dialog with the OK button "
"and restart MindForger to apply this change.")
"Please close the configuration dialog with the OK button to finish "
"the reconfiguration")
);
}

void ConfigurationDialog::WingmanOpenAiTab::refresh()
{
apiKeyEdit->setText(QString::fromStdString(config.getWingmanOpenAiApiKey()));

if(apiKeyEdit->text().size() == 0) {
clearApiKeyButton->setVisible(false);
} else {
if(helpLabel->isVisible()) {
clearApiKeyButton->setVisible(true);
}
}
}

void ConfigurationDialog::WingmanOpenAiTab::save()
Expand All @@ -771,18 +770,19 @@ ConfigurationDialog::WingmanOllamaTab::WingmanOllamaTab(QWidget* parent)
{
helpLabel = new QLabel(
tr(
"<html>ollama LLM provider configuration:\n"
"<html><a href='https://ollama.com'>ollama</a> LLM provider configuration:\n"
"<ul>"
"<li>Set ollama server URL - default is <a href='http://localhost:11434'>http://localhost:11434</a></li>"
"<li><font color='#ff0000'>Restart</font> MindForger to apply the change.</li>"
"<li>Set your ollama server URL - default is <a href='http://localhost:11434'>http://localhost:11434</a></li>"
"</ul>"
).arg(ENV_VAR_OPENAI_API_KEY));
helpLabel->setVisible(!config.canWingmanOllama());
urlLabel = new QLabel(tr("<br>ollama server URL:"));
urlEdit = new QLineEdit(this);
clearUrlButton = new QPushButton(tr("Clear URL"), this);

QVBoxLayout* llmProvidersLayout = new QVBoxLayout();
llmProvidersLayout->addWidget(helpLabel);
llmProvidersLayout->addWidget(urlLabel);
llmProvidersLayout->addWidget(urlEdit);
llmProvidersLayout->addWidget(clearUrlButton);
llmProvidersLayout->addStretch();
Expand All @@ -800,6 +800,7 @@ ConfigurationDialog::WingmanOllamaTab::WingmanOllamaTab(QWidget* parent)
ConfigurationDialog::WingmanOllamaTab::~WingmanOllamaTab()
{
delete helpLabel;
delete urlLabel;
delete urlEdit;
delete clearUrlButton;
}
Expand All @@ -813,26 +814,24 @@ void ConfigurationDialog::WingmanOllamaTab::clearUrlSlot()
tr(
"ollama URL has been cleared from the configuration. "
"Please close the configuration dialog with the OK button "
"and restart MindForger to apply this change.")
"to finish the reconfiguration.")
);
}

void ConfigurationDialog::WingmanOllamaTab::refresh()
{
urlEdit->setText(QString::fromStdString(config.getWingmanOllamaUrl()));

if(urlEdit->text().size() == 0) {
clearUrlButton->setVisible(false);
} else {
clearUrlButton->setVisible(true);
}
}

void ConfigurationDialog::WingmanOllamaTab::save()
{
config.setWingmanOllamaUrl(urlEdit->text().toStdString());
}

/*
* TODO: Wingman Open AI API tab
*/

/*
* Wingman tab
*/
Expand Down
2 changes: 2 additions & 0 deletions app/src/qt/dialogs/configuration_dialog.h
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ class ConfigurationDialog::WingmanOpenAiTab : public QWidget

QLabel* helpLabel;
QLabel* configuredLabel;
QLabel* apiKeyLabel;
QLineEdit* apiKeyEdit;
QPushButton* clearApiKeyButton;

Expand All @@ -110,6 +111,7 @@ class ConfigurationDialog::WingmanOllamaTab : public QWidget
Configuration& config;

QLabel* helpLabel;
QLabel* urlLabel;
QLineEdit* urlEdit;
QPushButton* clearUrlButton;

Expand Down
9 changes: 6 additions & 3 deletions app/src/qt/main_window_presenter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ MainWindowPresenter::MainWindowPresenter(MainWindowView& view)
QObject::connect(distributor, SIGNAL(finished()), distributor, SLOT(deleteLater()));
distributor->start();

// send signal to components to be updated on a configuration change
// send signal to components to be updated on a config change (callback)
QObject::connect(configDialog, SIGNAL(saveConfigSignal()), this, SLOT(handleMindPreferences()));
QObject::connect(configDialog, SIGNAL(saveConfigSignal()), orloj->getOutlineHeaderEdit()->getView()->getHeaderEditor(), SLOT(slotConfigurationUpdated()));
QObject::connect(configDialog, SIGNAL(saveConfigSignal()), orloj->getNoteEdit()->getView()->getNoteEditor(), SLOT(slotConfigurationUpdated()));
Expand Down Expand Up @@ -3337,6 +3337,9 @@ void MainWindowPresenter::handleMindPreferences()
{
mdConfigRepresentation->save(config);

// re-initialize Wingman
mind->initWingman();

view.getToolBar()->setVisible(config.isUiShowToolbar());
view.getOrloj()->getNoteView()->setZoomFactor(config.getUiHtmlZoomFactor());
view.getOrloj()->getOutlineHeaderView()->setZoomFactor(config.getUiHtmlZoomFactor());
Expand All @@ -3349,8 +3352,8 @@ void MainWindowPresenter::handleMindPreferences()
view.getOrloj()->getNoteEdit()->getButtonsPanel()->setVisible(!config.isUiExpertMode());
view.getOrloj()->getOutlineHeaderEdit()->getButtonsPanel()->setVisible(!config.isUiExpertMode());

// IMPROVE: highlighter should NOT reference lib configuration to honor MVP, spell check
// setting to be pushed to highlighter from here
// IMPROVE: highlighter should NOT reference lib configuration to honor MVP,
// spell check setting to be pushed to highlighter from here
}

void MainWindowPresenter::doActionViewTerminal()
Expand Down
10 changes: 7 additions & 3 deletions lib/src/config/configuration.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ using namespace m8r::filesystem;

namespace m8r {

const string LLM_MODEL_GPT35 = string{"gpt-3.5"};
// TODO ollama does NOT have to host llama2 > it should NOT be offered as default model
const string LLM_MODEL_LLAMA2 = string{"llama2"};

const string KnowledgeTool::TOOL_PHRASE = string{"<<PHRASE>>"};

// non-primitive constants initializations
Expand All @@ -38,8 +42,8 @@ const string Configuration::DEFAULT_UI_THEME_NAME = string{UI_DEFAULT_THEME};
const string Configuration::DEFAULT_UI_HTML_CSS_THEME = string{UI_DEFAULT_HTML_CSS_THEME};
const string Configuration::DEFAULT_EDITOR_FONT= string{UI_DEFAULT_EDITOR_FONT};
const string Configuration::DEFAULT_TIME_SCOPE = string{"0y0m0d0h0m"};
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OPENAI = LLM_MODEL_OPENAI_GPT35TURBO;
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OLLAMA = string{"llama2"};
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OPENAI = LLM_MODEL_GPT35;
const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OLLAMA = LLM_MODEL_LLAMA2;

Configuration::Configuration()
: asyncMindThreshold{},
Expand Down Expand Up @@ -479,7 +483,7 @@ bool Configuration::initWingmanOpenAi() {
bool Configuration::initWingmanOllama() {
MF_DEBUG(" Configuration::initWingmanOllama()" << endl);
if(canWingmanOllama()) {
// OPTIONAL: LLM model
// OPTIONAL: LLM model
if(wingmanOllamaLlm.size() <= 0) {
MF_DEBUG(" Wingman LLM model for ollama set to default: " << DEFAULT_WINGMAN_LLM_MODEL_OLLAMA << endl);
wingmanOpenAiLlm = DEFAULT_WINGMAN_LLM_MODEL_OLLAMA;
Expand Down
15 changes: 9 additions & 6 deletions lib/src/config/configuration.h
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,9 @@ class Configuration {
- NONE
- MOCK
- OPEN_AI
- configuration detection whether particular Wingman provider is available:
- OLLAMA
- OPENA_AI_API
- configuration detection whether any of Wingman providers is configured (available):
- bool can<provider>()
- Wingman initialization from the configuration perspective
(all fields, like API key, are set ...)
Expand All @@ -359,8 +361,9 @@ class Configuration {
- Wingman is available from the configuration perspective
- MIND:
- constructor:
if Wingman configuration is available,
then instantiate a Wingman @ configured provider
if any Wingman configuration is available,
then instantiate the Wingman using configured provider
else no Wingman
- if configuration.isWingman()
then mind.wingman = <provider>Wingman()
- Wingman AVAILABILITY to the runtime:
Expand All @@ -369,8 +372,8 @@ class Configuration {
- configuration CHANGE detection:
- mind.llmProvider used to detect configuration change
- on change: switch Wingman instance
- APP WINDOW / WINGMAN DIALOG:
- configuration CHANGE detection:
- APP WINDOW / WINGMAN CHAT WINDOW:
- configuration CHANGE detection - WINDOWS registered using a callback to be notified:
- appWindow.llmProvider used to detect configuration change
- on change: re-init Wingman DIALOG (refresh pre-defined prompts)
*/
Expand All @@ -379,7 +382,7 @@ class Configuration {
std::string wingmanOpenAiLlm;
std::string wingmanOllamaUrl; // base URL like http://localhost:11434
std::string wingmanOllamaLlm;

TimeScope timeScope;
std::string timeScopeAsString;
std::vector<std::string> tagsScope;
Expand Down
2 changes: 2 additions & 0 deletions lib/src/mind/ai/llm/mock_wingman.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,10 @@ using namespace std;

MockWingman::MockWingman(const string& llmModel)
: Wingman(WingmanLlmProviders::WINGMAN_PROVIDER_MOCK),
llmModels{},
llmModel{llmModel}
{
this->llmModels.push_back(this->llmModel);
}

MockWingman::~MockWingman()
Expand Down
10 changes: 10 additions & 0 deletions lib/src/mind/ai/llm/mock_wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ namespace m8r {
*/
class MockWingman: Wingman
{
public:
static constexpr const auto LLM_MODEL_MOCK = "mock-llm-model";

private:
std::vector<std::string> llmModels;
std::string llmModel;

public:
Expand All @@ -38,9 +43,14 @@ class MockWingman: Wingman
MockWingman& operator =(const MockWingman&&) = delete;
~MockWingman() override;

virtual std::vector<std::string>& listModels() {
return this->llmModels;
}

std::string getWingmanLlmModel() const { return llmModel; }

virtual void chat(CommandWingmanChat& command) override;

};

}
Expand Down
14 changes: 9 additions & 5 deletions lib/src/mind/ai/llm/openai_wingman.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,18 +53,22 @@ OpenAiWingman::OpenAiWingman(const string& apiKey)
{
MF_DEBUG("OpenAiWingman::OpenAiWingman() apiKey: " << apiKey << endl);

// IMPROVE list models using OpenAI API - will many models be confusing for user?
llmModels.push_back(LLM_GPT_35_TURBO);
llmModels.push_back(LLM_GPT_4);
listModels();
}

OpenAiWingman::~OpenAiWingman()
{
}

std::vector<std::string>& OpenAiWingman::listModels()
std::vector<std::string>& OpenAiWingman::listModels()
{
return this->llmModels;
llmModels.clear();

// TODO list models using OpenAI API - will many models be confusing for user?
llmModels.push_back(LLM_GPT_35_TURBO);
llmModels.push_back(LLM_GPT_4);

return llmModels;
}

// TODO refactor to parent class so that all wingmans can use it
Expand Down
10 changes: 10 additions & 0 deletions lib/src/mind/ai/llm/openai_wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,11 @@ class OpenAiWingman: Wingman
static const std::string LLM_MODEL_OPENAI_GPT4;

private:
// API key needed to access OpenAI API endpoint
std::string apiKey;
// Names of LLM models provided by the OpenAI API endpoint
std::vector<std::string> llmModels;
// Name of the LLM model which is used by Wingman - must be one of llmModels ^
std::string defaultLlmModel;

void curlGet(CommandWingmanChat& command);
Expand All @@ -56,7 +59,14 @@ class OpenAiWingman: Wingman
OpenAiWingman& operator =(const OpenAiWingman&&) = delete;
~OpenAiWingman() override;

/**
* @brief List (and cache) LLM model names
*/
virtual std::vector<std::string>& listModels() override;

/**
* @brief Chat with configured LLM model.
*/
virtual void chat(CommandWingmanChat& command) override;
};

Expand Down
4 changes: 2 additions & 2 deletions lib/src/mind/ai/llm/wingman.h
Original file line number Diff line number Diff line change
Expand Up @@ -164,12 +164,12 @@ class Wingman
}

/**
* List available LLM models.
* @brief List available LLM model names.
*/
virtual std::vector<std::string>& listModels() = 0;

/**
* Chat with given LLM model.
* @brief Chat with LLM model specified by the 'command'.
*/
virtual void chat(CommandWingmanChat& command) = 0;
};
Expand Down
Loading

0 comments on commit b47802f

Please sign in to comment.