diff --git a/app/src/qt/dialogs/configuration_dialog.cpp b/app/src/qt/dialogs/configuration_dialog.cpp
index f0846e6a..b0818dfc 100644
--- a/app/src/qt/dialogs/configuration_dialog.cpp
+++ b/app/src/qt/dialogs/configuration_dialog.cpp
@@ -92,6 +92,8 @@ void ConfigurationDialog::saveSlot()
mindTab->save();
wingmanTab->save();
+ // callback: notify components on config change using signals defined in
+ // the main window presenter
emit saveConfigSignal();
}
@@ -686,26 +688,30 @@ ConfigurationDialog::WingmanOpenAiTab::WingmanOpenAiTab(QWidget* parent)
tr(
"OpenAI LLM provider configuration:\n"
"
"
- "- Generate an OpenAI API key.
"
+ "- Generate new OpenAI API key at openai.com.
"
"- Set the API key:"
"
a) either set the %1 environment variable
"
"with the API key
"
"b) or paste the API key below to save it unencrypted to
"
".mindforger.md file in your home directory. "
- "- Restart MindForger to apply the change.
"
"
"
).arg(ENV_VAR_OPENAI_API_KEY));
helpLabel->setVisible(!config.canWingmanOpenAi());
+ apiKeyLabel = new QLabel(tr("
API key:"));
+ apiKeyLabel->setVisible(helpLabel->isVisible());
apiKeyEdit = new QLineEdit(this);
apiKeyEdit->setVisible(helpLabel->isVisible());
- clearApiKeyButton = new QPushButton(tr("Clear OpenAI API Key"), this);
+ setOllamaButton = new QPushButton(tr("Set ollama"), this); // enabled on valid config > add ollama to drop down > choose it in drop down
+ clearApiKeyButton = new QPushButton(tr("Clear API Key"), this);
clearApiKeyButton->setVisible(helpLabel->isVisible());
+
configuredLabel = new QLabel(
tr("The OpenAI API key is configured using the environment variable."), this);
configuredLabel->setVisible(!helpLabel->isVisible());
QVBoxLayout* llmProvidersLayout = new QVBoxLayout();
llmProvidersLayout->addWidget(helpLabel);
+ llmProvidersLayout->addWidget(apiKeyLabel);
llmProvidersLayout->addWidget(apiKeyEdit);
llmProvidersLayout->addWidget(clearApiKeyButton);
llmProvidersLayout->addWidget(configuredLabel);
@@ -725,6 +731,7 @@ ConfigurationDialog::WingmanOpenAiTab::~WingmanOpenAiTab()
{
delete helpLabel;
delete configuredLabel;
+ delete apiKeyLabel;
delete apiKeyEdit;
delete clearApiKeyButton;
}
@@ -737,22 +744,14 @@ void ConfigurationDialog::WingmanOpenAiTab::clearApiKeySlot()
tr("OpenAI API Key Cleared"),
tr(
"API key has been cleared from the configuration. "
- "Please close the configuration dialog with the OK button "
- "and restart MindForger to apply this change.")
+ "Please close the configuration dialog with the OK button to finish "
+ "the reconfiguration")
);
}
void ConfigurationDialog::WingmanOpenAiTab::refresh()
{
apiKeyEdit->setText(QString::fromStdString(config.getWingmanOpenAiApiKey()));
-
- if(apiKeyEdit->text().size() == 0) {
- clearApiKeyButton->setVisible(false);
- } else {
- if(helpLabel->isVisible()) {
- clearApiKeyButton->setVisible(true);
- }
- }
}
void ConfigurationDialog::WingmanOpenAiTab::save()
@@ -771,18 +770,19 @@ ConfigurationDialog::WingmanOllamaTab::WingmanOllamaTab(QWidget* parent)
{
helpLabel = new QLabel(
tr(
- "ollama LLM provider configuration:\n"
+ "ollama LLM provider configuration:\n"
""
).arg(ENV_VAR_OPENAI_API_KEY));
helpLabel->setVisible(!config.canWingmanOllama());
+ urlLabel = new QLabel(tr("
ollama server URL:"));
urlEdit = new QLineEdit(this);
clearUrlButton = new QPushButton(tr("Clear URL"), this);
QVBoxLayout* llmProvidersLayout = new QVBoxLayout();
llmProvidersLayout->addWidget(helpLabel);
+ llmProvidersLayout->addWidget(urlLabel);
llmProvidersLayout->addWidget(urlEdit);
llmProvidersLayout->addWidget(clearUrlButton);
llmProvidersLayout->addStretch();
@@ -800,6 +800,7 @@ ConfigurationDialog::WingmanOllamaTab::WingmanOllamaTab(QWidget* parent)
ConfigurationDialog::WingmanOllamaTab::~WingmanOllamaTab()
{
delete helpLabel;
+ delete urlLabel;
delete urlEdit;
delete clearUrlButton;
}
@@ -813,19 +814,13 @@ void ConfigurationDialog::WingmanOllamaTab::clearUrlSlot()
tr(
"ollama URL has been cleared from the configuration. "
"Please close the configuration dialog with the OK button "
- "and restart MindForger to apply this change.")
+ "to finish the reconfiguration.")
);
}
void ConfigurationDialog::WingmanOllamaTab::refresh()
{
urlEdit->setText(QString::fromStdString(config.getWingmanOllamaUrl()));
-
- if(urlEdit->text().size() == 0) {
- clearUrlButton->setVisible(false);
- } else {
- clearUrlButton->setVisible(true);
- }
}
void ConfigurationDialog::WingmanOllamaTab::save()
@@ -833,6 +828,10 @@ void ConfigurationDialog::WingmanOllamaTab::save()
config.setWingmanOllamaUrl(urlEdit->text().toStdString());
}
+/*
+ * TODO: Wingman Open AI API tab
+ */
+
/*
* Wingman tab
*/
diff --git a/app/src/qt/dialogs/configuration_dialog.h b/app/src/qt/dialogs/configuration_dialog.h
index d3a5480f..5cabcc5c 100644
--- a/app/src/qt/dialogs/configuration_dialog.h
+++ b/app/src/qt/dialogs/configuration_dialog.h
@@ -85,6 +85,7 @@ class ConfigurationDialog::WingmanOpenAiTab : public QWidget
QLabel* helpLabel;
QLabel* configuredLabel;
+ QLabel* apiKeyLabel;
QLineEdit* apiKeyEdit;
QPushButton* clearApiKeyButton;
@@ -110,6 +111,7 @@ class ConfigurationDialog::WingmanOllamaTab : public QWidget
Configuration& config;
QLabel* helpLabel;
+ QLabel* urlLabel;
QLineEdit* urlEdit;
QPushButton* clearUrlButton;
diff --git a/app/src/qt/main_window_presenter.cpp b/app/src/qt/main_window_presenter.cpp
index f829e917..0b671e57 100644
--- a/app/src/qt/main_window_presenter.cpp
+++ b/app/src/qt/main_window_presenter.cpp
@@ -214,7 +214,7 @@ MainWindowPresenter::MainWindowPresenter(MainWindowView& view)
QObject::connect(distributor, SIGNAL(finished()), distributor, SLOT(deleteLater()));
distributor->start();
- // send signal to components to be updated on a configuration change
+ // send signal to components to be updated on a config change (callback)
QObject::connect(configDialog, SIGNAL(saveConfigSignal()), this, SLOT(handleMindPreferences()));
QObject::connect(configDialog, SIGNAL(saveConfigSignal()), orloj->getOutlineHeaderEdit()->getView()->getHeaderEditor(), SLOT(slotConfigurationUpdated()));
QObject::connect(configDialog, SIGNAL(saveConfigSignal()), orloj->getNoteEdit()->getView()->getNoteEditor(), SLOT(slotConfigurationUpdated()));
@@ -3337,6 +3337,9 @@ void MainWindowPresenter::handleMindPreferences()
{
mdConfigRepresentation->save(config);
+ // re-initialize Wingman
+ mind->initWingman();
+
view.getToolBar()->setVisible(config.isUiShowToolbar());
view.getOrloj()->getNoteView()->setZoomFactor(config.getUiHtmlZoomFactor());
view.getOrloj()->getOutlineHeaderView()->setZoomFactor(config.getUiHtmlZoomFactor());
@@ -3349,8 +3352,8 @@ void MainWindowPresenter::handleMindPreferences()
view.getOrloj()->getNoteEdit()->getButtonsPanel()->setVisible(!config.isUiExpertMode());
view.getOrloj()->getOutlineHeaderEdit()->getButtonsPanel()->setVisible(!config.isUiExpertMode());
- // IMPROVE: highlighter should NOT reference lib configuration to honor MVP, spell check
- // setting to be pushed to highlighter from here
+ // IMPROVE: highlighter should NOT reference lib configuration to honor MVP,
+ // spell check setting to be pushed to highlighter from here
}
void MainWindowPresenter::doActionViewTerminal()
diff --git a/lib/src/config/configuration.cpp b/lib/src/config/configuration.cpp
index 4539d1d3..dfbbbaa7 100644
--- a/lib/src/config/configuration.cpp
+++ b/lib/src/config/configuration.cpp
@@ -29,6 +29,10 @@ using namespace m8r::filesystem;
namespace m8r {
+const string LLM_MODEL_GPT35 = string{"gpt-3.5"};
+// TODO ollama does NOT have to host llama2 > it should NOT be offered as default model
+const string LLM_MODEL_LLAMA2 = string{"llama2"};
+
const string KnowledgeTool::TOOL_PHRASE = string{"<>"};
// non-primitive constants initializations
@@ -38,8 +42,8 @@ const string Configuration::DEFAULT_UI_THEME_NAME = string{UI_DEFAULT_THEME};
const string Configuration::DEFAULT_UI_HTML_CSS_THEME = string{UI_DEFAULT_HTML_CSS_THEME};
const string Configuration::DEFAULT_EDITOR_FONT= string{UI_DEFAULT_EDITOR_FONT};
const string Configuration::DEFAULT_TIME_SCOPE = string{"0y0m0d0h0m"};
-const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OPENAI = LLM_MODEL_OPENAI_GPT35TURBO;
-const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OLLAMA = string{"llama2"};
+const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OPENAI = LLM_MODEL_GPT35;
+const string Configuration::DEFAULT_WINGMAN_LLM_MODEL_OLLAMA = LLM_MODEL_LLAMA2;
Configuration::Configuration()
: asyncMindThreshold{},
@@ -479,7 +483,7 @@ bool Configuration::initWingmanOpenAi() {
bool Configuration::initWingmanOllama() {
MF_DEBUG(" Configuration::initWingmanOllama()" << endl);
if(canWingmanOllama()) {
- // OPTIONAL: LLM model
+ // OPTIONAL: LLM model
if(wingmanOllamaLlm.size() <= 0) {
MF_DEBUG(" Wingman LLM model for ollama set to default: " << DEFAULT_WINGMAN_LLM_MODEL_OLLAMA << endl);
wingmanOpenAiLlm = DEFAULT_WINGMAN_LLM_MODEL_OLLAMA;
diff --git a/lib/src/config/configuration.h b/lib/src/config/configuration.h
index 0eb1528f..347279cb 100644
--- a/lib/src/config/configuration.h
+++ b/lib/src/config/configuration.h
@@ -349,7 +349,9 @@ class Configuration {
- NONE
- MOCK
- OPEN_AI
- - configuration detection whether particular Wingman provider is available:
+ - OLLAMA
+ - OPENA_AI_API
+ - configuration detection whether any of Wingman providers is configured (available):
- bool can()
- Wingman initialization from the configuration perspective
(all fields, like API key, are set ...)
@@ -359,8 +361,9 @@ class Configuration {
- Wingman is available from the configuration perspective
- MIND:
- constructor:
- if Wingman configuration is available,
- then instantiate a Wingman @ configured provider
+ if any Wingman configuration is available,
+ then instantiate the Wingman using configured provider
+ else no Wingman
- if configuration.isWingman()
then mind.wingman = Wingman()
- Wingman AVAILABILITY to the runtime:
@@ -369,8 +372,8 @@ class Configuration {
- configuration CHANGE detection:
- mind.llmProvider used to detect configuration change
- on change: switch Wingman instance
- - APP WINDOW / WINGMAN DIALOG:
- - configuration CHANGE detection:
+ - APP WINDOW / WINGMAN CHAT WINDOW:
+ - configuration CHANGE detection - WINDOWS registered using a callback to be notified:
- appWindow.llmProvider used to detect configuration change
- on change: re-init Wingman DIALOG (refresh pre-defined prompts)
*/
@@ -379,7 +382,7 @@ class Configuration {
std::string wingmanOpenAiLlm;
std::string wingmanOllamaUrl; // base URL like http://localhost:11434
std::string wingmanOllamaLlm;
-
+
TimeScope timeScope;
std::string timeScopeAsString;
std::vector tagsScope;
diff --git a/lib/src/mind/ai/llm/mock_wingman.cpp b/lib/src/mind/ai/llm/mock_wingman.cpp
index 481f9379..c1d2883c 100644
--- a/lib/src/mind/ai/llm/mock_wingman.cpp
+++ b/lib/src/mind/ai/llm/mock_wingman.cpp
@@ -24,8 +24,10 @@ using namespace std;
MockWingman::MockWingman(const string& llmModel)
: Wingman(WingmanLlmProviders::WINGMAN_PROVIDER_MOCK),
+ llmModels{},
llmModel{llmModel}
{
+ this->llmModels.push_back(this->llmModel);
}
MockWingman::~MockWingman()
diff --git a/lib/src/mind/ai/llm/mock_wingman.h b/lib/src/mind/ai/llm/mock_wingman.h
index 8ca9a8da..a102e1fe 100644
--- a/lib/src/mind/ai/llm/mock_wingman.h
+++ b/lib/src/mind/ai/llm/mock_wingman.h
@@ -28,6 +28,11 @@ namespace m8r {
*/
class MockWingman: Wingman
{
+public:
+ static constexpr const auto LLM_MODEL_MOCK = "mock-llm-model";
+
+private:
+ std::vector llmModels;
std::string llmModel;
public:
@@ -38,9 +43,14 @@ class MockWingman: Wingman
MockWingman& operator =(const MockWingman&&) = delete;
~MockWingman() override;
+ virtual std::vector& listModels() {
+ return this->llmModels;
+ }
+
std::string getWingmanLlmModel() const { return llmModel; }
virtual void chat(CommandWingmanChat& command) override;
+
};
}
diff --git a/lib/src/mind/ai/llm/openai_wingman.cpp b/lib/src/mind/ai/llm/openai_wingman.cpp
index 15cdb13e..0fc16b62 100644
--- a/lib/src/mind/ai/llm/openai_wingman.cpp
+++ b/lib/src/mind/ai/llm/openai_wingman.cpp
@@ -53,18 +53,22 @@ OpenAiWingman::OpenAiWingman(const string& apiKey)
{
MF_DEBUG("OpenAiWingman::OpenAiWingman() apiKey: " << apiKey << endl);
- // IMPROVE list models using OpenAI API - will many models be confusing for user?
- llmModels.push_back(LLM_GPT_35_TURBO);
- llmModels.push_back(LLM_GPT_4);
+ listModels();
}
OpenAiWingman::~OpenAiWingman()
{
}
-std::vector& OpenAiWingman::listModels()
+std::vector& OpenAiWingman::listModels()
{
- return this->llmModels;
+ llmModels.clear();
+
+ // TODO list models using OpenAI API - will many models be confusing for user?
+ llmModels.push_back(LLM_GPT_35_TURBO);
+ llmModels.push_back(LLM_GPT_4);
+
+ return llmModels;
}
// TODO refactor to parent class so that all wingmans can use it
diff --git a/lib/src/mind/ai/llm/openai_wingman.h b/lib/src/mind/ai/llm/openai_wingman.h
index b3b4701d..705a6dd3 100644
--- a/lib/src/mind/ai/llm/openai_wingman.h
+++ b/lib/src/mind/ai/llm/openai_wingman.h
@@ -42,8 +42,11 @@ class OpenAiWingman: Wingman
static const std::string LLM_MODEL_OPENAI_GPT4;
private:
+ // API key needed to access OpenAI API endpoint
std::string apiKey;
+ // Names of LLM models provided by the OpenAI API endpoint
std::vector llmModels;
+ // Name of the LLM model which is used by Wingman - must be one of llmModels ^
std::string defaultLlmModel;
void curlGet(CommandWingmanChat& command);
@@ -56,7 +59,14 @@ class OpenAiWingman: Wingman
OpenAiWingman& operator =(const OpenAiWingman&&) = delete;
~OpenAiWingman() override;
+ /**
+ * @brief List (and cache) LLM model names
+ */
virtual std::vector& listModels() override;
+
+ /**
+ * @brief Chat with configured LLM model.
+ */
virtual void chat(CommandWingmanChat& command) override;
};
diff --git a/lib/src/mind/ai/llm/wingman.h b/lib/src/mind/ai/llm/wingman.h
index f916fcc7..c8757d3b 100644
--- a/lib/src/mind/ai/llm/wingman.h
+++ b/lib/src/mind/ai/llm/wingman.h
@@ -164,12 +164,12 @@ class Wingman
}
/**
- * List available LLM models.
+ * @brief List available LLM model names.
*/
virtual std::vector& listModels() = 0;
/**
- * Chat with given LLM model.
+ * @brief Chat with LLM model specified by the 'command'.
*/
virtual void chat(CommandWingmanChat& command) = 0;
};
diff --git a/lib/src/mind/mind.cpp b/lib/src/mind/mind.cpp
index 29e91ed6..2b58b9c9 100644
--- a/lib/src/mind/mind.cpp
+++ b/lib/src/mind/mind.cpp
@@ -47,10 +47,10 @@ Mind::Mind(Configuration &configuration)
#endif
outlinesMap{},
exclusiveMind{},
+ wingman{nullptr},
timeScopeAspect{},
tagsScopeAspect{ontology},
- scopeAspect{timeScopeAspect, tagsScopeAspect},
- wingman{nullptr}
+ scopeAspect{timeScopeAspect, tagsScopeAspect}
{
ai = new Ai{memory, *this};
@@ -1465,8 +1465,8 @@ void Mind::initWingman()
wingman = nullptr;
}
wingman = (Wingman*)new OpenAiWingman{
- config.getWingmanOpenAiApiKey(),
- config.getWingmanOpenAiLlm()
+ config.getWingmanOpenAiApiKey()
+ // TODO config.getWingmanOpenAiLlm()
};
wingmanLlmProvider = config.getWingmanLlmProvider();
return;
@@ -1478,19 +1478,21 @@ void Mind::initWingman()
}
wingman = (Wingman*)new OllamaWingman{
config.getWingmanOllamaUrl(),
- config.getWingmanOllamaLlm()
+ // TODO config.getWingmanOllamaLlm()
};
wingmanLlmProvider = config.getWingmanLlmProvider();
return;
case WingmanLlmProviders::WINGMAN_PROVIDER_MOCK:
MF_DEBUG(" MIND Wingman init: MOCK" << endl);
wingman = (Wingman*)new MockWingman{
- "mock-llm-model"
+ MockWingman::LLM_MODEL_MOCK
};
wingmanLlmProvider = config.getWingmanLlmProvider();
return;
+ case WingmanLlmProviders::WINGMAN_PROVIDER_NONE:
+ MF_DEBUG(" MIND Wingman init: set to NONE > deinitialize > NO Wingman" << endl);
default:
- MF_DEBUG(" MIND Wingman init: UNKNOWN" << endl);
+ MF_DEBUG(" MIND Wingman init: UNKNOWN > NO Wingman" << endl);
break;
}
}
diff --git a/lib/src/mind/mind.h b/lib/src/mind/mind.h
index 8b6b9061..e2146dd3 100644
--- a/lib/src/mind/mind.h
+++ b/lib/src/mind/mind.h
@@ -196,13 +196,15 @@ class Mind : public OntologyProvider
*/
Ai* ai;
+public:
/**
* Configuration driven Wingman initialization.
*/
void initWingman();
+
+private:
/**
* Wingman LLM provider currently used by Mind.
- * (user to detect configuration changes)
*/
WingmanLlmProviders wingmanLlmProvider;
/**