From 20f62104c5e810d54d3768710a574483abc570f6 Mon Sep 17 00:00:00 2001 From: Martin Dvorak Date: Mon, 1 Apr 2024 16:13:14 +0200 Subject: [PATCH] WIP: library orphans search --- app/src/qt/dialogs/configuration_dialog.cpp | 3 +- app/src/qt/main_menu_presenter.cpp | 1 + app/src/qt/main_menu_view.cpp | 16 +++++-- app/src/qt/main_menu_view.h | 1 + app/src/qt/main_window_presenter.cpp | 10 ++-- app/src/qt/main_window_presenter.h | 1 + app/src/qt/main_window_view.cpp | 7 +-- lib/src/mind/ai/llm/ollama_wingman.cpp | 6 +-- lib/src/mind/ai/llm/openai_wingman.cpp | 4 +- lib/src/mind/mind.cpp | 47 +++++++++++++++++++ lib/src/mind/mind.h | 10 ++++ .../markdown_document_representation.cpp | 5 +- .../markdown_document_representation.h | 6 +++ lib/test/src/json/json_test.cpp | 2 +- 14 files changed, 97 insertions(+), 22 deletions(-) diff --git a/app/src/qt/dialogs/configuration_dialog.cpp b/app/src/qt/dialogs/configuration_dialog.cpp index b63572a5..784c01ae 100644 --- a/app/src/qt/dialogs/configuration_dialog.cpp +++ b/app/src/qt/dialogs/configuration_dialog.cpp @@ -709,6 +709,7 @@ ConfigurationDialog::WingmanOpenAiTab::WingmanOpenAiTab(QWidget* parent, QComboB setOpenAiButton->setVisible(helpLabel->isVisible()); clearApiKeyButton = new QPushButton(tr("Clear API Key"), this); clearApiKeyButton->setVisible(helpLabel->isVisible()); + // LLM model can be choose at any time when a valid configuration is available llmModelsLabel = new QLabel(tr("LLM model:")); llmModelsCombo = new QComboBox(); llmModelsCombo->addItem(LLM_MODEL_NONE); @@ -1006,7 +1007,7 @@ void ConfigurationDialog::WingmanTab::handleComboBoxChanged(int index) { tr( "You have chosen OpenAI as your Wingman LLM provider. " "Therefore, your data will be sent to OpenAI servers " - "for GPT processing when you use Wingman.")); + "when you use Wingman.")); } } diff --git a/app/src/qt/main_menu_presenter.cpp b/app/src/qt/main_menu_presenter.cpp index 5635c4a0..3b2e0927 100644 --- a/app/src/qt/main_menu_presenter.cpp +++ b/app/src/qt/main_menu_presenter.cpp @@ -115,6 +115,7 @@ MainMenuPresenter::MainMenuPresenter(MainWindowPresenter* mwp) // menu: Library QObject::connect(view->actionLibraryAdd, SIGNAL(triggered()), mwp, SLOT(doActionLibraryNew())); QObject::connect(view->actionLibrarySync, SIGNAL(triggered()), mwp, SLOT(doActionLibrarySync())); + QObject::connect(view->actionLibraryOrphans, SIGNAL(triggered()), mwp, SLOT(doActionLibraryOrphans())); QObject::connect(view->actionLibraryDeprecate, SIGNAL(triggered()), mwp, SLOT(doActionLibraryRm())); // menu: Organizer diff --git a/app/src/qt/main_menu_view.cpp b/app/src/qt/main_menu_view.cpp index a28d450e..76074cbd 100644 --- a/app/src/qt/main_menu_view.cpp +++ b/app/src/qt/main_menu_view.cpp @@ -109,7 +109,7 @@ MainMenuView::MainMenuView(MainWindowView& mainWindowView) actionMindAutolink->setVisible(false); #endif - actionMindWingman = new QAction(QIcon(":/menu-icons/wingman-green.svg"), tr("&Wingman GPT"), mainWindow); + actionMindWingman = new QAction(QIcon(":/menu-icons/wingman-green.svg"), tr("&Wingman LLM"), mainWindow); actionMindWingman->setShortcut(QKeySequence(Qt::CTRL+Qt::Key_Slash)); actionMindWingman->setStatusTip(tr("Open Wingman dialog...")); @@ -140,6 +140,13 @@ MainMenuView::MainMenuView(MainWindowView& mainWindowView) tr( "Synchronize library source directory with MindForger notebook(s) which represent" "library resources...")); + // library: find orphans + actionLibraryOrphans = new QAction( + QIcon(":/menu-icons/find.svg"), + tr("&Find orphans"), + mainWindow); + actionLibrarySync->setStatusTip( + tr("Find library Notebooks which reference non-existent documents...")); // library: deprecate actionLibraryDeprecate = new QAction( QIcon(":/menu-icons/delete.svg"), tr("&Delete library"), mainWindow); @@ -148,6 +155,7 @@ MainMenuView::MainMenuView(MainWindowView& mainWindowView) // assemble Library sub-menu submenuMindLibrary->addAction(actionLibraryAdd); submenuMindLibrary->addAction(actionLibrarySync); + submenuMindLibrary->addAction(actionLibraryOrphans); submenuMindLibrary->addAction(actionLibraryDeprecate); // dream ... sanity, integrity, detox, inference, assoc discovery, ... @@ -565,7 +573,7 @@ MainMenuView::MainMenuView(MainWindowView& mainWindowView) actionOutlineTWikiImport->setStatusTip(tr("Import Notebook from an external TWiki file and restart MindForger")); submenuOutlineImport->addAction(actionOutlineTWikiImport); - submenuOutlineWingman = menuOutline->addMenu(QIcon(":/menu-icons/wingman-green.svg"), tr("&Wingman GPT")); + submenuOutlineWingman = menuOutline->addMenu(QIcon(":/menu-icons/wingman-green.svg"), tr("&Wingman LLM")); actionOutlineWingmanSummarize = new QAction(tr("&Summarize"), mainWindow); actionOutlineWingmanSummarize->setStatusTip(tr("Ask Wingman to summarize text of the Notebook...")); submenuOutlineWingman->addAction(actionOutlineWingmanSummarize); @@ -699,7 +707,7 @@ MainMenuView::MainMenuView(MainWindowView& mainWindowView) actionNoteImport->setStatusTip(tr("Import Note from an external file in a supported format")); actionNoteImport->setEnabled(false); - submenuNoteWingman = menuNote->addMenu(QIcon(":/menu-icons/wingman-green.svg"), tr("&Wingman GPT")); + submenuNoteWingman = menuNote->addMenu(QIcon(":/menu-icons/wingman-green.svg"), tr("&Wingman LLM")); actionNoteWingmanSummarize = new QAction(tr("&Summarize"), mainWindow); actionNoteWingmanSummarize->setStatusTip(tr("Ask Wingman to summarize text of the Note...")); submenuNoteWingman->addAction(actionNoteWingmanSummarize); @@ -788,7 +796,7 @@ MainMenuView::MainMenuView(MainWindowView& mainWindowView) actionEditSpellCheck = new QAction(QIcon(":/menu-icons/paste.svg"), tr("&Spell Check"), mainWindow); actionEditSpellCheck->setStatusTip(tr("Spell check Notebook or Note description")); - submenuEditWingman = menuEdit->addMenu(QIcon(":/menu-icons/wingman-green.svg"), tr("&Wingman GPT")); + submenuEditWingman = menuEdit->addMenu(QIcon(":/menu-icons/wingman-green.svg"), tr("&Wingman LLM")); actionEditWingmanFixGrammar = new QAction(tr("&Fix Grammar"), mainWindow); actionEditWingmanFixGrammar->setStatusTip(tr("Ask Wingman to fix grammar errors in the selected text / word under the cursor...")); submenuEditWingman->addAction(actionEditWingmanFixGrammar); diff --git a/app/src/qt/main_menu_view.h b/app/src/qt/main_menu_view.h index e31d09b3..5a7e2065 100644 --- a/app/src/qt/main_menu_view.h +++ b/app/src/qt/main_menu_view.h @@ -85,6 +85,7 @@ class MainMenuView : public QObject QMenu* submenuMindLibrary; QAction* actionLibraryAdd; QAction* actionLibrarySync; + QAction* actionLibraryOrphans; QAction* actionLibraryDeprecate; QAction* actionMindPreferences; QMenu* submenuMindExport; diff --git a/app/src/qt/main_window_presenter.cpp b/app/src/qt/main_window_presenter.cpp index 0b671e57..91521567 100644 --- a/app/src/qt/main_window_presenter.cpp +++ b/app/src/qt/main_window_presenter.cpp @@ -2176,13 +2176,13 @@ void MainWindowPresenter::slotRunWingmanFromDialog(bool showDialog) // check the result if (future.isFinished()) { - statusBar->showInfo(QString(tr("Wingman received an answer from the GPT provider"))); + statusBar->showInfo(QString(tr("Wingman received an answer from the LLM provider"))); } else { - statusBar->showError(QString(tr("Wingman failed to receive an answer from the GPT provider"))); + statusBar->showError(QString(tr("Wingman failed to receive an answer from the LLM provider"))); // PUSH answer to the chat dialog this->wingmanDialog->appendAnswerToChat( - "Wingman failed to get answer from the GPT provider.

"+commandWingmanChat.answerMarkdown, + "Wingman failed to get answer from the LLM provider.

"+commandWingmanChat.answerMarkdown, "", this->wingmanDialog->getContextType(), true @@ -3464,6 +3464,10 @@ void MainWindowPresenter::handleSyncLibrary() rmLibraryDialog->reset(); } +void MainWindowPresenter::doActionLibraryOrphans() +{ + mind->findLibraryOrphanOs(); +} void MainWindowPresenter::doActionLibraryRm() { diff --git a/app/src/qt/main_window_presenter.h b/app/src/qt/main_window_presenter.h index 892714fd..1e843c68 100644 --- a/app/src/qt/main_window_presenter.h +++ b/app/src/qt/main_window_presenter.h @@ -240,6 +240,7 @@ public slots: void doActionLibraryNew(); void handleNewLibrary(); void doActionLibrarySync(); + void doActionLibraryOrphans(); void handleSyncLibrary(); void doActionLibraryRm(); void handleRmLibrary(); diff --git a/app/src/qt/main_window_view.cpp b/app/src/qt/main_window_view.cpp index e5ec0c7e..b76b7aff 100644 --- a/app/src/qt/main_window_view.cpp +++ b/app/src/qt/main_window_view.cpp @@ -24,13 +24,8 @@ MainWindowView::MainWindowView(LookAndFeels& lookAndFeel) : QMainWindow(nullptr), // main window has no parent - it is destroyed by main MF class lookAndFeel(lookAndFeel) { -#ifdef MF_LLAMA_CPP - #define MINDFORGER_GPT " GPT" -#else - #define MINDFORGER_GPT "" -#endif windowTitleSkeleton - = "MindForger" MINDFORGER_GPT " - " + = "MindForger - " +tr("Thinking Notebook")+" - " +MINDFORGER_VERSION; diff --git a/lib/src/mind/ai/llm/ollama_wingman.cpp b/lib/src/mind/ai/llm/ollama_wingman.cpp index ccbc9b48..de27784a 100644 --- a/lib/src/mind/ai/llm/ollama_wingman.cpp +++ b/lib/src/mind/ai/llm/ollama_wingman.cpp @@ -55,12 +55,12 @@ void OllamaWingman::curlListModels() { if(!this->llmModels.empty()) { this->llmModels.clear(); } - + // call to ollama API to list available models throw std::runtime_error("OllamaWingman::curlListModels() not implemented"); } -std::vector& OllamaWingman::listModels() +std::vector& OllamaWingman::listModels() { if(this->llmModels.empty()) { this->curlListModels(); @@ -258,7 +258,7 @@ void OllamaWingman::curlGet(CommandWingmanChat& command) { } if(httpResponseJSon.contains("response")) { httpResponseJSon["response"].get_to(command.answerMarkdown); - // TODO ask GPT for HTML formatted response + // TODO ask LLM for HTML formatted response m8r::replaceAll( "\n", "
", diff --git a/lib/src/mind/ai/llm/openai_wingman.cpp b/lib/src/mind/ai/llm/openai_wingman.cpp index f01bf64a..662aa49a 100644 --- a/lib/src/mind/ai/llm/openai_wingman.cpp +++ b/lib/src/mind/ai/llm/openai_wingman.cpp @@ -107,7 +107,7 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) { */ nlohmann::json messageSystemJSon{}; - messageSystemJSon["role"] = "system"; // system (instruct GPT who it is), user (user prompts), assistant (GPT answers) + messageSystemJSon["role"] = "system"; // system (instruct LLM who it is), user (user prompts), assistant (LLM answers) messageSystemJSon["content"] = // "You are a helpful assistant that returns HTML-formatted answers to the user's prompts." "You are a helpful assistant." @@ -196,6 +196,8 @@ void OpenAiWingman::curlGet(CommandWingmanChat& command) { " '" << command.httpResponse << "'" << endl); } #else + // TODO refactor this section to a generic CURL call which gets: URL, body as C string and returns httpResponse + // set up cURL options command.httpResponse.clear(); curl_easy_setopt( diff --git a/lib/src/mind/mind.cpp b/lib/src/mind/mind.cpp index 02784063..9fc392ed 100644 --- a/lib/src/mind/mind.cpp +++ b/lib/src/mind/mind.cpp @@ -1502,6 +1502,53 @@ void Mind::initWingman() wingmanLlmProvider = WingmanLlmProviders::WINGMAN_PROVIDER_NONE; } +void Mind::findLibraryOrphanOs() +{ + vector orphanOutlines{}; + const vector& outlines = memory.getOutlines(); + const Tag* t = memory.getOntology().findOrCreateTag( + MarkdownDocumentRepresentation::TAG_LIB_DOC); + MF_DEBUG("Searching ORPHAN library outlines..." << endl); + for(Outline* outline:outlines) { + if(!outline->hasTag(t)) { + continue; + } + + const vector& d = outline->getDescription(); + if(d.size()>0) { + if(d[0]->size() > 0) { + // MF_DEBUG(" Orphan: 1st line '" << *d[0] << "'" << endl); + if(stringStartsWith(*d[0], MarkdownDocumentRepresentation::PREFIX_1ST_LINE)) { + // extract Markdown link + size_t i{strlen(MarkdownDocumentRepresentation::PREFIX_1ST_LINE)}; + string s{ + d[0]->substr( + i, + d[0]->size()-i)}; + // MF_DEBUG(" '" << s << "'" << endl); + + // parse Markdown link + string documentPath{}; + if(s.size()>4 && s[0]=='[' && s[s.size()-1]==')') { + size_t i; + if((i=s.find("](")) != std::string::npos) { + documentPath = s.substr(i+2,s.size()-3-i); + } + } + if(documentPath.size()) { + MF_DEBUG(" '" << documentPath << "'" << endl); + if(!isFile(documentPath.c_str())) { + MF_DEBUG(" ORPHAN" << endl); + // TODO + detect whether the file exists + } + } + } + } + } + } +} + Wingman* Mind::getWingman() { if(this->wingmanLlmProvider != config.getWingmanLlmProvider()) { diff --git a/lib/src/mind/mind.h b/lib/src/mind/mind.h index e2146dd3..39854594 100644 --- a/lib/src/mind/mind.h +++ b/lib/src/mind/mind.h @@ -38,6 +38,7 @@ #include "../config/configuration.h" #include "../representations/representation_interceptor.h" #include "../representations/markdown/markdown_configuration_representation.h" +#include "../representations/markdown/markdown_document_representation.h" namespace m8r { @@ -685,6 +686,15 @@ class Mind : public OntologyProvider */ void noteOnRename(const std::string& oldName, const std::string& newName); + /* + * LIBRARY (information source) + */ + + /** + * @brief Find Os which reference non-existent documents. + */ + void findLibraryOrphanOs(); + /* * WINGMAN */ diff --git a/lib/src/representations/markdown/markdown_document_representation.cpp b/lib/src/representations/markdown/markdown_document_representation.cpp index e3d9f499..03625152 100644 --- a/lib/src/representations/markdown/markdown_document_representation.cpp +++ b/lib/src/representations/markdown/markdown_document_representation.cpp @@ -51,12 +51,11 @@ Outline* MarkdownDocumentRepresentation::to( } o->addTag(ontology.findOrCreateTag("pdf")); - o->addTag(ontology.findOrCreateTag("library-document")); + o->addTag(ontology.findOrCreateTag(TAG_LIB_DOC)); o->addDescriptionLine( new string{ - "This is a notebook for the document: " - "[" + documentPath + "](" + documentPath + ")"}); + string{PREFIX_1ST_LINE} + "[" + documentPath + "](" + documentPath + ")"}); o->addDescriptionLine(new string{""}); o->addDescriptionLine(new string{"---"}); o->addDescriptionLine(new string{""}); diff --git a/lib/src/representations/markdown/markdown_document_representation.h b/lib/src/representations/markdown/markdown_document_representation.h index 603f4306..93116abb 100644 --- a/lib/src/representations/markdown/markdown_document_representation.h +++ b/lib/src/representations/markdown/markdown_document_representation.h @@ -29,6 +29,12 @@ namespace m8r { class MarkdownDocumentRepresentation { +public: + + static constexpr const auto TAG_LIB_DOC = "library-document"; + + static constexpr const auto PREFIX_1ST_LINE = "This is a notebook for the document: "; + private: Ontology& ontology; diff --git a/lib/test/src/json/json_test.cpp b/lib/test/src/json/json_test.cpp index 9122ca1f..d269818d 100644 --- a/lib/test/src/json/json_test.cpp +++ b/lib/test/src/json/json_test.cpp @@ -107,7 +107,7 @@ TEST(JSonTestCase, ParseOpenAiResponse) && choice["message"].contains("content") ) { choice["message"]["content"].get_to(answerMarkdown); - // TODO ask GPT for HTML formatted response + // TODO ask LLM for HTML formatted response m8r::replaceAll( "\n", "
",