From 65385d7f60d0134c85c3157cb76ecc9aa61a4a3d Mon Sep 17 00:00:00 2001 From: J S <49557684+svilupp@users.noreply.github.com> Date: Sun, 12 May 2024 21:01:51 +0100 Subject: [PATCH] add prompt builder --- CHANGELOG.md | 20 +++- Project.toml | 3 +- README.md | 15 +++ app.jl | 179 +++++++++++++++++++++++++++- docs/src/index.md | 11 +- docs/src/introduction.md | 12 ++ main.jl | 9 +- src/ProToPortal.jl | 8 +- src/serialization.jl | 4 +- src/speech_to_text.jl | 10 ++ src/utils.jl | 15 +++ src/view.jl | 19 ++- src/view_builder.jl | 78 ++++++++++++ src/view_chat.jl | 123 +++++++++++++------ templates/PromptGeneratorBasic.json | 1 + 15 files changed, 442 insertions(+), 65 deletions(-) create mode 100644 src/speech_to_text.jl create mode 100644 src/view_builder.jl create mode 100644 templates/PromptGeneratorBasic.json diff --git a/CHANGELOG.md b/CHANGELOG.md index 42b2c4f..95ae646 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,15 +8,25 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +### Fixed + +## [0.2.0] + +### Added + Chat tab -- added delete icon to the last message in the conversation (for easy deletion) -- added a button in "Advanced Settings" to "Fork a conversation" (save to history for reference, but continue from a fresh copy) -- added a focus when a template is selected (on expand, on template selection, etc) +- Added delete icon to the last message in the conversation (for easy deletion) +- Added a button in "Advanced Settings" to "Fork a conversation" (save to history for reference, but continue from a fresh copy) +- Added a focus when a template is selected (on expand, on template selection, etc) +- Added a little edit icon for messages (disables the q-popup-edit that was distracting and jumping out too often) +- Added speech-to-text for the chat (click Record/Stop -> it will paste the text into the chat input and copy it in your clipboard) Meta-prompting tab -- Add an experimental meta-prompting experience based on [arxiv](https://arxiv.org/pdf/2401.12954). See the tab "Meta-Prompting" for more details. +- Added an experimental meta-prompting experience based on [arxiv](https://arxiv.org/pdf/2401.12954). See the tab "Meta-Prompting" for more details. -### Fixed +Prompt Builder tab +- Added a tab to generate "prompt templates" that you can then "Apply In Chat" (it jumps to the chat tab and provides template variables to fill in) +- Allows selecting different models (eg, Opus or more powerful ones) and defining how many samples are provided to give you a choice ## [0.1.0] diff --git a/Project.toml b/Project.toml index 193b77d..8fa5921 100644 --- a/Project.toml +++ b/Project.toml @@ -1,9 +1,10 @@ name = "ProToPortal" uuid = "f9496bd6-a3bb-4afc-927d-7268532ebfa9" authors = ["J S <49557684+svilupp@users.noreply.github.com> and contributors"] -version = "0.2.0-DEV" +version = "0.2.0" [deps] +Base64 = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" GenieFramework = "a59fdf5c-6bf0-4f5d-949c-a137c9e2f353" GenieSession = "03cc5b98-4f21-4eb6-99f2-22eced81f962" diff --git a/README.md b/README.md index db77ab6..c440d68 100644 --- a/README.md +++ b/README.md @@ -85,18 +85,33 @@ For a preview, see the video: `docs/src/videos/screen-capture-code-fixing.mp4` - Start "new chat" with conversations automatically saved both on disk and in history. ### History Tab + - Browse and load past conversations with a simple click. - Fork past conversations for continued exploration without altering the original history. - View the current session or reload to fetch all saved conversations. ### Templates Tab + - Explore all available templates with previews and metadata to select the most suitable one. - Search functionality for quick filtering by partial names or keywords. ### Configuration Tab + - Change the default model or add new ones from PromptingTools. - Modify the default system prompt used when not employing a template. +### Meta-Prompting Tab + +- An experimental meta-prompting experience based on [arxiv](https://arxiv.org/pdf/2401.12954). +- The model calls different "experts" to solve the provided tasks. + +### Prompt Builder Tab + +- Generate prompt templates (for use in Chat) from a brief description of a task. +- Generate multiple templates at once to choose from. +- Iterate all of them by providing more inputs in the text field. +- Once you're done, click "Apply in Chat" to jump to the normal chat (use as any other template, eg, fill in variables at the top). + And rich logging in the REPL to see what the GUI is doing under the hood! ## Screenshots diff --git a/app.jl b/app.jl index 34932d9..585efbd 100644 --- a/app.jl +++ b/app.jl @@ -1,4 +1,5 @@ module App +using Base64 using ProToPortal using PromptingTools const PT = PromptingTools @@ -69,9 +70,19 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true) @in chat_auto_reply_count = 0 # chat @in conv_displayed = Dict{Symbol, Any}[] + @in chat_edit_show = false + @in chat_edit_content = "" + @in chat_edit_save = false + @in chat_edit_index = 0 + @in is_recording = false + @in audio_chunks = [] + @in mediaRecorder = nothing + @in channel_ = nothing # Enter text @in chat_question = "" @out chat_disabled = false + @out chat_question_tokens = "" + @out chat_convo_tokens = "" # Select template @in chat_advanced_expanded = false @in chat_template_expanded = false @@ -92,6 +103,16 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true) @in meta_rounds_current = 0 @in meta_displayed = Dict{Symbol, Any}[] @in meta_rm_last_msg = false + ## Prompt Builder + @in builder_apply = false + @in builder_submit = false + @in builder_reset = false + @in builder_disabled = false + @in builder_question = "" + @in builder_tabs = Dict{Symbol, Any}[] + @in builder_tab = "tab1" + @in builder_model = isempty(PT.GROQ_API_KEY) ? "gpt4t" : "gllama370" + @in builder_samples = 3 # Template browser @in template_filter = "" @in template_submit = false @@ -231,6 +252,34 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true) chat_reset = true conv_displayed = conv_displayed_temp end + @onchange conv_displayed begin + chat_convo_tokens = if isempty(conv_displayed) + "" + elseif PT.isaimessage(conv_displayed[end][:message]) + msg = conv_displayed[end][:message] + "Tokens: $(sum(msg.tokens)), Cost: \$$(round(msg.cost;digits=2))" + else + "" + end + end + ## Chat Speech-to-text + @onchange fileuploads begin + if !isempty(fileuploads) + @info "File was uploaded: " fileuploads["path"] + filename = base64encode(fileuploads["name"]) + try + fn_new = fileuploads["path"] * ".wav" + mv(fileuploads["path"], fn_new; force = true) + chat_question = openai_whisper(fn_new) + rm(fn_new; force = true) + Base.run(__model__, "this.copyToClipboardText(this.chat_question);") + catch e + @error "Error processing file: $e" + notify(__model__, "Error processing file: $(fileuploads["name"])") + end + fileuploads = Dict{AbstractString, AbstractString}() + end + end ### Meta-prompting @onbutton meta_submit begin meta_disabled = true @@ -272,6 +321,71 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true) pop!(meta_displayed) meta_displayed = meta_displayed end + ### Prompt Builder + @onbutton builder_submit begin + builder_disabled = true + @info "> Prompt Builder Triggered - generating $(builder_samples) samples" + first_run = isempty(builder_tabs) + for i in 1:builder_samples + if first_run + ## Generate the first version + conv_current = send_to_model( + :PromptGeneratorBasic; task = builder_question, model = builder_model) + new_sample = Dict(:name => "tab$(i)", + :label => "Sample $(i)", + :display => [msg2display(msg; id) + for (id, msg) in enumerate(conv_current)]) + ## add new sample + builder_tabs = push!(builder_tabs, new_sample) + else + ## Generate the future iterations + current_tab = builder_tabs[i] + conv = prepare_conversation( + current_tab[:display]; question = builder_question) + conv_current = send_to_model( + conv; model = builder_model) + ## update the tab + current_tab[:display] = [msg2display(msg; id) + for (id, msg) in enumerate(conv_current)] + builder_tabs[i] = current_tab + builder_tabs = builder_tabs + end + end + + builder_disabled, builder_question = false, "" + end + @onbutton builder_reset begin + @info "> Prompt Builder Reset!" + builder_tabs = empty!(builder_tabs) + builder_disabled, builder_question = false, "" + end + @onbutton builder_apply begin + @info "> Applying Prompt Builder!" + builder_msg = filter(x -> x[:name] == builder_tab, builder_tabs) |> only + aimsg = builder_msg[:display][end][:message] + instructions, inputs = parse_builder(aimsg) + if isempty(instructions) && isempty(inputs) + notify(__model__, "Parsing failed! Retry...") + else + conv_current = if isempty(inputs) + notify(__model__, "Parsing failed! Expect bad results / edit as needed!") + ## slap all instructions into user message + [PT.SystemMessage(system_prompt), PT.UserMessage(instructions)] + else + ## turn into sytem and user message + [PT.SystemMessage(instructions), PT.UserMessage(inputs)] + end + conv_displayed = [msg2display(msg; id) + for (id, msg) in enumerate(conv_current)] + ## show the variables to fill in by the user -- use the last message / UserMessage + chat_template_expanded = true + chat_template_variables = [Dict(:id => id, :variable => String(sym), + :content => "") + for (id, sym) in enumerate(conv_current[end].variables)] + ## change page to chat + selected_page = "chat" + end + end ### Template browsing behavior @onbutton template_submit begin @info "> Template filter: $template_filter" @@ -306,10 +420,7 @@ const HISTORY_SAVE = get(ENV, "PROTO_HISTORY_SAVE", true) end end end -## TODO: add cost tracking on configuration pages + token tracking -## TODO: add RAG/knowledge loading from folder or URL -# Required for the JS events - +### JAVASCRIPT SECTION ### # set focus to the first variable when it changes @watch begin raw""" @@ -330,6 +441,16 @@ end console.log("woowza"); } }, + // saves edits made in the chat dialog + saveEdits(index) { + this.chat_edit_show = false; + this.conv_displayed[this.chat_edit_index].content = this.chat_edit_content; + this.chat_edit_content = ""; + }, + updateLengthChat() { + const tokens = Math.round(this.chat_question.length / 3.5); + this.chat_question_tokens = `Approx. tokens: ${tokens}`; + }, focusTemplateSelect() { this.$nextTick(() => { this.$refs.tpl_select.focus(); @@ -387,6 +508,56 @@ end el.select(); // Select the