Skip to content

Commit

Permalink
ensure title matches current server selection, minor cleanups
Browse files Browse the repository at this point in the history
  • Loading branch information
aprxi committed Jul 4, 2024
1 parent 6b7e6b9 commit efc6724
Show file tree
Hide file tree
Showing 7 changed files with 9 additions and 53 deletions.
5 changes: 0 additions & 5 deletions lumni/src/apps/builtin/llm/prompt/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,6 @@ async fn prompt_app<B: Backend>(
.color_scheme
.unwrap_or_else(|| defaults.get_color_scheme());

// TODO: replace with loaded server and model
let server_name = tab.chat.server_name();
tab.ui.response.set_window_title(tab.chat.server_name());

let (tx, mut rx) = mpsc::channel(CHANNEL_QUEUE_SIZE);
let mut tick = interval(Duration::from_millis(1));
let keep_running = Arc::new(AtomicBool::new(false));
Expand Down Expand Up @@ -136,7 +132,6 @@ async fn prompt_app<B: Backend>(

tab.ui.response.text_append_with_insert(
&formatted_prompt,
//Some(PromptStyle::user()),
Some(color_scheme.get_primary_style()),
);
tab.ui.response.text_append_with_insert(
Expand Down
1 change: 0 additions & 1 deletion lumni/src/apps/builtin/llm/prompt/src/chat/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ impl ChatSession {
&mut self,
server_name: &str,
) -> Result<(), ApplicationError> {

if self.server_name() != server_name {
log::debug!("switching server: {}", server_name);
self.stop();
Expand Down
42 changes: 0 additions & 42 deletions lumni/src/apps/builtin/llm/prompt/src/server/openai/response.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::collections::HashMap;
use std::error::Error;

use bytes::Bytes;
use serde::Deserialize;
Expand All @@ -18,47 +17,6 @@ pub struct OpenAIResponsePayload {
pub extra: HashMap<String, Value>,
}

impl OpenAIResponsePayload {
// TODO: does not work yet
// OpenAI sents back split responses, which we need to concatenate first
pub fn extract_content(
bytes: Bytes,
) -> Result<OpenAIResponsePayload, Box<dyn Error>> {
// Convert bytes to string, log the raw input
let text = match String::from_utf8(bytes.to_vec()) {
Ok(t) => t,
Err(e) => {
//eprintln!("Failed to convert bytes to UTF-8: {:?}", e);
//eprintln!("Raw bytes: {:?}", bytes);
return Err(Box::new(e));
}
};
//eprintln!("Raw text: {:?}", text);

// Remove 'data: ' prefix if present
let json_text = text.strip_prefix("data: ").unwrap_or(&text);
//eprintln!("JSON text after stripping prefix: {:?}", json_text);

let parsed_json: Value = match serde_json::from_str(json_text) {
Ok(v) => v,
Err(e) => {
//eprintln!("Failed to parse as generic JSON: {:?}", e);
//eprintln!("Problematic JSON text: {:?}", json_text);
return Err(Box::new(e));
}
};
//eprintln!("Parsed generic JSON: {:#?}", parsed_json);

match serde_json::from_value(parsed_json) {
Ok(payload) => Ok(payload),
Err(e) => {
//eprintln!("Failed to deserialize into OpenAIResponsePayload: {:?}", e);
Err(Box::new(e))
}
}
}
}

#[derive(Debug, Deserialize)]
pub struct Choice {
pub index: u32,
Expand Down
8 changes: 6 additions & 2 deletions lumni/src/apps/builtin/llm/prompt/src/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ use ratatui::backend::Backend;
use ratatui::Terminal;

use super::chat::ChatSession;
use super::tui::{draw_ui, ColorScheme, ColorSchemeType, TabUi};
use super::tui::{
draw_ui, ColorScheme, ColorSchemeType, TabUi, TextWindowTrait,
};

pub struct TabSession<'a> {
pub ui: TabUi<'a>,
Expand All @@ -16,7 +18,6 @@ impl TabSession<'_> {
pub fn new(chat: ChatSession) -> Self {
let mut tab_ui = TabUi::new();
tab_ui.init();

TabSession {
ui: tab_ui,
chat,
Expand All @@ -28,6 +29,9 @@ impl TabSession<'_> {
&mut self,
terminal: &mut Terminal<B>,
) -> Result<(), io::Error> {
// Set the response window title to current server name
self.ui.response.set_window_title(self.chat.server_name());

draw_ui(terminal, self)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,8 +183,7 @@ impl<'a> TextWindow<'a> {
.border_style(self.window_type.border_style())
.padding(Padding::new(0, 0, 0, 0));

let title = self.window_type.title();
if let Some(title) = title {
if let Some(title) = self.window_type.title() {
block = block
.title(title)
.title_style(Style::default().fg(Color::LightGreen))
Expand Down
1 change: 1 addition & 0 deletions lumni/src/apps/builtin/llm/prompt/src/tui/draw.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ pub fn draw_ui<B: Backend>(
tab.ui.prompt.widget(&prompt_edit_area),
prompt_edit_area,
);
tab.ui.response.set_window_title(tab.chat.server_name());
frame.render_widget(
tab.ui.response.widget(&prompt_log_area),
prompt_log_area,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crossterm::event::{KeyCode, KeyEvent, KeyModifiers};
use super::handle_command_line::handle_command_line_event;
use super::handle_prompt_window::handle_prompt_window_event;
use super::handle_response_window::handle_response_window_event;
use super::{TabSession, TabUi, WindowEvent};
use super::{TabUi, WindowEvent};
use crate::apps::builtin::llm::prompt::src::chat::ChatSession;

#[derive(Debug, Clone)]
Expand Down

0 comments on commit efc6724

Please sign in to comment.