Skip to content

Commit

Permalink
re-organisate chat files, continue last conversation if settings are …
Browse files Browse the repository at this point in the history
…equal
  • Loading branch information
aprxi committed Jul 22, 2024
1 parent aa8344e commit c0d02bb
Show file tree
Hide file tree
Showing 16 changed files with 247 additions and 178 deletions.
33 changes: 24 additions & 9 deletions lumni/src/apps/builtin/llm/prompt/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ use tokio::signal;
use tokio::sync::{mpsc, Mutex};
use tokio::time::{interval, timeout, Duration};

use super::chat::db::ConversationDatabaseStore;
use super::chat::{
AssistantManager, ChatSession, ConversationDatabaseStore, NewConversation,
PromptInstruction,
AssistantManager, ChatSession, NewConversation, PromptInstruction,
};
use super::server::{ModelServer, ModelServerName, ServerTrait};
use super::session::{AppSession, TabSession};
Expand Down Expand Up @@ -409,25 +409,40 @@ pub async fn run_cli(
let mut completion_options =
assistant_manager.get_completion_options().clone();

let model_server = ModelServerName::from_str(&server_name);
completion_options.model_server = Some(model_server.clone());

// overwrite default options with options set by the user
if let Some(s) = user_options {
let user_options_value = serde_json::from_str::<serde_json::Value>(s)?;
completion_options.update(user_options_value)?;
}

}
let new_conversation = NewConversation {
server: ModelServerName::from_str(&server_name),
server: model_server,
model: default_model,
options: Some(serde_json::to_value(completion_options)?),
system_prompt: instruction,
initial_messages: Some(initial_messages),
parent: None,
};

let prompt_instruction = PromptInstruction::new(
new_conversation,
&db_conn,
)?;
// check if the last conversation is the same as the new conversation, if so,
// continue the conversation, otherwise start a new conversation
let prompt_instruction =
if let Some(conversation_id) = db_conn.fetch_last_conversation_id()? {
let reader = db_conn.get_conversation_reader(conversation_id);
let is_equal = new_conversation.is_equal(&reader)?;
if is_equal {
log::debug!("Continuing last conversation");
PromptInstruction::from_reader(&reader)?
} else {
log::debug!("Starting new conversation");
PromptInstruction::new(new_conversation, &db_conn)?
}
} else {
log::debug!("Starting new conversation");
PromptInstruction::new(new_conversation, &db_conn)?
};

let chat_session =
ChatSession::new(&server_name, prompt_instruction, &db_conn).await?;
Expand Down
2 changes: 1 addition & 1 deletion lumni/src/apps/builtin/llm/prompt/src/chat/assistant.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use lumni::api::error::ApplicationError;

use super::conversation::{ConversationId, Message, MessageId};
use super::db::{ConversationId, Message, MessageId};
use super::options::{AssistantOptions, ChatCompletionOptions};
use super::prompt::Prompt;
use super::{PromptRole, PERSONAS};
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
use std::collections::HashMap;

use super::{
use super::db::{
Attachment, AttachmentId, ConversationId, Message, MessageId,
ModelIdentifier, ModelSpec, PromptRole,
ModelIdentifier, ModelSpec,
};
use super::PromptRole;

#[derive(Debug)]
pub struct ConversationCache {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,80 +1,14 @@
use lumni::api::error::ApplicationError;

use super::conversation::{
ConversationCache, ConversationId, Message, MessageId, ModelServerName,
ModelSpec,
};
use super::db::{
system_time_in_milliseconds, ConversationDatabaseStore, ConversationReader,
system_time_in_milliseconds, ConversationCache, ConversationDatabaseStore,
ConversationId, ConversationReader, Message, MessageId, ModelServerName,
ModelSpec,
};
use super::prepare::NewConversation;
use super::{ChatCompletionOptions, ChatMessage, PromptRole};
pub use crate::external as lumni;

#[derive(Debug, Clone)]
pub struct ParentConversation {
pub id: ConversationId,
pub fork_message_id: MessageId,
}

#[derive(Debug, Clone)]
pub struct NewConversation {
pub server: ModelServerName,
pub model: Option<ModelSpec>,
pub options: Option<serde_json::Value>,
pub system_prompt: Option<String>, // system_prompt ignored if parent is provided
pub initial_messages: Option<Vec<Message>>, // initial_messages ignored if parent is provided
pub parent: Option<ParentConversation>, // forked conversation
}

impl NewConversation {
pub fn new(
new_server: ModelServerName,
new_model: ModelSpec,
conversation_reader: Option<&ConversationReader<'_>>,
) -> Result<NewConversation, ApplicationError> {
if let Some(reader) = conversation_reader {
// fork from an existing conversation
let current_conversation_id = reader.get_conversation_id();
let current_completion_options = reader.get_completion_options()?;

if let Some(last_message_id) = reader.get_last_message_id()? {
Ok(NewConversation {
server: new_server,
model: Some(new_model),
options: Some(current_completion_options),
system_prompt: None, // ignored when forking
initial_messages: None, // ignored when forking
parent: Some(ParentConversation {
id: current_conversation_id,
fork_message_id: last_message_id,
}),
})
} else {
// start a new conversation, as there is no last message is there is nothing to fork from.
// Both system_prompt and assistant_name are set to None, because if no messages exist, these were also None in the (empty) parent conversation
Ok(NewConversation {
server: new_server,
model: Some(new_model),
options: Some(current_completion_options),
system_prompt: None,
initial_messages: None,
parent: None,
})
}
} else {
// start a new conversation
Ok(NewConversation {
server: new_server,
model: Some(new_model),
options: None,
system_prompt: None,
initial_messages: None,
parent: None,
})
}
}
}

#[derive(Debug)]
pub struct PromptInstruction {
cache: ConversationCache,
Expand All @@ -88,16 +22,14 @@ impl PromptInstruction {
new_conversation: NewConversation,
db_conn: &ConversationDatabaseStore,
) -> Result<Self, ApplicationError> {
let mut completion_options = match new_conversation.options {
let completion_options = match new_conversation.options {
Some(opts) => {
let mut options = ChatCompletionOptions::default();
options.update(opts)?;
options
}
None => ChatCompletionOptions::default(),
};
// Update model_server in completion_options
completion_options.model_server = Some(new_conversation.server);

let conversation_id = if let Some(ref model) = new_conversation.model {
Some(db_conn.new_conversation(
Expand Down
81 changes: 10 additions & 71 deletions lumni/src/apps/builtin/llm/prompt/src/chat/conversation/mod.rs
Original file line number Diff line number Diff line change
@@ -1,77 +1,16 @@
use serde::{Deserialize, Serialize};

mod cache;
mod model;

mod instruction;
mod prepare;
pub use cache::ConversationCache;
pub use model::{ModelIdentifier, ModelSpec};

use super::PromptRole;

#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ModelServerName(pub String);

impl ModelServerName {
pub fn from_str<T: AsRef<str>>(s: T) -> Self {
ModelServerName(s.as_ref().to_string())
}

pub fn to_string(&self) -> String {
self.0.clone()
}
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ConversationId(pub i64);
use db::{ConversationId, MessageId};
pub use instruction::PromptInstruction;
pub use prepare::NewConversation;

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct MessageId(pub i64);
pub use super::db;
use super::{ChatCompletionOptions, ChatMessage, PromptRole};

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct AttachmentId(pub i64);

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Conversation {
#[derive(Debug, Clone)]
pub struct ParentConversation {
pub id: ConversationId,
pub name: String,
pub info: serde_json::Value,
pub model_identifier: ModelIdentifier,
pub parent_conversation_id: Option<ConversationId>,
pub fork_message_id: Option<MessageId>, // New field
pub completion_options: Option<serde_json::Value>,
pub created_at: i64,
pub updated_at: i64,
pub is_deleted: bool,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Message {
pub id: MessageId,
pub conversation_id: ConversationId,
pub role: PromptRole,
pub message_type: String,
pub content: String,
pub has_attachments: bool,
pub token_length: Option<i64>,
pub previous_message_id: Option<MessageId>,
pub created_at: i64,
pub is_deleted: bool,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AttachmentData {
Uri(String),
Data(Vec<u8>),
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Attachment {
pub attachment_id: AttachmentId,
pub message_id: MessageId,
pub conversation_id: ConversationId,
pub data: AttachmentData, // file_uri or file_data
pub file_type: String,
pub metadata: Option<serde_json::Value>,
pub created_at: i64,
pub is_deleted: bool,
pub fork_message_id: MessageId,
}
114 changes: 114 additions & 0 deletions lumni/src/apps/builtin/llm/prompt/src/chat/conversation/prepare.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
use lumni::api::error::ApplicationError;

use super::db::{ConversationReader, Message, ModelServerName, ModelSpec};
use super::{ParentConversation, PromptRole};
pub use crate::external as lumni;

#[derive(Debug, Clone)]
pub struct NewConversation {
pub server: ModelServerName,
pub model: Option<ModelSpec>,
pub options: Option<serde_json::Value>,
pub system_prompt: Option<String>, // system_prompt ignored if parent is provided
pub initial_messages: Option<Vec<Message>>, // initial_messages ignored if parent is provided
pub parent: Option<ParentConversation>, // forked conversation
}

impl NewConversation {
pub fn new(
new_server: ModelServerName,
new_model: ModelSpec,
conversation_reader: Option<&ConversationReader<'_>>,
) -> Result<NewConversation, ApplicationError> {
if let Some(reader) = conversation_reader {
// fork from an existing conversation
let current_conversation_id = reader.get_conversation_id();
let mut current_completion_options =
reader.get_completion_options()?;

current_completion_options["model_server"] =
serde_json::to_value(new_server.clone())?;

if let Some(last_message_id) = reader.get_last_message_id()? {
Ok(NewConversation {
server: new_server,
model: Some(new_model),
options: Some(current_completion_options),
system_prompt: None, // ignored when forking
initial_messages: None, // ignored when forking
parent: Some(ParentConversation {
id: current_conversation_id,
fork_message_id: last_message_id,
}),
})
} else {
// start a new conversation, as there is no last message is there is nothing to fork from.
// Both system_prompt and assistant_name are set to None, because if no messages exist, these were also None in the (empty) parent conversation
Ok(NewConversation {
server: new_server,
model: Some(new_model),
options: Some(current_completion_options),
system_prompt: None,
initial_messages: None,
parent: None,
})
}
} else {
// start a new conversation
let completion_options = serde_json::json!({
"model_server": new_server,
});
Ok(NewConversation {
server: new_server,
model: Some(new_model),
options: Some(completion_options),
system_prompt: None,
initial_messages: None,
parent: None,
})
}
}

pub fn is_equal(
&self,
reader: &ConversationReader,
) -> Result<bool, ApplicationError> {
// check if conversation settings are equal to the conversation stored in the database

// Compare model
let last_model = reader.get_model_spec()?;
if self.model.as_ref() != Some(&last_model) {
return Ok(false);
}

// Compare completion options (which includes server name and assistant)
let last_options = reader.get_completion_options()?;
let new_options = match &self.options {
Some(opts) => opts.clone(),
None => serde_json::json!({}),
};
if last_options != new_options {
return Ok(false);
}

// Compare system prompt. If the system prompt is not set in the new conversation, we check by first system prompt in the initial messages
let last_system_prompt = reader.get_system_prompt()?;
let new_system_prompt = match &self.system_prompt {
Some(prompt) => Some(prompt.as_str()),
None => self.initial_messages.as_ref().and_then(|messages| {
messages.first().and_then(|msg| {
if msg.role == PromptRole::System {
Some(msg.content.as_str())
} else {
None
}
})
}),
};
if last_system_prompt.as_deref() != new_system_prompt {
return Ok(false);
}
// Conversation settings are equal
Ok(true)
}
}
3 changes: 1 addition & 2 deletions lumni/src/apps/builtin/llm/prompt/src/chat/db/display.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use lumni::api::error::ApplicationError;

use super::conversation::{Conversation, ConversationId, Message};
use super::ConversationDatabaseStore;
use super::{Conversation, ConversationDatabaseStore, ConversationId, Message};
pub use crate::external as lumni;

impl ConversationDatabaseStore {
Expand Down
Loading

0 comments on commit c0d02bb

Please sign in to comment.