Skip to content

Commit db65748

Browse files
TOOL-519: Minor tweaks.
1 parent 634450a commit db65748

File tree

9 files changed

+58
-37
lines changed

9 files changed

+58
-37
lines changed

application/apps/indexer/mcp/src/client/conversation.rs

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
// Simple conversation state implementation for managing chat messages within the client
2-
// Currently all
1+
// Simple model for storing the state of a chat conversation inside the MCP client
32

43
use rmcp::{
54
model::Content,
@@ -15,7 +14,7 @@ pub enum ChatMessage {
1514
#[derive(Debug)]
1615
pub enum ClientToLlm {
1716
SystemPrompt { content: String },
18-
Prompt { content: String },
17+
UserPrompt { content: String },
1918
ToolResult { content: Vec<Content> },
2019
}
2120

@@ -39,6 +38,9 @@ pub struct Conversation {
3938
}
4039

4140
impl Conversation {
41+
/// Create a new conversation with an initial system prompt
42+
/// # Arguments
43+
/// * `system_prompt`: The system prompt to initialize the conversation with
4244
pub fn new(system_prompt: String) -> Self {
4345
Self {
4446
chat_messages: vec![ChatMessage::ClientToLlm(ClientToLlm::SystemPrompt {

application/apps/indexer/mcp/src/client/llm/mock.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
// A mock LLM client for testing purposes
2-
// It simulates LLM behaviour without making actual API calls / without a HTTP client
2+
// It simulates LLM behavior without making actual API calls / without a HTTP client
33
// The logic for processing messages is as follows:
44
// - User prompts received from the chipmunk core will cause the mock LLM to emit a ToolCall message:
55
// LlmToClient::ToolCall ("apply_search_filter") with the prompt content as filters
@@ -19,10 +19,9 @@ use crate::{
1919
};
2020
pub struct MockLlmClient;
2121

22-
// Implement the LlmClient trait for the MockLlmClient
22+
// Abstraction of LLM clients using the LlmClient trait
2323
impl LlmClient for MockLlmClient {
2424
async fn process(&self, conversation: &Conversation) -> Result<LlmToClient, McpError> {
25-
// Call the inherent implementation to avoid recursive trait call
2625
MockLlmClient::process(self, conversation).await
2726
}
2827
}
@@ -40,7 +39,7 @@ impl MockLlmClient {
4039
ClientToLlm::SystemPrompt { .. } => Err(McpError::Generic {
4140
message: "Mock LLM client received a system prompt; nothing to do".into(),
4241
}),
43-
ClientToLlm::Prompt { content } => {
42+
ClientToLlm::UserPrompt { content } => {
4443
// Simulate LLM reasoning duration
4544
warn!("⏰ Mock LLM client waits 5s ...");
4645
sleep(Duration::from_secs(5)).await;

application/apps/indexer/mcp/src/client/llm/mod.rs

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,7 @@
1-
use crate::{client::conversation::{Conversation, LlmToClient}, types::McpError};
1+
use crate::{
2+
client::conversation::{Conversation, LlmToClient},
3+
types::McpError,
4+
};
25

36
pub mod mock;
47

@@ -24,10 +27,7 @@ pub enum LlmConfig {
2427
// We suppress the warning for now as all LLM clients are internal to the MCP client module.
2528
#[allow(async_fn_in_trait)]
2629
pub trait LlmClient {
27-
async fn process(
28-
&self,
29-
conversation: &Conversation,
30-
) -> Result<LlmToClient, McpError>;
30+
async fn process(&self, conversation: &Conversation) -> Result<LlmToClient, McpError>;
3131
}
3232

3333
// LLM wrapper struct for abstracting over different LLM clients
@@ -41,16 +41,13 @@ impl<C: LlmClient> Llm<C> {
4141
Self { client }
4242
}
4343

44-
pub async fn process(
45-
&self,
46-
conversation: &Conversation,
47-
) -> Result<LlmToClient, McpError> {
44+
pub async fn process(&self, conversation: &Conversation) -> Result<LlmToClient, McpError> {
4845
self.client.process(conversation).await
4946
}
5047
}
5148

5249
// Implementation of LLM creation from configuration for the mock client
53-
// TODO:[MCP] Maybe move this into the trait instead so that each LLM client can implement its own from_config function
50+
// TODO:[MCP] Can this be moved to the client modules? Via trait?
5451
impl Llm<mock::MockLlmClient> {
5552
pub fn from_config(config: LlmConfig) -> Self {
5653
match config {

application/apps/indexer/mcp/src/client/messages.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,16 @@
22
#[derive(Debug)]
33
pub enum McpClientToChipmunk {
44
Response { response: String },
5+
// TODO:[MCP] add other message types as needed. E.g.:
56
// If the LLM wants to use a tool it should be approved by the user
6-
// ToolApprovalRequets { tool_name, arguments }
7+
// ToolApprovalRequest { tool_name, arguments }
78
}
89

910
/// Messages from chipmunk to the MCP client
1011
#[derive(Debug, Clone)]
1112
pub enum McpChipmunkToClient {
12-
Prompt { prompt: String },
13+
UserPrompt { prompt: String },
14+
// TODO:[MCP] add other message types as needed. E.g.:
15+
// SystemPrompt { prompt: String },
1316
// ToolApprovalResponse { bool }
1417
}

application/apps/indexer/mcp/src/client/mod.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -131,10 +131,11 @@ impl MCPClient {
131131
select! {
132132
Some(chipmunk_request) = chipmunk_request_rx.recv() => {
133133
match chipmunk_request {
134-
McpChipmunkToClient::Prompt { prompt } => {
134+
McpChipmunkToClient::UserPrompt { prompt } => {
135135
warn!("🟢 MCP Client received mock prompt: {}", prompt);
136-
conversation.add_chat_message(ChatMessage::ClientToLlm(ClientToLlm::Prompt { content: prompt.clone() }));
136+
conversation.add_chat_message(ChatMessage::ClientToLlm(ClientToLlm::UserPrompt { content: prompt.clone() }));
137137

138+
// Interatively process converasation/messages with the LLM until we get a final response
138139
loop {
139140
match llm.process(&conversation).await? {
140141
LlmToClient::ToolCall { tool_name, arguments } => {

application/apps/indexer/mcp/src/server/messages.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use crate::types::SearchFilter;
77
/// Messages from the MCP server to chipmunk
88
#[derive(Debug)]
99
pub enum McpServerToChipmunk {
10-
ApplyFilter {
10+
ApplySearchFilter {
1111
filters: Vec<SearchFilter>,
1212
response_tx: oneshot::Sender<Result<(), McpError>>,
1313
},

application/apps/indexer/mcp/src/server/mod.rs

Lines changed: 8 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,24 +4,21 @@ use rmcp::{
44
ErrorData as RmcpError,
55
handler::server::{ServerHandler, tool::ToolRouter, wrapper::Parameters},
66
model::{CallToolResult, Content, ErrorCode, ServerCapabilities, ServerInfo},
7-
87
tool, tool_handler, tool_router,
98
transport::streamable_http_server::{
109
session::local::LocalSessionManager,
1110
tower::{StreamableHttpServerConfig, StreamableHttpService},
1211
},
1312
};
14-
use tokio::{
15-
sync::{mpsc, oneshot},
16-
};
13+
use tokio::sync::{mpsc, oneshot};
1714

1815
pub mod messages;
1916

2017
pub const BIND_ADDRESS: &str = "127.0.0.1:8181";
2118

2219
use messages::McpServerToChipmunk;
2320

24-
use crate::types::{SearchFilters};
21+
use crate::types::SearchFilters;
2522

2623
#[derive(Clone, Debug)]
2724
pub struct McpServer {
@@ -133,12 +130,9 @@ When the user provides natural language instructions, interpret them as follows:
133130
"🟢 MCP server received apply_search_filter tool call with params: {:?}",
134131
params
135132
);
136-
warn!("⏰ MCP server wait 5s");
137-
// Simulate LLM reasoning
138-
// sleep(std::time::Duration::from_secs(5)).await;
139133
let (response_tx, response_rx) = oneshot::channel();
140-
let task = McpServerToChipmunk::ApplyFilter {
141-
filters: params.filters,
134+
let task = McpServerToChipmunk::ApplySearchFilter {
135+
filters: params.filters.clone(),
142136
response_tx,
143137
};
144138
let task_tx_clone = self.server_to_chipmunk_tx.clone();
@@ -156,9 +150,10 @@ When the user provides natural language instructions, interpret them as follows:
156150
// based on the response send back the JSON response to client
157151
match response_rx.await {
158152
Ok(task_response) => match task_response {
159-
Ok(()) => Ok(CallToolResult::success(vec![Content::json(
160-
"🟢 MCP Server received ApplyFilter task response: success",
161-
)?])),
153+
Ok(()) => Ok(CallToolResult::success(vec![Content::json(format!(
154+
"Chipmunk finished task ApplySearchFilter sucessfully. Applied filter: {}",
155+
params
156+
))?])),
162157
Err(err) => {
163158
let err_msg = format!("Error while applying the search filter: {err}");
164159
Ok(CallToolResult::error(vec![Content::json(err_msg)?]))

application/apps/indexer/mcp/src/types.rs

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
use std::{ffi::os_str::Display, fmt};
2+
13
use rmcp::ServiceError;
24
use schemars::JsonSchema;
35
use serde::{Deserialize, Serialize};
@@ -41,3 +43,19 @@ pub struct SearchFilter {
4143
pub struct SearchFilters {
4244
pub filters: Vec<SearchFilter>,
4345
}
46+
47+
impl fmt::Display for SearchFilters {
48+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
49+
let filters: Vec<String> = self
50+
.filters
51+
.iter()
52+
.map(|filter| {
53+
format!(
54+
"{{ value: {}, is_regex: {}, ignore_case: {}, is_word: {} }}",
55+
filter.value, filter.is_regex, filter.ignore_case, filter.is_word
56+
)
57+
})
58+
.collect();
59+
write!(f, "[{}]", filters.join(", "))
60+
}
61+
}

application/apps/indexer/session/src/mcp_api/mod.rs

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
// MCP API for handling communication between the Chipmunk application and the MCP client/server.
2+
// Contains functinality to:
3+
// - create and manage MCP API instances
4+
// - run the MCP event loop to process incoming messages
5+
// - methods to send messages to the MCP client and server
6+
17
use crate::operations::{Operation, OperationKind};
28
use crate::state::SessionStateAPI;
39
use crate::tracker::OperationTrackerAPI;
@@ -37,7 +43,7 @@ impl McpApi {
3743
/// TODO:[MCP] Send a prompt to the Chipmunk MCP client. The prompt would typically come from the UI.
3844
/// The arguments probably need to be tweaked to also include the session
3945
pub async fn send_prompt(&self, prompt: String) -> Result<(), stypes::NativeError> {
40-
let message = McpChipmunkToClient::Prompt { prompt };
46+
let message = McpChipmunkToClient::UserPrompt { prompt };
4147

4248
self.chipmunk_to_client_tx
4349
.send(message.clone())
@@ -61,7 +67,7 @@ pub async fn run(
6167
select! {
6268
Some(server_request) = server_to_chipmunk_rx.recv() => {
6369
match server_request {
64-
McpServerToChipmunk::ApplyFilter {filters, response_tx} => {
70+
McpServerToChipmunk::ApplySearchFilter {filters, response_tx} => {
6571

6672
error!(
6773
"[Chipmunk] received filters: {:?}", filters

0 commit comments

Comments
 (0)