Skip to content

Commit

Permalink
codespace-assistant: configurable file system paths, improved tools s…
Browse files Browse the repository at this point in the history
…upport, more output from testing on workspaces-app project (#310)
  • Loading branch information
bkrabach authored Jan 29, 2025
1 parent 754432f commit d9913c7
Show file tree
Hide file tree
Showing 13 changed files with 210 additions and 86 deletions.
12 changes: 5 additions & 7 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,17 +27,17 @@
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [
3000,
4000
],
"forwardPorts": [3000, 4000, 5000],
// Use 'portsAttributes' to configure the behavior of specific port forwarding instances.
"portsAttributes": {
"3000": {
"label": "service:semantic-workbench"
},
"4000": {
"label": "app:semantic-workbench"
},
"5000": {
"label": "app:workspaces"
}
},
// Use 'otherPortsAttributes' to set the defaults that are applied to all ports, unless overridden
Expand All @@ -48,9 +48,7 @@
// Configure tool-specific properties.
"customizations": {
"codespaces": {
"openFiles": [
".devcontainer/POST_SETUP_README.md"
]
"openFiles": [".devcontainer/POST_SETUP_README.md"]
},
"vscode": {
"extensions": [
Expand Down
Original file line number Diff line number Diff line change
@@ -1,54 +1,57 @@
from textwrap import dedent
from typing import List

from .__model import MCPServerConfig
from .__model import MCPServerConfig, ToolsConfigModel

# List of MCP server configurations
# Each configuration includes the server name, command, arguments, and environment variables.
# Tested with 'npx' commands and _should_ work with 'uvx' as well.
# Can also use 'node' (and probably 'python'), but requires the full path to the lib/script called.

mcp_server_configs: List[MCPServerConfig] = [
MCPServerConfig(
name="Filesystem MCP Server",
command="npx",
args=["-y", "@modelcontextprotocol/server-filesystem", "/workspaces/semanticworkbench"],
),
MCPServerConfig(
name="Memory MCP Server",
command="npx",
args=["-y", "@modelcontextprotocol/server-memory"],
prompt=dedent("""
Follow these steps for each interaction:

1. Memory Retrieval:
- Always begin your chat by saying only "Remembering..." and retrieve all relevant information
from your knowledge graph
- Always refer to your knowledge graph as your "memory"
def get_mcp_server_configs(tools_config: ToolsConfigModel) -> List[MCPServerConfig]:
file_system_paths = [f"/workspaces/{file_system_path}" for file_system_path in tools_config.file_system_paths]
return [
MCPServerConfig(
name="Filesystem MCP Server",
command="npx",
args=["-y", "@modelcontextprotocol/server-filesystem", *file_system_paths],
),
MCPServerConfig(
name="Memory MCP Server",
command="npx",
args=["-y", "@modelcontextprotocol/server-memory"],
prompt=dedent("""
Follow these steps for each interaction:
2. Memory
- While conversing with the user, be attentive to any new information that falls into these categories:
a) Basic Identity (age, gender, location, job title, education level, etc.)
b) Behaviors (interests, habits, etc.)
c) Preferences (communication style, preferred language, etc.)
d) Goals (goals, targets, aspirations, etc.)
e) Relationships (personal and professional relationships up to 3 degrees of separation)
1. Memory Retrieval:
- Always begin your chat by saying only "Remembering..." and retrieve all relevant information
from your knowledge graph
- Always refer to your knowledge graph as your "memory"
3. Memory Update:
- If any new information was gathered during the interaction, update your memory as follows:
a) Create entities for recurring organizations, people, and significant events
b) Connect them to the current entities using relations
b) Store facts about them as observations
2. Memory
- While conversing with the user, be attentive to any new information that falls into these categories:
a) Basic Identity (age, gender, location, job title, education level, etc.)
b) Behaviors (interests, habits, etc.)
c) Preferences (communication style, preferred language, etc.)
d) Goals (goals, targets, aspirations, etc.)
e) Relationships (personal and professional relationships up to 3 degrees of separation)
3. Memory Update:
- If any new information was gathered during the interaction, update your memory as follows:
a) Create entities for recurring organizations, people, and significant events
b) Connect them to the current entities using relations
b) Store facts about them as observations
"""),
),
# MCPServerConfig(
# name="Sequential Thinking MCP Server",
# command="npx",
# args=["-y", "@modelcontextprotocol/server-sequential-thinking"],
# ),
# MCPServerConfig(
# name="Web Research MCP Server",
# command="npx",
# args=["-y", "@mzxrai/mcp-webresearch@latest"],
# ),
]
),
# MCPServerConfig(
# name="Sequential Thinking MCP Server",
# command="npx",
# args=["-y", "@modelcontextprotocol/server-sequential-thinking"],
# ),
# MCPServerConfig(
# name="Web Research MCP Server",
# command="npx",
# args=["-y", "@mzxrai/mcp-webresearch@latest"],
# ),
]
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from mcp import ClientSession
from mcp.client.stdio import StdioServerParameters, stdio_client

from .__mcp_server_configs import mcp_server_configs
from .__model import MCPServerConfig
from .__mcp_server_configs import get_mcp_server_configs
from .__model import MCPServerConfig, ToolsConfigModel

logger = logging.getLogger(__name__)

Expand All @@ -29,13 +29,13 @@ async def connect_to_mcp_server(server_config: MCPServerConfig) -> AsyncIterator
yield None # Yield None if connection fails


async def establish_mcp_sessions(stack: AsyncExitStack) -> List[ClientSession]:
async def establish_mcp_sessions(tools_config: ToolsConfigModel, stack: AsyncExitStack) -> List[ClientSession]:
"""
Establish connections to MCP servers using the provided AsyncExitStack.
"""

sessions: List[ClientSession] = []
for server_config in mcp_server_configs:
for server_config in get_mcp_server_configs(tools_config):
session: ClientSession | None = await stack.enter_async_context(connect_to_mcp_server(server_config))
if session:
sessions.append(session)
Expand All @@ -44,6 +44,6 @@ async def establish_mcp_sessions(stack: AsyncExitStack) -> List[ClientSession]:
return sessions


def get_mcp_server_prompts() -> List[str]:
def get_mcp_server_prompts(tools_config: ToolsConfigModel) -> List[str]:
"""Get the prompts for all MCP servers."""
return [server.prompt for server in mcp_server_configs if server.prompt]
return [server.prompt for server in get_mcp_server_configs(tools_config) if server.prompt]
Original file line number Diff line number Diff line change
Expand Up @@ -97,3 +97,11 @@ class ToolsConfigModel(BaseModel):
- Ensure that all values are plain data types (e.g., strings, numbers).
- **Do not** include any additional characters, functions, or expressions within the JSON.
""").strip()

file_system_paths: Annotated[
list[str],
Field(
title="File System Paths",
description="Paths to the file system for tools to use, relative to `/workspaces/` in the container.",
),
] = ["semanticworkbench"]
4 changes: 2 additions & 2 deletions assistants/codespace-assistant/assistant/response/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ async def respond_to_conversation(
# If tools are enabled, establish connections to the MCP servers
mcp_sessions: List[ClientSession] = []
if config.extensions_config.tools.enabled:
mcp_sessions = await establish_mcp_sessions(stack)
mcp_sessions = await establish_mcp_sessions(config.extensions_config.tools, stack)
if not mcp_sessions:
await context.send_messages(
NewConversationMessage(
Expand All @@ -47,7 +47,7 @@ async def respond_to_conversation(
return

# Retrieve prompts from the MCP servers
mcp_prompts = get_mcp_server_prompts()
mcp_prompts = get_mcp_server_prompts(config.extensions_config.tools)

# Retrieve tools from the MCP sessions
mcp_tools = await retrieve_tools_from_sessions(mcp_sessions)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Copyright (c) Microsoft. All rights reserved.

import logging
from typing import Any, List, Tuple
from typing import List, Tuple

import deepmerge
import openai_client
Expand Down Expand Up @@ -48,7 +48,8 @@ async def get_completion(
if tools is not None:
completion_args["tool_choice"] = "auto"

return await client.chat.completions.create(**completion_args)
completion = await client.chat.completions.create(**completion_args)
return completion


def extract_content_from_tool_calls(
Expand Down Expand Up @@ -104,35 +105,36 @@ def convert_mcp_tools_to_openai_tools(mcp_tools: List[Tool] | None) -> List[Chat
if not mcp_tools:
return None

tools_list: List[ChatCompletionToolParam] = []
openai_tools: List[ChatCompletionToolParam] = []
for mcp_tool in mcp_tools:
# add parameter for explaining the step for the user observing the assistant
aiContext: dict[str, Any] = {
"type": "string",
"description": (
"Explanation of why the AI is using this tool and what it expects to accomplish."
"This message is displayed to the user, coming from the point of view of the assistant"
" and should fit within the flow of the ongoing conversation, responding to the"
" preceding user message."
),
}

tools_list.append(
openai_tools.append(
ChatCompletionToolParam(
function=FunctionDefinition(
name=mcp_tool.name,
description=mcp_tool.description if mcp_tool.description else "[no description provided]",
parameters=deepmerge.always_merger.merge(
mcp_tool.inputSchema,
# Make a copy of the input schema so that we don't modify the original schema
mcp_tool.inputSchema.copy(),
{
"properties": {
"aiContext": aiContext,
# Add the "aiContext" parameter to the input schema
"aiContext": {
"type": "string",
"description": (
"Explanation of why the AI is using this tool and what it expects to accomplish."
"This message is displayed to the user, coming from the point of view of the assistant"
" and should fit within the flow of the ongoing conversation, responding to the"
" preceding user message."
),
},
},
# Ensure that the "aiContext" parameter is required
"required": ["aiContext"],
},
),
),
type="function",
)
)
return tools_list
return openai_tools
1 change: 1 addition & 0 deletions workbench-app/.vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,7 @@
"reduxjs",
"rehype",
"retriable",
"reverseproxy",
"rjsf",
"rootpath",
"selectin",
Expand Down
28 changes: 28 additions & 0 deletions workspaces-app/.vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "app: workspaces-app",
"cwd": "${workspaceFolder}",
"skipFiles": ["<node_internals>/**"],
"console": "integratedTerminal",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "dev"]
},
{
"type": "node",
"request": "launch",
"name": "app: workspaces-app (no strict mode)",
"env": {
"VITE_DISABLE_STRICT_MODE": "true"
},
"cwd": "${workspaceFolder}",
"skipFiles": ["<node_internals>/**"],
"console": "integratedTerminal",
"runtimeExecutable": "pnpm",
"runtimeArgs": ["run", "dev"]
}
]
}
Loading

0 comments on commit d9913c7

Please sign in to comment.