Skip to content

Commit 73e07e4

Browse files
authored
Add headers handling to BasicMCPClient (#18919)
1 parent 69f1a60 commit 73e07e4

File tree

2 files changed

+19
-4
lines changed

2 files changed

+19
-4
lines changed

llama-index-integrations/tools/llama-index-tools-mcp/llama_index/tools/mcp/client.py

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,17 @@
11
import warnings
22
from contextlib import asynccontextmanager
33
from datetime import timedelta
4-
from typing import Optional, List, Dict, Tuple, Callable, AsyncIterator, Awaitable, Dict
4+
from typing import (
5+
Optional,
6+
List,
7+
Dict,
8+
Tuple,
9+
Callable,
10+
AsyncIterator,
11+
Awaitable,
12+
Dict,
13+
Any,
14+
)
515
from urllib.parse import urlparse, parse_qs
616
from mcp.client.session import ClientSession, ProgressFnT
717
from mcp.client.sse import sse_client
@@ -73,6 +83,7 @@ class BasicMCPClient(ClientSession):
7383
timeout: The timeout for the command in seconds.
7484
auth: Optional OAuth client provider for authentication.
7585
sampling_callback: Optional callback for handling sampling messages.
86+
headers: Optional headers to pass by sse client or streamable http client
7687
7788
"""
7889

@@ -88,13 +99,15 @@ def __init__(
8899
[types.CreateMessageRequestParams], Awaitable[types.CreateMessageResult]
89100
]
90101
] = None,
102+
headers: Optional[Dict[str, Any]] = None,
91103
):
92104
self.command_or_url = command_or_url
93105
self.args = args or []
94106
self.env = env or {}
95107
self.timeout = timeout
96108
self.auth = auth
97109
self.sampling_callback = sampling_callback
110+
self.headers = headers
98111

99112
@classmethod
100113
def with_oauth(
@@ -161,7 +174,9 @@ async def _run_session(self) -> AsyncIterator[ClientSession]:
161174
# Check if this is a streamable HTTP endpoint (default) or SSE
162175
if enable_sse(self.command_or_url):
163176
# SSE transport
164-
async with sse_client(self.command_or_url, auth=self.auth) as streams:
177+
async with sse_client(
178+
self.command_or_url, auth=self.auth, headers=self.headers
179+
) as streams:
165180
async with ClientSession(
166181
*streams,
167182
read_timeout_seconds=timedelta(seconds=self.timeout),
@@ -172,7 +187,7 @@ async def _run_session(self) -> AsyncIterator[ClientSession]:
172187
else:
173188
# Streamable HTTP transport (recommended)
174189
async with streamablehttp_client(
175-
self.command_or_url, auth=self.auth
190+
self.command_or_url, auth=self.auth, headers=self.headers
176191
) as (read, write, _):
177192
async with ClientSession(
178193
read,

llama-index-integrations/tools/llama-index-tools-mcp/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ dev = [
2929

3030
[project]
3131
name = "llama-index-tools-mcp"
32-
version = "0.2.2"
32+
version = "0.2.3"
3333
description = "llama-index tools mcp integration"
3434
authors = [{name = "Chojan Shang", email = "[email protected]"}]
3535
requires-python = ">=3.10,<4.0"

0 commit comments

Comments
 (0)