Merge branch 'main' of github.com:taylorwilsdon/google_workspace_mcp into feat/555-auto-reply-headers

This commit is contained in:
Taylor Wilsdon
2026-03-17 08:37:20 -04:00
15 changed files with 1416 additions and 1083 deletions

View File

@@ -4,6 +4,10 @@ on:
pull_request: pull_request:
types: [opened, synchronize, reopened, edited] types: [opened, synchronize, reopened, edited]
permissions:
pull-requests: read
issues: write
jobs: jobs:
check-maintainer-edits: check-maintainer-edits:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -11,6 +11,8 @@ on:
- main - main
workflow_dispatch: workflow_dispatch:
permissions: {}
env: env:
REGISTRY: ghcr.io REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }} IMAGE_NAME: ${{ github.repository }}

View File

@@ -6,6 +6,8 @@ on:
- "v*" - "v*"
workflow_dispatch: workflow_dispatch:
permissions: {}
jobs: jobs:
publish: publish:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -641,8 +641,8 @@ def get_credentials(
f"[get_credentials] Found OAuth 2.1 credentials for MCP session {session_id}" f"[get_credentials] Found OAuth 2.1 credentials for MCP session {session_id}"
) )
# Refresh expired credentials before checking scopes # Refresh invalid credentials before checking scopes
if credentials.expired and credentials.refresh_token: if (not credentials.valid) and credentials.refresh_token:
try: try:
credentials.refresh(Request()) credentials.refresh(Request())
logger.info( logger.info(
@@ -772,9 +772,9 @@ def get_credentials(
logger.debug( logger.debug(
f"[get_credentials] Credentials are valid. User: '{user_google_email}', Session: '{session_id}'" f"[get_credentials] Credentials are valid. User: '{user_google_email}', Session: '{session_id}'"
) )
elif credentials.expired and credentials.refresh_token: elif credentials.refresh_token:
logger.info( logger.info(
f"[get_credentials] Credentials expired. Attempting refresh. User: '{user_google_email}', Session: '{session_id}'" f"[get_credentials] Credentials not valid. Attempting refresh. User: '{user_google_email}', Session: '{session_id}'"
) )
try: try:
logger.debug( logger.debug(

View File

@@ -348,7 +348,7 @@ def configure_server_for_http():
) )
elif use_disk: elif use_disk:
try: try:
from key_value.aio.stores.disk import DiskStore from key_value.aio.stores.filetree import FileTreeStore
disk_directory = os.getenv( disk_directory = os.getenv(
"WORKSPACE_MCP_OAUTH_PROXY_DISK_DIRECTORY", "" "WORKSPACE_MCP_OAUTH_PROXY_DISK_DIRECTORY", ""
@@ -363,7 +363,7 @@ def configure_server_for_http():
"~/.fastmcp/oauth-proxy" "~/.fastmcp/oauth-proxy"
) )
client_storage = DiskStore(directory=disk_directory) client_storage = FileTreeStore(data_directory=disk_directory)
jwt_signing_key = validate_and_derive_jwt_key( jwt_signing_key = validate_and_derive_jwt_key(
jwt_signing_key_override, config.client_secret jwt_signing_key_override, config.client_secret
@@ -379,7 +379,7 @@ def configure_server_for_http():
fernet=Fernet(key=storage_encryption_key), fernet=Fernet(key=storage_encryption_key),
) )
logger.info( logger.info(
"OAuth 2.1: Using DiskStore for FastMCP OAuth proxy client_storage (directory=%s)", "OAuth 2.1: Using FileTreeStore for FastMCP OAuth proxy client_storage (directory=%s)",
disk_directory, disk_directory,
) )
except ImportError as exc: except ImportError as exc:

View File

@@ -138,9 +138,15 @@ def build_paragraph_style(
if named_style_type is not None: if named_style_type is not None:
valid_styles = [ valid_styles = [
"NORMAL_TEXT", "TITLE", "SUBTITLE", "NORMAL_TEXT",
"HEADING_1", "HEADING_2", "HEADING_3", "TITLE",
"HEADING_4", "HEADING_5", "HEADING_6", "SUBTITLE",
"HEADING_1",
"HEADING_2",
"HEADING_3",
"HEADING_4",
"HEADING_5",
"HEADING_6",
] ]
if named_style_type not in valid_styles: if named_style_type not in valid_styles:
raise ValueError( raise ValueError(

View File

@@ -325,7 +325,9 @@ class BatchOperationManager:
tab_id, tab_id,
) )
style = "bulleted" if list_type == "UNORDERED" else "numbered" style = "bulleted" if list_type == "UNORDERED" else "numbered"
description = f"create {style} list {op['start_index']}-{op['end_index']}" description = (
f"create {style} list {op['start_index']}-{op['end_index']}"
)
if op.get("nesting_level"): if op.get("nesting_level"):
description += f" (nesting level {op['nesting_level']})" description += f" (nesting level {op['nesting_level']})"
@@ -491,7 +493,11 @@ class BatchOperationManager:
}, },
"create_bullet_list": { "create_bullet_list": {
"required": ["start_index", "end_index"], "required": ["start_index", "end_index"],
"optional": ["list_type", "nesting_level", "paragraph_start_indices"], "optional": [
"list_type",
"nesting_level",
"paragraph_start_indices",
],
"description": "Apply or remove native bullet/numbered list formatting (list_type: UNORDERED, ORDERED, or NONE to remove; nesting_level: 0-8)", "description": "Apply or remove native bullet/numbered list formatting (list_type: UNORDERED, ORDERED, or NONE to remove; nesting_level: 0-8)",
}, },
"insert_doc_tab": { "insert_doc_tab": {

View File

@@ -318,9 +318,15 @@ class ValidationManager:
if named_style_type is not None: if named_style_type is not None:
valid_styles = [ valid_styles = [
"NORMAL_TEXT", "TITLE", "SUBTITLE", "NORMAL_TEXT",
"HEADING_1", "HEADING_2", "HEADING_3", "TITLE",
"HEADING_4", "HEADING_5", "HEADING_6", "SUBTITLE",
"HEADING_1",
"HEADING_2",
"HEADING_3",
"HEADING_4",
"HEADING_5",
"HEADING_6",
] ]
if named_style_type not in valid_styles: if named_style_type not in valid_styles:
return ( return (

View File

@@ -36,7 +36,17 @@ logger = logging.getLogger(__name__)
GMAIL_BATCH_SIZE = 25 GMAIL_BATCH_SIZE = 25
GMAIL_REQUEST_DELAY = 0.1 GMAIL_REQUEST_DELAY = 0.1
HTML_BODY_TRUNCATE_LIMIT = 20000 HTML_BODY_TRUNCATE_LIMIT = 20000
GMAIL_METADATA_HEADERS = ["Subject", "From", "To", "Cc", "Message-ID", "Date"]
GMAIL_METADATA_HEADERS = [
"Subject",
"From",
"To",
"Cc",
"Message-ID",
"In-Reply-To",
"References",
"Date",
]
LOW_VALUE_TEXT_PLACEHOLDERS = ( LOW_VALUE_TEXT_PLACEHOLDERS = (
"your client does not support html", "your client does not support html",
"view this email in your browser", "view this email in your browser",
@@ -217,6 +227,114 @@ def _append_signature_to_body(
return f"{body}{separator}{signature_text}" return f"{body}{separator}{signature_text}"
async def _fetch_original_for_quote(
service, thread_id: str, in_reply_to: Optional[str] = None
) -> Optional[dict]:
"""Fetch the original message from a thread for quoting in a reply.
When *in_reply_to* is provided the function looks for that specific
Message-ID inside the thread. Otherwise it falls back to the last
message in the thread.
Returns a dict with keys: sender, date, text_body, html_body -- or
*None* when the message cannot be retrieved.
"""
try:
thread_data = await asyncio.to_thread(
service.users()
.threads()
.get(userId="me", id=thread_id, format="full")
.execute
)
except Exception as e:
logger.warning(f"Failed to fetch thread {thread_id} for quoting: {e}")
return None
messages = thread_data.get("messages", [])
if not messages:
return None
target = None
if in_reply_to:
for msg in messages:
headers = {
h["name"]: h["value"] for h in msg.get("payload", {}).get("headers", [])
}
if headers.get("Message-ID") == in_reply_to:
target = msg
break
if target is None:
target = messages[-1]
headers = {
h["name"]: h["value"] for h in target.get("payload", {}).get("headers", [])
}
bodies = _extract_message_bodies(target.get("payload", {}))
return {
"sender": headers.get("From", "unknown"),
"date": headers.get("Date", ""),
"text_body": bodies.get("text", ""),
"html_body": bodies.get("html", ""),
}
def _build_quoted_reply_body(
reply_body: str,
body_format: Literal["plain", "html"],
signature_html: str,
original: dict,
) -> str:
"""Assemble reply body + signature + quoted original message.
Layout:
reply_body
-- signature --
On {date}, {sender} wrote:
> quoted original
"""
import html as _html_mod
if original.get("date"):
attribution = f"On {original['date']}, {original['sender']} wrote:"
else:
attribution = f"{original['sender']} wrote:"
if body_format == "html":
# Signature
sig_block = ""
if signature_html and signature_html.strip():
sig_block = f"<br><br>{signature_html}"
# Quoted original
orig_html = original.get("html_body") or ""
if not orig_html:
orig_text = original.get("text_body", "")
orig_html = f"<pre>{_html_mod.escape(orig_text)}</pre>"
quote_block = (
'<br><br><div class="gmail_quote">'
f"<span>{_html_mod.escape(attribution)}</span><br>"
'<blockquote style="margin:0 0 0 .8ex;border-left:1px solid #ccc;padding-left:1ex">'
f"{orig_html}"
"</blockquote></div>"
)
return f"{reply_body}{sig_block}{quote_block}"
# Plain text path
sig_block = ""
if signature_html and signature_html.strip():
sig_text = _html_to_text(signature_html).strip()
if sig_text:
sig_block = f"\n\n{sig_text}"
orig_text = original.get("text_body") or ""
if not orig_text and original.get("html_body"):
orig_text = _html_to_text(original["html_body"])
quoted_lines = "\n".join(f"> {line}" for line in orig_text.splitlines())
return f"{reply_body}{sig_block}\n\n{attribution}\n{quoted_lines}"
async def _get_send_as_signature_html(service, from_email: Optional[str] = None) -> str: async def _get_send_as_signature_html(service, from_email: Optional[str] = None) -> str:
""" """
Fetch signature HTML from Gmail send-as settings. Fetch signature HTML from Gmail send-as settings.
@@ -761,6 +879,13 @@ async def get_gmail_message_content(
if rfc822_msg_id: if rfc822_msg_id:
content_lines.append(f"Message-ID: {rfc822_msg_id}") content_lines.append(f"Message-ID: {rfc822_msg_id}")
in_reply_to = headers.get("In-Reply-To", "")
references = headers.get("References", "")
if in_reply_to:
content_lines.append(f"In-Reply-To: {in_reply_to}")
if references:
content_lines.append(f"References: {references}")
if to: if to:
content_lines.append(f"To: {to}") content_lines.append(f"To: {to}")
if cc: if cc:
@@ -926,12 +1051,19 @@ async def get_gmail_messages_content_batch(
cc = headers.get("Cc", "") cc = headers.get("Cc", "")
rfc822_msg_id = headers.get("Message-ID", "") rfc822_msg_id = headers.get("Message-ID", "")
in_reply_to = headers.get("In-Reply-To", "")
references = headers.get("References", "")
msg_output = ( msg_output = (
f"Message ID: {mid}\nSubject: {subject}\nFrom: {sender}\n" f"Message ID: {mid}\nSubject: {subject}\nFrom: {sender}\n"
f"Date: {headers.get('Date', '(unknown date)')}\n" f"Date: {headers.get('Date', '(unknown date)')}\n"
) )
if rfc822_msg_id: if rfc822_msg_id:
msg_output += f"Message-ID: {rfc822_msg_id}\n" msg_output += f"Message-ID: {rfc822_msg_id}\n"
if in_reply_to:
msg_output += f"In-Reply-To: {in_reply_to}\n"
if references:
msg_output += f"References: {references}\n"
if to: if to:
msg_output += f"To: {to}\n" msg_output += f"To: {to}\n"
@@ -957,12 +1089,19 @@ async def get_gmail_messages_content_batch(
# Format body content with HTML fallback # Format body content with HTML fallback
body_data = _format_body_content(text_body, html_body) body_data = _format_body_content(text_body, html_body)
in_reply_to = headers.get("In-Reply-To", "")
references = headers.get("References", "")
msg_output = ( msg_output = (
f"Message ID: {mid}\nSubject: {subject}\nFrom: {sender}\n" f"Message ID: {mid}\nSubject: {subject}\nFrom: {sender}\n"
f"Date: {headers.get('Date', '(unknown date)')}\n" f"Date: {headers.get('Date', '(unknown date)')}\n"
) )
if rfc822_msg_id: if rfc822_msg_id:
msg_output += f"Message-ID: {rfc822_msg_id}\n" msg_output += f"Message-ID: {rfc822_msg_id}\n"
if in_reply_to:
msg_output += f"In-Reply-To: {in_reply_to}\n"
if references:
msg_output += f"References: {references}\n"
if to: if to:
msg_output += f"To: {to}\n" msg_output += f"To: {to}\n"
@@ -1202,7 +1341,7 @@ async def send_gmail_message(
in_reply_to: Annotated[ in_reply_to: Annotated[
Optional[str], Optional[str],
Field( Field(
description="Optional Message-ID of the message being replied to.", description="Optional RFC Message-ID of the message being replied to (e.g., '<message123@gmail.com>').",
), ),
] = None, ] = None,
references: Annotated[ references: Annotated[
@@ -1244,8 +1383,8 @@ async def send_gmail_message(
the email will be sent from the authenticated user's primary email address. the email will be sent from the authenticated user's primary email address.
user_google_email (str): The user's Google email address. Required for authentication. user_google_email (str): The user's Google email address. Required for authentication.
thread_id (Optional[str]): Optional Gmail thread ID to reply within. When provided, sends a reply. thread_id (Optional[str]): Optional Gmail thread ID to reply within. When provided, sends a reply.
in_reply_to (Optional[str]): Optional Message-ID of the message being replied to. Used for proper threading. in_reply_to (Optional[str]): Optional RFC Message-ID of the message being replied to (e.g., '<message123@gmail.com>').
references (Optional[str]): Optional chain of Message-IDs for proper threading. Should include all previous Message-IDs. references (Optional[str]): Optional chain of RFC Message-IDs for proper threading (e.g., '<msg1@gmail.com> <msg2@gmail.com>').
Returns: Returns:
str: Confirmation message with the sent email's message ID. str: Confirmation message with the sent email's message ID.
@@ -1409,7 +1548,7 @@ async def draft_gmail_message(
in_reply_to: Annotated[ in_reply_to: Annotated[
Optional[str], Optional[str],
Field( Field(
description="Optional Message-ID of the message being replied to.", description="Optional RFC Message-ID of the message being replied to (e.g., '<message123@gmail.com>').",
), ),
] = None, ] = None,
references: Annotated[ references: Annotated[
@@ -1430,6 +1569,12 @@ async def draft_gmail_message(
description="Whether to append the Gmail signature from Settings > Signature when available. Defaults to true.", description="Whether to append the Gmail signature from Settings > Signature when available. Defaults to true.",
), ),
] = True, ] = True,
quote_original: Annotated[
bool,
Field(
description="Whether to include the original message as a quoted reply. Requires thread_id. Defaults to false.",
),
] = False,
) -> str: ) -> str:
""" """
Creates a draft email in the user's Gmail account. Supports both new drafts and reply drafts with optional attachments. Creates a draft email in the user's Gmail account. Supports both new drafts and reply drafts with optional attachments.
@@ -1448,8 +1593,8 @@ async def draft_gmail_message(
configured in Gmail settings (Settings > Accounts > Send mail as). If not provided, configured in Gmail settings (Settings > Accounts > Send mail as). If not provided,
the draft will be from the authenticated user's primary email address. the draft will be from the authenticated user's primary email address.
thread_id (Optional[str]): Optional Gmail thread ID to reply within. When provided, creates a reply draft. thread_id (Optional[str]): Optional Gmail thread ID to reply within. When provided, creates a reply draft.
in_reply_to (Optional[str]): Optional Message-ID of the message being replied to. Used for proper threading. in_reply_to (Optional[str]): Optional RFC Message-ID of the message being replied to (e.g., '<message123@gmail.com>').
references (Optional[str]): Optional chain of Message-IDs for proper threading. Should include all previous Message-IDs. references (Optional[str]): Optional chain of RFC Message-IDs for proper threading (e.g., '<msg1@gmail.com> <msg2@gmail.com>').
attachments (List[Dict[str, str]]): Optional list of attachments. Each dict can contain: attachments (List[Dict[str, str]]): Optional list of attachments. Each dict can contain:
Option 1 - File path (auto-encodes): Option 1 - File path (auto-encodes):
- 'path' (required): File path to attach - 'path' (required): File path to attach
@@ -1461,6 +1606,9 @@ async def draft_gmail_message(
- 'mime_type' (optional): MIME type (defaults to 'application/octet-stream') - 'mime_type' (optional): MIME type (defaults to 'application/octet-stream')
include_signature (bool): Whether to append Gmail signature HTML from send-as settings. include_signature (bool): Whether to append Gmail signature HTML from send-as settings.
If unavailable (e.g., missing gmail.settings.basic scope), the draft is still created without signature. If unavailable (e.g., missing gmail.settings.basic scope), the draft is still created without signature.
quote_original (bool): Whether to include the original message as a quoted reply.
Requires thread_id to be provided. When enabled, fetches the original message
and appends it below the signature. Defaults to False.
Returns: Returns:
str: Confirmation message with the created draft's ID. str: Confirmation message with the created draft's ID.
@@ -1525,10 +1673,23 @@ async def draft_gmail_message(
# Use from_email (Send As alias) if provided, otherwise default to authenticated user # Use from_email (Send As alias) if provided, otherwise default to authenticated user
sender_email = from_email or user_google_email sender_email = from_email or user_google_email
draft_body = body draft_body = body
signature_html = ""
if include_signature: if include_signature:
signature_html = await _get_send_as_signature_html( signature_html = await _get_send_as_signature_html(
service, from_email=sender_email service, from_email=sender_email
) )
if quote_original and thread_id:
original = await _fetch_original_for_quote(service, thread_id, in_reply_to)
if original:
draft_body = _build_quoted_reply_body(
draft_body, body_format, signature_html, original
)
else:
draft_body = _append_signature_to_body(
draft_body, body_format, signature_html
)
else:
draft_body = _append_signature_to_body(draft_body, body_format, signature_html) draft_body = _append_signature_to_body(draft_body, body_format, signature_html)
# Auto-populate In-Reply-To and References when thread_id is provided # Auto-populate In-Reply-To and References when thread_id is provided

View File

@@ -7,11 +7,15 @@ conditional formatting helpers.
import asyncio import asyncio
import json import json
import logging
import re import re
from typing import List, Optional, Union from typing import List, Optional, Union
from core.utils import UserInputError from core.utils import UserInputError
logger = logging.getLogger(__name__)
MAX_GRID_METADATA_CELLS = 5000
A1_PART_REGEX = re.compile(r"^([A-Za-z]*)(\d*)$") A1_PART_REGEX = re.compile(r"^([A-Za-z]*)(\d*)$")
SHEET_TITLE_SAFE_RE = re.compile(r"^[A-Za-z0-9_]+$") SHEET_TITLE_SAFE_RE = re.compile(r"^[A-Za-z0-9_]+$")
@@ -877,3 +881,170 @@ def _build_gradient_rule(
rule_body["gradientRule"]["midpoint"] = gradient_points[1] rule_body["gradientRule"]["midpoint"] = gradient_points[1]
rule_body["gradientRule"]["maxpoint"] = gradient_points[2] rule_body["gradientRule"]["maxpoint"] = gradient_points[2]
return rule_body return rule_body
def _extract_cell_notes_from_grid(spreadsheet: dict) -> list[dict[str, str]]:
"""
Extract cell notes from spreadsheet grid data.
Returns a list of dictionaries with:
- "cell": cell A1 reference
- "note": the note text
"""
notes: list[dict[str, str]] = []
for sheet in spreadsheet.get("sheets", []) or []:
sheet_title = sheet.get("properties", {}).get("title") or "Unknown"
for grid in sheet.get("data", []) or []:
start_row = _coerce_int(grid.get("startRow"), default=0)
start_col = _coerce_int(grid.get("startColumn"), default=0)
for row_offset, row_data in enumerate(grid.get("rowData", []) or []):
if not row_data:
continue
for col_offset, cell_data in enumerate(
row_data.get("values", []) or []
):
if not cell_data:
continue
note = cell_data.get("note")
if not note:
continue
notes.append(
{
"cell": _format_a1_cell(
sheet_title,
start_row + row_offset,
start_col + col_offset,
),
"note": note,
}
)
return notes
async def _fetch_sheet_notes(
service, spreadsheet_id: str, a1_range: str
) -> list[dict[str, str]]:
"""Fetch cell notes for the given range via spreadsheets.get with includeGridData."""
response = await asyncio.to_thread(
service.spreadsheets()
.get(
spreadsheetId=spreadsheet_id,
ranges=[a1_range],
includeGridData=True,
fields="sheets(properties(title),data(startRow,startColumn,rowData(values(note))))",
)
.execute
)
return _extract_cell_notes_from_grid(response)
def _format_sheet_notes_section(
*, notes: list[dict[str, str]], range_label: str, max_details: int = 25
) -> str:
"""
Format a list of cell notes into a human-readable section.
"""
if not notes:
return ""
lines = []
for item in notes[:max_details]:
cell = item.get("cell") or "(unknown cell)"
note = item.get("note") or "(empty note)"
lines.append(f"- {cell}: {note}")
suffix = (
f"\n... and {len(notes) - max_details} more notes"
if len(notes) > max_details
else ""
)
return f"\n\nCell notes in range '{range_label}':\n" + "\n".join(lines) + suffix
async def _fetch_grid_metadata(
service,
spreadsheet_id: str,
resolved_range: str,
values: List[List[object]],
include_hyperlinks: bool = False,
include_notes: bool = False,
) -> tuple[str, str]:
"""Fetch hyperlinks and/or notes for a range via a single spreadsheets.get call.
Computes tight range bounds, enforces the cell-count cap, builds a combined
``fields`` selector so only one API round-trip is needed when both flags are
``True``, then parses the response into formatted output sections.
Returns:
(hyperlink_section, notes_section) — each is an empty string when the
corresponding flag is ``False`` or no data was found.
"""
if not include_hyperlinks and not include_notes:
return "", ""
tight_range = _a1_range_for_values(resolved_range, values)
if not tight_range:
logger.info(
"[read_sheet_values] Skipping grid metadata fetch for range '%s': "
"unable to determine tight bounds",
resolved_range,
)
return "", ""
cell_count = _a1_range_cell_count(tight_range) or sum(len(row) for row in values)
if cell_count > MAX_GRID_METADATA_CELLS:
logger.info(
"[read_sheet_values] Skipping grid metadata fetch for large range "
"'%s' (%d cells > %d limit)",
tight_range,
cell_count,
MAX_GRID_METADATA_CELLS,
)
return "", ""
# Build a combined fields selector so we hit the API at most once.
value_fields: list[str] = []
if include_hyperlinks:
value_fields.extend(["hyperlink", "textFormatRuns(format(link(uri)))"])
if include_notes:
value_fields.append("note")
fields = (
"sheets(properties(title),data(startRow,startColumn,"
f"rowData(values({','.join(value_fields)}))))"
)
try:
response = await asyncio.to_thread(
service.spreadsheets()
.get(
spreadsheetId=spreadsheet_id,
ranges=[tight_range],
includeGridData=True,
fields=fields,
)
.execute
)
except Exception as exc:
logger.warning(
"[read_sheet_values] Failed fetching grid metadata for range '%s': %s",
tight_range,
exc,
)
return "", ""
hyperlink_section = ""
if include_hyperlinks:
hyperlinks = _extract_cell_hyperlinks_from_grid(response)
hyperlink_section = _format_sheet_hyperlink_section(
hyperlinks=hyperlinks, range_label=tight_range
)
notes_section = ""
if include_notes:
notes = _extract_cell_notes_from_grid(response)
notes_section = _format_sheet_notes_section(
notes=notes, range_label=tight_range
)
return hyperlink_section, notes_section

View File

@@ -15,16 +15,14 @@ from core.server import server
from core.utils import handle_http_errors, UserInputError from core.utils import handle_http_errors, UserInputError
from core.comments import create_comment_tools from core.comments import create_comment_tools
from gsheets.sheets_helpers import ( from gsheets.sheets_helpers import (
_a1_range_cell_count,
CONDITION_TYPES, CONDITION_TYPES,
_a1_range_for_values, _a1_range_for_values,
_build_boolean_rule, _build_boolean_rule,
_build_gradient_rule, _build_gradient_rule,
_fetch_detailed_sheet_errors, _fetch_detailed_sheet_errors,
_fetch_sheet_hyperlinks, _fetch_grid_metadata,
_fetch_sheets_with_rules, _fetch_sheets_with_rules,
_format_conditional_rules_section, _format_conditional_rules_section,
_format_sheet_hyperlink_section,
_format_sheet_error_section, _format_sheet_error_section,
_parse_a1_range, _parse_a1_range,
_parse_condition_values, _parse_condition_values,
@@ -36,7 +34,6 @@ from gsheets.sheets_helpers import (
# Configure module logger # Configure module logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
MAX_HYPERLINK_FETCH_CELLS = 5000
@server.tool() @server.tool()
@@ -179,6 +176,7 @@ async def read_sheet_values(
spreadsheet_id: str, spreadsheet_id: str,
range_name: str = "A1:Z1000", range_name: str = "A1:Z1000",
include_hyperlinks: bool = False, include_hyperlinks: bool = False,
include_notes: bool = False,
) -> str: ) -> str:
""" """
Reads values from a specific range in a Google Sheet. Reads values from a specific range in a Google Sheet.
@@ -189,6 +187,8 @@ async def read_sheet_values(
range_name (str): The range to read (e.g., "Sheet1!A1:D10", "A1:D10"). Defaults to "A1:Z1000". range_name (str): The range to read (e.g., "Sheet1!A1:D10", "A1:D10"). Defaults to "A1:Z1000".
include_hyperlinks (bool): If True, also fetch hyperlink metadata for the range. include_hyperlinks (bool): If True, also fetch hyperlink metadata for the range.
Defaults to False to avoid expensive includeGridData requests. Defaults to False to avoid expensive includeGridData requests.
include_notes (bool): If True, also fetch cell notes for the range.
Defaults to False to avoid expensive includeGridData requests.
Returns: Returns:
str: The formatted values from the specified range. str: The formatted values from the specified range.
@@ -211,41 +211,14 @@ async def read_sheet_values(
resolved_range = result.get("range", range_name) resolved_range = result.get("range", range_name)
detailed_range = _a1_range_for_values(resolved_range, values) or resolved_range detailed_range = _a1_range_for_values(resolved_range, values) or resolved_range
hyperlink_section = "" hyperlink_section, notes_section = await _fetch_grid_metadata(
if include_hyperlinks: service,
# Use a tight A1 range for includeGridData fetches to avoid expensive spreadsheet_id,
# open-ended requests (e.g., A:Z). resolved_range,
hyperlink_range = _a1_range_for_values(resolved_range, values) values,
if not hyperlink_range: include_hyperlinks=include_hyperlinks,
logger.info( include_notes=include_notes,
"[read_sheet_values] Skipping hyperlink fetch for range '%s': unable to determine tight bounds", )
resolved_range,
)
else:
cell_count = _a1_range_cell_count(hyperlink_range) or sum(
len(row) for row in values
)
if cell_count <= MAX_HYPERLINK_FETCH_CELLS:
try:
hyperlinks = await _fetch_sheet_hyperlinks(
service, spreadsheet_id, hyperlink_range
)
hyperlink_section = _format_sheet_hyperlink_section(
hyperlinks=hyperlinks, range_label=hyperlink_range
)
except Exception as exc:
logger.warning(
"[read_sheet_values] Failed fetching hyperlinks for range '%s': %s",
hyperlink_range,
exc,
)
else:
logger.info(
"[read_sheet_values] Skipping hyperlink fetch for large range '%s' (%d cells > %d limit)",
hyperlink_range,
cell_count,
MAX_HYPERLINK_FETCH_CELLS,
)
detailed_errors_section = "" detailed_errors_section = ""
if _values_contain_sheets_errors(values): if _values_contain_sheets_errors(values):
@@ -277,7 +250,7 @@ async def read_sheet_values(
) )
logger.info(f"Successfully read {len(values)} rows for {user_google_email}.") logger.info(f"Successfully read {len(values)} rows for {user_google_email}.")
return text_output + hyperlink_section + detailed_errors_section return text_output + hyperlink_section + notes_section + detailed_errors_section
@server.tool() @server.tool()

View File

@@ -517,7 +517,12 @@ def main():
) )
sys.exit(1) sys.exit(1)
server.run(transport="streamable-http", host=host, port=port) server.run(
transport="streamable-http",
host=host,
port=port,
stateless_http=is_stateless_mode(),
)
else: else:
server.run() server.run()
except KeyboardInterrupt: except KeyboardInterrupt:

View File

@@ -12,13 +12,13 @@ license = "MIT"
requires-python = ">=3.10" requires-python = ">=3.10"
dependencies = [ dependencies = [
"fastapi>=0.115.12", "fastapi>=0.115.12",
"fastmcp>=3.0.2", "fastmcp>=3.1.1",
"google-api-python-client>=2.168.0", "google-api-python-client>=2.168.0",
"google-auth-httplib2>=0.2.0", "google-auth-httplib2>=0.2.0",
"google-auth-oauthlib>=1.2.2", "google-auth-oauthlib>=1.2.2",
"httpx>=0.28.1", "httpx>=0.28.1",
"py-key-value-aio>=0.3.0", "py-key-value-aio>=0.3.0",
"pyjwt>=2.10.1", "pyjwt>=2.12.0",
"python-dotenv>=1.1.0", "python-dotenv>=1.1.0",
"pyyaml>=6.0.2", "pyyaml>=6.0.2",
"cryptography>=45.0.0", "cryptography>=45.0.0",
@@ -59,7 +59,7 @@ workspace-mcp = "main:main"
[project.optional-dependencies] [project.optional-dependencies]
disk = [ disk = [
"py-key-value-aio[disk]>=0.3.0", "py-key-value-aio[filetree]>=0.3.0",
] ]
valkey = [ valkey = [
"py-key-value-aio[valkey]>=0.3.0", "py-key-value-aio[valkey]>=0.3.0",
@@ -84,7 +84,7 @@ dev = [
[dependency-groups] [dependency-groups]
disk = [ disk = [
"py-key-value-aio[disk]>=0.3.0", "py-key-value-aio[filetree]>=0.3.0",
] ]
valkey = [ valkey = [
"py-key-value-aio[valkey]>=0.3.0", "py-key-value-aio[valkey]>=0.3.0",

View File

@@ -3,6 +3,8 @@ Unit tests for Google Chat MCP tools — attachment support
""" """
import base64 import base64
from urllib.parse import urlparse
import pytest import pytest
from unittest.mock import AsyncMock, Mock, patch from unittest.mock import AsyncMock, Mock, patch
import sys import sys
@@ -271,10 +273,12 @@ async def test_download_uses_api_media_endpoint():
# Verify we used the API endpoint with attachmentDataRef.resourceName # Verify we used the API endpoint with attachmentDataRef.resourceName
call_args = mock_client.get.call_args call_args = mock_client.get.call_args
url_used = call_args.args[0] url_used = call_args.args[0]
assert "chat.googleapis.com" in url_used parsed = urlparse(url_used)
assert parsed.scheme == "https"
assert parsed.hostname == "chat.googleapis.com"
assert "alt=media" in url_used assert "alt=media" in url_used
assert "spaces/S/attachments/A" in url_used assert "spaces/S/attachments/A" in parsed.path
assert "/messages/" not in url_used assert "/messages/" not in parsed.path
# Verify Bearer token # Verify Bearer token
assert call_args.kwargs["headers"]["Authorization"] == "Bearer fake-access-token" assert call_args.kwargs["headers"]["Authorization"] == "Bearer fake-access-token"

2019
uv.lock generated

File diff suppressed because it is too large Load Diff