Merge branch 'main' of github.com:taylorwilsdon/google_workspace_mcp into fix/567-darwin-stdout

This commit is contained in:
Taylor Wilsdon
2026-03-17 10:12:11 -04:00
17 changed files with 1802 additions and 1088 deletions

View File

@@ -4,6 +4,10 @@ on:
pull_request: pull_request:
types: [opened, synchronize, reopened, edited] types: [opened, synchronize, reopened, edited]
permissions:
pull-requests: read
issues: write
jobs: jobs:
check-maintainer-edits: check-maintainer-edits:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -11,6 +11,8 @@ on:
- main - main
workflow_dispatch: workflow_dispatch:
permissions: {}
env: env:
REGISTRY: ghcr.io REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }} IMAGE_NAME: ${{ github.repository }}

View File

@@ -6,6 +6,8 @@ on:
- "v*" - "v*"
workflow_dispatch: workflow_dispatch:
permissions: {}
jobs: jobs:
publish: publish:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -338,6 +338,85 @@ def create_oauth_flow(
return flow return flow
def _determine_oauth_prompt(
user_google_email: Optional[str],
required_scopes: List[str],
session_id: Optional[str] = None,
) -> str:
"""
Determine which OAuth prompt to use for a new authorization URL.
Uses `select_account` for re-auth when existing credentials already cover
required scopes. Uses `consent` for first-time auth and scope expansion.
"""
normalized_email = (
user_google_email.strip()
if user_google_email
and user_google_email.strip()
and user_google_email.lower() != "default"
else None
)
# If no explicit email was provided, attempt to resolve it from session mapping.
if not normalized_email and session_id:
try:
session_user = get_oauth21_session_store().get_user_by_mcp_session(
session_id
)
if session_user:
normalized_email = session_user
except Exception as e:
logger.debug(f"Could not resolve user from session for prompt choice: {e}")
if not normalized_email:
logger.info(
"[start_auth_flow] Using prompt='consent' (no known user email for re-auth detection)."
)
return "consent"
existing_credentials: Optional[Credentials] = None
# Prefer credentials bound to the current session when available.
if session_id:
try:
session_store = get_oauth21_session_store()
mapped_user = session_store.get_user_by_mcp_session(session_id)
if mapped_user == normalized_email:
existing_credentials = session_store.get_credentials_by_mcp_session(
session_id
)
except Exception as e:
logger.debug(
f"Could not read OAuth 2.1 session store for prompt choice: {e}"
)
# Fall back to credential file store in stateful mode.
if not existing_credentials and not is_stateless_mode():
try:
existing_credentials = get_credential_store().get_credential(
normalized_email
)
except Exception as e:
logger.debug(f"Could not read credential store for prompt choice: {e}")
if not existing_credentials:
logger.info(
f"[start_auth_flow] Using prompt='consent' (no existing credentials for {normalized_email})."
)
return "consent"
if has_required_scopes(existing_credentials.scopes, required_scopes):
logger.info(
f"[start_auth_flow] Using prompt='select_account' for re-auth of {normalized_email}."
)
return "select_account"
logger.info(
f"[start_auth_flow] Using prompt='consent' (existing credentials for {normalized_email} are missing required scopes)."
)
return "consent"
# --- Core OAuth Logic --- # --- Core OAuth Logic ---
@@ -387,15 +466,14 @@ async def start_auth_flow(
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1" os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
oauth_state = os.urandom(16).hex() oauth_state = os.urandom(16).hex()
current_scopes = get_current_scopes()
flow = create_oauth_flow( flow = create_oauth_flow(
scopes=get_current_scopes(), # Use scopes for enabled tools only scopes=current_scopes, # Use scopes for enabled tools only
redirect_uri=redirect_uri, # Use passed redirect_uri redirect_uri=redirect_uri, # Use passed redirect_uri
state=oauth_state, state=oauth_state,
) )
auth_url, _ = flow.authorization_url(access_type="offline", prompt="consent")
session_id = None session_id = None
try: try:
session_id = get_fastmcp_session_id() session_id = get_fastmcp_session_id()
@@ -404,6 +482,13 @@ async def start_auth_flow(
f"Could not retrieve FastMCP session ID for state binding: {e}" f"Could not retrieve FastMCP session ID for state binding: {e}"
) )
prompt_type = _determine_oauth_prompt(
user_google_email=user_google_email,
required_scopes=current_scopes,
session_id=session_id,
)
auth_url, _ = flow.authorization_url(access_type="offline", prompt=prompt_type)
store = get_oauth21_session_store() store = get_oauth21_session_store()
store.store_oauth_state( store.store_oauth_state(
oauth_state, oauth_state,
@@ -568,12 +653,61 @@ def handle_auth_callback(
user_google_email = user_info["email"] user_google_email = user_info["email"]
logger.info(f"Identified user_google_email: {user_google_email}") logger.info(f"Identified user_google_email: {user_google_email}")
# Save the credentials
credential_store = get_credential_store() credential_store = get_credential_store()
if not credentials.refresh_token:
fallback_refresh_token = None
if session_id:
try:
session_credentials = store.get_credentials_by_mcp_session(
session_id
)
if session_credentials and session_credentials.refresh_token:
fallback_refresh_token = session_credentials.refresh_token
logger.info(
"OAuth callback response omitted refresh token; preserving existing refresh token from session store."
)
except Exception as e:
logger.debug(
f"Could not check session store for existing refresh token: {e}"
)
if not fallback_refresh_token and not is_stateless_mode():
try:
existing_credentials = credential_store.get_credential(
user_google_email
)
if existing_credentials and existing_credentials.refresh_token:
fallback_refresh_token = existing_credentials.refresh_token
logger.info(
"OAuth callback response omitted refresh token; preserving existing refresh token from credential store."
)
except Exception as e:
logger.debug(
f"Could not check credential store for existing refresh token: {e}"
)
if fallback_refresh_token:
credentials = Credentials(
token=credentials.token,
refresh_token=fallback_refresh_token,
id_token=getattr(credentials, "id_token", None),
token_uri=credentials.token_uri,
client_id=credentials.client_id,
client_secret=credentials.client_secret,
scopes=credentials.scopes,
expiry=credentials.expiry,
quota_project_id=getattr(credentials, "quota_project_id", None),
)
else:
logger.warning(
"OAuth callback did not include a refresh token and no previous refresh token was available to preserve."
)
# Save the credentials
credential_store.store_credential(user_google_email, credentials) credential_store.store_credential(user_google_email, credentials)
# Always save to OAuth21SessionStore for centralized management # Always save to OAuth21SessionStore for centralized management
store = get_oauth21_session_store()
store.store_session( store.store_session(
user_email=user_google_email, user_email=user_google_email,
access_token=credentials.token, access_token=credentials.token,
@@ -641,8 +775,8 @@ def get_credentials(
f"[get_credentials] Found OAuth 2.1 credentials for MCP session {session_id}" f"[get_credentials] Found OAuth 2.1 credentials for MCP session {session_id}"
) )
# Refresh expired credentials before checking scopes # Refresh invalid credentials before checking scopes
if credentials.expired and credentials.refresh_token: if (not credentials.valid) and credentials.refresh_token:
try: try:
credentials.refresh(Request()) credentials.refresh(Request())
logger.info( logger.info(
@@ -772,9 +906,9 @@ def get_credentials(
logger.debug( logger.debug(
f"[get_credentials] Credentials are valid. User: '{user_google_email}', Session: '{session_id}'" f"[get_credentials] Credentials are valid. User: '{user_google_email}', Session: '{session_id}'"
) )
elif credentials.expired and credentials.refresh_token: elif credentials.refresh_token:
logger.info( logger.info(
f"[get_credentials] Credentials expired. Attempting refresh. User: '{user_google_email}', Session: '{session_id}'" f"[get_credentials] Credentials not valid. Attempting refresh. User: '{user_google_email}', Session: '{session_id}'"
) )
try: try:
logger.debug( logger.debug(

View File

@@ -348,7 +348,7 @@ def configure_server_for_http():
) )
elif use_disk: elif use_disk:
try: try:
from key_value.aio.stores.disk import DiskStore from key_value.aio.stores.filetree import FileTreeStore
disk_directory = os.getenv( disk_directory = os.getenv(
"WORKSPACE_MCP_OAUTH_PROXY_DISK_DIRECTORY", "" "WORKSPACE_MCP_OAUTH_PROXY_DISK_DIRECTORY", ""
@@ -363,7 +363,7 @@ def configure_server_for_http():
"~/.fastmcp/oauth-proxy" "~/.fastmcp/oauth-proxy"
) )
client_storage = DiskStore(directory=disk_directory) client_storage = FileTreeStore(data_directory=disk_directory)
jwt_signing_key = validate_and_derive_jwt_key( jwt_signing_key = validate_and_derive_jwt_key(
jwt_signing_key_override, config.client_secret jwt_signing_key_override, config.client_secret
@@ -379,7 +379,7 @@ def configure_server_for_http():
fernet=Fernet(key=storage_encryption_key), fernet=Fernet(key=storage_encryption_key),
) )
logger.info( logger.info(
"OAuth 2.1: Using DiskStore for FastMCP OAuth proxy client_storage (directory=%s)", "OAuth 2.1: Using FileTreeStore for FastMCP OAuth proxy client_storage (directory=%s)",
disk_directory, disk_directory,
) )
except ImportError as exc: except ImportError as exc:

View File

@@ -138,9 +138,15 @@ def build_paragraph_style(
if named_style_type is not None: if named_style_type is not None:
valid_styles = [ valid_styles = [
"NORMAL_TEXT", "TITLE", "SUBTITLE", "NORMAL_TEXT",
"HEADING_1", "HEADING_2", "HEADING_3", "TITLE",
"HEADING_4", "HEADING_5", "HEADING_6", "SUBTITLE",
"HEADING_1",
"HEADING_2",
"HEADING_3",
"HEADING_4",
"HEADING_5",
"HEADING_6",
] ]
if named_style_type not in valid_styles: if named_style_type not in valid_styles:
raise ValueError( raise ValueError(

View File

@@ -325,7 +325,9 @@ class BatchOperationManager:
tab_id, tab_id,
) )
style = "bulleted" if list_type == "UNORDERED" else "numbered" style = "bulleted" if list_type == "UNORDERED" else "numbered"
description = f"create {style} list {op['start_index']}-{op['end_index']}" description = (
f"create {style} list {op['start_index']}-{op['end_index']}"
)
if op.get("nesting_level"): if op.get("nesting_level"):
description += f" (nesting level {op['nesting_level']})" description += f" (nesting level {op['nesting_level']})"
@@ -491,7 +493,11 @@ class BatchOperationManager:
}, },
"create_bullet_list": { "create_bullet_list": {
"required": ["start_index", "end_index"], "required": ["start_index", "end_index"],
"optional": ["list_type", "nesting_level", "paragraph_start_indices"], "optional": [
"list_type",
"nesting_level",
"paragraph_start_indices",
],
"description": "Apply or remove native bullet/numbered list formatting (list_type: UNORDERED, ORDERED, or NONE to remove; nesting_level: 0-8)", "description": "Apply or remove native bullet/numbered list formatting (list_type: UNORDERED, ORDERED, or NONE to remove; nesting_level: 0-8)",
}, },
"insert_doc_tab": { "insert_doc_tab": {

View File

@@ -318,9 +318,15 @@ class ValidationManager:
if named_style_type is not None: if named_style_type is not None:
valid_styles = [ valid_styles = [
"NORMAL_TEXT", "TITLE", "SUBTITLE", "NORMAL_TEXT",
"HEADING_1", "HEADING_2", "HEADING_3", "TITLE",
"HEADING_4", "HEADING_5", "HEADING_6", "SUBTITLE",
"HEADING_1",
"HEADING_2",
"HEADING_3",
"HEADING_4",
"HEADING_5",
"HEADING_6",
] ]
if named_style_type not in valid_styles: if named_style_type not in valid_styles:
return ( return (

View File

@@ -36,7 +36,17 @@ logger = logging.getLogger(__name__)
GMAIL_BATCH_SIZE = 25 GMAIL_BATCH_SIZE = 25
GMAIL_REQUEST_DELAY = 0.1 GMAIL_REQUEST_DELAY = 0.1
HTML_BODY_TRUNCATE_LIMIT = 20000 HTML_BODY_TRUNCATE_LIMIT = 20000
GMAIL_METADATA_HEADERS = ["Subject", "From", "To", "Cc", "Message-ID", "Date"]
GMAIL_METADATA_HEADERS = [
"Subject",
"From",
"To",
"Cc",
"Message-ID",
"In-Reply-To",
"References",
"Date",
]
LOW_VALUE_TEXT_PLACEHOLDERS = ( LOW_VALUE_TEXT_PLACEHOLDERS = (
"your client does not support html", "your client does not support html",
"view this email in your browser", "view this email in your browser",
@@ -217,6 +227,114 @@ def _append_signature_to_body(
return f"{body}{separator}{signature_text}" return f"{body}{separator}{signature_text}"
async def _fetch_original_for_quote(
service, thread_id: str, in_reply_to: Optional[str] = None
) -> Optional[dict]:
"""Fetch the original message from a thread for quoting in a reply.
When *in_reply_to* is provided the function looks for that specific
Message-ID inside the thread. Otherwise it falls back to the last
message in the thread.
Returns a dict with keys: sender, date, text_body, html_body -- or
*None* when the message cannot be retrieved.
"""
try:
thread_data = await asyncio.to_thread(
service.users()
.threads()
.get(userId="me", id=thread_id, format="full")
.execute
)
except Exception as e:
logger.warning(f"Failed to fetch thread {thread_id} for quoting: {e}")
return None
messages = thread_data.get("messages", [])
if not messages:
return None
target = None
if in_reply_to:
for msg in messages:
headers = {
h["name"]: h["value"] for h in msg.get("payload", {}).get("headers", [])
}
if headers.get("Message-ID") == in_reply_to:
target = msg
break
if target is None:
target = messages[-1]
headers = {
h["name"]: h["value"] for h in target.get("payload", {}).get("headers", [])
}
bodies = _extract_message_bodies(target.get("payload", {}))
return {
"sender": headers.get("From", "unknown"),
"date": headers.get("Date", ""),
"text_body": bodies.get("text", ""),
"html_body": bodies.get("html", ""),
}
def _build_quoted_reply_body(
reply_body: str,
body_format: Literal["plain", "html"],
signature_html: str,
original: dict,
) -> str:
"""Assemble reply body + signature + quoted original message.
Layout:
reply_body
-- signature --
On {date}, {sender} wrote:
> quoted original
"""
import html as _html_mod
if original.get("date"):
attribution = f"On {original['date']}, {original['sender']} wrote:"
else:
attribution = f"{original['sender']} wrote:"
if body_format == "html":
# Signature
sig_block = ""
if signature_html and signature_html.strip():
sig_block = f"<br><br>{signature_html}"
# Quoted original
orig_html = original.get("html_body") or ""
if not orig_html:
orig_text = original.get("text_body", "")
orig_html = f"<pre>{_html_mod.escape(orig_text)}</pre>"
quote_block = (
'<br><br><div class="gmail_quote">'
f"<span>{_html_mod.escape(attribution)}</span><br>"
'<blockquote style="margin:0 0 0 .8ex;border-left:1px solid #ccc;padding-left:1ex">'
f"{orig_html}"
"</blockquote></div>"
)
return f"{reply_body}{sig_block}{quote_block}"
# Plain text path
sig_block = ""
if signature_html and signature_html.strip():
sig_text = _html_to_text(signature_html).strip()
if sig_text:
sig_block = f"\n\n{sig_text}"
orig_text = original.get("text_body") or ""
if not orig_text and original.get("html_body"):
orig_text = _html_to_text(original["html_body"])
quoted_lines = "\n".join(f"> {line}" for line in orig_text.splitlines())
return f"{reply_body}{sig_block}\n\n{attribution}\n{quoted_lines}"
async def _get_send_as_signature_html(service, from_email: Optional[str] = None) -> str: async def _get_send_as_signature_html(service, from_email: Optional[str] = None) -> str:
""" """
Fetch signature HTML from Gmail send-as settings. Fetch signature HTML from Gmail send-as settings.
@@ -714,6 +832,13 @@ async def get_gmail_message_content(
if rfc822_msg_id: if rfc822_msg_id:
content_lines.append(f"Message-ID: {rfc822_msg_id}") content_lines.append(f"Message-ID: {rfc822_msg_id}")
in_reply_to = headers.get("In-Reply-To", "")
references = headers.get("References", "")
if in_reply_to:
content_lines.append(f"In-Reply-To: {in_reply_to}")
if references:
content_lines.append(f"References: {references}")
if to: if to:
content_lines.append(f"To: {to}") content_lines.append(f"To: {to}")
if cc: if cc:
@@ -879,12 +1004,19 @@ async def get_gmail_messages_content_batch(
cc = headers.get("Cc", "") cc = headers.get("Cc", "")
rfc822_msg_id = headers.get("Message-ID", "") rfc822_msg_id = headers.get("Message-ID", "")
in_reply_to = headers.get("In-Reply-To", "")
references = headers.get("References", "")
msg_output = ( msg_output = (
f"Message ID: {mid}\nSubject: {subject}\nFrom: {sender}\n" f"Message ID: {mid}\nSubject: {subject}\nFrom: {sender}\n"
f"Date: {headers.get('Date', '(unknown date)')}\n" f"Date: {headers.get('Date', '(unknown date)')}\n"
) )
if rfc822_msg_id: if rfc822_msg_id:
msg_output += f"Message-ID: {rfc822_msg_id}\n" msg_output += f"Message-ID: {rfc822_msg_id}\n"
if in_reply_to:
msg_output += f"In-Reply-To: {in_reply_to}\n"
if references:
msg_output += f"References: {references}\n"
if to: if to:
msg_output += f"To: {to}\n" msg_output += f"To: {to}\n"
@@ -910,12 +1042,19 @@ async def get_gmail_messages_content_batch(
# Format body content with HTML fallback # Format body content with HTML fallback
body_data = _format_body_content(text_body, html_body) body_data = _format_body_content(text_body, html_body)
in_reply_to = headers.get("In-Reply-To", "")
references = headers.get("References", "")
msg_output = ( msg_output = (
f"Message ID: {mid}\nSubject: {subject}\nFrom: {sender}\n" f"Message ID: {mid}\nSubject: {subject}\nFrom: {sender}\n"
f"Date: {headers.get('Date', '(unknown date)')}\n" f"Date: {headers.get('Date', '(unknown date)')}\n"
) )
if rfc822_msg_id: if rfc822_msg_id:
msg_output += f"Message-ID: {rfc822_msg_id}\n" msg_output += f"Message-ID: {rfc822_msg_id}\n"
if in_reply_to:
msg_output += f"In-Reply-To: {in_reply_to}\n"
if references:
msg_output += f"References: {references}\n"
if to: if to:
msg_output += f"To: {to}\n" msg_output += f"To: {to}\n"
@@ -1155,7 +1294,7 @@ async def send_gmail_message(
in_reply_to: Annotated[ in_reply_to: Annotated[
Optional[str], Optional[str],
Field( Field(
description="Optional Message-ID of the message being replied to.", description="Optional RFC Message-ID of the message being replied to (e.g., '<message123@gmail.com>').",
), ),
] = None, ] = None,
references: Annotated[ references: Annotated[
@@ -1197,8 +1336,8 @@ async def send_gmail_message(
the email will be sent from the authenticated user's primary email address. the email will be sent from the authenticated user's primary email address.
user_google_email (str): The user's Google email address. Required for authentication. user_google_email (str): The user's Google email address. Required for authentication.
thread_id (Optional[str]): Optional Gmail thread ID to reply within. When provided, sends a reply. thread_id (Optional[str]): Optional Gmail thread ID to reply within. When provided, sends a reply.
in_reply_to (Optional[str]): Optional Message-ID of the message being replied to. Used for proper threading. in_reply_to (Optional[str]): Optional RFC Message-ID of the message being replied to (e.g., '<message123@gmail.com>').
references (Optional[str]): Optional chain of Message-IDs for proper threading. Should include all previous Message-IDs. references (Optional[str]): Optional chain of RFC Message-IDs for proper threading (e.g., '<msg1@gmail.com> <msg2@gmail.com>').
Returns: Returns:
str: Confirmation message with the sent email's message ID. str: Confirmation message with the sent email's message ID.
@@ -1362,7 +1501,7 @@ async def draft_gmail_message(
in_reply_to: Annotated[ in_reply_to: Annotated[
Optional[str], Optional[str],
Field( Field(
description="Optional Message-ID of the message being replied to.", description="Optional RFC Message-ID of the message being replied to (e.g., '<message123@gmail.com>').",
), ),
] = None, ] = None,
references: Annotated[ references: Annotated[
@@ -1383,6 +1522,12 @@ async def draft_gmail_message(
description="Whether to append the Gmail signature from Settings > Signature when available. Defaults to true.", description="Whether to append the Gmail signature from Settings > Signature when available. Defaults to true.",
), ),
] = True, ] = True,
quote_original: Annotated[
bool,
Field(
description="Whether to include the original message as a quoted reply. Requires thread_id. Defaults to false.",
),
] = False,
) -> str: ) -> str:
""" """
Creates a draft email in the user's Gmail account. Supports both new drafts and reply drafts with optional attachments. Creates a draft email in the user's Gmail account. Supports both new drafts and reply drafts with optional attachments.
@@ -1401,8 +1546,8 @@ async def draft_gmail_message(
configured in Gmail settings (Settings > Accounts > Send mail as). If not provided, configured in Gmail settings (Settings > Accounts > Send mail as). If not provided,
the draft will be from the authenticated user's primary email address. the draft will be from the authenticated user's primary email address.
thread_id (Optional[str]): Optional Gmail thread ID to reply within. When provided, creates a reply draft. thread_id (Optional[str]): Optional Gmail thread ID to reply within. When provided, creates a reply draft.
in_reply_to (Optional[str]): Optional Message-ID of the message being replied to. Used for proper threading. in_reply_to (Optional[str]): Optional RFC Message-ID of the message being replied to (e.g., '<message123@gmail.com>').
references (Optional[str]): Optional chain of Message-IDs for proper threading. Should include all previous Message-IDs. references (Optional[str]): Optional chain of RFC Message-IDs for proper threading (e.g., '<msg1@gmail.com> <msg2@gmail.com>').
attachments (List[Dict[str, str]]): Optional list of attachments. Each dict can contain: attachments (List[Dict[str, str]]): Optional list of attachments. Each dict can contain:
Option 1 - File path (auto-encodes): Option 1 - File path (auto-encodes):
- 'path' (required): File path to attach - 'path' (required): File path to attach
@@ -1414,6 +1559,9 @@ async def draft_gmail_message(
- 'mime_type' (optional): MIME type (defaults to 'application/octet-stream') - 'mime_type' (optional): MIME type (defaults to 'application/octet-stream')
include_signature (bool): Whether to append Gmail signature HTML from send-as settings. include_signature (bool): Whether to append Gmail signature HTML from send-as settings.
If unavailable (e.g., missing gmail.settings.basic scope), the draft is still created without signature. If unavailable (e.g., missing gmail.settings.basic scope), the draft is still created without signature.
quote_original (bool): Whether to include the original message as a quoted reply.
Requires thread_id to be provided. When enabled, fetches the original message
and appends it below the signature. Defaults to False.
Returns: Returns:
str: Confirmation message with the created draft's ID. str: Confirmation message with the created draft's ID.
@@ -1478,10 +1626,23 @@ async def draft_gmail_message(
# Use from_email (Send As alias) if provided, otherwise default to authenticated user # Use from_email (Send As alias) if provided, otherwise default to authenticated user
sender_email = from_email or user_google_email sender_email = from_email or user_google_email
draft_body = body draft_body = body
signature_html = ""
if include_signature: if include_signature:
signature_html = await _get_send_as_signature_html( signature_html = await _get_send_as_signature_html(
service, from_email=sender_email service, from_email=sender_email
) )
if quote_original and thread_id:
original = await _fetch_original_for_quote(service, thread_id, in_reply_to)
if original:
draft_body = _build_quoted_reply_body(
draft_body, body_format, signature_html, original
)
else:
draft_body = _append_signature_to_body(
draft_body, body_format, signature_html
)
else:
draft_body = _append_signature_to_body(draft_body, body_format, signature_html) draft_body = _append_signature_to_body(draft_body, body_format, signature_html)
raw_message, thread_id_final, attached_count = _prepare_gmail_message( raw_message, thread_id_final, attached_count = _prepare_gmail_message(

View File

@@ -7,11 +7,15 @@ conditional formatting helpers.
import asyncio import asyncio
import json import json
import logging
import re import re
from typing import List, Optional, Union from typing import List, Optional, Union
from core.utils import UserInputError from core.utils import UserInputError
logger = logging.getLogger(__name__)
MAX_GRID_METADATA_CELLS = 5000
A1_PART_REGEX = re.compile(r"^([A-Za-z]*)(\d*)$") A1_PART_REGEX = re.compile(r"^([A-Za-z]*)(\d*)$")
SHEET_TITLE_SAFE_RE = re.compile(r"^[A-Za-z0-9_]+$") SHEET_TITLE_SAFE_RE = re.compile(r"^[A-Za-z0-9_]+$")
@@ -877,3 +881,170 @@ def _build_gradient_rule(
rule_body["gradientRule"]["midpoint"] = gradient_points[1] rule_body["gradientRule"]["midpoint"] = gradient_points[1]
rule_body["gradientRule"]["maxpoint"] = gradient_points[2] rule_body["gradientRule"]["maxpoint"] = gradient_points[2]
return rule_body return rule_body
def _extract_cell_notes_from_grid(spreadsheet: dict) -> list[dict[str, str]]:
"""
Extract cell notes from spreadsheet grid data.
Returns a list of dictionaries with:
- "cell": cell A1 reference
- "note": the note text
"""
notes: list[dict[str, str]] = []
for sheet in spreadsheet.get("sheets", []) or []:
sheet_title = sheet.get("properties", {}).get("title") or "Unknown"
for grid in sheet.get("data", []) or []:
start_row = _coerce_int(grid.get("startRow"), default=0)
start_col = _coerce_int(grid.get("startColumn"), default=0)
for row_offset, row_data in enumerate(grid.get("rowData", []) or []):
if not row_data:
continue
for col_offset, cell_data in enumerate(
row_data.get("values", []) or []
):
if not cell_data:
continue
note = cell_data.get("note")
if not note:
continue
notes.append(
{
"cell": _format_a1_cell(
sheet_title,
start_row + row_offset,
start_col + col_offset,
),
"note": note,
}
)
return notes
async def _fetch_sheet_notes(
service, spreadsheet_id: str, a1_range: str
) -> list[dict[str, str]]:
"""Fetch cell notes for the given range via spreadsheets.get with includeGridData."""
response = await asyncio.to_thread(
service.spreadsheets()
.get(
spreadsheetId=spreadsheet_id,
ranges=[a1_range],
includeGridData=True,
fields="sheets(properties(title),data(startRow,startColumn,rowData(values(note))))",
)
.execute
)
return _extract_cell_notes_from_grid(response)
def _format_sheet_notes_section(
*, notes: list[dict[str, str]], range_label: str, max_details: int = 25
) -> str:
"""
Format a list of cell notes into a human-readable section.
"""
if not notes:
return ""
lines = []
for item in notes[:max_details]:
cell = item.get("cell") or "(unknown cell)"
note = item.get("note") or "(empty note)"
lines.append(f"- {cell}: {note}")
suffix = (
f"\n... and {len(notes) - max_details} more notes"
if len(notes) > max_details
else ""
)
return f"\n\nCell notes in range '{range_label}':\n" + "\n".join(lines) + suffix
async def _fetch_grid_metadata(
service,
spreadsheet_id: str,
resolved_range: str,
values: List[List[object]],
include_hyperlinks: bool = False,
include_notes: bool = False,
) -> tuple[str, str]:
"""Fetch hyperlinks and/or notes for a range via a single spreadsheets.get call.
Computes tight range bounds, enforces the cell-count cap, builds a combined
``fields`` selector so only one API round-trip is needed when both flags are
``True``, then parses the response into formatted output sections.
Returns:
(hyperlink_section, notes_section) — each is an empty string when the
corresponding flag is ``False`` or no data was found.
"""
if not include_hyperlinks and not include_notes:
return "", ""
tight_range = _a1_range_for_values(resolved_range, values)
if not tight_range:
logger.info(
"[read_sheet_values] Skipping grid metadata fetch for range '%s': "
"unable to determine tight bounds",
resolved_range,
)
return "", ""
cell_count = _a1_range_cell_count(tight_range) or sum(len(row) for row in values)
if cell_count > MAX_GRID_METADATA_CELLS:
logger.info(
"[read_sheet_values] Skipping grid metadata fetch for large range "
"'%s' (%d cells > %d limit)",
tight_range,
cell_count,
MAX_GRID_METADATA_CELLS,
)
return "", ""
# Build a combined fields selector so we hit the API at most once.
value_fields: list[str] = []
if include_hyperlinks:
value_fields.extend(["hyperlink", "textFormatRuns(format(link(uri)))"])
if include_notes:
value_fields.append("note")
fields = (
"sheets(properties(title),data(startRow,startColumn,"
f"rowData(values({','.join(value_fields)}))))"
)
try:
response = await asyncio.to_thread(
service.spreadsheets()
.get(
spreadsheetId=spreadsheet_id,
ranges=[tight_range],
includeGridData=True,
fields=fields,
)
.execute
)
except Exception as exc:
logger.warning(
"[read_sheet_values] Failed fetching grid metadata for range '%s': %s",
tight_range,
exc,
)
return "", ""
hyperlink_section = ""
if include_hyperlinks:
hyperlinks = _extract_cell_hyperlinks_from_grid(response)
hyperlink_section = _format_sheet_hyperlink_section(
hyperlinks=hyperlinks, range_label=tight_range
)
notes_section = ""
if include_notes:
notes = _extract_cell_notes_from_grid(response)
notes_section = _format_sheet_notes_section(
notes=notes, range_label=tight_range
)
return hyperlink_section, notes_section

View File

@@ -15,16 +15,14 @@ from core.server import server
from core.utils import handle_http_errors, UserInputError from core.utils import handle_http_errors, UserInputError
from core.comments import create_comment_tools from core.comments import create_comment_tools
from gsheets.sheets_helpers import ( from gsheets.sheets_helpers import (
_a1_range_cell_count,
CONDITION_TYPES, CONDITION_TYPES,
_a1_range_for_values, _a1_range_for_values,
_build_boolean_rule, _build_boolean_rule,
_build_gradient_rule, _build_gradient_rule,
_fetch_detailed_sheet_errors, _fetch_detailed_sheet_errors,
_fetch_sheet_hyperlinks, _fetch_grid_metadata,
_fetch_sheets_with_rules, _fetch_sheets_with_rules,
_format_conditional_rules_section, _format_conditional_rules_section,
_format_sheet_hyperlink_section,
_format_sheet_error_section, _format_sheet_error_section,
_parse_a1_range, _parse_a1_range,
_parse_condition_values, _parse_condition_values,
@@ -36,7 +34,6 @@ from gsheets.sheets_helpers import (
# Configure module logger # Configure module logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
MAX_HYPERLINK_FETCH_CELLS = 5000
@server.tool() @server.tool()
@@ -179,6 +176,7 @@ async def read_sheet_values(
spreadsheet_id: str, spreadsheet_id: str,
range_name: str = "A1:Z1000", range_name: str = "A1:Z1000",
include_hyperlinks: bool = False, include_hyperlinks: bool = False,
include_notes: bool = False,
) -> str: ) -> str:
""" """
Reads values from a specific range in a Google Sheet. Reads values from a specific range in a Google Sheet.
@@ -189,6 +187,8 @@ async def read_sheet_values(
range_name (str): The range to read (e.g., "Sheet1!A1:D10", "A1:D10"). Defaults to "A1:Z1000". range_name (str): The range to read (e.g., "Sheet1!A1:D10", "A1:D10"). Defaults to "A1:Z1000".
include_hyperlinks (bool): If True, also fetch hyperlink metadata for the range. include_hyperlinks (bool): If True, also fetch hyperlink metadata for the range.
Defaults to False to avoid expensive includeGridData requests. Defaults to False to avoid expensive includeGridData requests.
include_notes (bool): If True, also fetch cell notes for the range.
Defaults to False to avoid expensive includeGridData requests.
Returns: Returns:
str: The formatted values from the specified range. str: The formatted values from the specified range.
@@ -211,41 +211,14 @@ async def read_sheet_values(
resolved_range = result.get("range", range_name) resolved_range = result.get("range", range_name)
detailed_range = _a1_range_for_values(resolved_range, values) or resolved_range detailed_range = _a1_range_for_values(resolved_range, values) or resolved_range
hyperlink_section = "" hyperlink_section, notes_section = await _fetch_grid_metadata(
if include_hyperlinks: service,
# Use a tight A1 range for includeGridData fetches to avoid expensive spreadsheet_id,
# open-ended requests (e.g., A:Z). resolved_range,
hyperlink_range = _a1_range_for_values(resolved_range, values) values,
if not hyperlink_range: include_hyperlinks=include_hyperlinks,
logger.info( include_notes=include_notes,
"[read_sheet_values] Skipping hyperlink fetch for range '%s': unable to determine tight bounds", )
resolved_range,
)
else:
cell_count = _a1_range_cell_count(hyperlink_range) or sum(
len(row) for row in values
)
if cell_count <= MAX_HYPERLINK_FETCH_CELLS:
try:
hyperlinks = await _fetch_sheet_hyperlinks(
service, spreadsheet_id, hyperlink_range
)
hyperlink_section = _format_sheet_hyperlink_section(
hyperlinks=hyperlinks, range_label=hyperlink_range
)
except Exception as exc:
logger.warning(
"[read_sheet_values] Failed fetching hyperlinks for range '%s': %s",
hyperlink_range,
exc,
)
else:
logger.info(
"[read_sheet_values] Skipping hyperlink fetch for large range '%s' (%d cells > %d limit)",
hyperlink_range,
cell_count,
MAX_HYPERLINK_FETCH_CELLS,
)
detailed_errors_section = "" detailed_errors_section = ""
if _values_contain_sheets_errors(values): if _values_contain_sheets_errors(values):
@@ -277,7 +250,7 @@ async def read_sheet_values(
) )
logger.info(f"Successfully read {len(values)} rows for {user_google_email}.") logger.info(f"Successfully read {len(values)} rows for {user_google_email}.")
return text_output + hyperlink_section + detailed_errors_section return text_output + hyperlink_section + notes_section + detailed_errors_section
@server.tool() @server.tool()

View File

@@ -534,7 +534,12 @@ def main():
) )
sys.exit(1) sys.exit(1)
server.run(transport="streamable-http", host=host, port=port) server.run(
transport="streamable-http",
host=host,
port=port,
stateless_http=is_stateless_mode(),
)
else: else:
server.run() server.run()
except KeyboardInterrupt: except KeyboardInterrupt:

View File

@@ -12,13 +12,13 @@ license = "MIT"
requires-python = ">=3.10" requires-python = ">=3.10"
dependencies = [ dependencies = [
"fastapi>=0.115.12", "fastapi>=0.115.12",
"fastmcp>=3.0.2", "fastmcp>=3.1.1",
"google-api-python-client>=2.168.0", "google-api-python-client>=2.168.0",
"google-auth-httplib2>=0.2.0", "google-auth-httplib2>=0.2.0",
"google-auth-oauthlib>=1.2.2", "google-auth-oauthlib>=1.2.2",
"httpx>=0.28.1", "httpx>=0.28.1",
"py-key-value-aio>=0.3.0", "py-key-value-aio>=0.3.0",
"pyjwt>=2.10.1", "pyjwt>=2.12.0",
"python-dotenv>=1.1.0", "python-dotenv>=1.1.0",
"pyyaml>=6.0.2", "pyyaml>=6.0.2",
"cryptography>=45.0.0", "cryptography>=45.0.0",
@@ -59,7 +59,7 @@ workspace-mcp = "main:main"
[project.optional-dependencies] [project.optional-dependencies]
disk = [ disk = [
"py-key-value-aio[disk]>=0.3.0", "py-key-value-aio[filetree]>=0.3.0",
] ]
valkey = [ valkey = [
"py-key-value-aio[valkey]>=0.3.0", "py-key-value-aio[valkey]>=0.3.0",
@@ -84,7 +84,7 @@ dev = [
[dependency-groups] [dependency-groups]
disk = [ disk = [
"py-key-value-aio[disk]>=0.3.0", "py-key-value-aio[filetree]>=0.3.0",
] ]
valkey = [ valkey = [
"py-key-value-aio[valkey]>=0.3.0", "py-key-value-aio[valkey]>=0.3.0",

View File

@@ -0,0 +1,128 @@
from google.oauth2.credentials import Credentials
from auth.google_auth import handle_auth_callback
class _DummyFlow:
def __init__(self, credentials):
self.credentials = credentials
def fetch_token(self, authorization_response): # noqa: ARG002
return None
class _DummyOAuthStore:
def __init__(self, session_credentials=None):
self._session_credentials = session_credentials
self.stored_refresh_token = None
def validate_and_consume_oauth_state(self, state, session_id=None): # noqa: ARG002
return {"session_id": session_id, "code_verifier": "verifier"}
def get_credentials_by_mcp_session(self, mcp_session_id): # noqa: ARG002
return self._session_credentials
def store_session(self, **kwargs):
self.stored_refresh_token = kwargs.get("refresh_token")
class _DummyCredentialStore:
def __init__(self, existing_credentials=None):
self._existing_credentials = existing_credentials
self.saved_credentials = None
def get_credential(self, user_email): # noqa: ARG002
return self._existing_credentials
def store_credential(self, user_email, credentials): # noqa: ARG002
self.saved_credentials = credentials
return True
def _make_credentials(refresh_token):
return Credentials(
token="access-token",
refresh_token=refresh_token,
token_uri="https://oauth2.googleapis.com/token",
client_id="client-id",
client_secret="client-secret",
scopes=["scope.a"],
)
def test_callback_preserves_refresh_token_from_credential_store(monkeypatch):
callback_credentials = _make_credentials(refresh_token=None)
oauth_store = _DummyOAuthStore(session_credentials=None)
credential_store = _DummyCredentialStore(
existing_credentials=_make_credentials(refresh_token="file-refresh-token")
)
monkeypatch.setattr(
"auth.google_auth.create_oauth_flow",
lambda **kwargs: _DummyFlow(callback_credentials), # noqa: ARG005
)
monkeypatch.setattr(
"auth.google_auth.get_oauth21_session_store", lambda: oauth_store
)
monkeypatch.setattr(
"auth.google_auth.get_credential_store", lambda: credential_store
)
monkeypatch.setattr(
"auth.google_auth.get_user_info",
lambda credentials: {"email": "user@gmail.com"}, # noqa: ARG005
)
monkeypatch.setattr(
"auth.google_auth.save_credentials_to_session", lambda *args: None
)
monkeypatch.setattr("auth.google_auth.is_stateless_mode", lambda: False)
_email, credentials = handle_auth_callback(
scopes=["scope.a"],
authorization_response="http://localhost/callback?state=abc123&code=code123",
redirect_uri="http://localhost/callback",
session_id="session-1",
)
assert credentials.refresh_token == "file-refresh-token"
assert credential_store.saved_credentials.refresh_token == "file-refresh-token"
assert oauth_store.stored_refresh_token == "file-refresh-token"
def test_callback_prefers_session_refresh_token_over_credential_store(monkeypatch):
callback_credentials = _make_credentials(refresh_token=None)
oauth_store = _DummyOAuthStore(
session_credentials=_make_credentials(refresh_token="session-refresh-token")
)
credential_store = _DummyCredentialStore(
existing_credentials=_make_credentials(refresh_token="file-refresh-token")
)
monkeypatch.setattr(
"auth.google_auth.create_oauth_flow",
lambda **kwargs: _DummyFlow(callback_credentials), # noqa: ARG005
)
monkeypatch.setattr(
"auth.google_auth.get_oauth21_session_store", lambda: oauth_store
)
monkeypatch.setattr(
"auth.google_auth.get_credential_store", lambda: credential_store
)
monkeypatch.setattr(
"auth.google_auth.get_user_info",
lambda credentials: {"email": "user@gmail.com"}, # noqa: ARG005
)
monkeypatch.setattr(
"auth.google_auth.save_credentials_to_session", lambda *args: None
)
monkeypatch.setattr("auth.google_auth.is_stateless_mode", lambda: False)
_email, credentials = handle_auth_callback(
scopes=["scope.a"],
authorization_response="http://localhost/callback?state=abc123&code=code123",
redirect_uri="http://localhost/callback",
session_id="session-1",
)
assert credentials.refresh_token == "session-refresh-token"
assert credential_store.saved_credentials.refresh_token == "session-refresh-token"
assert oauth_store.stored_refresh_token == "session-refresh-token"

View File

@@ -0,0 +1,119 @@
from types import SimpleNamespace
from auth.google_auth import _determine_oauth_prompt
class _DummyCredentialStore:
def __init__(self, credentials_by_email=None):
self._credentials_by_email = credentials_by_email or {}
def get_credential(self, user_email):
return self._credentials_by_email.get(user_email)
class _DummySessionStore:
def __init__(self, user_by_session=None, credentials_by_session=None):
self._user_by_session = user_by_session or {}
self._credentials_by_session = credentials_by_session or {}
def get_user_by_mcp_session(self, mcp_session_id):
return self._user_by_session.get(mcp_session_id)
def get_credentials_by_mcp_session(self, mcp_session_id):
return self._credentials_by_session.get(mcp_session_id)
def _credentials_with_scopes(scopes):
return SimpleNamespace(scopes=scopes)
def test_prompt_select_account_when_existing_credentials_cover_scopes(monkeypatch):
required_scopes = ["scope.a", "scope.b"]
monkeypatch.setattr(
"auth.google_auth.get_oauth21_session_store",
lambda: _DummySessionStore(),
)
monkeypatch.setattr(
"auth.google_auth.get_credential_store",
lambda: _DummyCredentialStore(
{"user@gmail.com": _credentials_with_scopes(required_scopes)}
),
)
monkeypatch.setattr("auth.google_auth.is_stateless_mode", lambda: False)
prompt = _determine_oauth_prompt(
user_google_email="user@gmail.com",
required_scopes=required_scopes,
session_id=None,
)
assert prompt == "select_account"
def test_prompt_consent_when_existing_credentials_missing_scopes(monkeypatch):
monkeypatch.setattr(
"auth.google_auth.get_oauth21_session_store",
lambda: _DummySessionStore(),
)
monkeypatch.setattr(
"auth.google_auth.get_credential_store",
lambda: _DummyCredentialStore(
{"user@gmail.com": _credentials_with_scopes(["scope.a"])}
),
)
monkeypatch.setattr("auth.google_auth.is_stateless_mode", lambda: False)
prompt = _determine_oauth_prompt(
user_google_email="user@gmail.com",
required_scopes=["scope.a", "scope.b"],
session_id=None,
)
assert prompt == "consent"
def test_prompt_consent_when_no_existing_credentials(monkeypatch):
monkeypatch.setattr(
"auth.google_auth.get_oauth21_session_store",
lambda: _DummySessionStore(),
)
monkeypatch.setattr(
"auth.google_auth.get_credential_store",
lambda: _DummyCredentialStore(),
)
monkeypatch.setattr("auth.google_auth.is_stateless_mode", lambda: False)
prompt = _determine_oauth_prompt(
user_google_email="new_user@gmail.com",
required_scopes=["scope.a"],
session_id=None,
)
assert prompt == "consent"
def test_prompt_uses_session_mapping_when_email_not_provided(monkeypatch):
session_id = "session-123"
required_scopes = ["scope.a"]
monkeypatch.setattr(
"auth.google_auth.get_oauth21_session_store",
lambda: _DummySessionStore(
user_by_session={session_id: "mapped@gmail.com"},
credentials_by_session={
session_id: _credentials_with_scopes(required_scopes)
},
),
)
monkeypatch.setattr(
"auth.google_auth.get_credential_store",
lambda: _DummyCredentialStore(),
)
monkeypatch.setattr("auth.google_auth.is_stateless_mode", lambda: False)
prompt = _determine_oauth_prompt(
user_google_email=None,
required_scopes=required_scopes,
session_id=session_id,
)
assert prompt == "select_account"

View File

@@ -3,6 +3,8 @@ Unit tests for Google Chat MCP tools — attachment support
""" """
import base64 import base64
from urllib.parse import urlparse
import pytest import pytest
from unittest.mock import AsyncMock, Mock, patch from unittest.mock import AsyncMock, Mock, patch
import sys import sys
@@ -271,10 +273,12 @@ async def test_download_uses_api_media_endpoint():
# Verify we used the API endpoint with attachmentDataRef.resourceName # Verify we used the API endpoint with attachmentDataRef.resourceName
call_args = mock_client.get.call_args call_args = mock_client.get.call_args
url_used = call_args.args[0] url_used = call_args.args[0]
assert "chat.googleapis.com" in url_used parsed = urlparse(url_used)
assert parsed.scheme == "https"
assert parsed.hostname == "chat.googleapis.com"
assert "alt=media" in url_used assert "alt=media" in url_used
assert "spaces/S/attachments/A" in url_used assert "spaces/S/attachments/A" in parsed.path
assert "/messages/" not in url_used assert "/messages/" not in parsed.path
# Verify Bearer token # Verify Bearer token
assert call_args.kwargs["headers"]["Authorization"] == "Bearer fake-access-token" assert call_args.kwargs["headers"]["Authorization"] == "Bearer fake-access-token"

2019
uv.lock generated

File diff suppressed because it is too large Load Diff