Merge branch 'main' of github.com:taylorwilsdon/google_workspace_mcp into add-chat-spaces-readonly-scope

This commit is contained in:
Taylor Wilsdon
2026-02-18 12:36:18 -05:00
20 changed files with 577 additions and 154 deletions

View File

@@ -1,42 +1,58 @@
# ---- Python artefacts -------------------------------------------------- # =============================================================================
__pycache__/ # .dxtignore — defense-in-depth denylist for dxt pack
#
# IMPORTANT: Always use dxt-safe-pack.sh instead of bare `dxt pack`.
# The script guarantees only git-tracked files are packaged.
# This file exists as a safety net in case someone runs `dxt pack` directly.
# =============================================================================
# ---- Caches ----------------------------------------------------------------
.mypy_cache
__pycache__
*.py[cod] *.py[cod]
*.so *.so
.pytest_cache
.ruff_cache
# ---- Packaging --------------------------------------------------------- # ---- Build / packaging -----------------------------------------------------
*.egg-info/ *.egg-info
build/ build/
dist/ dist/
# ---- Environments & tooling ------------------------------------------- # ---- Environments & tooling ------------------------------------------------
.env .env
.venv/ .venv
venv/ venv/
.idea/ .idea/
.vscode/ .vscode/
.claude/ .claude/
.serena/
node_modules/
# ---- macOS clutter ----------------------------------------------------- # ---- macOS -----------------------------------------------------------------
.DS_Store .DS_Store
# ---- Secrets & Credentials -------------------------------------------- # ---- Secrets & credentials — CRITICAL --------------------------------------
client_secret.json client_secret.json
.credentials/ .credentials
credentials.json
token.pickle
*_token
*_secret
.mcpregistry_*
*.key *.key
*.pem *.pem
*.p12 *.p12
*.crt *.crt
*.der *.der
token.pickle
credentials.json
# ---- Test & Debug Files ----------------------------------------------- # ---- Test & debug -----------------------------------------------------------
.coverage .coverage
pytest_out.txt pytest_out.txt
mcp_server_debug.log mcp_server_debug.log
diff_output.txt diff_output.txt
# ---- Temporary & Build Files ------------------------------------------ # ---- Temp & editor files ----------------------------------------------------
*.tmp *.tmp
*.log *.log
*.pid *.pid
@@ -44,3 +60,8 @@ diff_output.txt
*.swo *.swo
*~ *~
# ---- Development artifacts not for distribution -----------------------------
scripts/
.beads
.github/
tests/

19
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,19 @@
# .github/FUNDING.yml
github: taylorwilsdon
# --- Optional platforms (one value per platform) ---
# patreon: REPLACE_ME
# open_collective: REPLACE_ME
# ko_fi: REPLACE_ME
# liberapay: REPLACE_ME
# issuehunt: REPLACE_ME
# polar: REPLACE_ME
# buy_me_a_coffee: REPLACE_ME
# thanks_dev: u/gh/REPLACE_ME_GITHUB_USERNAME
# Tidelift uses platform/package (npm, pypi, rubygems, maven, packagist, nuget)
# tidelift: pypi/REPLACE_ME_PACKAGE_NAME
# Up to 4 custom URLs (wrap in quotes if they contain :)
# Good pattern: link to a SUPPORT.md that describes how to sponsor, or your donation page.
# custom: ["https://REPLACE_ME_DOMAIN/sponsor", "https://github.com/REPLACE_ME_OWNER/REPLACE_ME_REPO/blob/main/SUPPORT.md"]

View File

@@ -14,10 +14,10 @@ jobs:
id-token: write id-token: write
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v6
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v6
with: with:
python-version: "3.11" python-version: "3.11"

View File

@@ -1,22 +0,0 @@
name: Ruff Format
on:
pull_request:
branches: [ main ]
push:
branches: [ main ]
jobs:
ruff-format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/setup-python@v6
with:
python-version: '3.11'
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Install dependencies
run: uv sync
- name: Run ruff format check
run: uv run ruff format --check

View File

@@ -1,4 +1,4 @@
name: Ruff Check name: Ruff
on: on:
pull_request: pull_request:
@@ -7,13 +7,16 @@ on:
branches: [ main ] branches: [ main ]
permissions: permissions:
contents: read contents: write
jobs: jobs:
ruff: ruff:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v6
with:
ref: ${{ github.head_ref || github.ref }}
token: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/setup-python@v6 - uses: actions/setup-python@v6
with: with:
python-version: '3.11' python-version: '3.11'
@@ -21,5 +24,21 @@ jobs:
uses: astral-sh/setup-uv@v7 uses: astral-sh/setup-uv@v7
- name: Install dependencies - name: Install dependencies
run: uv sync run: uv sync
- name: Run ruff check - name: Auto-fix ruff lint and format
run: uv run ruff check if: github.event_name == 'pull_request'
run: |
uv run ruff check --fix
uv run ruff format
- name: Commit and push fixes
if: github.event_name == 'pull_request'
run: |
git diff --quiet && exit 0
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add -A
git commit -m "style: auto-fix ruff lint and format"
git push || echo "::warning::Could not push auto-fixes — fork PRs require 'Allow edits from maintainers' enabled"
- name: Validate
run: |
uv run ruff check
uv run ruff format --check

View File

@@ -930,7 +930,7 @@ Saved files expire after 1 hour and are cleaned up automatically.
|------|------|-------------| |------|------|-------------|
| `get_doc_content` | **Core** | Extract document text | | `get_doc_content` | **Core** | Extract document text |
| `create_doc` | **Core** | Create new documents | | `create_doc` | **Core** | Create new documents |
| `modify_doc_text` | **Core** | Modify document text | | `modify_doc_text` | **Core** | Modify document text (formatting + links) |
| `search_docs` | Extended | Find documents by name | | `search_docs` | Extended | Find documents by name |
| `find_and_replace_doc` | Extended | Find and replace text | | `find_and_replace_doc` | Extended | Find and replace text |
| `list_docs_in_folder` | Extended | List docs in folder | | `list_docs_in_folder` | Extended | List docs in folder |

View File

@@ -97,7 +97,7 @@ export OAUTHLIB_INSECURE_TRANSPORT=1 # Development only
|------|------|-------------| |------|------|-------------|
| `get_doc_content` | Core | Extract text from Docs or .docx files (supports tabs) | | `get_doc_content` | Core | Extract text from Docs or .docx files (supports tabs) |
| `create_doc` | Core | Create new documents with optional initial content | | `create_doc` | Core | Create new documents with optional initial content |
| `modify_doc_text` | Core | Insert, replace, format text (bold, italic, colors, fonts) | | `modify_doc_text` | Core | Insert, replace, format text (bold, italic, colors, fonts, links) |
| `search_docs` | Extended | Find documents by name | | `search_docs` | Extended | Find documents by name |
| `find_and_replace_doc` | Extended | Global find/replace with case matching | | `find_and_replace_doc` | Extended | Global find/replace with case matching |
| `list_docs_in_folder` | Extended | List Docs in a specific folder | | `list_docs_in_folder` | Extended | List Docs in a specific folder |

View File

@@ -15,7 +15,7 @@ from google.auth.transport.requests import Request
from google.auth.exceptions import RefreshError from google.auth.exceptions import RefreshError
from googleapiclient.discovery import build from googleapiclient.discovery import build
from googleapiclient.errors import HttpError from googleapiclient.errors import HttpError
from auth.scopes import SCOPES, get_current_scopes # noqa from auth.scopes import SCOPES, get_current_scopes, has_required_scopes # noqa
from auth.oauth21_session_store import get_oauth21_session_store from auth.oauth21_session_store import get_oauth21_session_store
from auth.credential_store import get_credential_store from auth.credential_store import get_credential_store
from auth.oauth_config import get_oauth_config, is_stateless_mode from auth.oauth_config import get_oauth_config, is_stateless_mode
@@ -586,20 +586,8 @@ def get_credentials(
f"[get_credentials] Found OAuth 2.1 credentials for MCP session {session_id}" f"[get_credentials] Found OAuth 2.1 credentials for MCP session {session_id}"
) )
# Check scopes # Refresh expired credentials before checking scopes
if not all( if credentials.expired and credentials.refresh_token:
scope in credentials.scopes for scope in required_scopes
):
logger.warning(
f"[get_credentials] OAuth 2.1 credentials lack required scopes. Need: {required_scopes}, Have: {credentials.scopes}"
)
return None
# Return if valid
if credentials.valid:
return credentials
elif credentials.expired and credentials.refresh_token:
# Try to refresh
try: try:
credentials.refresh(Request()) credentials.refresh(Request())
logger.info( logger.info(
@@ -631,12 +619,23 @@ def get_credentials(
logger.warning( logger.warning(
f"[get_credentials] Failed to persist refreshed OAuth 2.1 credentials for user {user_email}: {persist_error}" f"[get_credentials] Failed to persist refreshed OAuth 2.1 credentials for user {user_email}: {persist_error}"
) )
return credentials
except Exception as e: except Exception as e:
logger.error( logger.error(
f"[get_credentials] Failed to refresh OAuth 2.1 credentials: {e}" f"[get_credentials] Failed to refresh OAuth 2.1 credentials: {e}"
) )
return None return None
# Check scopes after refresh so stale metadata doesn't block valid tokens
if not has_required_scopes(credentials.scopes, required_scopes):
logger.warning(
f"[get_credentials] OAuth 2.1 credentials lack required scopes. Need: {required_scopes}, Have: {credentials.scopes}"
)
return None
if credentials.valid:
return credentials
return None
except ImportError: except ImportError:
pass # OAuth 2.1 store not available pass # OAuth 2.1 store not available
except Exception as e: except Exception as e:
@@ -710,21 +709,14 @@ def get_credentials(
f"[get_credentials] Credentials found. Scopes: {credentials.scopes}, Valid: {credentials.valid}, Expired: {credentials.expired}" f"[get_credentials] Credentials found. Scopes: {credentials.scopes}, Valid: {credentials.valid}, Expired: {credentials.expired}"
) )
if not all(scope in credentials.scopes for scope in required_scopes): # Attempt refresh before checking scopes — the scope check validates against
logger.warning( # credentials.scopes which is set at authorization time and not updated by the
f"[get_credentials] Credentials lack required scopes. Need: {required_scopes}, Have: {credentials.scopes}. User: '{user_google_email}', Session: '{session_id}'" # google-auth library on refresh. Checking scopes first would block a valid
) # refresh attempt when stored scope metadata is stale.
return None # Re-authentication needed for scopes
logger.debug(
f"[get_credentials] Credentials have sufficient scopes. User: '{user_google_email}', Session: '{session_id}'"
)
if credentials.valid: if credentials.valid:
logger.debug( logger.debug(
f"[get_credentials] Credentials are valid. User: '{user_google_email}', Session: '{session_id}'" f"[get_credentials] Credentials are valid. User: '{user_google_email}', Session: '{session_id}'"
) )
return credentials
elif credentials.expired and credentials.refresh_token: elif credentials.expired and credentials.refresh_token:
logger.info( logger.info(
f"[get_credentials] Credentials expired. Attempting refresh. User: '{user_google_email}', Session: '{session_id}'" f"[get_credentials] Credentials expired. Attempting refresh. User: '{user_google_email}', Session: '{session_id}'"
@@ -733,7 +725,6 @@ def get_credentials(
logger.debug( logger.debug(
"[get_credentials] Refreshing token using embedded client credentials" "[get_credentials] Refreshing token using embedded client credentials"
) )
# client_config = load_client_secrets(client_secrets_path) # Not strictly needed if creds have client_id/secret
credentials.refresh(Request()) credentials.refresh(Request())
logger.info( logger.info(
f"[get_credentials] Credentials refreshed successfully. User: '{user_google_email}', Session: '{session_id}'" f"[get_credentials] Credentials refreshed successfully. User: '{user_google_email}', Session: '{session_id}'"
@@ -766,7 +757,6 @@ def get_credentials(
if session_id: # Update session cache if it was the source or is active if session_id: # Update session cache if it was the source or is active
save_credentials_to_session(session_id, credentials) save_credentials_to_session(session_id, credentials)
return credentials
except RefreshError as e: except RefreshError as e:
logger.warning( logger.warning(
f"[get_credentials] RefreshError - token expired/revoked: {e}. User: '{user_google_email}', Session: '{session_id}'" f"[get_credentials] RefreshError - token expired/revoked: {e}. User: '{user_google_email}', Session: '{session_id}'"
@@ -785,6 +775,19 @@ def get_credentials(
) )
return None return None
# Check scopes after refresh so stale scope metadata doesn't block valid tokens.
# Uses hierarchy-aware check (e.g. gmail.modify satisfies gmail.readonly).
if not has_required_scopes(credentials.scopes, required_scopes):
logger.warning(
f"[get_credentials] Credentials lack required scopes. Need: {required_scopes}, Have: {credentials.scopes}. User: '{user_google_email}', Session: '{session_id}'"
)
return None # Re-authentication needed for scopes
logger.debug(
f"[get_credentials] Credentials have sufficient scopes. User: '{user_google_email}', Session: '{session_id}'"
)
return credentials
def get_user_info( def get_user_info(
credentials: Credentials, *, skip_valid_check: bool = False credentials: Credentials, *, skip_valid_check: bool = False

View File

@@ -81,6 +81,52 @@ SCRIPT_DEPLOYMENTS_READONLY_SCOPE = (
SCRIPT_PROCESSES_READONLY_SCOPE = "https://www.googleapis.com/auth/script.processes" SCRIPT_PROCESSES_READONLY_SCOPE = "https://www.googleapis.com/auth/script.processes"
SCRIPT_METRICS_SCOPE = "https://www.googleapis.com/auth/script.metrics" SCRIPT_METRICS_SCOPE = "https://www.googleapis.com/auth/script.metrics"
# Google scope hierarchy: broader scopes that implicitly cover narrower ones.
# See https://developers.google.com/gmail/api/auth/scopes,
# https://developers.google.com/drive/api/guides/api-specific-auth, etc.
SCOPE_HIERARCHY = {
GMAIL_MODIFY_SCOPE: {
GMAIL_READONLY_SCOPE,
GMAIL_SEND_SCOPE,
GMAIL_COMPOSE_SCOPE,
GMAIL_LABELS_SCOPE,
},
DRIVE_SCOPE: {DRIVE_READONLY_SCOPE, DRIVE_FILE_SCOPE},
CALENDAR_SCOPE: {CALENDAR_READONLY_SCOPE, CALENDAR_EVENTS_SCOPE},
DOCS_WRITE_SCOPE: {DOCS_READONLY_SCOPE},
SHEETS_WRITE_SCOPE: {SHEETS_READONLY_SCOPE},
SLIDES_SCOPE: {SLIDES_READONLY_SCOPE},
TASKS_SCOPE: {TASKS_READONLY_SCOPE},
CONTACTS_SCOPE: {CONTACTS_READONLY_SCOPE},
CHAT_WRITE_SCOPE: {CHAT_READONLY_SCOPE},
FORMS_BODY_SCOPE: {FORMS_BODY_READONLY_SCOPE},
SCRIPT_PROJECTS_SCOPE: {SCRIPT_PROJECTS_READONLY_SCOPE},
SCRIPT_DEPLOYMENTS_SCOPE: {SCRIPT_DEPLOYMENTS_READONLY_SCOPE},
}
def has_required_scopes(available_scopes, required_scopes):
"""
Check if available scopes satisfy all required scopes, accounting for
Google's scope hierarchy (e.g., gmail.modify covers gmail.readonly).
Args:
available_scopes: Scopes the credentials have (set, list, or frozenset).
required_scopes: Scopes that are required (set, list, or frozenset).
Returns:
True if all required scopes are satisfied.
"""
available = set(available_scopes or [])
required = set(required_scopes or [])
# Expand available scopes with implied narrower scopes
expanded = set(available)
for broad_scope, covered in SCOPE_HIERARCHY.items():
if broad_scope in available:
expanded.update(covered)
return all(scope in expanded for scope in required)
# Base OAuth scopes required for user identification # Base OAuth scopes required for user identification
BASE_SCOPES = [USERINFO_EMAIL_SCOPE, USERINFO_PROFILE_SCOPE, OPENID_SCOPE] BASE_SCOPES = [USERINFO_EMAIL_SCOPE, USERINFO_PROFILE_SCOPE, OPENID_SCOPE]

View File

@@ -54,6 +54,7 @@ from auth.scopes import (
SCRIPT_PROJECTS_READONLY_SCOPE, SCRIPT_PROJECTS_READONLY_SCOPE,
SCRIPT_DEPLOYMENTS_SCOPE, SCRIPT_DEPLOYMENTS_SCOPE,
SCRIPT_DEPLOYMENTS_READONLY_SCOPE, SCRIPT_DEPLOYMENTS_READONLY_SCOPE,
has_required_scopes,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -275,7 +276,7 @@ async def get_authenticated_google_service_oauth21(
if not scopes_available and getattr(access_token, "scopes", None): if not scopes_available and getattr(access_token, "scopes", None):
scopes_available = set(access_token.scopes) scopes_available = set(access_token.scopes)
if not all(scope in scopes_available for scope in required_scopes): if not has_required_scopes(scopes_available, required_scopes):
raise GoogleAuthenticationError( raise GoogleAuthenticationError(
f"OAuth credentials lack required scopes. Need: {required_scopes}, Have: {sorted(scopes_available)}" f"OAuth credentials lack required scopes. Need: {required_scopes}, Have: {sorted(scopes_available)}"
) )
@@ -305,7 +306,7 @@ async def get_authenticated_google_service_oauth21(
else: else:
scopes_available = set(credentials.scopes) scopes_available = set(credentials.scopes)
if not all(scope in scopes_available for scope in required_scopes): if not has_required_scopes(scopes_available, required_scopes):
raise GoogleAuthenticationError( raise GoogleAuthenticationError(
f"OAuth 2.1 credentials lack required scopes. Need: {required_scopes}, Have: {sorted(scopes_available)}" f"OAuth 2.1 credentials lack required scopes. Need: {required_scopes}, Have: {sorted(scopes_available)}"
) )

View File

@@ -6,17 +6,97 @@ This module provides MCP tools for interacting with Google Chat API.
import logging import logging
import asyncio import asyncio
from typing import Optional from typing import Dict, List, Optional
from googleapiclient.errors import HttpError from googleapiclient.errors import HttpError
# Auth & server utilities # Auth & server utilities
from auth.service_decorator import require_google_service from auth.service_decorator import require_google_service, require_multiple_services
from core.server import server from core.server import server
from core.utils import handle_http_errors from core.utils import handle_http_errors
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# In-memory cache for user ID → display name (bounded to avoid unbounded growth)
_SENDER_CACHE_MAX_SIZE = 256
_sender_name_cache: Dict[str, str] = {}
def _cache_sender(user_id: str, name: str) -> None:
"""Store a resolved sender name, evicting oldest entries if cache is full."""
if len(_sender_name_cache) >= _SENDER_CACHE_MAX_SIZE:
to_remove = list(_sender_name_cache.keys())[: _SENDER_CACHE_MAX_SIZE // 2]
for k in to_remove:
del _sender_name_cache[k]
_sender_name_cache[user_id] = name
async def _resolve_sender(people_service, sender_obj: dict) -> str:
"""Resolve a Chat message sender to a display name.
Fast path: use displayName if the API already provided it.
Slow path: look up the user via the People API directory and cache the result.
"""
# Fast path — Chat API sometimes provides displayName directly
display_name = sender_obj.get("displayName")
if display_name:
return display_name
user_id = sender_obj.get("name", "") # e.g. "users/123456789"
if not user_id:
return "Unknown Sender"
# Check cache
if user_id in _sender_name_cache:
return _sender_name_cache[user_id]
# Try People API directory lookup
# Chat API uses "users/ID" but People API expects "people/ID"
people_resource = user_id.replace("users/", "people/", 1)
if people_service:
try:
person = await asyncio.to_thread(
people_service.people()
.get(resourceName=people_resource, personFields="names,emailAddresses")
.execute
)
names = person.get("names", [])
if names:
resolved = names[0].get("displayName", user_id)
_cache_sender(user_id, resolved)
return resolved
# Fall back to email if no name
emails = person.get("emailAddresses", [])
if emails:
resolved = emails[0].get("value", user_id)
_cache_sender(user_id, resolved)
return resolved
except HttpError as e:
logger.debug(f"People API lookup failed for {user_id}: {e}")
except Exception as e:
logger.debug(f"Unexpected error resolving {user_id}: {e}")
# Final fallback
_cache_sender(user_id, user_id)
return user_id
def _extract_rich_links(msg: dict) -> List[str]:
"""Extract URLs from RICH_LINK annotations (smart chips).
When a user pastes a Google Workspace URL in Chat and it renders as a
smart chip, the URL is NOT in the text field — it's only available in
the annotations array as a RICH_LINK with richLinkMetadata.uri.
"""
text = msg.get("text", "")
urls = []
for ann in msg.get("annotations", []):
if ann.get("type") == "RICH_LINK":
uri = ann.get("richLinkMetadata", {}).get("uri", "")
if uri and uri not in text:
urls.append(uri)
return urls
@server.tool() @server.tool()
@require_google_service("chat", "chat_spaces_readonly") @require_google_service("chat", "chat_spaces_readonly")
@@ -63,10 +143,20 @@ async def list_spaces(
@server.tool() @server.tool()
@require_google_service("chat", "chat_read") @require_multiple_services(
[
{"service_type": "chat", "scopes": "chat_read", "param_name": "chat_service"},
{
"service_type": "people",
"scopes": "contacts_read",
"param_name": "people_service",
},
]
)
@handle_http_errors("get_messages", service_type="chat") @handle_http_errors("get_messages", service_type="chat")
async def get_messages( async def get_messages(
service, chat_service,
people_service,
user_google_email: str, user_google_email: str,
space_id: str, space_id: str,
page_size: int = 50, page_size: int = 50,
@@ -81,12 +171,14 @@ async def get_messages(
logger.info(f"[get_messages] Space ID: '{space_id}' for user '{user_google_email}'") logger.info(f"[get_messages] Space ID: '{space_id}' for user '{user_google_email}'")
# Get space info first # Get space info first
space_info = await asyncio.to_thread(service.spaces().get(name=space_id).execute) space_info = await asyncio.to_thread(
chat_service.spaces().get(name=space_id).execute
)
space_name = space_info.get("displayName", "Unknown Space") space_name = space_info.get("displayName", "Unknown Space")
# Get messages # Get messages
response = await asyncio.to_thread( response = await asyncio.to_thread(
service.spaces() chat_service.spaces()
.messages() .messages()
.list(parent=space_id, pageSize=page_size, orderBy=order_by) .list(parent=space_id, pageSize=page_size, orderBy=order_by)
.execute .execute
@@ -96,15 +188,51 @@ async def get_messages(
if not messages: if not messages:
return f"No messages found in space '{space_name}' (ID: {space_id})." return f"No messages found in space '{space_name}' (ID: {space_id})."
# Pre-resolve unique senders in parallel
sender_lookup = {}
for msg in messages:
s = msg.get("sender", {})
key = s.get("name", "")
if key and key not in sender_lookup:
sender_lookup[key] = s
resolved_names = await asyncio.gather(
*[_resolve_sender(people_service, s) for s in sender_lookup.values()]
)
sender_map = dict(zip(sender_lookup.keys(), resolved_names))
output = [f"Messages from '{space_name}' (ID: {space_id}):\n"] output = [f"Messages from '{space_name}' (ID: {space_id}):\n"]
for msg in messages: for msg in messages:
sender = msg.get("sender", {}).get("displayName", "Unknown Sender") sender_obj = msg.get("sender", {})
sender_key = sender_obj.get("name", "")
sender = sender_map.get(sender_key) or await _resolve_sender(
people_service, sender_obj
)
create_time = msg.get("createTime", "Unknown Time") create_time = msg.get("createTime", "Unknown Time")
text_content = msg.get("text", "No text content") text_content = msg.get("text", "No text content")
msg_name = msg.get("name", "") msg_name = msg.get("name", "")
output.append(f"[{create_time}] {sender}:") output.append(f"[{create_time}] {sender}:")
output.append(f" {text_content}") output.append(f" {text_content}")
rich_links = _extract_rich_links(msg)
for url in rich_links:
output.append(f" [linked: {url}]")
# Show thread info if this is a threaded reply
thread = msg.get("thread", {})
if msg.get("threadReply") and thread.get("name"):
output.append(f" [thread: {thread['name']}]")
# Show emoji reactions
reactions = msg.get("emojiReactionSummaries", [])
if reactions:
parts = []
for r in reactions:
emoji = r.get("emoji", {})
symbol = emoji.get("unicode", "")
if not symbol:
ce = emoji.get("customEmoji", {})
symbol = f":{ce.get('uid', '?')}:"
count = r.get("reactionCount", 0)
parts.append(f"{symbol}x{count}")
output.append(f" [reactions: {', '.join(parts)}]")
output.append(f" (Message ID: {msg_name})\n") output.append(f" (Message ID: {msg_name})\n")
return "\n".join(output) return "\n".join(output)
@@ -119,10 +247,15 @@ async def send_message(
space_id: str, space_id: str,
message_text: str, message_text: str,
thread_key: Optional[str] = None, thread_key: Optional[str] = None,
thread_name: Optional[str] = None,
) -> str: ) -> str:
""" """
Sends a message to a Google Chat space. Sends a message to a Google Chat space.
Args:
thread_name: Reply in an existing thread by its resource name (e.g. spaces/X/threads/Y).
thread_key: Reply in a thread by app-defined key (creates thread if not found).
Returns: Returns:
str: Confirmation message with sent message details. str: Confirmation message with sent message details.
""" """
@@ -130,10 +263,15 @@ async def send_message(
message_body = {"text": message_text} message_body = {"text": message_text}
# Add thread key if provided (for threaded replies)
request_params = {"parent": space_id, "body": message_body} request_params = {"parent": space_id, "body": message_body}
if thread_key:
request_params["threadKey"] = thread_key # Thread reply support
if thread_name:
message_body["thread"] = {"name": thread_name}
request_params["messageReplyOption"] = "REPLY_MESSAGE_FALLBACK_TO_NEW_THREAD"
elif thread_key:
message_body["thread"] = {"threadKey": thread_key}
request_params["messageReplyOption"] = "REPLY_MESSAGE_FALLBACK_TO_NEW_THREAD"
message = await asyncio.to_thread( message = await asyncio.to_thread(
service.spaces().messages().create(**request_params).execute service.spaces().messages().create(**request_params).execute
@@ -150,10 +288,20 @@ async def send_message(
@server.tool() @server.tool()
@require_google_service("chat", "chat_read") @require_multiple_services(
[
{"service_type": "chat", "scopes": "chat_read", "param_name": "chat_service"},
{
"service_type": "people",
"scopes": "contacts_read",
"param_name": "people_service",
},
]
)
@handle_http_errors("search_messages", service_type="chat") @handle_http_errors("search_messages", service_type="chat")
async def search_messages( async def search_messages(
service, chat_service,
people_service,
user_google_email: str, user_google_email: str,
query: str, query: str,
space_id: Optional[str] = None, space_id: Optional[str] = None,
@@ -170,7 +318,7 @@ async def search_messages(
# If specific space provided, search within that space # If specific space provided, search within that space
if space_id: if space_id:
response = await asyncio.to_thread( response = await asyncio.to_thread(
service.spaces() chat_service.spaces()
.messages() .messages()
.list(parent=space_id, pageSize=page_size, filter=f'text:"{query}"') .list(parent=space_id, pageSize=page_size, filter=f'text:"{query}"')
.execute .execute
@@ -181,7 +329,7 @@ async def search_messages(
# Search across all accessible spaces (this may require iterating through spaces) # Search across all accessible spaces (this may require iterating through spaces)
# For simplicity, we'll search the user's spaces first # For simplicity, we'll search the user's spaces first
spaces_response = await asyncio.to_thread( spaces_response = await asyncio.to_thread(
service.spaces().list(pageSize=100).execute chat_service.spaces().list(pageSize=100).execute
) )
spaces = spaces_response.get("spaces", []) spaces = spaces_response.get("spaces", [])
@@ -189,7 +337,7 @@ async def search_messages(
for space in spaces[:10]: # Limit to first 10 spaces to avoid timeout for space in spaces[:10]: # Limit to first 10 spaces to avoid timeout
try: try:
space_messages = await asyncio.to_thread( space_messages = await asyncio.to_thread(
service.spaces() chat_service.spaces()
.messages() .messages()
.list( .list(
parent=space.get("name"), pageSize=5, filter=f'text:"{query}"' parent=space.get("name"), pageSize=5, filter=f'text:"{query}"'
@@ -200,16 +348,35 @@ async def search_messages(
for msg in space_msgs: for msg in space_msgs:
msg["_space_name"] = space.get("displayName", "Unknown") msg["_space_name"] = space.get("displayName", "Unknown")
messages.extend(space_msgs) messages.extend(space_msgs)
except HttpError: except HttpError as e:
continue # Skip spaces we can't access logger.debug(
"Skipping space %s during search: %s", space.get("name"), e
)
continue
context = "all accessible spaces" context = "all accessible spaces"
if not messages: if not messages:
return f"No messages found matching '{query}' in {context}." return f"No messages found matching '{query}' in {context}."
# Pre-resolve unique senders in parallel
sender_lookup = {}
for msg in messages:
s = msg.get("sender", {})
key = s.get("name", "")
if key and key not in sender_lookup:
sender_lookup[key] = s
resolved_names = await asyncio.gather(
*[_resolve_sender(people_service, s) for s in sender_lookup.values()]
)
sender_map = dict(zip(sender_lookup.keys(), resolved_names))
output = [f"Found {len(messages)} messages matching '{query}' in {context}:"] output = [f"Found {len(messages)} messages matching '{query}' in {context}:"]
for msg in messages: for msg in messages:
sender = msg.get("sender", {}).get("displayName", "Unknown Sender") sender_obj = msg.get("sender", {})
sender_key = sender_obj.get("name", "")
sender = sender_map.get(sender_key) or await _resolve_sender(
people_service, sender_obj
)
create_time = msg.get("createTime", "Unknown Time") create_time = msg.get("createTime", "Unknown Time")
text_content = msg.get("text", "No text content") text_content = msg.get("text", "No text content")
space_name = msg.get("_space_name", "Unknown Space") space_name = msg.get("_space_name", "Unknown Space")
@@ -218,6 +385,46 @@ async def search_messages(
if len(text_content) > 100: if len(text_content) > 100:
text_content = text_content[:100] + "..." text_content = text_content[:100] + "..."
output.append(f"- [{create_time}] {sender} in '{space_name}': {text_content}") rich_links = _extract_rich_links(msg)
links_suffix = "".join(f" [linked: {url}]" for url in rich_links)
output.append(
f"- [{create_time}] {sender} in '{space_name}': {text_content}{links_suffix}"
)
return "\n".join(output) return "\n".join(output)
@server.tool()
@require_google_service("chat", "chat_write")
@handle_http_errors("create_reaction", service_type="chat")
async def create_reaction(
service,
user_google_email: str,
message_id: str,
emoji_unicode: str,
) -> str:
"""
Adds an emoji reaction to a Google Chat message.
Args:
message_id: The message resource name (e.g. spaces/X/messages/Y).
emoji_unicode: The emoji character to react with (e.g. 👍).
Returns:
str: Confirmation message.
"""
logger.info(f"[create_reaction] Message: '{message_id}', Emoji: '{emoji_unicode}'")
reaction = await asyncio.to_thread(
service.spaces()
.messages()
.reactions()
.create(
parent=message_id,
body={"emoji": {"unicode": emoji_unicode}},
)
.execute
)
reaction_name = reaction.get("name", "")
return f"Reacted with {emoji_unicode} on message {message_id}. Reaction ID: {reaction_name}"

View File

@@ -46,6 +46,7 @@ def build_text_style(
font_family: str = None, font_family: str = None,
text_color: str = None, text_color: str = None,
background_color: str = None, background_color: str = None,
link_url: str = None,
) -> tuple[Dict[str, Any], list[str]]: ) -> tuple[Dict[str, Any], list[str]]:
""" """
Build text style object for Google Docs API requests. Build text style object for Google Docs API requests.
@@ -58,6 +59,7 @@ def build_text_style(
font_family: Font family name font_family: Font family name
text_color: Text color as hex string "#RRGGBB" text_color: Text color as hex string "#RRGGBB"
background_color: Background (highlight) color as hex string "#RRGGBB" background_color: Background (highlight) color as hex string "#RRGGBB"
link_url: Hyperlink URL (http/https)
Returns: Returns:
Tuple of (text_style_dict, list_of_field_names) Tuple of (text_style_dict, list_of_field_names)
@@ -95,6 +97,10 @@ def build_text_style(
text_style["backgroundColor"] = {"color": {"rgbColor": rgb}} text_style["backgroundColor"] = {"color": {"rgbColor": rgb}}
fields.append("backgroundColor") fields.append("backgroundColor")
if link_url is not None:
text_style["link"] = {"url": link_url}
fields.append("link")
return text_style, fields return text_style, fields
@@ -242,6 +248,7 @@ def create_format_text_request(
font_family: str = None, font_family: str = None,
text_color: str = None, text_color: str = None,
background_color: str = None, background_color: str = None,
link_url: str = None,
) -> Optional[Dict[str, Any]]: ) -> Optional[Dict[str, Any]]:
""" """
Create an updateTextStyle request for Google Docs API. Create an updateTextStyle request for Google Docs API.
@@ -256,12 +263,20 @@ def create_format_text_request(
font_family: Font family name font_family: Font family name
text_color: Text color as hex string "#RRGGBB" text_color: Text color as hex string "#RRGGBB"
background_color: Background (highlight) color as hex string "#RRGGBB" background_color: Background (highlight) color as hex string "#RRGGBB"
link_url: Hyperlink URL (http/https)
Returns: Returns:
Dictionary representing the updateTextStyle request, or None if no styles provided Dictionary representing the updateTextStyle request, or None if no styles provided
""" """
text_style, fields = build_text_style( text_style, fields = build_text_style(
bold, italic, underline, font_size, font_family, text_color, background_color bold,
italic,
underline,
font_size,
font_family,
text_color,
background_color,
link_url,
) )
if not text_style: if not text_style:

View File

@@ -367,6 +367,7 @@ async def modify_doc_text(
font_family: str = None, font_family: str = None,
text_color: str = None, text_color: str = None,
background_color: str = None, background_color: str = None,
link_url: str = None,
) -> str: ) -> str:
""" """
Modifies text in a Google Doc - can insert/replace text and/or apply formatting in a single operation. Modifies text in a Google Doc - can insert/replace text and/or apply formatting in a single operation.
@@ -384,13 +385,14 @@ async def modify_doc_text(
font_family: Font family name (e.g., "Arial", "Times New Roman") font_family: Font family name (e.g., "Arial", "Times New Roman")
text_color: Foreground text color (#RRGGBB) text_color: Foreground text color (#RRGGBB)
background_color: Background/highlight color (#RRGGBB) background_color: Background/highlight color (#RRGGBB)
link_url: Hyperlink URL (http/https)
Returns: Returns:
str: Confirmation message with operation details str: Confirmation message with operation details
""" """
logger.info( logger.info(
f"[modify_doc_text] Doc={document_id}, start={start_index}, end={end_index}, text={text is not None}, " f"[modify_doc_text] Doc={document_id}, start={start_index}, end={end_index}, text={text is not None}, "
f"formatting={any([bold, italic, underline, font_size, font_family, text_color, background_color])}" f"formatting={any(p is not None for p in [bold, italic, underline, font_size, font_family, text_color, background_color, link_url])}"
) )
# Input validation # Input validation
@@ -401,31 +403,21 @@ async def modify_doc_text(
return f"Error: {error_msg}" return f"Error: {error_msg}"
# Validate that we have something to do # Validate that we have something to do
if text is None and not any( formatting_params = [
[ bold,
bold is not None, italic,
italic is not None, underline,
underline is not None, font_size,
font_size, font_family,
font_family, text_color,
text_color, background_color,
background_color, link_url,
] ]
): if text is None and not any(p is not None for p in formatting_params):
return "Error: Must provide either 'text' to insert/replace, or formatting parameters (bold, italic, underline, font_size, font_family, text_color, background_color)." return "Error: Must provide either 'text' to insert/replace, or formatting parameters (bold, italic, underline, font_size, font_family, text_color, background_color, link_url)."
# Validate text formatting params if provided # Validate text formatting params if provided
if any( if any(p is not None for p in formatting_params):
[
bold is not None,
italic is not None,
underline is not None,
font_size,
font_family,
text_color,
background_color,
]
):
is_valid, error_msg = validator.validate_text_formatting_params( is_valid, error_msg = validator.validate_text_formatting_params(
bold, bold,
italic, italic,
@@ -434,6 +426,7 @@ async def modify_doc_text(
font_family, font_family,
text_color, text_color,
background_color, background_color,
link_url,
) )
if not is_valid: if not is_valid:
return f"Error: {error_msg}" return f"Error: {error_msg}"
@@ -482,17 +475,7 @@ async def modify_doc_text(
operations.append(f"Inserted text at index {start_index}") operations.append(f"Inserted text at index {start_index}")
# Handle formatting # Handle formatting
if any( if any(p is not None for p in formatting_params):
[
bold is not None,
italic is not None,
underline is not None,
font_size,
font_family,
text_color,
background_color,
]
):
# Adjust range for formatting based on text operations # Adjust range for formatting based on text operations
format_start = start_index format_start = start_index
format_end = end_index format_end = end_index
@@ -524,24 +507,24 @@ async def modify_doc_text(
font_family, font_family,
text_color, text_color,
background_color, background_color,
link_url,
) )
) )
format_details = [] format_details = [
if bold is not None: f"{name}={value}"
format_details.append(f"bold={bold}") for name, value in [
if italic is not None: ("bold", bold),
format_details.append(f"italic={italic}") ("italic", italic),
if underline is not None: ("underline", underline),
format_details.append(f"underline={underline}") ("font_size", font_size),
if font_size: ("font_family", font_family),
format_details.append(f"font_size={font_size}") ("text_color", text_color),
if font_family: ("background_color", background_color),
format_details.append(f"font_family={font_family}") ("link_url", link_url),
if text_color: ]
format_details.append(f"text_color={text_color}") if value is not None
if background_color: ]
format_details.append(f"background_color={background_color}")
operations.append( operations.append(
f"Applied formatting ({', '.join(format_details)}) to range {format_start}-{format_end}" f"Applied formatting ({', '.join(format_details)}) to range {format_start}-{format_end}"

View File

@@ -190,6 +190,7 @@ class BatchOperationManager:
op.get("font_family"), op.get("font_family"),
op.get("text_color"), op.get("text_color"),
op.get("background_color"), op.get("background_color"),
op.get("link_url"),
) )
if not request: if not request:
@@ -205,6 +206,7 @@ class BatchOperationManager:
("font_family", "font family"), ("font_family", "font family"),
("text_color", "text color"), ("text_color", "text color"),
("background_color", "background color"), ("background_color", "background color"),
("link_url", "link"),
]: ]:
if op.get(param) is not None: if op.get(param) is not None:
value = f"{op[param]}pt" if param == "font_size" else op[param] value = f"{op[param]}pt" if param == "font_size" else op[param]
@@ -370,6 +372,7 @@ class BatchOperationManager:
"font_family", "font_family",
"text_color", "text_color",
"background_color", "background_color",
"link_url",
], ],
"description": "Apply formatting to text range", "description": "Apply formatting to text range",
}, },

View File

@@ -7,6 +7,7 @@ extracting validation patterns from individual tool functions.
import logging import logging
from typing import Dict, Any, List, Tuple, Optional from typing import Dict, Any, List, Tuple, Optional
from urllib.parse import urlparse
from gdocs.docs_helpers import validate_operation from gdocs.docs_helpers import validate_operation
@@ -159,6 +160,7 @@ class ValidationManager:
font_family: Optional[str] = None, font_family: Optional[str] = None,
text_color: Optional[str] = None, text_color: Optional[str] = None,
background_color: Optional[str] = None, background_color: Optional[str] = None,
link_url: Optional[str] = None,
) -> Tuple[bool, str]: ) -> Tuple[bool, str]:
""" """
Validate text formatting parameters. Validate text formatting parameters.
@@ -171,6 +173,7 @@ class ValidationManager:
font_family: Font family name font_family: Font family name
text_color: Text color in "#RRGGBB" format text_color: Text color in "#RRGGBB" format
background_color: Background color in "#RRGGBB" format background_color: Background color in "#RRGGBB" format
link_url: Hyperlink URL (http/https)
Returns: Returns:
Tuple of (is_valid, error_message) Tuple of (is_valid, error_message)
@@ -184,11 +187,12 @@ class ValidationManager:
font_family, font_family,
text_color, text_color,
background_color, background_color,
link_url,
] ]
if all(param is None for param in formatting_params): if all(param is None for param in formatting_params):
return ( return (
False, False,
"At least one formatting parameter must be provided (bold, italic, underline, font_size, font_family, text_color, or background_color)", "At least one formatting parameter must be provided (bold, italic, underline, font_size, font_family, text_color, background_color, or link_url)",
) )
# Validate boolean parameters # Validate boolean parameters
@@ -240,6 +244,30 @@ class ValidationManager:
if not is_valid: if not is_valid:
return False, error_msg return False, error_msg
is_valid, error_msg = self.validate_link_url(link_url)
if not is_valid:
return False, error_msg
return True, ""
def validate_link_url(self, link_url: Optional[str]) -> Tuple[bool, str]:
"""Validate hyperlink URL parameters."""
if link_url is None:
return True, ""
if not isinstance(link_url, str):
return False, f"link_url must be a string, got {type(link_url).__name__}"
if not link_url.strip():
return False, "link_url cannot be empty"
parsed = urlparse(link_url)
if parsed.scheme not in ("http", "https"):
return False, "link_url must start with http:// or https://"
if not parsed.netloc:
return False, "link_url must include a valid host"
return True, "" return True, ""
def validate_paragraph_style_params( def validate_paragraph_style_params(
@@ -578,6 +606,7 @@ class ValidationManager:
op.get("font_family"), op.get("font_family"),
op.get("text_color"), op.get("text_color"),
op.get("background_color"), op.get("background_color"),
op.get("link_url"),
) )
if not is_valid: if not is_valid:
return False, f"Operation {i + 1} (format_text): {error_msg}" return False, f"Operation {i + 1} (format_text): {error_msg}"

View File

@@ -2,7 +2,7 @@
"dxt_version": "0.1", "dxt_version": "0.1",
"name": "workspace-mcp", "name": "workspace-mcp",
"display_name": "Google Workspace MCP", "display_name": "Google Workspace MCP",
"version": "1.6.2", "version": "1.11.5",
"description": "Full natural language control over Google Calendar, Drive, Gmail, Docs, Sheets, Slides, Forms, Tasks, Chat and Custom Search through all MCP clients, AI assistants and developer tools", "description": "Full natural language control over Google Calendar, Drive, Gmail, Docs, Sheets, Slides, Forms, Tasks, Chat and Custom Search through all MCP clients, AI assistants and developer tools",
"long_description": "A production-ready MCP server that integrates all major Google Workspace services with AI assistants. Includes Google PSE integration for custom web searches.", "long_description": "A production-ready MCP server that integrates all major Google Workspace services with AI assistants. Includes Google PSE integration for custom web searches.",
"author": { "author": {

View File

@@ -4,10 +4,11 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "workspace-mcp" name = "workspace-mcp"
version = "1.11.2" version = "1.11.5"
description = "Comprehensive, highly performant Google Workspace Streamable HTTP & SSE MCP Server for Calendar, Gmail, Docs, Sheets, Slides & Drive" description = "Comprehensive, highly performant Google Workspace Streamable HTTP & SSE MCP Server for Calendar, Gmail, Docs, Sheets, Slides & Drive"
readme = "README.md" readme = "README.md"
keywords = [ "mcp", "google", "workspace", "llm", "ai", "claude", "model", "context", "protocol", "server"] keywords = [ "mcp", "google", "workspace", "llm", "ai", "claude", "model", "context", "protocol", "server"]
license = "MIT"
requires-python = ">=3.10" requires-python = ">=3.10"
dependencies = [ dependencies = [
"fastapi>=0.115.12", "fastapi>=0.115.12",
@@ -26,7 +27,6 @@ classifiers = [
"Development Status :: 4 - Beta", "Development Status :: 4 - Beta",
"Environment :: Console", "Environment :: Console",
"Intended Audience :: Developers", "Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English", "Natural Language :: English",
"Operating System :: OS Independent", "Operating System :: OS Independent",
"Programming Language :: Python", "Programming Language :: Python",
@@ -46,9 +46,6 @@ classifiers = [
name = "Taylor Wilsdon" name = "Taylor Wilsdon"
email = "taylor@taylorwilsdon.com" email = "taylor@taylorwilsdon.com"
[project.license]
text = "MIT"
[project.urls] [project.urls]
Homepage = "https://workspacemcp.com" Homepage = "https://workspacemcp.com"
Repository = "https://github.com/taylorwilsdon/google_workspace_mcp" Repository = "https://github.com/taylorwilsdon/google_workspace_mcp"

View File

@@ -3,7 +3,7 @@
"name": "io.github.taylorwilsdon/workspace-mcp", "name": "io.github.taylorwilsdon/workspace-mcp",
"description": "Google Workspace MCP server for Gmail, Drive, Calendar, Docs, Sheets, Slides, Forms, Tasks, Chat.", "description": "Google Workspace MCP server for Gmail, Drive, Calendar, Docs, Sheets, Slides, Forms, Tasks, Chat.",
"status": "active", "status": "active",
"version": "1.11.2", "version": "1.11.5",
"packages": [ "packages": [
{ {
"registryType": "pypi", "registryType": "pypi",
@@ -11,7 +11,7 @@
"transport": { "transport": {
"type": "stdio" "type": "stdio"
}, },
"version": "1.11.2" "version": "1.11.5"
} }
] ]
} }

View File

@@ -12,10 +12,23 @@ import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from auth.scopes import ( from auth.scopes import (
CALENDAR_READONLY_SCOPE,
CALENDAR_SCOPE,
CONTACTS_READONLY_SCOPE,
CONTACTS_SCOPE,
DRIVE_FILE_SCOPE, DRIVE_FILE_SCOPE,
DRIVE_READONLY_SCOPE, DRIVE_READONLY_SCOPE,
DRIVE_SCOPE, DRIVE_SCOPE,
GMAIL_COMPOSE_SCOPE,
GMAIL_LABELS_SCOPE,
GMAIL_MODIFY_SCOPE,
GMAIL_READONLY_SCOPE,
GMAIL_SEND_SCOPE,
GMAIL_SETTINGS_BASIC_SCOPE,
SHEETS_READONLY_SCOPE,
SHEETS_WRITE_SCOPE,
get_scopes_for_tools, get_scopes_for_tools,
has_required_scopes,
set_read_only, set_read_only,
) )
@@ -93,3 +106,92 @@ class TestReadOnlyScopes:
set_read_only(True) set_read_only(True)
scopes = get_scopes_for_tools(["sheets"]) scopes = get_scopes_for_tools(["sheets"])
assert DRIVE_READONLY_SCOPE in scopes assert DRIVE_READONLY_SCOPE in scopes
class TestHasRequiredScopes:
"""Tests for hierarchy-aware scope checking."""
def test_exact_match(self):
"""Exact scope match should pass."""
assert has_required_scopes([GMAIL_READONLY_SCOPE], [GMAIL_READONLY_SCOPE])
def test_missing_scope_fails(self):
"""Missing scope with no covering broader scope should fail."""
assert not has_required_scopes([GMAIL_READONLY_SCOPE], [GMAIL_SEND_SCOPE])
def test_empty_available_fails(self):
"""Empty available scopes should fail when scopes are required."""
assert not has_required_scopes([], [GMAIL_READONLY_SCOPE])
def test_empty_required_passes(self):
"""No required scopes should always pass."""
assert has_required_scopes([], [])
assert has_required_scopes([GMAIL_READONLY_SCOPE], [])
def test_none_available_fails(self):
"""None available scopes should fail when scopes are required."""
assert not has_required_scopes(None, [GMAIL_READONLY_SCOPE])
def test_none_available_empty_required_passes(self):
"""None available with no required scopes should pass."""
assert has_required_scopes(None, [])
# Gmail hierarchy: gmail.modify covers readonly, send, compose, labels
def test_gmail_modify_covers_readonly(self):
assert has_required_scopes([GMAIL_MODIFY_SCOPE], [GMAIL_READONLY_SCOPE])
def test_gmail_modify_covers_send(self):
assert has_required_scopes([GMAIL_MODIFY_SCOPE], [GMAIL_SEND_SCOPE])
def test_gmail_modify_covers_compose(self):
assert has_required_scopes([GMAIL_MODIFY_SCOPE], [GMAIL_COMPOSE_SCOPE])
def test_gmail_modify_covers_labels(self):
assert has_required_scopes([GMAIL_MODIFY_SCOPE], [GMAIL_LABELS_SCOPE])
def test_gmail_modify_does_not_cover_settings(self):
"""gmail.modify does NOT cover gmail.settings.basic."""
assert not has_required_scopes(
[GMAIL_MODIFY_SCOPE], [GMAIL_SETTINGS_BASIC_SCOPE]
)
def test_gmail_modify_covers_multiple_children(self):
"""gmail.modify should satisfy multiple child scopes at once."""
assert has_required_scopes(
[GMAIL_MODIFY_SCOPE],
[GMAIL_READONLY_SCOPE, GMAIL_SEND_SCOPE, GMAIL_LABELS_SCOPE],
)
# Drive hierarchy: drive covers drive.readonly and drive.file
def test_drive_covers_readonly(self):
assert has_required_scopes([DRIVE_SCOPE], [DRIVE_READONLY_SCOPE])
def test_drive_covers_file(self):
assert has_required_scopes([DRIVE_SCOPE], [DRIVE_FILE_SCOPE])
def test_drive_readonly_does_not_cover_full(self):
"""Narrower scope should not satisfy broader scope."""
assert not has_required_scopes([DRIVE_READONLY_SCOPE], [DRIVE_SCOPE])
# Other hierarchies
def test_calendar_covers_readonly(self):
assert has_required_scopes([CALENDAR_SCOPE], [CALENDAR_READONLY_SCOPE])
def test_sheets_write_covers_readonly(self):
assert has_required_scopes([SHEETS_WRITE_SCOPE], [SHEETS_READONLY_SCOPE])
def test_contacts_covers_readonly(self):
assert has_required_scopes([CONTACTS_SCOPE], [CONTACTS_READONLY_SCOPE])
# Mixed: some exact, some via hierarchy
def test_mixed_exact_and_hierarchy(self):
"""Combination of exact matches and hierarchy-implied scopes."""
available = [GMAIL_MODIFY_SCOPE, DRIVE_READONLY_SCOPE]
required = [GMAIL_READONLY_SCOPE, DRIVE_READONLY_SCOPE]
assert has_required_scopes(available, required)
def test_mixed_partial_failure(self):
"""Should fail if hierarchy covers some but not all required scopes."""
available = [GMAIL_MODIFY_SCOPE]
required = [GMAIL_READONLY_SCOPE, DRIVE_READONLY_SCOPE]
assert not has_required_scopes(available, required)

2
uv.lock generated
View File

@@ -2195,7 +2195,7 @@ wheels = [
[[package]] [[package]]
name = "workspace-mcp" name = "workspace-mcp"
version = "1.11.1" version = "1.11.5"
source = { editable = "." } source = { editable = "." }
dependencies = [ dependencies = [
{ name = "cryptography" }, { name = "cryptography" },