merge conflicts

This commit is contained in:
Taylor Wilsdon
2026-02-28 17:53:39 -04:00
20 changed files with 1591 additions and 127 deletions

View File

@@ -1,5 +1,6 @@
{"id":"google_workspace_mcp-016","title":"fix: correct MCP registry PyPI ownership metadata","description":"Twine/PyPI rejects project.urls mcp-name because URL values must be valid URLs. For MCP registry PyPI verification, use README marker mcp-name: \u003cserver-name\u003e and ensure server.json name uses io.github.\u003cuser\u003e/\u003cserver\u003e format.","status":"closed","priority":1,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T20:04:06.49156-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T20:05:35.18854-05:00","closed_at":"2026-02-08T20:05:35.18854-05:00","close_reason":"Closed"} {"id":"google_workspace_mcp-016","title":"fix: correct MCP registry PyPI ownership metadata","description":"Twine/PyPI rejects project.urls mcp-name because URL values must be valid URLs. For MCP registry PyPI verification, use README marker mcp-name: \u003cserver-name\u003e and ensure server.json name uses io.github.\u003cuser\u003e/\u003cserver\u003e format.","status":"closed","priority":1,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T20:04:06.49156-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T20:05:35.18854-05:00","closed_at":"2026-02-08T20:05:35.18854-05:00","close_reason":"Closed"}
{"id":"google_workspace_mcp-0fl","title":"enh: add MCP registry publish to local release.py flow","description":"Extend scripts/release.py to sync server.json version and publish to MCP Registry via mcp-publisher during local release process, so publishing does not depend on GitHub Actions.","status":"closed","priority":2,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T21:03:51.388408-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T21:06:05.334395-05:00","closed_at":"2026-02-08T21:06:05.334395-05:00","close_reason":"Closed"} {"id":"google_workspace_mcp-0fl","title":"enh: add MCP registry publish to local release.py flow","description":"Extend scripts/release.py to sync server.json version and publish to MCP Registry via mcp-publisher during local release process, so publishing does not depend on GitHub Actions.","status":"closed","priority":2,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T21:03:51.388408-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T21:06:05.334395-05:00","closed_at":"2026-02-08T21:06:05.334395-05:00","close_reason":"Closed"}
{"id":"google_workspace_mcp-0lv","title":"fix: preserve PKCE code_verifier across legacy OAuth callback","status":"closed","priority":1,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-28T09:56:17.914665-04:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-28T09:58:46.892165-04:00","closed_at":"2026-02-28T09:58:46.892165-04:00","close_reason":"Implemented PKCE code_verifier state continuity in legacy OAuth callback path and added tests"}
{"id":"google_workspace_mcp-2mc","title":"release: cut next PyPI version and publish MCP registry entry","description":"Run local release flow: uv run python scripts/release.py to publish PyPI + MCP Registry via mcp-publisher. Verify package version on PyPI and server listing in registry search endpoint.","status":"open","priority":2,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T20:00:39.779476-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T21:06:15.613447-05:00"} {"id":"google_workspace_mcp-2mc","title":"release: cut next PyPI version and publish MCP registry entry","description":"Run local release flow: uv run python scripts/release.py to publish PyPI + MCP Registry via mcp-publisher. Verify package version on PyPI and server listing in registry search endpoint.","status":"open","priority":2,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T20:00:39.779476-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T21:06:15.613447-05:00"}
{"id":"google_workspace_mcp-3bn","title":"Fix AppScript run_script_function schema for Gemini API","description":"The run_script_function tool has a 'parameters' parameter defined as Optional[List[Any]] which causes schema generation issues with the Gemini API. The error is: 'GenerateContentRequest.tools[0].function_declarations[125].parameters.properties[parameters].items: missing field'. Need to fix the type annotation to generate proper JSON schema with items field.","status":"closed","priority":2,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-09T14:16:48.857746-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-09T14:21:43.5927-05:00","closed_at":"2026-02-09T14:21:43.5927-05:00","close_reason":"Fixed by changing parameters type from Optional[List[Any]] to Optional[list] in run_script_function. This ensures proper JSON schema generation with items field for Gemini API compatibility."} {"id":"google_workspace_mcp-3bn","title":"Fix AppScript run_script_function schema for Gemini API","description":"The run_script_function tool has a 'parameters' parameter defined as Optional[List[Any]] which causes schema generation issues with the Gemini API. The error is: 'GenerateContentRequest.tools[0].function_declarations[125].parameters.properties[parameters].items: missing field'. Need to fix the type annotation to generate proper JSON schema with items field.","status":"closed","priority":2,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-09T14:16:48.857746-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-09T14:21:43.5927-05:00","closed_at":"2026-02-09T14:21:43.5927-05:00","close_reason":"Fixed by changing parameters type from Optional[List[Any]] to Optional[list] in run_script_function. This ensures proper JSON schema generation with items field for Gemini API compatibility."}
{"id":"google_workspace_mcp-631","title":"Address copilot feedback for docs/sheets hyperlink range and extraction","status":"closed","priority":2,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T18:36:22.330879-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T18:38:04.356856-05:00","closed_at":"2026-02-08T18:38:04.356856-05:00","close_reason":"Closed"} {"id":"google_workspace_mcp-631","title":"Address copilot feedback for docs/sheets hyperlink range and extraction","status":"closed","priority":2,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T18:36:22.330879-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T18:38:04.356856-05:00","closed_at":"2026-02-08T18:38:04.356856-05:00","close_reason":"Closed"}
@@ -12,6 +13,7 @@
{"id":"google_workspace_mcp-gpb","title":"Address PR feedback for docs list nesting and sheets hyperlink fetch","status":"closed","priority":2,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T17:48:48.31354-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T17:51:53.608353-05:00","closed_at":"2026-02-08T17:51:53.608353-05:00","close_reason":"Closed"} {"id":"google_workspace_mcp-gpb","title":"Address PR feedback for docs list nesting and sheets hyperlink fetch","status":"closed","priority":2,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T17:48:48.31354-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T17:51:53.608353-05:00","closed_at":"2026-02-08T17:51:53.608353-05:00","close_reason":"Closed"}
{"id":"google_workspace_mcp-ic8","title":"enh: support writing hyperlink URLs in modify_sheet_values","description":"Issue #434 also requested hyperlink creation/writes. Current implementation reads hyperlinks in read_sheet_values but modify_sheet_values does not expose first-class hyperlink writes.","status":"open","priority":3,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T17:42:10.590658-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T17:42:10.590658-05:00"} {"id":"google_workspace_mcp-ic8","title":"enh: support writing hyperlink URLs in modify_sheet_values","description":"Issue #434 also requested hyperlink creation/writes. Current implementation reads hyperlinks in read_sheet_values but modify_sheet_values does not expose first-class hyperlink writes.","status":"open","priority":3,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T17:42:10.590658-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T17:42:10.590658-05:00"}
{"id":"google_workspace_mcp-jf2","title":"ci: make PyPI publish step rerun-safe with skip-existing","description":"GitHub Actions reruns on same tag fail because PyPI rejects duplicate file uploads. Add skip-existing=true to pypa/gh-action-pypi-publish so reruns proceed to MCP publish.","status":"closed","priority":2,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T20:59:58.461102-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T21:00:32.121469-05:00","closed_at":"2026-02-08T21:00:32.121469-05:00","close_reason":"Closed"} {"id":"google_workspace_mcp-jf2","title":"ci: make PyPI publish step rerun-safe with skip-existing","description":"GitHub Actions reruns on same tag fail because PyPI rejects duplicate file uploads. Add skip-existing=true to pypa/gh-action-pypi-publish so reruns proceed to MCP publish.","status":"closed","priority":2,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-08T20:59:58.461102-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-08T21:00:32.121469-05:00","closed_at":"2026-02-08T21:00:32.121469-05:00","close_reason":"Closed"}
{"id":"google_workspace_mcp-le6","title":"test: stabilize oauth callback redirect URI tests with OAuthConfig singleton reset","description":"tests/test_oauth_callback_server.py currently fails in this environment because get_oauth_redirect_uri uses cached OAuthConfig state that ignores per-test env var mutations. Add deterministic config reset/fixture strategy.","status":"open","priority":3,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-28T09:59:11.402699-04:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-28T09:59:11.402699-04:00"}
{"id":"google_workspace_mcp-qfl","title":"Fix stdio multi-account session binding","status":"in_progress","priority":1,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-07T13:27:09.466282-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-07T13:27:22.857227-05:00"} {"id":"google_workspace_mcp-qfl","title":"Fix stdio multi-account session binding","status":"in_progress","priority":1,"issue_type":"task","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-07T13:27:09.466282-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-07T13:27:22.857227-05:00"}
{"id":"google_workspace_mcp-qr5","title":"fix: include RFC Message-ID threading headers in thread content output","status":"closed","priority":2,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-11T11:44:41.966911-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-11T11:46:11.355237-05:00","closed_at":"2026-02-11T11:46:11.355237-05:00","close_reason":"Closed"} {"id":"google_workspace_mcp-qr5","title":"fix: include RFC Message-ID threading headers in thread content output","status":"closed","priority":2,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-11T11:44:41.966911-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-11T11:46:11.355237-05:00","closed_at":"2026-02-11T11:46:11.355237-05:00","close_reason":"Closed"}
{"id":"google_workspace_mcp-xia","title":"fix: CLI should unwrap FastAPI Body defaults when invoking tools","description":"CLI mode invokes tool functions directly and currently passes FastAPI Body marker objects as defaults for omitted args. This breaks gmail send/draft with errors like Body has no attribute lower/len. Update CLI invocation to normalize Param defaults and return clear missing-required errors.","status":"closed","priority":1,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-10T12:33:06.83139-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-10T12:36:35.051947-05:00","closed_at":"2026-02-10T12:36:35.051947-05:00","close_reason":"Implemented CLI FastAPI default normalization + regression tests","labels":["cli","gmail"]} {"id":"google_workspace_mcp-xia","title":"fix: CLI should unwrap FastAPI Body defaults when invoking tools","description":"CLI mode invokes tool functions directly and currently passes FastAPI Body marker objects as defaults for omitted args. This breaks gmail send/draft with errors like Body has no attribute lower/len. Update CLI invocation to normalize Param defaults and return clear missing-required errors.","status":"closed","priority":1,"issue_type":"bug","owner":"tbarrettwilsdon@gmail.com","created_at":"2026-02-10T12:33:06.83139-05:00","created_by":"Taylor Wilsdon","updated_at":"2026-02-10T12:36:35.051947-05:00","closed_at":"2026-02-10T12:36:35.051947-05:00","close_reason":"Implemented CLI FastAPI default normalization + regression tests","labels":["cli","gmail"]}

View File

@@ -147,16 +147,7 @@ uv run main.py --tools gmail drive
</details> </details>
### 1. One-Click Claude Desktop Install (Recommended)
1. **Download:** Grab the latest `google_workspace_mcp.dxt` from the “Releases” page
2. **Install:** Double-click the file Claude Desktop opens and prompts you to **Install**
3. **Configure:** In Claude Desktop → **Settings → Extensions → Google Workspace MCP**, paste your Google OAuth credentials
4. **Use it:** Start a new Claude chat and call any Google Workspace tool
>
**Why DXT?**
> Desktop Extensions (`.dxt`) bundle the server, dependencies, and manifest so users go from download → working MCP in **one click** no terminal, no JSON editing, no version conflicts.
#### Required Configuration #### Required Configuration
<details> <details>
@@ -192,6 +183,17 @@ Claude Desktop stores these securely in the OS keychain; set them once in the ex
--- ---
### One-Click Claude Desktop Install (Claude Desktop Only, Stdio, Single User)
1. **Download:** Grab the latest `google_workspace_mcp.dxt` from the “Releases” page
2. **Install:** Double-click the file Claude Desktop opens and prompts you to **Install**
3. **Configure:** In Claude Desktop → **Settings → Extensions → Google Workspace MCP**, paste your Google OAuth credentials
4. **Use it:** Start a new Claude chat and call any Google Workspace tool
>
**Why DXT?**
> Desktop Extensions (`.dxt`) bundle the server, dependencies, and manifest so users go from download → working MCP in **one click** no terminal, no JSON editing, no version conflicts.
<div align="center"> <div align="center">
<video width="832" src="https://github.com/user-attachments/assets/83cca4b3-5e94-448b-acb3-6e3a27341d3a"></video> <video width="832" src="https://github.com/user-attachments/assets/83cca4b3-5e94-448b-acb3-6e3a27341d3a"></video>
</div> </div>
@@ -558,6 +560,22 @@ Read-only mode provides secure, restricted access by:
- Automatically filtering out tools that require write permissions at startup - Automatically filtering out tools that require write permissions at startup
- Allowing read operations: list, get, search, and export across all services - Allowing read operations: list, get, search, and export across all services
**🔐 Granular Permissions**
```bash
# Per-service permission levels
uv run main.py --permissions gmail:organize drive:readonly
# Combine permissions with tier filtering
uv run main.py --permissions gmail:send drive:full --tool-tier core
```
Granular permissions mode provides service-by-service scope control:
- Format: `service:level` (one entry per service)
- Gmail levels: `readonly`, `organize`, `drafts`, `send`, `full` (cumulative)
- Other services currently support: `readonly`, `full`
- `--permissions` and `--read-only` are mutually exclusive
- `--permissions` cannot be combined with `--tools`; enabled services are determined by the `--permissions` entries (optionally filtered by `--tool-tier`)
- With `--tool-tier`, only tier-matched tools are enabled and only services that have tools in the selected tier are imported
**★ Tool Tiers** **★ Tool Tiers**
```bash ```bash
uv run main.py --tool-tier core # ● Essential tools only uv run main.py --tool-tier core # ● Essential tools only
@@ -736,6 +754,9 @@ uv run main.py --tool-tier complete # Enable all availabl
uv run main.py --tools gmail drive --tool-tier core # Core tools for specific services uv run main.py --tools gmail drive --tool-tier core # Core tools for specific services
uv run main.py --tools gmail --tool-tier extended # Extended Gmail functionality only uv run main.py --tools gmail --tool-tier extended # Extended Gmail functionality only
uv run main.py --tools docs sheets --tool-tier complete # Full access to Docs and Sheets uv run main.py --tools docs sheets --tool-tier complete # Full access to Docs and Sheets
# Combine tier selection with granular permission levels
uv run main.py --permissions gmail:organize drive:full --tool-tier core
``` ```
## 📋 Credential Configuration ## 📋 Credential Configuration

View File

@@ -291,16 +291,27 @@ def check_client_secrets() -> Optional[str]:
def create_oauth_flow( def create_oauth_flow(
scopes: List[str], redirect_uri: str, state: Optional[str] = None scopes: List[str],
redirect_uri: str,
state: Optional[str] = None,
code_verifier: Optional[str] = None,
) -> Flow: ) -> Flow:
"""Creates an OAuth flow using environment variables or client secrets file.""" """Creates an OAuth flow using environment variables or client secrets file."""
flow_kwargs = {
"scopes": scopes,
"redirect_uri": redirect_uri,
"state": state,
}
if code_verifier:
flow_kwargs["code_verifier"] = code_verifier
# Preserve the original verifier when re-creating the flow in callback.
flow_kwargs["autogenerate_code_verifier"] = False
# Try environment variables first # Try environment variables first
env_config = load_client_secrets_from_env() env_config = load_client_secrets_from_env()
if env_config: if env_config:
# Use client config directly # Use client config directly
flow = Flow.from_client_config( flow = Flow.from_client_config(env_config, **flow_kwargs)
env_config, scopes=scopes, redirect_uri=redirect_uri, state=state
)
logger.debug("Created OAuth flow from environment variables") logger.debug("Created OAuth flow from environment variables")
return flow return flow
@@ -312,9 +323,7 @@ def create_oauth_flow(
flow = Flow.from_client_secrets_file( flow = Flow.from_client_secrets_file(
CONFIG_CLIENT_SECRETS_PATH, CONFIG_CLIENT_SECRETS_PATH,
scopes=scopes, **flow_kwargs,
redirect_uri=redirect_uri,
state=state,
) )
logger.debug( logger.debug(
f"Created OAuth flow from client secrets file: {CONFIG_CLIENT_SECRETS_PATH}" f"Created OAuth flow from client secrets file: {CONFIG_CLIENT_SECRETS_PATH}"
@@ -389,7 +398,11 @@ async def start_auth_flow(
) )
store = get_oauth21_session_store() store = get_oauth21_session_store()
store.store_oauth_state(oauth_state, session_id=session_id) store.store_oauth_state(
oauth_state,
session_id=session_id,
code_verifier=flow.code_verifier,
)
logger.info( logger.info(
f"Auth flow started for {user_display_name}. Advise user to visit: {auth_url}" f"Auth flow started for {user_display_name}. Advise user to visit: {auth_url}"
@@ -482,6 +495,12 @@ def handle_auth_callback(
) )
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1" os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
# Allow partial scope grants without raising an exception.
# When users decline some scopes on Google's consent screen,
# oauthlib raises because the granted scopes differ from requested.
if "OAUTHLIB_RELAX_TOKEN_SCOPE" not in os.environ:
os.environ["OAUTHLIB_RELAX_TOKEN_SCOPE"] = "1"
store = get_oauth21_session_store() store = get_oauth21_session_store()
parsed_response = urlparse(authorization_response) parsed_response = urlparse(authorization_response)
state_values = parse_qs(parsed_response.query).get("state") state_values = parse_qs(parsed_response.query).get("state")
@@ -496,7 +515,12 @@ def handle_auth_callback(
state_info.get("session_id") or "<unknown>", state_info.get("session_id") or "<unknown>",
) )
flow = create_oauth_flow(scopes=scopes, redirect_uri=redirect_uri, state=state) flow = create_oauth_flow(
scopes=scopes,
redirect_uri=redirect_uri,
state=state,
code_verifier=state_info.get("code_verifier"),
)
# Exchange the authorization code for credentials # Exchange the authorization code for credentials
# Note: fetch_token will use the redirect_uri configured in the flow # Note: fetch_token will use the redirect_uri configured in the flow
@@ -504,6 +528,29 @@ def handle_auth_callback(
credentials = flow.credentials credentials = flow.credentials
logger.info("Successfully exchanged authorization code for tokens.") logger.info("Successfully exchanged authorization code for tokens.")
# Handle partial OAuth grants: if the user declined some scopes on
# Google's consent screen, credentials.granted_scopes contains only
# what was actually authorized. Store those instead of the inflated
# requested scopes so that refresh() sends the correct scope set.
granted = getattr(credentials, "granted_scopes", None)
if granted and set(granted) != set(credentials.scopes or []):
logger.warning(
"Partial OAuth grant detected. Requested: %s, Granted: %s",
credentials.scopes,
granted,
)
credentials = Credentials(
token=credentials.token,
refresh_token=credentials.refresh_token,
id_token=getattr(credentials, "id_token", None),
token_uri=credentials.token_uri,
client_id=credentials.client_id,
client_secret=credentials.client_secret,
scopes=list(granted),
expiry=credentials.expiry,
quota_project_id=getattr(credentials, "quota_project_id", None),
)
# Get user info to determine user_id (using email here) # Get user info to determine user_id (using email here)
user_info = get_user_info(credentials) user_info = get_user_info(credentials)
if not user_info or "email" not in user_info: if not user_info or "email" not in user_info:

View File

@@ -221,6 +221,7 @@ class OAuth21SessionStore:
state: str, state: str,
session_id: Optional[str] = None, session_id: Optional[str] = None,
expires_in_seconds: int = 600, expires_in_seconds: int = 600,
code_verifier: Optional[str] = None,
) -> None: ) -> None:
"""Persist an OAuth state value for later validation.""" """Persist an OAuth state value for later validation."""
if not state: if not state:
@@ -236,6 +237,7 @@ class OAuth21SessionStore:
"session_id": session_id, "session_id": session_id,
"expires_at": expiry, "expires_at": expiry,
"created_at": now, "created_at": now,
"code_verifier": code_verifier,
} }
logger.debug( logger.debug(
"Stored OAuth state %s (expires at %s)", "Stored OAuth state %s (expires at %s)",

248
auth/permissions.py Normal file
View File

@@ -0,0 +1,248 @@
"""
Granular per-service permission levels.
Each service has named permission levels (cumulative), mapping to a list of
OAuth scopes. The levels for a service are ordered from least to most
permissive — requesting level N implicitly includes all scopes from levels < N.
Usage:
--permissions gmail:organize drive:readonly
Gmail levels: readonly, organize, drafts, send, full
Other services: readonly, full (extensible by adding entries to SERVICE_PERMISSION_LEVELS)
"""
import logging
from typing import Dict, List, Optional, Tuple
from auth.scopes import (
GMAIL_READONLY_SCOPE,
GMAIL_LABELS_SCOPE,
GMAIL_MODIFY_SCOPE,
GMAIL_COMPOSE_SCOPE,
GMAIL_SEND_SCOPE,
GMAIL_SETTINGS_BASIC_SCOPE,
DRIVE_READONLY_SCOPE,
DRIVE_FILE_SCOPE,
DRIVE_SCOPE,
CALENDAR_READONLY_SCOPE,
CALENDAR_EVENTS_SCOPE,
CALENDAR_SCOPE,
DOCS_READONLY_SCOPE,
DOCS_WRITE_SCOPE,
SHEETS_READONLY_SCOPE,
SHEETS_WRITE_SCOPE,
CHAT_READONLY_SCOPE,
CHAT_WRITE_SCOPE,
CHAT_SPACES_SCOPE,
CHAT_SPACES_READONLY_SCOPE,
FORMS_BODY_SCOPE,
FORMS_BODY_READONLY_SCOPE,
FORMS_RESPONSES_READONLY_SCOPE,
SLIDES_SCOPE,
SLIDES_READONLY_SCOPE,
TASKS_SCOPE,
TASKS_READONLY_SCOPE,
CONTACTS_SCOPE,
CONTACTS_READONLY_SCOPE,
CUSTOM_SEARCH_SCOPE,
SCRIPT_PROJECTS_SCOPE,
SCRIPT_PROJECTS_READONLY_SCOPE,
SCRIPT_DEPLOYMENTS_SCOPE,
SCRIPT_DEPLOYMENTS_READONLY_SCOPE,
SCRIPT_PROCESSES_READONLY_SCOPE,
SCRIPT_METRICS_SCOPE,
)
logger = logging.getLogger(__name__)
# Ordered permission levels per service.
# Each entry is (level_name, [additional_scopes_at_this_level]).
# Scopes are CUMULATIVE: level N includes all scopes from levels 0..N.
SERVICE_PERMISSION_LEVELS: Dict[str, List[Tuple[str, List[str]]]] = {
"gmail": [
("readonly", [GMAIL_READONLY_SCOPE]),
("organize", [GMAIL_LABELS_SCOPE, GMAIL_MODIFY_SCOPE]),
("drafts", [GMAIL_COMPOSE_SCOPE]),
("send", [GMAIL_SEND_SCOPE]),
("full", [GMAIL_SETTINGS_BASIC_SCOPE]),
],
"drive": [
("readonly", [DRIVE_READONLY_SCOPE]),
("full", [DRIVE_SCOPE, DRIVE_FILE_SCOPE]),
],
"calendar": [
("readonly", [CALENDAR_READONLY_SCOPE]),
("full", [CALENDAR_SCOPE, CALENDAR_EVENTS_SCOPE]),
],
"docs": [
("readonly", [DOCS_READONLY_SCOPE, DRIVE_READONLY_SCOPE]),
("full", [DOCS_WRITE_SCOPE, DRIVE_READONLY_SCOPE, DRIVE_FILE_SCOPE]),
],
"sheets": [
("readonly", [SHEETS_READONLY_SCOPE, DRIVE_READONLY_SCOPE]),
("full", [SHEETS_WRITE_SCOPE, DRIVE_READONLY_SCOPE]),
],
"chat": [
("readonly", [CHAT_READONLY_SCOPE, CHAT_SPACES_READONLY_SCOPE]),
("full", [CHAT_WRITE_SCOPE, CHAT_SPACES_SCOPE]),
],
"forms": [
("readonly", [FORMS_BODY_READONLY_SCOPE, FORMS_RESPONSES_READONLY_SCOPE]),
("full", [FORMS_BODY_SCOPE, FORMS_RESPONSES_READONLY_SCOPE]),
],
"slides": [
("readonly", [SLIDES_READONLY_SCOPE]),
("full", [SLIDES_SCOPE]),
],
"tasks": [
("readonly", [TASKS_READONLY_SCOPE]),
("full", [TASKS_SCOPE]),
],
"contacts": [
("readonly", [CONTACTS_READONLY_SCOPE]),
("full", [CONTACTS_SCOPE]),
],
"search": [
("readonly", [CUSTOM_SEARCH_SCOPE]),
("full", [CUSTOM_SEARCH_SCOPE]),
],
"appscript": [
(
"readonly",
[
SCRIPT_PROJECTS_READONLY_SCOPE,
SCRIPT_DEPLOYMENTS_READONLY_SCOPE,
SCRIPT_PROCESSES_READONLY_SCOPE,
SCRIPT_METRICS_SCOPE,
DRIVE_READONLY_SCOPE,
],
),
(
"full",
[
SCRIPT_PROJECTS_SCOPE,
SCRIPT_DEPLOYMENTS_SCOPE,
SCRIPT_PROCESSES_READONLY_SCOPE,
SCRIPT_METRICS_SCOPE,
DRIVE_FILE_SCOPE,
],
),
],
}
# Module-level state: parsed --permissions config
# Dict mapping service_name -> level_name, e.g. {"gmail": "organize"}
_PERMISSIONS: Optional[Dict[str, str]] = None
def set_permissions(permissions: Dict[str, str]) -> None:
"""Set granular permissions from parsed --permissions argument."""
global _PERMISSIONS
_PERMISSIONS = permissions
logger.info("Granular permissions set: %s", permissions)
def get_permissions() -> Optional[Dict[str, str]]:
"""Return current permissions dict, or None if not using granular mode."""
return _PERMISSIONS
def is_permissions_mode() -> bool:
"""Check if granular permissions mode is active."""
return _PERMISSIONS is not None
def get_scopes_for_permission(service: str, level: str) -> List[str]:
"""
Get cumulative scopes for a service at a given permission level.
Returns all scopes up to and including the named level.
Raises ValueError if service or level is unknown.
"""
levels = SERVICE_PERMISSION_LEVELS.get(service)
if levels is None:
raise ValueError(f"Unknown service: '{service}'")
cumulative: List[str] = []
found = False
for level_name, level_scopes in levels:
cumulative.extend(level_scopes)
if level_name == level:
found = True
break
if not found:
valid = [name for name, _ in levels]
raise ValueError(
f"Unknown permission level '{level}' for service '{service}'. "
f"Valid levels: {valid}"
)
return sorted(set(cumulative))
def get_all_permission_scopes() -> List[str]:
"""
Get the combined scopes for all services at their configured permission levels.
Only meaningful when is_permissions_mode() is True.
"""
if _PERMISSIONS is None:
return []
all_scopes: set = set()
for service, level in _PERMISSIONS.items():
all_scopes.update(get_scopes_for_permission(service, level))
return list(all_scopes)
def get_allowed_scopes_set() -> Optional[set]:
"""
Get the set of allowed scopes under permissions mode (for tool filtering).
Returns None if permissions mode is not active.
"""
if _PERMISSIONS is None:
return None
return set(get_all_permission_scopes())
def get_valid_levels(service: str) -> List[str]:
"""Get valid permission level names for a service."""
levels = SERVICE_PERMISSION_LEVELS.get(service)
if levels is None:
return []
return [name for name, _ in levels]
def parse_permissions_arg(permissions_list: List[str]) -> Dict[str, str]:
"""
Parse --permissions arguments like ["gmail:organize", "drive:full"].
Returns dict mapping service -> level.
Raises ValueError on parse errors (unknown service, invalid level, bad format).
"""
result: Dict[str, str] = {}
for entry in permissions_list:
if ":" not in entry:
raise ValueError(
f"Invalid permission format: '{entry}'. "
f"Expected 'service:level' (e.g., 'gmail:organize', 'drive:readonly')"
)
service, level = entry.split(":", 1)
if service in result:
raise ValueError(f"Duplicate service in permissions: '{service}'")
if service not in SERVICE_PERMISSION_LEVELS:
raise ValueError(
f"Unknown service: '{service}'. "
f"Valid services: {sorted(SERVICE_PERMISSION_LEVELS.keys())}"
)
valid = get_valid_levels(service)
if level not in valid:
raise ValueError(
f"Unknown level '{level}' for service '{service}'. "
f"Valid levels: {valid}"
)
result[service] = level
return result

View File

@@ -291,6 +291,24 @@ def get_scopes_for_tools(enabled_tools=None):
Returns: Returns:
List of unique scopes for the enabled tools plus base scopes. List of unique scopes for the enabled tools plus base scopes.
""" """
# Granular permissions mode overrides both full and read-only scope maps.
# Lazy import with guard to avoid circular dependency during module init
# (SCOPES = get_scopes_for_tools() runs at import time before auth.permissions
# is fully loaded, but permissions mode is never active at that point).
try:
from auth.permissions import is_permissions_mode, get_all_permission_scopes
if is_permissions_mode():
scopes = BASE_SCOPES.copy()
scopes.extend(get_all_permission_scopes())
logger.debug(
"Generated scopes from granular permissions: %d unique scopes",
len(set(scopes)),
)
return list(set(scopes))
except ImportError:
pass
if enabled_tools is None: if enabled_tools is None:
# Default behavior - return all scopes # Default behavior - return all scopes
enabled_tools = TOOL_SCOPES_MAP.keys() enabled_tools = TOOL_SCOPES_MAP.keys()

View File

@@ -1,3 +1,4 @@
import hashlib
import logging import logging
import os import os
from typing import List, Optional from typing import List, Optional
@@ -6,6 +7,7 @@ from importlib import metadata
from fastapi.responses import HTMLResponse, JSONResponse, FileResponse from fastapi.responses import HTMLResponse, JSONResponse, FileResponse
from starlette.applications import Starlette from starlette.applications import Starlette
from starlette.requests import Request from starlette.requests import Request
from starlette.responses import Response
from starlette.middleware import Middleware from starlette.middleware import Middleware
from fastmcp import FastMCP from fastmcp import FastMCP
@@ -38,6 +40,34 @@ _legacy_callback_registered = False
session_middleware = Middleware(MCPSessionMiddleware) session_middleware = Middleware(MCPSessionMiddleware)
def _compute_scope_fingerprint() -> str:
"""Compute a short hash of the current scope configuration for cache-busting."""
scopes_str = ",".join(sorted(get_current_scopes()))
return hashlib.sha256(scopes_str.encode()).hexdigest()[:12]
def _wrap_well_known_endpoint(endpoint, etag: str):
"""Wrap a well-known metadata endpoint to prevent browser caching.
The MCP SDK hardcodes ``Cache-Control: public, max-age=3600`` on discovery
responses. When the server restarts with different ``--permissions`` or
``--read-only`` flags, browsers / MCP clients serve stale metadata that
advertises the wrong scopes, causing OAuth to silently fail.
The wrapper overrides the header to ``no-store`` and adds an ``ETag``
derived from the current scope set so intermediary caches that ignore
``no-store`` still see a fingerprint change.
"""
async def _no_cache_endpoint(request: Request) -> Response:
response = await endpoint(request)
response.headers["Cache-Control"] = "no-store, must-revalidate"
response.headers["ETag"] = etag
return response
return _no_cache_endpoint
# Custom FastMCP that adds secure middleware stack for OAuth 2.1 # Custom FastMCP that adds secure middleware stack for OAuth 2.1
class SecureFastMCP(FastMCP): class SecureFastMCP(FastMCP):
def http_app(self, **kwargs) -> "Starlette": def http_app(self, **kwargs) -> "Starlette":
@@ -387,14 +417,18 @@ def configure_server_for_http():
"OAuth 2.1 enabled using FastMCP GoogleProvider with protocol-level auth" "OAuth 2.1 enabled using FastMCP GoogleProvider with protocol-level auth"
) )
# Explicitly mount well-known routes from the OAuth provider # Mount well-known routes with cache-busting headers.
# These should be auto-mounted but we ensure they're available # The MCP SDK hardcodes Cache-Control: public, max-age=3600
# on discovery responses which causes stale-scope bugs when
# the server is restarted with a different --permissions config.
try: try:
scope_etag = f'"{_compute_scope_fingerprint()}"'
well_known_routes = provider.get_well_known_routes() well_known_routes = provider.get_well_known_routes()
for route in well_known_routes: for route in well_known_routes:
logger.info(f"Mounting OAuth well-known route: {route.path}") logger.info(f"Mounting OAuth well-known route: {route.path}")
wrapped = _wrap_well_known_endpoint(route.endpoint, scope_etag)
server.custom_route(route.path, methods=list(route.methods))( server.custom_route(route.path, methods=list(route.methods))(
route.endpoint wrapped
) )
except Exception as e: except Exception as e:
logger.warning(f"Could not mount well-known routes: {e}") logger.warning(f"Could not mount well-known routes: {e}")
@@ -438,10 +472,11 @@ async def health_check(request: Request):
@server.custom_route("/attachments/{file_id}", methods=["GET"]) @server.custom_route("/attachments/{file_id}", methods=["GET"])
async def serve_attachment(file_id: str): async def serve_attachment(request: Request):
"""Serve a stored attachment file.""" """Serve a stored attachment file."""
from core.attachment_storage import get_attachment_storage from core.attachment_storage import get_attachment_storage
file_id = request.path_params["file_id"]
storage = get_attachment_storage() storage = get_attachment_storage()
metadata = storage.get_attachment_metadata(file_id) metadata = storage.get_attachment_metadata(file_id)

View File

@@ -9,6 +9,7 @@ import logging
from typing import Set, Optional, Callable from typing import Set, Optional, Callable
from auth.oauth_config import is_oauth21_enabled from auth.oauth_config import is_oauth21_enabled
from auth.permissions import is_permissions_mode, get_allowed_scopes_set
from auth.scopes import is_read_only_mode, get_all_read_only_scopes from auth.scopes import is_read_only_mode, get_all_read_only_scopes
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -104,7 +105,13 @@ def filter_server_tools(server):
"""Remove disabled tools from the server after registration.""" """Remove disabled tools from the server after registration."""
enabled_tools = get_enabled_tools() enabled_tools = get_enabled_tools()
oauth21_enabled = is_oauth21_enabled() oauth21_enabled = is_oauth21_enabled()
if enabled_tools is None and not oauth21_enabled and not is_read_only_mode(): permissions_mode = is_permissions_mode()
if (
enabled_tools is None
and not oauth21_enabled
and not is_read_only_mode()
and not permissions_mode
):
return return
tools_removed = 0 tools_removed = 0
@@ -126,8 +133,8 @@ def filter_server_tools(server):
tools_to_remove.add("start_google_auth") tools_to_remove.add("start_google_auth")
logger.info("OAuth 2.1 enabled: disabling start_google_auth tool") logger.info("OAuth 2.1 enabled: disabling start_google_auth tool")
# 3. Read-only mode filtering # 3. Read-only mode filtering (skipped when granular permissions are active)
if read_only_mode: if read_only_mode and not permissions_mode:
for tool_name, tool_obj in tool_components.items(): for tool_name, tool_obj in tool_components.items():
if tool_name in tools_to_remove: if tool_name in tools_to_remove:
continue continue
@@ -147,6 +154,32 @@ def filter_server_tools(server):
) )
tools_to_remove.add(tool_name) tools_to_remove.add(tool_name)
# 4. Granular permissions filtering
# No scope hierarchy expansion here — permission levels are already cumulative
# and explicitly define allowed scopes. Hierarchy expansion would defeat the
# purpose (e.g. gmail.modify in the hierarchy covers gmail.send, but the
# "organize" permission level intentionally excludes gmail.send).
if permissions_mode:
perm_allowed = get_allowed_scopes_set() or set()
for tool_name, tool_obj in tool_components.items():
if tool_name in tools_to_remove:
continue
func_to_check = tool_obj
if hasattr(tool_obj, "fn"):
func_to_check = tool_obj.fn
required_scopes = getattr(func_to_check, "_required_google_scopes", [])
if required_scopes:
if not all(scope in perm_allowed for scope in required_scopes):
logger.info(
"Permissions mode: Disabling tool '%s' (requires: %s)",
tool_name,
required_scopes,
)
tools_to_remove.add(tool_name)
for tool_name in tools_to_remove: for tool_name in tools_to_remove:
try: try:
server.local_provider.remove_tool(tool_name) server.local_provider.remove_tool(tool_name)
@@ -167,7 +200,12 @@ def filter_server_tools(server):
if tools_removed > 0: if tools_removed > 0:
enabled_count = len(enabled_tools) if enabled_tools is not None else "all" enabled_count = len(enabled_tools) if enabled_tools is not None else "all"
mode = "Read-Only" if is_read_only_mode() else "Full" if permissions_mode:
mode = "Permissions"
elif is_read_only_mode():
mode = "Read-Only"
else:
mode = "Full"
logger.info( logger.info(
f"Tool filtering: removed {tools_removed} tools, {enabled_count} enabled. Mode: {mode}" f"Tool filtering: removed {tools_removed} tools, {enabled_count} enabled. Mode: {mode}"
) )

View File

@@ -181,6 +181,8 @@ def build_drive_list_params(
drive_id: Optional[str] = None, drive_id: Optional[str] = None,
include_items_from_all_drives: bool = True, include_items_from_all_drives: bool = True,
corpora: Optional[str] = None, corpora: Optional[str] = None,
page_token: Optional[str] = None,
detailed: bool = True,
) -> Dict[str, Any]: ) -> Dict[str, Any]:
""" """
Helper function to build common list parameters for Drive API calls. Helper function to build common list parameters for Drive API calls.
@@ -191,18 +193,28 @@ def build_drive_list_params(
drive_id: Optional shared drive ID drive_id: Optional shared drive ID
include_items_from_all_drives: Whether to include items from all drives include_items_from_all_drives: Whether to include items from all drives
corpora: Optional corpus specification corpora: Optional corpus specification
page_token: Optional page token for pagination (from a previous nextPageToken)
detailed: Whether to request size, modifiedTime, and webViewLink fields.
Defaults to True to preserve existing behavior.
Returns: Returns:
Dictionary of parameters for Drive API list calls Dictionary of parameters for Drive API list calls
""" """
if detailed:
fields = "nextPageToken, files(id, name, mimeType, webViewLink, iconLink, modifiedTime, size)"
else:
fields = "nextPageToken, files(id, name, mimeType)"
list_params = { list_params = {
"q": query, "q": query,
"pageSize": page_size, "pageSize": page_size,
"fields": "nextPageToken, files(id, name, mimeType, webViewLink, iconLink, modifiedTime, size)", "fields": fields,
"supportsAllDrives": True, "supportsAllDrives": True,
"includeItemsFromAllDrives": include_items_from_all_drives, "includeItemsFromAllDrives": include_items_from_all_drives,
} }
if page_token:
list_params["pageToken"] = page_token
if drive_id: if drive_id:
list_params["driveId"] = drive_id list_params["driveId"] = drive_id
if corpora: if corpora:

View File

@@ -58,10 +58,12 @@ async def search_drive_files(
user_google_email: str, user_google_email: str,
query: str, query: str,
page_size: int = 10, page_size: int = 10,
page_token: Optional[str] = None,
drive_id: Optional[str] = None, drive_id: Optional[str] = None,
include_items_from_all_drives: bool = True, include_items_from_all_drives: bool = True,
corpora: Optional[str] = None, corpora: Optional[str] = None,
file_type: Optional[str] = None, file_type: Optional[str] = None,
detailed: bool = True,
) -> str: ) -> str:
""" """
Searches for files and folders within a user's Google Drive, including shared drives. Searches for files and folders within a user's Google Drive, including shared drives.
@@ -70,6 +72,7 @@ async def search_drive_files(
user_google_email (str): The user's Google email address. Required. user_google_email (str): The user's Google email address. Required.
query (str): The search query string. Supports Google Drive search operators. query (str): The search query string. Supports Google Drive search operators.
page_size (int): The maximum number of files to return. Defaults to 10. page_size (int): The maximum number of files to return. Defaults to 10.
page_token (Optional[str]): Page token from a previous response's nextPageToken to retrieve the next page of results.
drive_id (Optional[str]): ID of the shared drive to search. If None, behavior depends on `corpora` and `include_items_from_all_drives`. drive_id (Optional[str]): ID of the shared drive to search. If None, behavior depends on `corpora` and `include_items_from_all_drives`.
include_items_from_all_drives (bool): Whether shared drive items should be included in results. Defaults to True. This is effective when not specifying a `drive_id`. include_items_from_all_drives (bool): Whether shared drive items should be included in results. Defaults to True. This is effective when not specifying a `drive_id`.
corpora (Optional[str]): Bodies of items to query (e.g., 'user', 'domain', 'drive', 'allDrives'). corpora (Optional[str]): Bodies of items to query (e.g., 'user', 'domain', 'drive', 'allDrives').
@@ -80,9 +83,11 @@ async def search_drive_files(
'presentation'/'slides', 'form', 'drawing', 'pdf', 'shortcut', 'presentation'/'slides', 'form', 'drawing', 'pdf', 'shortcut',
'script', 'site', 'jam'/'jamboard') or any raw MIME type 'script', 'site', 'jam'/'jamboard') or any raw MIME type
string (e.g. 'application/pdf'). Defaults to None (all types). string (e.g. 'application/pdf'). Defaults to None (all types).
detailed (bool): Whether to include size, modified time, and link in results. Defaults to True.
Returns: Returns:
str: A formatted list of found files/folders with their details (ID, name, type, size, modified time, link). str: A formatted list of found files/folders with their details (ID, name, type, and optionally size, modified time, link).
Includes a nextPageToken line when more results are available.
""" """
logger.info( logger.info(
f"[search_drive_files] Invoked. Email: '{user_google_email}', Query: '{query}', file_type: '{file_type}'" f"[search_drive_files] Invoked. Email: '{user_google_email}', Query: '{query}', file_type: '{file_type}'"
@@ -116,6 +121,8 @@ async def search_drive_files(
drive_id=drive_id, drive_id=drive_id,
include_items_from_all_drives=include_items_from_all_drives, include_items_from_all_drives=include_items_from_all_drives,
corpora=corpora, corpora=corpora,
page_token=page_token,
detailed=detailed,
) )
results = await asyncio.to_thread(service.files().list(**list_params).execute) results = await asyncio.to_thread(service.files().list(**list_params).execute)
@@ -123,14 +130,21 @@ async def search_drive_files(
if not files: if not files:
return f"No files found for '{query}'." return f"No files found for '{query}'."
formatted_files_text_parts = [ next_token = results.get("nextPageToken")
f"Found {len(files)} files for {user_google_email} matching '{query}':" header = f"Found {len(files)} files for {user_google_email} matching '{query}':"
] formatted_files_text_parts = [header]
for item in files: for item in files:
size_str = f", Size: {item.get('size', 'N/A')}" if "size" in item else "" if detailed:
formatted_files_text_parts.append( size_str = f", Size: {item.get('size', 'N/A')}" if "size" in item else ""
f'- Name: "{item["name"]}" (ID: {item["id"]}, Type: {item["mimeType"]}{size_str}, Modified: {item.get("modifiedTime", "N/A")}) Link: {item.get("webViewLink", "#")}' formatted_files_text_parts.append(
) f'- Name: "{item["name"]}" (ID: {item["id"]}, Type: {item["mimeType"]}{size_str}, Modified: {item.get("modifiedTime", "N/A")}) Link: {item.get("webViewLink", "#")}'
)
else:
formatted_files_text_parts.append(
f'- Name: "{item["name"]}" (ID: {item["id"]}, Type: {item["mimeType"]})'
)
if next_token:
formatted_files_text_parts.append(f"nextPageToken: {next_token}")
text_output = "\n".join(formatted_files_text_parts) text_output = "\n".join(formatted_files_text_parts)
return text_output return text_output
@@ -423,10 +437,12 @@ async def list_drive_items(
user_google_email: str, user_google_email: str,
folder_id: str = "root", folder_id: str = "root",
page_size: int = 100, page_size: int = 100,
page_token: Optional[str] = None,
drive_id: Optional[str] = None, drive_id: Optional[str] = None,
include_items_from_all_drives: bool = True, include_items_from_all_drives: bool = True,
corpora: Optional[str] = None, corpora: Optional[str] = None,
file_type: Optional[str] = None, file_type: Optional[str] = None,
detailed: bool = True,
) -> str: ) -> str:
""" """
Lists files and folders, supporting shared drives. Lists files and folders, supporting shared drives.
@@ -437,6 +453,7 @@ async def list_drive_items(
user_google_email (str): The user's Google email address. Required. user_google_email (str): The user's Google email address. Required.
folder_id (str): The ID of the Google Drive folder. Defaults to 'root'. For a shared drive, this can be the shared drive's ID to list its root, or a folder ID within that shared drive. folder_id (str): The ID of the Google Drive folder. Defaults to 'root'. For a shared drive, this can be the shared drive's ID to list its root, or a folder ID within that shared drive.
page_size (int): The maximum number of items to return. Defaults to 100. page_size (int): The maximum number of items to return. Defaults to 100.
page_token (Optional[str]): Page token from a previous response's nextPageToken to retrieve the next page of results.
drive_id (Optional[str]): ID of the shared drive. If provided, the listing is scoped to this drive. drive_id (Optional[str]): ID of the shared drive. If provided, the listing is scoped to this drive.
include_items_from_all_drives (bool): Whether items from all accessible shared drives should be included if `drive_id` is not set. Defaults to True. include_items_from_all_drives (bool): Whether items from all accessible shared drives should be included if `drive_id` is not set. Defaults to True.
corpora (Optional[str]): Corpus to query ('user', 'drive', 'allDrives'). If `drive_id` is set and `corpora` is None, 'drive' is used. If None and no `drive_id`, API defaults apply. corpora (Optional[str]): Corpus to query ('user', 'drive', 'allDrives'). If `drive_id` is set and `corpora` is None, 'drive' is used. If None and no `drive_id`, API defaults apply.
@@ -445,9 +462,11 @@ async def list_drive_items(
'presentation'/'slides', 'form', 'drawing', 'pdf', 'shortcut', 'presentation'/'slides', 'form', 'drawing', 'pdf', 'shortcut',
'script', 'site', 'jam'/'jamboard') or any raw MIME type 'script', 'site', 'jam'/'jamboard') or any raw MIME type
string (e.g. 'application/pdf'). Defaults to None (all types). string (e.g. 'application/pdf'). Defaults to None (all types).
detailed (bool): Whether to include size, modified time, and link in results. Defaults to True.
Returns: Returns:
str: A formatted list of files/folders in the specified folder. str: A formatted list of files/folders in the specified folder.
Includes a nextPageToken line when more results are available.
""" """
logger.info( logger.info(
f"[list_drive_items] Invoked. Email: '{user_google_email}', Folder ID: '{folder_id}', File Type: '{file_type}'" f"[list_drive_items] Invoked. Email: '{user_google_email}', Folder ID: '{folder_id}', File Type: '{file_type}'"
@@ -467,6 +486,8 @@ async def list_drive_items(
drive_id=drive_id, drive_id=drive_id,
include_items_from_all_drives=include_items_from_all_drives, include_items_from_all_drives=include_items_from_all_drives,
corpora=corpora, corpora=corpora,
page_token=page_token,
detailed=detailed,
) )
results = await asyncio.to_thread(service.files().list(**list_params).execute) results = await asyncio.to_thread(service.files().list(**list_params).execute)
@@ -474,14 +495,23 @@ async def list_drive_items(
if not files: if not files:
return f"No items found in folder '{folder_id}'." return f"No items found in folder '{folder_id}'."
formatted_items_text_parts = [ next_token = results.get("nextPageToken")
header = (
f"Found {len(files)} items in folder '{folder_id}' for {user_google_email}:" f"Found {len(files)} items in folder '{folder_id}' for {user_google_email}:"
] )
formatted_items_text_parts = [header]
for item in files: for item in files:
size_str = f", Size: {item.get('size', 'N/A')}" if "size" in item else "" if detailed:
formatted_items_text_parts.append( size_str = f", Size: {item.get('size', 'N/A')}" if "size" in item else ""
f'- Name: "{item["name"]}" (ID: {item["id"]}, Type: {item["mimeType"]}{size_str}, Modified: {item.get("modifiedTime", "N/A")}) Link: {item.get("webViewLink", "#")}' formatted_items_text_parts.append(
) f'- Name: "{item["name"]}" (ID: {item["id"]}, Type: {item["mimeType"]}{size_str}, Modified: {item.get("modifiedTime", "N/A")}) Link: {item.get("webViewLink", "#")}'
)
else:
formatted_items_text_parts.append(
f'- Name: "{item["name"]}" (ID: {item["id"]}, Type: {item["mimeType"]})'
)
if next_token:
formatted_items_text_parts.append(f"nextPageToken: {next_token}")
text_output = "\n".join(formatted_items_text_parts) text_output = "\n".join(formatted_items_text_parts)
return text_output return text_output

92
main.py
View File

@@ -91,6 +91,32 @@ def configure_safe_logging():
handler.setFormatter(safe_formatter) handler.setFormatter(safe_formatter)
def resolve_permissions_mode_selection(
permission_services: list[str], tool_tier: str | None
) -> tuple[list[str], set[str] | None]:
"""
Resolve service imports and optional tool-name filtering for --permissions mode.
When a tier is specified, both:
- imported services are narrowed to services with tier-matched tools
- registered tools are narrowed to the resolved tool names
"""
if tool_tier is None:
return permission_services, None
tier_tools, tier_services = resolve_tools_from_tier(tool_tier, permission_services)
return tier_services, set(tier_tools)
def narrow_permissions_to_services(
permissions: dict[str, str], services: list[str]
) -> dict[str, str]:
"""Restrict permission entries to the provided service list order."""
return {
service: permissions[service] for service in services if service in permissions
}
def main(): def main():
""" """
Main entry point for the Google Workspace MCP server. Main entry point for the Google Workspace MCP server.
@@ -155,6 +181,18 @@ def main():
action="store_true", action="store_true",
help="Run in read-only mode - requests only read-only scopes and disables tools requiring write permissions", help="Run in read-only mode - requests only read-only scopes and disables tools requiring write permissions",
) )
parser.add_argument(
"--permissions",
nargs="+",
metavar="SERVICE:LEVEL",
help=(
"Granular per-service permission levels. Format: service:level. "
"Example: --permissions gmail:organize drive:readonly. "
"Gmail levels: readonly, organize, drafts, send, full (cumulative). "
"Other services: readonly, full. "
"Mutually exclusive with --read-only and --tools."
),
)
args = parser.parse_args() args = parser.parse_args()
# Clean up CLI args - argparse.REMAINDER may include leading dashes from first arg # Clean up CLI args - argparse.REMAINDER may include leading dashes from first arg
@@ -162,6 +200,22 @@ def main():
# Filter out empty strings that might appear # Filter out empty strings that might appear
args.cli = [a for a in args.cli if a] args.cli = [a for a in args.cli if a]
# Validate mutually exclusive flags
if args.permissions and args.read_only:
print(
"Error: --permissions and --read-only are mutually exclusive. "
"Use service:readonly within --permissions instead.",
file=sys.stderr,
)
sys.exit(1)
if args.permissions and args.tools is not None:
print(
"Error: --permissions and --tools cannot be combined. "
"Select services via --permissions (optionally with --tool-tier).",
file=sys.stderr,
)
sys.exit(1)
# Set port and base URI once for reuse throughout the function # Set port and base URI once for reuse throughout the function
port = int(os.getenv("PORT", os.getenv("WORKSPACE_MCP_PORT", 8000))) port = int(os.getenv("PORT", os.getenv("WORKSPACE_MCP_PORT", 8000)))
base_uri = os.getenv("WORKSPACE_MCP_BASE_URI", "http://localhost") base_uri = os.getenv("WORKSPACE_MCP_BASE_URI", "http://localhost")
@@ -184,6 +238,8 @@ def main():
safe_print(f" 👤 Mode: {'Single-user' if args.single_user else 'Multi-user'}") safe_print(f" 👤 Mode: {'Single-user' if args.single_user else 'Multi-user'}")
if args.read_only: if args.read_only:
safe_print(" 🔒 Read-Only: Enabled") safe_print(" 🔒 Read-Only: Enabled")
if args.permissions:
safe_print(" 🔒 Permissions: Granular mode")
safe_print(f" 🐍 Python: {sys.version.split()[0]}") safe_print(f" 🐍 Python: {sys.version.split()[0]}")
safe_print("") safe_print("")
@@ -265,7 +321,36 @@ def main():
} }
# Determine which tools to import based on arguments # Determine which tools to import based on arguments
if args.tool_tier is not None: perms = None
if args.permissions:
# Granular permissions mode — parse and activate before tool selection
from auth.permissions import parse_permissions_arg, set_permissions
try:
perms = parse_permissions_arg(args.permissions)
except ValueError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
# Permissions implicitly defines which services to load
tools_to_import = list(perms.keys())
set_enabled_tool_names(None)
if args.tool_tier is not None:
# Combine with tier filtering within the permission-selected services
try:
tools_to_import, tier_tool_filter = resolve_permissions_mode_selection(
tools_to_import, args.tool_tier
)
set_enabled_tool_names(tier_tool_filter)
perms = narrow_permissions_to_services(perms, tools_to_import)
except Exception as e:
print(
f"Error loading tools for tier '{args.tool_tier}': {e}",
file=sys.stderr,
)
sys.exit(1)
set_permissions(perms)
elif args.tool_tier is not None:
# Use tier-based tool selection, optionally filtered by services # Use tier-based tool selection, optionally filtered by services
try: try:
tier_tools, suggested_services = resolve_tools_from_tier( tier_tools, suggested_services = resolve_tools_from_tier(
@@ -314,6 +399,11 @@ def main():
except ModuleNotFoundError as exc: except ModuleNotFoundError as exc:
logger.error("Failed to import tool '%s': %s", tool, exc, exc_info=True) logger.error("Failed to import tool '%s': %s", tool, exc, exc_info=True)
safe_print(f" ⚠️ Failed to load {tool.title()} tool module ({exc}).") safe_print(f" ⚠️ Failed to load {tool.title()} tool module ({exc}).")
if perms:
safe_print("🔒 Permission Levels:")
for svc, lvl in sorted(perms.items()):
safe_print(f" {tool_icons.get(svc, ' ')} {svc}: {lvl}")
safe_print("") safe_print("")
# Filter tools based on tier configuration (if tier-based loading is enabled) # Filter tools based on tier configuration (if tier-based loading is enabled)

View File

@@ -108,7 +108,7 @@ where = ["."]
exclude = ["tests*", "docs*", "build", "dist"] exclude = ["tests*", "docs*", "build", "dist"]
[tool.pytest.ini_options] [tool.pytest.ini_options]
collect_ignore_glob = ["**/manual_test.py"] addopts = "--ignore=tests/gappsscript/manual_test.py"
[tool.setuptools.package-data] [tool.setuptools.package-data]
core = ["tool_tiers.yaml"] core = ["tool_tiers.yaml"]

View File

@@ -0,0 +1,69 @@
import pytest
from starlette.requests import Request
from starlette.responses import FileResponse, JSONResponse
from core.server import serve_attachment
def _build_request(file_id: str) -> Request:
scope = {
"type": "http",
"asgi": {"version": "3.0"},
"http_version": "1.1",
"method": "GET",
"scheme": "http",
"path": f"/attachments/{file_id}",
"raw_path": f"/attachments/{file_id}".encode(),
"query_string": b"",
"headers": [],
"client": ("127.0.0.1", 12345),
"server": ("localhost", 8000),
"path_params": {"file_id": file_id},
}
async def receive():
return {"type": "http.request", "body": b"", "more_body": False}
return Request(scope, receive)
@pytest.mark.asyncio
async def test_serve_attachment_uses_path_param_file_id(monkeypatch, tmp_path):
file_path = tmp_path / "sample.pdf"
file_path.write_bytes(b"%PDF-1.3\n")
captured = {}
class DummyStorage:
def get_attachment_metadata(self, file_id):
captured["file_id"] = file_id
return {"filename": "sample.pdf", "mime_type": "application/pdf"}
def get_attachment_path(self, _file_id):
return file_path
monkeypatch.setattr(
"core.attachment_storage.get_attachment_storage", lambda: DummyStorage()
)
response = await serve_attachment(_build_request("abc123"))
assert captured["file_id"] == "abc123"
assert isinstance(response, FileResponse)
assert response.status_code == 200
@pytest.mark.asyncio
async def test_serve_attachment_404_when_metadata_missing(monkeypatch):
class DummyStorage:
def get_attachment_metadata(self, _file_id):
return None
monkeypatch.setattr(
"core.attachment_storage.get_attachment_storage", lambda: DummyStorage()
)
response = await serve_attachment(_build_request("missing"))
assert isinstance(response, JSONResponse)
assert response.status_code == 404
assert b"Attachment not found or expired" in response.body

View File

@@ -43,7 +43,7 @@ def _make_attachment(
def _unwrap(tool): def _unwrap(tool):
"""Unwrap a FunctionTool + decorator chain to the original async function.""" """Unwrap a FunctionTool + decorator chain to the original async function."""
fn = tool.fn # FunctionTool stores the wrapped callable in .fn fn = getattr(tool, "fn", tool)
while hasattr(fn, "__wrapped__"): while hasattr(fn, "__wrapped__"):
fn = fn.__wrapped__ fn = fn.__wrapped__
return fn return fn

View File

@@ -0,0 +1,147 @@
"""
Unit tests for create_drive_folder tool.
"""
import os
import sys
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
from gdrive.drive_tools import _create_drive_folder_impl as _raw_create_drive_folder
def _make_service(created_response):
"""Build a mock Drive service whose files().create().execute returns *created_response*."""
execute = MagicMock(return_value=created_response)
create = MagicMock()
create.return_value.execute = execute
files = MagicMock()
files.return_value.create = create
service = MagicMock()
service.files = files
return service
@pytest.mark.asyncio
async def test_create_folder_root_skips_resolve():
"""Parent 'root' should pass through resolve_folder_id and produce correct output."""
api_response = {
"id": "new-folder-id",
"name": "My Folder",
"webViewLink": "https://drive.google.com/drive/folders/new-folder-id",
}
service = _make_service(api_response)
with patch(
"gdrive.drive_tools.resolve_folder_id",
new_callable=AsyncMock,
return_value="root",
):
result = await _raw_create_drive_folder(
service,
user_google_email="user@example.com",
folder_name="My Folder",
parent_folder_id="root",
)
assert "new-folder-id" in result
assert "My Folder" in result
assert "https://drive.google.com/drive/folders/new-folder-id" in result
@pytest.mark.asyncio
async def test_create_folder_custom_parent_resolves():
"""A non-root parent_folder_id should go through resolve_folder_id."""
api_response = {
"id": "new-folder-id",
"name": "Sub Folder",
"webViewLink": "https://drive.google.com/drive/folders/new-folder-id",
}
service = _make_service(api_response)
with patch(
"gdrive.drive_tools.resolve_folder_id",
new_callable=AsyncMock,
return_value="resolved-parent-id",
) as mock_resolve:
result = await _raw_create_drive_folder(
service,
user_google_email="user@example.com",
folder_name="Sub Folder",
parent_folder_id="shortcut-id",
)
mock_resolve.assert_awaited_once_with(service, "shortcut-id")
# The output message uses the original parent_folder_id, not the resolved one
assert "shortcut-id" in result
# But the API call should use the resolved ID
service.files().create.assert_called_once_with(
body={
"name": "Sub Folder",
"mimeType": "application/vnd.google-apps.folder",
"parents": ["resolved-parent-id"],
},
fields="id, name, webViewLink",
supportsAllDrives=True,
)
@pytest.mark.asyncio
async def test_create_folder_passes_correct_metadata():
"""Verify the metadata dict sent to the Drive API is correct."""
api_response = {
"id": "abc123",
"name": "Test",
"webViewLink": "https://drive.google.com/drive/folders/abc123",
}
service = _make_service(api_response)
with patch(
"gdrive.drive_tools.resolve_folder_id",
new_callable=AsyncMock,
return_value="resolved-id",
):
await _raw_create_drive_folder(
service,
user_google_email="user@example.com",
folder_name="Test",
parent_folder_id="some-parent",
)
service.files().create.assert_called_once_with(
body={
"name": "Test",
"mimeType": "application/vnd.google-apps.folder",
"parents": ["resolved-id"],
},
fields="id, name, webViewLink",
supportsAllDrives=True,
)
@pytest.mark.asyncio
async def test_create_folder_missing_webviewlink():
"""When the API omits webViewLink, the result should have an empty link."""
api_response = {
"id": "abc123",
"name": "NoLink",
}
service = _make_service(api_response)
with patch(
"gdrive.drive_tools.resolve_folder_id",
new_callable=AsyncMock,
return_value="root",
):
result = await _raw_create_drive_folder(
service,
user_google_email="user@example.com",
folder_name="NoLink",
parent_folder_id="root",
)
assert "abc123" in result
assert "NoLink" in result

View File

@@ -1,8 +1,9 @@
""" """
Unit tests for Google Drive MCP tools. Unit tests for Google Drive MCP tools.
Tests create_drive_folder with mocked API responses, Tests create_drive_folder with mocked API responses, plus coverage for
and the list_drive_items and search_drive_files tools `search_drive_files` and `list_drive_items` pagination, `detailed` output,
and `file_type` filtering behaviors.
""" """
import pytest import pytest
@@ -12,6 +13,221 @@ import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))) sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
from gdrive.drive_helpers import build_drive_list_params
from gdrive.drive_tools import list_drive_items, search_drive_files
def _unwrap(tool):
"""Unwrap a FunctionTool + decorator chain to the original async function.
Handles both older FastMCP (FunctionTool with .fn) and newer FastMCP
(server.tool() returns the function directly).
"""
fn = tool.fn if hasattr(tool, "fn") else tool
while hasattr(fn, "__wrapped__"):
fn = fn.__wrapped__
return fn
# ---------------------------------------------------------------------------
# search_drive_files — page_token
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
async def test_search_drive_files_page_token_passed_to_api():
"""page_token is forwarded to the Drive API as pageToken."""
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
{
"id": "f1",
"name": "Report.pdf",
"mimeType": "application/pdf",
"webViewLink": "https://drive.google.com/file/f1",
"modifiedTime": "2024-01-01T00:00:00Z",
}
]
}
await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="budget",
page_token="tok_abc123",
)
call_kwargs = mock_service.files.return_value.list.call_args.kwargs
assert call_kwargs.get("pageToken") == "tok_abc123"
@pytest.mark.asyncio
async def test_search_drive_files_next_page_token_in_output():
"""nextPageToken from the API response is appended at the end of the output."""
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
{
"id": "f2",
"name": "Notes.docx",
"mimeType": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
"webViewLink": "https://drive.google.com/file/f2",
"modifiedTime": "2024-02-01T00:00:00Z",
}
],
"nextPageToken": "next_tok_xyz",
}
result = await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="notes",
)
assert result.endswith("nextPageToken: next_tok_xyz")
@pytest.mark.asyncio
async def test_search_drive_files_no_next_page_token_when_absent():
"""nextPageToken does not appear in output when the API has no more pages."""
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
{
"id": "f3",
"name": "Summary.txt",
"mimeType": "text/plain",
"webViewLink": "https://drive.google.com/file/f3",
"modifiedTime": "2024-03-01T00:00:00Z",
}
]
# no nextPageToken key
}
result = await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="summary",
)
assert "nextPageToken" not in result
# ---------------------------------------------------------------------------
# list_drive_items — page_token
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_drive_items_page_token_passed_to_api(mock_resolve_folder):
"""page_token is forwarded to the Drive API as pageToken."""
mock_resolve_folder.return_value = "root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
{
"id": "folder1",
"name": "Archive",
"mimeType": "application/vnd.google-apps.folder",
"webViewLink": "https://drive.google.com/drive/folders/folder1",
"modifiedTime": "2024-01-15T00:00:00Z",
}
]
}
await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
page_token="tok_page2",
)
call_kwargs = mock_service.files.return_value.list.call_args.kwargs
assert call_kwargs.get("pageToken") == "tok_page2"
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_drive_items_next_page_token_in_output(mock_resolve_folder):
"""nextPageToken from the API response is appended at the end of the output."""
mock_resolve_folder.return_value = "root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
{
"id": "file99",
"name": "data.csv",
"mimeType": "text/csv",
"webViewLink": "https://drive.google.com/file/file99",
"modifiedTime": "2024-04-01T00:00:00Z",
}
],
"nextPageToken": "next_list_tok",
}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
)
assert result.endswith("nextPageToken: next_list_tok")
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_drive_items_no_next_page_token_when_absent(mock_resolve_folder):
"""nextPageToken does not appear in output when the API has no more pages."""
mock_resolve_folder.return_value = "root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
{
"id": "file100",
"name": "readme.txt",
"mimeType": "text/plain",
"webViewLink": "https://drive.google.com/file/file100",
"modifiedTime": "2024-05-01T00:00:00Z",
}
]
# no nextPageToken key
}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
)
assert "nextPageToken" not in result
# Helpers
# ---------------------------------------------------------------------------
def _make_file(
file_id: str,
name: str,
mime_type: str,
link: str = "http://link",
modified: str = "2024-01-01T00:00:00Z",
size: str | None = None,
) -> dict:
item = {
"id": file_id,
"name": name,
"mimeType": mime_type,
"webViewLink": link,
"modifiedTime": modified,
}
if size is not None:
item["size"] = size
return item
# ---------------------------------------------------------------------------
# create_drive_folder
# ---------------------------------------------------------------------------
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_drive_folder(): async def test_create_drive_folder():
@@ -47,40 +263,324 @@ async def test_create_drive_folder():
assert "https://drive.google.com/drive/folders/folder123" in result assert "https://drive.google.com/drive/folders/folder123" in result
# ---------------------------------------------------------------------------
# build_drive_list_params — detailed flag (pure unit tests, no I/O)
# ---------------------------------------------------------------------------
from gdrive.drive_tools import list_drive_items, search_drive_files
def test_build_params_detailed_true_includes_extra_fields():
"""detailed=True requests modifiedTime, webViewLink, and size from the API."""
params = build_drive_list_params(query="name='x'", page_size=10, detailed=True)
assert "modifiedTime" in params["fields"]
assert "webViewLink" in params["fields"]
assert "size" in params["fields"]
def test_build_params_detailed_false_omits_extra_fields():
"""detailed=False omits modifiedTime, webViewLink, and size from the API request."""
params = build_drive_list_params(query="name='x'", page_size=10, detailed=False)
assert "modifiedTime" not in params["fields"]
assert "webViewLink" not in params["fields"]
assert "size" not in params["fields"]
def test_build_params_detailed_false_keeps_core_fields():
"""detailed=False still requests id, name, and mimeType."""
params = build_drive_list_params(query="name='x'", page_size=10, detailed=False)
assert "id" in params["fields"]
assert "name" in params["fields"]
assert "mimeType" in params["fields"]
def test_build_params_default_is_detailed():
"""Omitting detailed behaves identically to detailed=True."""
params_default = build_drive_list_params(query="q", page_size=5)
params_true = build_drive_list_params(query="q", page_size=5, detailed=True)
assert params_default["fields"] == params_true["fields"]
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Helpers # search_drive_files — detailed flag
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
def _unwrap(fn):
"""Unwrap a FunctionTool or plain-function decorator chain to the original async function."""
if hasattr(fn, "fn"):
fn = fn.fn # FunctionTool wrapper (some server versions)
while hasattr(fn, "__wrapped__"):
fn = fn.__wrapped__
return fn
def _make_file( @pytest.mark.asyncio
file_id: str, async def test_search_detailed_true_output_includes_metadata():
name: str, """detailed=True (default) includes modified time and link in output."""
mime_type: str, mock_service = Mock()
link: str = "http://link", mock_service.files().list().execute.return_value = {
modified: str = "2024-01-01T00:00:00Z", "files": [
) -> dict: _make_file(
return { "f1",
"id": file_id, "My Doc",
"name": name, "application/vnd.google-apps.document",
"mimeType": mime_type, modified="2024-06-01T12:00:00Z",
"webViewLink": link, link="http://link/f1",
"modifiedTime": modified, )
]
} }
result = await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="my doc",
detailed=True,
)
assert "My Doc" in result
assert "2024-06-01T12:00:00Z" in result
assert "http://link/f1" in result
@pytest.mark.asyncio
async def test_search_detailed_false_output_excludes_metadata():
"""detailed=False omits modified time and link from output."""
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
_make_file(
"f1",
"My Doc",
"application/vnd.google-apps.document",
modified="2024-06-01T12:00:00Z",
link="http://link/f1",
)
]
}
result = await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="my doc",
detailed=False,
)
assert "My Doc" in result
assert "f1" in result
assert "2024-06-01T12:00:00Z" not in result
assert "http://link/f1" not in result
@pytest.mark.asyncio
async def test_search_detailed_true_with_size():
"""When the item has a size field, detailed=True includes it in output."""
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
_make_file("f2", "Big File", "application/pdf", size="102400"),
]
}
result = await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="big",
detailed=True,
)
assert "102400" in result
@pytest.mark.asyncio
async def test_search_detailed_true_requests_extra_api_fields():
"""detailed=True passes full fields string to the Drive API."""
mock_service = Mock()
mock_service.files().list().execute.return_value = {"files": []}
await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="anything",
detailed=True,
)
call_kwargs = mock_service.files.return_value.list.call_args.kwargs
assert "modifiedTime" in call_kwargs["fields"]
assert "webViewLink" in call_kwargs["fields"]
assert "size" in call_kwargs["fields"]
@pytest.mark.asyncio
async def test_search_detailed_false_requests_compact_api_fields():
"""detailed=False passes compact fields string to the Drive API."""
mock_service = Mock()
mock_service.files().list().execute.return_value = {"files": []}
await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="anything",
detailed=False,
)
call_kwargs = mock_service.files.return_value.list.call_args.kwargs
assert "modifiedTime" not in call_kwargs["fields"]
assert "webViewLink" not in call_kwargs["fields"]
assert "size" not in call_kwargs["fields"]
@pytest.mark.asyncio
async def test_search_default_detailed_matches_detailed_true():
"""Omitting detailed produces the same output as detailed=True."""
file = _make_file(
"f1",
"Doc",
"application/vnd.google-apps.document",
modified="2024-01-01T00:00:00Z",
link="http://l",
)
mock_service = Mock()
mock_service.files().list().execute.return_value = {"files": [file]}
result_default = await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="doc",
)
mock_service.files().list().execute.return_value = {"files": [file]}
result_true = await _unwrap(search_drive_files)(
service=mock_service,
user_google_email="user@example.com",
query="doc",
detailed=True,
)
assert result_default == result_true
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# search_drive_files # list_drive_items — detailed flag
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_detailed_true_output_includes_metadata(mock_resolve_folder):
"""detailed=True (default) includes modified time and link in output."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
_make_file(
"id1",
"Report",
"application/vnd.google-apps.document",
modified="2024-03-15T08:00:00Z",
link="http://link/id1",
)
]
}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
detailed=True,
)
assert "Report" in result
assert "2024-03-15T08:00:00Z" in result
assert "http://link/id1" in result
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_detailed_false_output_excludes_metadata(mock_resolve_folder):
"""detailed=False omits modified time and link from output."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
_make_file(
"id1",
"Report",
"application/vnd.google-apps.document",
modified="2024-03-15T08:00:00Z",
link="http://link/id1",
)
]
}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
detailed=False,
)
assert "Report" in result
assert "id1" in result
assert "2024-03-15T08:00:00Z" not in result
assert "http://link/id1" not in result
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_detailed_true_with_size(mock_resolve_folder):
"""When item has a size field, detailed=True includes it in output."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
_make_file("id2", "Big File", "application/pdf", size="204800"),
]
}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
detailed=True,
)
assert "204800" in result
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_detailed_true_requests_extra_api_fields(mock_resolve_folder):
"""detailed=True passes full fields string to the Drive API."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {"files": []}
await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
detailed=True,
)
call_kwargs = mock_service.files.return_value.list.call_args.kwargs
assert "modifiedTime" in call_kwargs["fields"]
assert "webViewLink" in call_kwargs["fields"]
assert "size" in call_kwargs["fields"]
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_detailed_false_requests_compact_api_fields(mock_resolve_folder):
"""detailed=False passes compact fields string to the Drive API."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {"files": []}
await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
detailed=False,
)
call_kwargs = mock_service.files.return_value.list.call_args.kwargs
assert "modifiedTime" not in call_kwargs["fields"]
assert "webViewLink" not in call_kwargs["fields"]
assert "size" not in call_kwargs["fields"]
# ---------------------------------------------------------------------------
# Existing behavior coverage
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@@ -120,15 +620,59 @@ async def test_search_no_results():
assert "No files found" in result assert "No files found" in result
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_items_basic(mock_resolve_folder):
"""Basic listing without filters returns all items."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
_make_file("id1", "Folder A", "application/vnd.google-apps.folder"),
_make_file("id2", "Doc B", "application/vnd.google-apps.document"),
]
}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
)
assert "Found 2 items" in result
assert "Folder A" in result
assert "Doc B" in result
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_items_no_results(mock_resolve_folder):
"""Empty folder returns a clear message."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {"files": []}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
)
assert "No items found" in result
# ---------------------------------------------------------------------------
# file_type filtering
# ---------------------------------------------------------------------------
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_search_file_type_folder_adds_mime_filter(): async def test_search_file_type_folder_adds_mime_filter():
"""file_type='folder' appends the folder MIME type to the query.""" """file_type='folder' appends the folder MIME type to the query."""
mock_service = Mock() mock_service = Mock()
mock_service.files().list().execute.return_value = { mock_service.files().list().execute.return_value = {
"files": [ "files": [
_make_file( _make_file("fold1", "My Folder", "application/vnd.google-apps.folder")
"fold1", "My Folder", "application/vnd.google-apps.folder"
)
] ]
} }
@@ -142,7 +686,6 @@ async def test_search_file_type_folder_adds_mime_filter():
assert "Found 1 files" in result assert "Found 1 files" in result
assert "My Folder" in result assert "My Folder" in result
# Verify the API was called with the mimeType filter in the query
call_kwargs = mock_service.files.return_value.list.call_args.kwargs call_kwargs = mock_service.files.return_value.list.call_args.kwargs
assert "mimeType = 'application/vnd.google-apps.folder'" in call_kwargs["q"] assert "mimeType = 'application/vnd.google-apps.folder'" in call_kwargs["q"]
@@ -227,7 +770,7 @@ async def test_search_file_type_structured_query_combined():
await _unwrap(search_drive_files)( await _unwrap(search_drive_files)(
service=mock_service, service=mock_service,
user_google_email="user@example.com", user_google_email="user@example.com",
query="name contains 'budget'", # structured query query="name contains 'budget'",
file_type="spreadsheet", file_type="spreadsheet",
) )
@@ -251,52 +794,6 @@ async def test_search_file_type_unknown_raises_value_error():
) )
# ---------------------------------------------------------------------------
# list_drive_items
# ---------------------------------------------------------------------------
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_items_basic(mock_resolve_folder):
"""Basic listing without filters returns all items."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {
"files": [
_make_file("id1", "Folder A", "application/vnd.google-apps.folder"),
_make_file("id2", "Doc B", "application/vnd.google-apps.document"),
]
}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
)
assert "Found 2 items" in result
assert "Folder A" in result
assert "Doc B" in result
@pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_items_no_results(mock_resolve_folder):
"""Empty folder returns a clear message."""
mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock()
mock_service.files().list().execute.return_value = {"files": []}
result = await _unwrap(list_drive_items)(
service=mock_service,
user_google_email="user@example.com",
folder_id="root",
)
assert "No items found" in result
@pytest.mark.asyncio @pytest.mark.asyncio
@patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock) @patch("gdrive.drive_tools.resolve_folder_id", new_callable=AsyncMock)
async def test_list_items_file_type_folder_adds_mime_filter(mock_resolve_folder): async def test_list_items_file_type_folder_adds_mime_filter(mock_resolve_folder):
@@ -304,9 +801,7 @@ async def test_list_items_file_type_folder_adds_mime_filter(mock_resolve_folder)
mock_resolve_folder.return_value = "resolved_root" mock_resolve_folder.return_value = "resolved_root"
mock_service = Mock() mock_service = Mock()
mock_service.files().list().execute.return_value = { mock_service.files().list().execute.return_value = {
"files": [ "files": [_make_file("sub1", "SubFolder", "application/vnd.google-apps.folder")]
_make_file("sub1", "SubFolder", "application/vnd.google-apps.folder")
]
} }
result = await _unwrap(list_drive_items)( result = await _unwrap(list_drive_items)(
@@ -406,7 +901,7 @@ async def test_list_items_file_type_unknown_raises(mock_resolve_folder):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_search_or_query_is_grouped_before_mime_filter(): async def test_search_or_query_is_grouped_before_mime_filter():
"""An OR structured query is wrapped in parentheses so the MIME filter binds correctly.""" """An OR structured query is wrapped in parentheses so MIME filter precedence is correct."""
mock_service = Mock() mock_service = Mock()
mock_service.files().list().execute.return_value = {"files": []} mock_service.files().list().execute.return_value = {"files": []}
@@ -418,8 +913,6 @@ async def test_search_or_query_is_grouped_before_mime_filter():
) )
q = mock_service.files.return_value.list.call_args.kwargs["q"] q = mock_service.files.return_value.list.call_args.kwargs["q"]
# Without grouping this would be: name contains 'a' or name contains 'b' and mimeType = ...
# The 'and' would only bind to the second term, leaking the first term through unfiltered.
assert q.startswith("(") assert q.startswith("(")
assert "name contains 'a' or name contains 'b'" in q assert "name contains 'a' or name contains 'b'" in q
assert ") and mimeType = 'application/vnd.google-apps.document'" in q assert ") and mimeType = 'application/vnd.google-apps.document'" in q

View File

@@ -0,0 +1,60 @@
import os
import sys
import pytest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import main
def test_resolve_permissions_mode_selection_without_tier():
services = ["gmail", "drive"]
resolved_services, tier_tool_filter = main.resolve_permissions_mode_selection(
services, None
)
assert resolved_services == services
assert tier_tool_filter is None
def test_resolve_permissions_mode_selection_with_tier_filters_services(monkeypatch):
def fake_resolve_tools_from_tier(tier, services):
assert tier == "core"
assert services == ["gmail", "drive", "slides"]
return ["search_gmail_messages"], ["gmail"]
monkeypatch.setattr(main, "resolve_tools_from_tier", fake_resolve_tools_from_tier)
resolved_services, tier_tool_filter = main.resolve_permissions_mode_selection(
["gmail", "drive", "slides"], "core"
)
assert resolved_services == ["gmail"]
assert tier_tool_filter == {"search_gmail_messages"}
def test_narrow_permissions_to_services_keeps_selected_order():
permissions = {"drive": "full", "gmail": "readonly", "calendar": "readonly"}
narrowed = main.narrow_permissions_to_services(permissions, ["gmail", "drive"])
assert narrowed == {"gmail": "readonly", "drive": "full"}
def test_narrow_permissions_to_services_drops_non_selected_services():
permissions = {"gmail": "send", "drive": "full"}
narrowed = main.narrow_permissions_to_services(permissions, ["gmail"])
assert narrowed == {"gmail": "send"}
def test_permissions_and_tools_flags_are_rejected(monkeypatch, capsys):
monkeypatch.setattr(main, "configure_safe_logging", lambda: None)
monkeypatch.setattr(
sys,
"argv",
["main.py", "--permissions", "gmail:readonly", "--tools", "gmail"],
)
with pytest.raises(SystemExit) as exc:
main.main()
assert exc.value.code == 1
captured = capsys.readouterr()
assert "--permissions and --tools cannot be combined" in captured.err

118
tests/test_permissions.py Normal file
View File

@@ -0,0 +1,118 @@
"""
Unit tests for granular per-service permission parsing and scope resolution.
Covers parse_permissions_arg() validation (format, duplicates, unknown
service/level) and cumulative scope expansion in get_scopes_for_permission().
"""
import sys
import os
import pytest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from auth.permissions import (
get_scopes_for_permission,
parse_permissions_arg,
SERVICE_PERMISSION_LEVELS,
)
from auth.scopes import (
GMAIL_READONLY_SCOPE,
GMAIL_LABELS_SCOPE,
GMAIL_MODIFY_SCOPE,
GMAIL_COMPOSE_SCOPE,
DRIVE_READONLY_SCOPE,
DRIVE_SCOPE,
DRIVE_FILE_SCOPE,
)
class TestParsePermissionsArg:
"""Tests for parse_permissions_arg()."""
def test_single_valid_entry(self):
result = parse_permissions_arg(["gmail:readonly"])
assert result == {"gmail": "readonly"}
def test_multiple_valid_entries(self):
result = parse_permissions_arg(["gmail:organize", "drive:full"])
assert result == {"gmail": "organize", "drive": "full"}
def test_all_services_at_readonly(self):
entries = [f"{svc}:readonly" for svc in SERVICE_PERMISSION_LEVELS]
result = parse_permissions_arg(entries)
assert set(result.keys()) == set(SERVICE_PERMISSION_LEVELS.keys())
def test_missing_colon_raises(self):
with pytest.raises(ValueError, match="Invalid permission format"):
parse_permissions_arg(["gmail_readonly"])
def test_duplicate_service_raises(self):
with pytest.raises(ValueError, match="Duplicate service"):
parse_permissions_arg(["gmail:readonly", "gmail:full"])
def test_unknown_service_raises(self):
with pytest.raises(ValueError, match="Unknown service"):
parse_permissions_arg(["fakesvc:readonly"])
def test_unknown_level_raises(self):
with pytest.raises(ValueError, match="Unknown level"):
parse_permissions_arg(["gmail:superadmin"])
def test_empty_list_returns_empty(self):
assert parse_permissions_arg([]) == {}
def test_extra_colon_in_value(self):
"""A level containing a colon should fail as unknown level."""
with pytest.raises(ValueError, match="Unknown level"):
parse_permissions_arg(["gmail:read:only"])
class TestGetScopesForPermission:
"""Tests for get_scopes_for_permission() cumulative scope expansion."""
def test_gmail_readonly_returns_readonly_scope(self):
scopes = get_scopes_for_permission("gmail", "readonly")
assert GMAIL_READONLY_SCOPE in scopes
def test_gmail_organize_includes_readonly(self):
"""Organize level should cumulatively include readonly scopes."""
scopes = get_scopes_for_permission("gmail", "organize")
assert GMAIL_READONLY_SCOPE in scopes
assert GMAIL_LABELS_SCOPE in scopes
assert GMAIL_MODIFY_SCOPE in scopes
def test_gmail_drafts_includes_organize_and_readonly(self):
scopes = get_scopes_for_permission("gmail", "drafts")
assert GMAIL_READONLY_SCOPE in scopes
assert GMAIL_LABELS_SCOPE in scopes
assert GMAIL_COMPOSE_SCOPE in scopes
def test_drive_readonly_excludes_full(self):
scopes = get_scopes_for_permission("drive", "readonly")
assert DRIVE_READONLY_SCOPE in scopes
assert DRIVE_SCOPE not in scopes
assert DRIVE_FILE_SCOPE not in scopes
def test_drive_full_includes_readonly(self):
scopes = get_scopes_for_permission("drive", "full")
assert DRIVE_READONLY_SCOPE in scopes
assert DRIVE_SCOPE in scopes
def test_unknown_service_raises(self):
with pytest.raises(ValueError, match="Unknown service"):
get_scopes_for_permission("nonexistent", "readonly")
def test_unknown_level_raises(self):
with pytest.raises(ValueError, match="Unknown permission level"):
get_scopes_for_permission("gmail", "nonexistent")
def test_no_duplicate_scopes(self):
"""Cumulative expansion should deduplicate scopes."""
for service, levels in SERVICE_PERMISSION_LEVELS.items():
for level_name, _ in levels:
scopes = get_scopes_for_permission(service, level_name)
assert len(scopes) == len(set(scopes)), (
f"Duplicate scopes for {service}:{level_name}"
)

View File

@@ -12,6 +12,7 @@ import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from auth.scopes import ( from auth.scopes import (
BASE_SCOPES,
CALENDAR_READONLY_SCOPE, CALENDAR_READONLY_SCOPE,
CALENDAR_SCOPE, CALENDAR_SCOPE,
CONTACTS_READONLY_SCOPE, CONTACTS_READONLY_SCOPE,
@@ -31,6 +32,8 @@ from auth.scopes import (
has_required_scopes, has_required_scopes,
set_read_only, set_read_only,
) )
from auth.permissions import get_scopes_for_permission, set_permissions
import auth.permissions as permissions_module
class TestDocsScopes: class TestDocsScopes:
@@ -195,3 +198,34 @@ class TestHasRequiredScopes:
available = [GMAIL_MODIFY_SCOPE] available = [GMAIL_MODIFY_SCOPE]
required = [GMAIL_READONLY_SCOPE, DRIVE_READONLY_SCOPE] required = [GMAIL_READONLY_SCOPE, DRIVE_READONLY_SCOPE]
assert not has_required_scopes(available, required) assert not has_required_scopes(available, required)
class TestGranularPermissionsScopes:
"""Tests for granular permissions scope generation path."""
def setup_method(self):
set_read_only(False)
permissions_module._PERMISSIONS = None
def teardown_method(self):
set_read_only(False)
permissions_module._PERMISSIONS = None
def test_permissions_mode_returns_base_plus_permission_scopes(self):
set_permissions({"gmail": "send", "drive": "readonly"})
scopes = get_scopes_for_tools(["calendar"]) # ignored in permissions mode
expected = set(BASE_SCOPES)
expected.update(get_scopes_for_permission("gmail", "send"))
expected.update(get_scopes_for_permission("drive", "readonly"))
assert set(scopes) == expected
def test_permissions_mode_overrides_read_only_and_full_maps(self):
set_read_only(True)
without_permissions = get_scopes_for_tools(["drive"])
assert DRIVE_READONLY_SCOPE in without_permissions
set_permissions({"gmail": "readonly"})
with_permissions = get_scopes_for_tools(["drive"])
assert GMAIL_READONLY_SCOPE in with_permissions
assert DRIVE_READONLY_SCOPE not in with_permissions

2
uv.lock generated
View File

@@ -2035,7 +2035,7 @@ wheels = [
[[package]] [[package]]
name = "workspace-mcp" name = "workspace-mcp"
version = "1.12.0" version = "1.13.0"
source = { editable = "." } source = { editable = "." }
dependencies = [ dependencies = [
{ name = "cryptography" }, { name = "cryptography" },