diff --git a/deploy/docker/server.py b/deploy/docker/server.py index 7ae1adb8b..dbcbe4eea 100644 --- a/deploy/docker/server.py +++ b/deploy/docker/server.py @@ -83,6 +83,11 @@ # Hooks are disabled by default for security (RCE risk). Set to "true" to enable. HOOKS_ENABLED = os.environ.get("CRAWL4AI_HOOKS_ENABLED", "false").lower() == "true" +# /execute_js runs arbitrary JavaScript in the headless browser, which can +# make unrestricted network requests (bypassing any Python-level SSRF checks). +# Disabled by default. Set to "true" to enable. +EXECUTE_JS_ENABLED = os.environ.get("CRAWL4AI_EXECUTE_JS_ENABLED", "false").lower() == "true" + # ── default browser config helper ───────────────────────────── def get_default_browser_config() -> BrowserConfig: """Get default BrowserConfig from config.yml.""" @@ -494,6 +499,8 @@ class MarkdownGenerationResult(BaseModel): ``` """ + if not EXECUTE_JS_ENABLED: + raise HTTPException(403, "/execute_js is disabled. Set CRAWL4AI_EXECUTE_JS_ENABLED=true to enable.") validate_url_scheme(body.url) from crawler_pool import get_crawler try: @@ -507,6 +514,10 @@ class MarkdownGenerationResult(BaseModel): except Exception as e: raise HTTPException(500, detail=str(e)) +# Hide from MCP tool list when disabled +if not EXECUTE_JS_ENABLED: + del execute_js.__mcp_kind__ + @app.get("/llm/{url:path}") async def llm_endpoint(