Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions deploy/docker/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,11 @@
# Hooks are disabled by default for security (RCE risk). Set to "true" to enable.
HOOKS_ENABLED = os.environ.get("CRAWL4AI_HOOKS_ENABLED", "false").lower() == "true"

# /execute_js runs arbitrary JavaScript in the headless browser, which can
# make unrestricted network requests (bypassing any Python-level SSRF checks).
# Disabled by default. Set to "true" to enable.
EXECUTE_JS_ENABLED = os.environ.get("CRAWL4AI_EXECUTE_JS_ENABLED", "false").lower() == "true"

# ── default browser config helper ─────────────────────────────
def get_default_browser_config() -> BrowserConfig:
"""Get default BrowserConfig from config.yml."""
Expand Down Expand Up @@ -494,6 +499,8 @@ class MarkdownGenerationResult(BaseModel):
```

"""
if not EXECUTE_JS_ENABLED:
raise HTTPException(403, "/execute_js is disabled. Set CRAWL4AI_EXECUTE_JS_ENABLED=true to enable.")
validate_url_scheme(body.url)
from crawler_pool import get_crawler
try:
Expand All @@ -507,6 +514,10 @@ class MarkdownGenerationResult(BaseModel):
except Exception as e:
raise HTTPException(500, detail=str(e))

# Hide from MCP tool list when disabled
if not EXECUTE_JS_ENABLED:
del execute_js.__mcp_kind__


@app.get("/llm/{url:path}")
async def llm_endpoint(
Expand Down