Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions deploy/docker/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -538,6 +538,12 @@ async def handle_crawl_request(
hook_manager = None

try:
_allowed_schemes = ('http://', 'https://', 'raw:', 'raw://')
for url in urls:
# Detect explicit scheme: contains :// or starts with a known no-// scheme
has_scheme = '://' in url or url.lower().startswith(('javascript:', 'data:', 'vbscript:'))
if has_scheme and not url.startswith(_allowed_schemes):
raise HTTPException(400, f"URL scheme not allowed: {url[:50]}")
urls = [('https://' + url) if not url.startswith(('http://', 'https://')) and not url.startswith(("raw:", "raw://")) else url for url in urls]
browser_config = BrowserConfig.load(browser_config)
crawler_config = CrawlerRunConfig.load(crawler_config)
Expand Down Expand Up @@ -720,6 +726,12 @@ async def handle_stream_crawl_request(
"""Handle streaming crawl requests with optional hooks."""
hooks_info = None
try:
_allowed_schemes = ('http://', 'https://', 'raw:', 'raw://')
for url in urls:
# Detect explicit scheme: contains :// or starts with a known no-// scheme
has_scheme = '://' in url or url.lower().startswith(('javascript:', 'data:', 'vbscript:'))
if has_scheme and not url.startswith(_allowed_schemes):
raise HTTPException(400, f"URL scheme not allowed: {url[:50]}")
browser_config = BrowserConfig.load(browser_config)
# browser_config.verbose = True # Set to False or remove for production stress testing
browser_config.verbose = False
Expand Down