Created
July 9, 2025 22:54
-
-
Save pirate/b701c0607070ca7d15d3b451d15f20a6 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
"""Test to ensure navigation timeouts don't block subsequent operations""" | |
import asyncio | |
import pytest | |
from pytest_httpserver import HTTPServer | |
from browser_use.browser.profile import BrowserProfile | |
from browser_use.browser.session import BrowserSession | |
@pytest.fixture | |
async def browser_session(): | |
"""Create a browser session for testing""" | |
session = BrowserSession( | |
browser_profile=BrowserProfile( | |
headless=True, | |
keep_alive=False, | |
default_navigation_timeout=30_000, # 30 seconds default | |
minimum_wait_page_load_time=0.1, # Short wait for testing | |
) | |
) | |
await session.start() | |
yield session | |
await session.kill() | |
def test_blocking_javascript_handled_gracefully(httpserver: HTTPServer, browser_session: BrowserSession): | |
"""Test that pages with blocking JavaScript are handled gracefully without crashing""" | |
# Create a page with synchronous blocking JavaScript that makes the page completely unresponsive | |
# For this test, we'll use a permanent block to test the worst case | |
httpserver.expect_request('/blocking').respond_with_data( | |
""" | |
<html> | |
<head> | |
<title>Blocking Page</title> | |
</head> | |
<body> | |
<h1>This page will block</h1> | |
<script> | |
// Synchronous blocking script that makes page unresponsive | |
console.log('Starting blocking script...'); | |
const start = Date.now(); | |
while (Date.now() - start < 120000) { // Block for 2 minutes | |
// This will make the page completely unresponsive | |
} | |
</script> | |
</body> | |
</html> | |
""", | |
content_type='text/html', | |
) | |
async def run_test(): | |
# Track whether we successfully recovered | |
recovery_happened = False | |
original_create_new_tab = browser_session.create_new_tab | |
async def track_recovery(*args, **kwargs): | |
nonlocal recovery_happened | |
recovery_happened = True | |
print(f"🔄 Recovery triggered! Creating new tab with args: {args}, kwargs: {kwargs}") | |
result = await original_create_new_tab(*args, **kwargs) | |
print(f"✅ New tab created successfully") | |
return result | |
browser_session.create_new_tab = track_recovery | |
# Navigate to the blocking page - should timeout and handle gracefully | |
await browser_session.navigate_to(httpserver.url_for('/blocking')) | |
# Now try to use the browser - it should attempt recovery | |
# Test 1: Take screenshot - should attempt recovery but may fail due to permanent block | |
try: | |
screenshot = await browser_session.take_screenshot() | |
print(f"Screenshot result: {len(screenshot) if screenshot else 'None'} bytes") | |
except Exception as e: | |
print(f"Screenshot failed as expected: {type(e).__name__}") | |
# This is expected for permanently blocking pages | |
# The key test: We should still be able to navigate away to a working page | |
# Test 2: Navigate somewhere else - should work even after blocking page | |
httpserver.expect_request('/normal').respond_with_data( | |
"<html><body><h1>Normal Page</h1></body></html>", | |
content_type='text/html', | |
) | |
await browser_session.navigate_to(httpserver.url_for('/normal')) | |
# Test 3: Verify we can interact with the new page | |
page = await browser_session.get_current_page() | |
content = await page.content() | |
assert 'Normal Page' in content, "Should be able to navigate to new page after blocking page" | |
# Test 4: Screenshot should work on the normal page | |
screenshot = await browser_session.take_screenshot() | |
assert screenshot is not None, "Screenshot should work on normal page" | |
assert len(screenshot) > 100, "Screenshot should have content" | |
print("✅ Successfully handled blocking JavaScript without crashing!") | |
print(f"✅ Recovery was {'triggered' if recovery_happened else 'not triggered'}") | |
asyncio.run(run_test()) | |
def test_multiple_sessions_with_slow_pages(httpserver: HTTPServer): | |
"""Test multiple browser sessions navigating to slow pages simultaneously""" | |
# Create a page that never finishes loading | |
httpserver.expect_request('/neverending').respond_with_data( | |
""" | |
<html> | |
<head><title>Never Ending Page</title></head> | |
<body> | |
<h1>Loading Forever...</h1> | |
<script> | |
// Create an infinite loading situation | |
async function loadForever() { | |
while (true) { | |
await new Promise(resolve => setTimeout(resolve, 100)); | |
document.body.innerHTML += '.'; | |
} | |
} | |
loadForever(); | |
</script> | |
</body> | |
</html> | |
""", | |
content_type='text/html', | |
) | |
async def run_test(): | |
sessions = [] | |
# Create 5 browser sessions | |
for i in range(5): | |
session = BrowserSession( | |
browser_profile=BrowserProfile( | |
headless=True, | |
keep_alive=False, | |
default_navigation_timeout=1000, # 1 second timeout | |
) | |
) | |
sessions.append(session) | |
try: | |
# Start all sessions | |
await asyncio.gather(*[s.start() for s in sessions]) | |
# Navigate all to the slow page | |
await asyncio.gather(*[s.navigate_to(httpserver.url_for('/neverending')) for s in sessions]) | |
# All sessions should be able to take screenshots without timeout | |
screenshot_tasks = [] | |
for i, session in enumerate(sessions): | |
async def take_screenshot_with_timeout(sess, idx): | |
try: | |
screenshot = await asyncio.wait_for(sess.take_screenshot(), timeout=5.0) | |
return (idx, screenshot is not None) | |
except TimeoutError: | |
return (idx, False) | |
screenshot_tasks.append(take_screenshot_with_timeout(session, i)) | |
results = await asyncio.gather(*screenshot_tasks) | |
# Check that all screenshots succeeded | |
for idx, success in results: | |
assert success, f'Session {idx} failed to take screenshot due to timeout' | |
finally: | |
# Clean up all sessions | |
await asyncio.gather(*[s.kill() for s in sessions]) | |
asyncio.run(run_test()) | |
def test_navigation_timeout_warning_appears(httpserver: HTTPServer, browser_session: BrowserSession): | |
"""Test that the timeout warning appears in logs when navigation times out""" | |
# Create a page with a long server response time | |
async def slow_response(request): | |
await asyncio.sleep(5) # 5 second delay | |
return """<html><body>Finally loaded!</body></html>""" | |
httpserver.expect_request('/delayed').respond_with_handler(slow_response) | |
async def run_test(): | |
# Navigate with a short timeout - should see warning in logs | |
old_method = browser_session.navigate | |
warning_logged = False | |
async def navigate_with_log_check(url): | |
nonlocal warning_logged | |
# Intercept logger warnings | |
import logging | |
class WarningHandler(logging.Handler): | |
def emit(self, record): | |
if "didn't finish after" in record.getMessage() and 'continuing anyway' in record.getMessage(): | |
nonlocal warning_logged | |
warning_logged = True | |
handler = WarningHandler() | |
browser_session.logger.addHandler(handler) | |
try: | |
await old_method(url) | |
finally: | |
browser_session.logger.removeHandler(handler) | |
browser_session.navigate = navigate_with_log_check | |
# This should timeout and log a warning | |
await browser_session.navigate(httpserver.url_for('/delayed')) | |
# But we should still be able to interact with the page | |
page = await browser_session.get_current_page() | |
assert page is not None | |
# The warning should have been logged | |
assert warning_logged, 'Navigation timeout warning was not logged' | |
asyncio.run(run_test()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment