crawl4ai/docs/examples/docker_python_sdk.py
UncleCode 3cb28875c3 refactor(config): enhance serialization and config handling
- Add ignore_default_value option to to_serializable_dict
- Add viewport dict support in BrowserConfig
- Replace FastFilterChain with FilterChain
- Add deprecation warnings for unwanted properties
- Clean up unused imports
- Rename example files for consistency
- Add comprehensive Docker configuration tutorial

BREAKING CHANGE: FastFilterChain has been replaced with FilterChain
2025-02-19 17:23:25 +08:00

35 lines
1.1 KiB
Python

import asyncio
from crawl4ai.docker_client import Crawl4aiDockerClient
from crawl4ai import (
BrowserConfig,
CrawlerRunConfig
)
async def main():
async with Crawl4aiDockerClient(base_url="http://localhost:8000", verbose=True) as client:
# If jwt is enabled, authenticate first
# await client.authenticate("test@example.com")
# Non-streaming crawl
results = await client.crawl(
["https://example.com", "https://python.org"],
browser_config=BrowserConfig(headless=True),
crawler_config=CrawlerRunConfig()
)
print(f"Non-streaming results: {results}")
# Streaming crawl
crawler_config = CrawlerRunConfig(stream=True)
async for result in await client.crawl(
["https://example.com", "https://python.org"],
browser_config=BrowserConfig(headless=True),
crawler_config=crawler_config
):
print(f"Streamed result: {result}")
# Get schema
schema = await client.get_schema()
print(f"Schema: {schema}")
if __name__ == "__main__":
asyncio.run(main())