Sponsors/new (#1637)
This commit is contained in:
@@ -1034,7 +1034,7 @@ Our enterprise sponsors and technology partners help scale Crawl4AI to power pro
|
||||
|
||||
| Company | About | Sponsorship Tier |
|
||||
|------|------|----------------------------|
|
||||
| <a href="https://app.scrapeless.com/passport/register?utm_source=official&utm_term=crawl4ai" target="_blank"><picture><source width="250" media="(prefers-color-scheme: dark)" srcset="https://gist.githubusercontent.com/aravindkarnam/0d275b942705604263e5c32d2db27bc1/raw/Scrapeless-light-logo.svg"><source width="250" media="(prefers-color-scheme: light)" srcset="https://gist.githubusercontent.com/aravindkarnam/22d0525cc0f3021bf19ebf6e11a69ccd/raw/Scrapeless-dark-logo.svg"><img alt="Scrapeless" src="https://gist.githubusercontent.com/aravindkarnam/22d0525cc0f3021bf19ebf6e11a69ccd/raw/Scrapeless-dark-logo.svg"></picture></a> | Scrapeless is the best full-stack web scraping toolkit offering Scraping API, Scraping Browser, Web Unlocker, Captcha Solver, and Proxies, designed to handle all your data collection needs. | 🥈 Silver |
|
||||
| <a href="https://app.scrapeless.com/passport/register?utm_source=official&utm_term=crawl4ai" target="_blank"><picture><source width="250" media="(prefers-color-scheme: dark)" srcset="https://gist.githubusercontent.com/aravindkarnam/0d275b942705604263e5c32d2db27bc1/raw/Scrapeless-light-logo.svg"><source width="250" media="(prefers-color-scheme: light)" srcset="https://gist.githubusercontent.com/aravindkarnam/22d0525cc0f3021bf19ebf6e11a69ccd/raw/Scrapeless-dark-logo.svg"><img alt="Scrapeless" src="https://gist.githubusercontent.com/aravindkarnam/22d0525cc0f3021bf19ebf6e11a69ccd/raw/Scrapeless-dark-logo.svg"></picture></a> | Scrapeless provides production-grade infrastructure for Crawling, Automation, and AI Agents, offering Scraping Browser, 4 Proxy Types and Universal Scraping API. | 🥈 Silver |
|
||||
| <a href="https://dashboard.capsolver.com/passport/register?inviteCode=ESVSECTX5Q23" target="_blank"><picture><source width="120" media="(prefers-color-scheme: dark)" srcset="https://docs.crawl4ai.com/uploads/sponsors/20251013045338_72a71fa4ee4d2f40.png"><source width="120" media="(prefers-color-scheme: light)" srcset="https://www.capsolver.com/assets/images/logo-text.png"><img alt="Capsolver" src="https://www.capsolver.com/assets/images/logo-text.png"></picture></a> | AI-powered Captcha solving service. Supports all major Captcha types, including reCAPTCHA, Cloudflare, and more | 🥉 Bronze |
|
||||
| <a href="https://kipo.ai" target="_blank"><img src="https://docs.crawl4ai.com/uploads/sponsors/20251013045751_2d54f57f117c651e.png" alt="DataSync" width="120"/></a> | Helps engineers and buyers find, compare, and source electronic & industrial parts in seconds, with specs, pricing, lead times & alternatives.| 🥇 Gold |
|
||||
| <a href="https://www.kidocode.com/" target="_blank"><img src="https://docs.crawl4ai.com/uploads/sponsors/20251013045045_bb8dace3f0440d65.svg" alt="Kidocode" width="120"/><p align="center">KidoCode</p></a> | Kidocode is a hybrid technology and entrepreneurship school for kids aged 5–18, offering both online and on-campus education. | 🥇 Gold |
|
||||
|
||||
61
docs/examples/cloud_browser/scrapeless_browser.py
Normal file
61
docs/examples/cloud_browser/scrapeless_browser.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import json
|
||||
import asyncio
|
||||
from urllib.parse import quote, urlencode
|
||||
from crawl4ai import CrawlerRunConfig, BrowserConfig, AsyncWebCrawler
|
||||
|
||||
# Scrapeless provides a free anti-detection fingerprint browser client and cloud browsers:
|
||||
# https://www.scrapeless.com/en/blog/scrapeless-nstbrowser-strategic-integration
|
||||
|
||||
async def main():
|
||||
# customize browser fingerprint
|
||||
fingerprint = {
|
||||
"userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.1.2.3 Safari/537.36",
|
||||
"platform": "Windows",
|
||||
"screen": {
|
||||
"width": 1280, "height": 1024
|
||||
},
|
||||
"localization": {
|
||||
"languages": ["zh-HK", "en-US", "en"], "timezone": "Asia/Hong_Kong",
|
||||
}
|
||||
}
|
||||
|
||||
fingerprint_json = json.dumps(fingerprint)
|
||||
encoded_fingerprint = quote(fingerprint_json)
|
||||
|
||||
scrapeless_params = {
|
||||
"token": "your token",
|
||||
"sessionTTL": 1000,
|
||||
"sessionName": "Demo",
|
||||
"fingerprint": encoded_fingerprint,
|
||||
# Sets the target country/region for the proxy, sending requests via an IP address from that region. You can specify a country code (e.g., US for the United States, GB for the United Kingdom, ANY for any country). See country codes for all supported options.
|
||||
# "proxyCountry": "ANY",
|
||||
# create profile on scrapeless
|
||||
# "profileId": "your profileId",
|
||||
# For more usage details, please refer to https://docs.scrapeless.com/en/scraping-browser/quickstart/getting-started
|
||||
}
|
||||
query_string = urlencode(scrapeless_params)
|
||||
scrapeless_connection_url = f"wss://browser.scrapeless.com/api/v2/browser?{query_string}"
|
||||
async with AsyncWebCrawler(
|
||||
config=BrowserConfig(
|
||||
headless=False,
|
||||
browser_mode="cdp",
|
||||
cdp_url=scrapeless_connection_url,
|
||||
)
|
||||
) as crawler:
|
||||
result = await crawler.arun(
|
||||
url="https://www.scrapeless.com/en",
|
||||
config=CrawlerRunConfig(
|
||||
wait_for="css:.content",
|
||||
scan_full_page=True,
|
||||
),
|
||||
)
|
||||
print("-" * 20)
|
||||
print(f'Status Code: {result.status_code}')
|
||||
print("-" * 20)
|
||||
print(f'Title: {result.metadata["title"]}')
|
||||
print(f'Description: {result.metadata["description"]}')
|
||||
print("-" * 20)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
||||
Reference in New Issue
Block a user