-
Notifications
You must be signed in to change notification settings - Fork 195
Expand file tree
/
Copy pathwebdriver.rs
More file actions
65 lines (51 loc) · 1.71 KB
/
webdriver.rs
File metadata and controls
65 lines (51 loc) · 1.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
//! cargo run --example webdriver --features="webdriver webdriver_stealth"
//!
//! This example demonstrates basic WebDriver usage with spider.
//! You need to have a WebDriver server running (e.g., chromedriver, geckodriver, or Selenium).
//!
//! To start chromedriver: `chromedriver --port=4444`
//! To start geckodriver: `geckodriver --port=4444`
extern crate spider;
use spider::features::webdriver_common::{WebDriverBrowser, WebDriverConfig};
use spider::tokio;
use spider::website::Website;
use std::io::Result;
async fn crawl_website(url: &str) -> Result<()> {
let webdriver_config = WebDriverConfig::new()
.with_server_url("http://localhost:4444")
.with_browser(WebDriverBrowser::Chrome)
.with_headless(true);
let mut website: Website = Website::new(url)
.with_limit(10)
.with_webdriver(webdriver_config)
.build()
.unwrap();
let mut rx2 = website.subscribe(16);
let handle = tokio::spawn(async move {
while let Ok(page) = rx2.recv().await {
println!("{:?}", page.get_url());
}
});
let start = crate::tokio::time::Instant::now();
website.crawl().await;
website.unsubscribe();
let _ = handle.await;
let duration = start.elapsed();
let links = website.get_all_links_visited().await;
println!(
"Time elapsed in website.crawl({url}) is: {:?} for total pages: {:?}",
duration,
links.len()
);
Ok(())
}
#[tokio::main]
async fn main() -> Result<()> {
env_logger::init();
let _ = tokio::join!(
crawl_website("https://choosealicense.com"),
crawl_website("https://jeffmendez.com"),
crawl_website("https://example.com"),
);
Ok(())
}