Structures as asynchronous web crawlers.
Goal of this library is to help crabs with web crawling.
```rust extern crate crabler;
use crabler::*;
struct Scraper {}
impl Scraper { async fn response_handler(&self, response: Response) -> Result<()> { println!("Status {}", response.status); Ok(()) }
async fn print_handler(&self, response: Response, a: Element) -> Result<()> {
if let Some(href) = a.attr("href") {
println!("Found link {} on {}", href, response.url);
}
Ok(())
}
}
async fn main() -> Result<()> { let scraper = Scraper {};
// Run scraper starting from given url and using 20 worker threads
scraper.run(Opts::new().with_urls(vec!["https://news.ycombinator.com/"]).with_threads(20)).await
} ```