Clone of 4chan-downloader written in Rust
CLI to download all images/webms of a 4chan thread.
If you use the reload flag, previously saved image won't be redownloaded.
Best results obtained while using the option -c 4
(4 concurrent downloads).
```
USAGE:
chan-downloader [FLAGS] [OPTIONS] --thread
FLAGS: -h, --help Prints help information -r, --reload Reload thread every t minutes to get new images -V, --version Prints version information
OPTIONS:
-c, --concurrent
You can also use chan_downloader, the library used
Saves the image from the url to the given path. Returns the path on success ``` use reqwest::Client; use std::env; use std::fs::removefile; let client = Client::new(); let workpath = env::currentdir().unwrap().join("1489266570954.jpg"); let url = "https://i.4cdn.org/wg/1489266570954.jpg"; let answer = chandownloader::saveimage(url, workpath.to_str().unwrap(), &client).unwrap();
asserteq!(workpath.tostr().unwrap(), answer); remove_file(answer).unwrap(); ```
Returns the page content from the given url.
use reqwest::Client;
let client = Client::new();
let url = "https://boards.4chan.org/wg/thread/6872254";
match chan_downloader::get_page_content(url, &client) {
Ok(page) => println!("Content: {}", page),
Err(err) => eprintln!("Error: {}", err),
}
Returns the board name and thread id.
let url = "https://boards.4chan.org/wg/thread/6872254";
let (board_name, thread_id) = chan_downloader::get_thread_infos(url);
///
assert_eq!(board_name, "wg");
assert_eq!(thread_id, "6872254");
Returns the links and the number of links from a page. Note that the links are doubled. ``` use reqwest::Client; let client = Client::new(); let url = "https://boards.4chan.org/wg/thread/6872254"; match chandownloader::getpagecontent(url, &client) { Ok(pagestring) => { let (linksiter, numberoflinks) = chandownloader::getimagelinks(pagestring.asstr());
assert_eq!(number_of_links, 4);
for cap in links_iter.step_by(2) {
println!("{} and {}", &cap[1], &cap[2]);
}
},
Err(err) => eprintln!("Error: {}", err),
} ```