Skip to content

Commit 104ab9c

Browse files
djmitchepwnall
authored andcommitted
add a few rustdoc
1 parent 96fe2e8 commit 104ab9c

File tree

1 file changed

+6
-2
lines changed

1 file changed

+6
-2
lines changed

src/exercises/concurrency/link-checker.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@ struct CrawlState {
7373
domain: String,
7474
visited_pages: std::collections::HashSet<String>,
7575
}
76+
7677
impl CrawlState {
7778
fn new(start_url: &Url) -> CrawlState {
7879
let mut visited_pages = std::collections::HashSet::new();
@@ -83,13 +84,16 @@ impl CrawlState {
8384
}
8485
}
8586

86-
fn visit_links(&self, url: &Url) -> bool {
87+
/// Determine whether links within the given page should be extracted.
88+
fn should_extract_links(&self, url: &Url) -> bool {
8789
let Some(url_domain) = url.domain() else {
8890
return false;
8991
};
9092
url_domain == self.domain
9193
}
9294

95+
/// Mark the given page as visited, returning true if it had already
96+
/// been visited.
9397
fn mark_visited(&mut self, url: &Url) -> bool {
9498
self.visited_pages.insert(url.as_str().to_string())
9599
}
@@ -146,7 +150,7 @@ fn control_crawl(
146150
Ok(link_urls) => {
147151
for url in link_urls {
148152
if crawl_state.mark_visited(&url) {
149-
let extract_links = crawl_state.visit_links(&url);
153+
let extract_links = crawl_state.should_extract_links(&url);
150154
let crawl_command = CrawlCommand { url, extract_links };
151155
command_sender.send(crawl_command).unwrap();
152156
pending_urls += 1;

0 commit comments

Comments
 (0)