Created
March 4, 2015 22:04
-
-
Save jcarmena/5da8093f885dbb56b024 to your computer and use it in GitHub Desktop.
A Tour of Go: Concurrency #9 - Web Crawler
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| package main | |
| import ( | |
| "fmt" | |
| ) | |
| type Fetcher interface { | |
| // Fetch returns the body of URL and | |
| // a slice of URLs found on that page. | |
| Fetch(url string) (body string, urls []string, err error) | |
| } | |
| type URLInfo struct { | |
| URL string | |
| Depth int | |
| } | |
| // Crawl uses fetcher to recursively crawl | |
| // pages starting with url, to a maximum of depth. | |
| // Fetch URLs in parallel. | |
| // Don't fetch the same URL twice. | |
| func Crawl(url string, depth int, fetcher Fetcher) { | |
| done := make(map[string]bool) // urls previously crawled | |
| urlsch := make(chan URLInfo) // urls found | |
| donech := make(chan bool) // control channel | |
| loop := true | |
| live := 1 | |
| // goroutine for fetch an url | |
| fetch := func(url string, depth int, fetcher Fetcher, uch chan URLInfo, dch chan bool) { | |
| if depth <= 0 { | |
| return | |
| } | |
| body, urls, err := fetcher.Fetch(url) | |
| if err != nil { | |
| fmt.Println(err) | |
| dch <- true | |
| return | |
| } | |
| fmt.Printf("found: %s %q\n", url, body) | |
| for _, v := range urls { | |
| uch <- URLInfo{v, depth} | |
| } | |
| dch <- true | |
| return | |
| } | |
| // fetches the first url | |
| done[url] = true | |
| go fetch(url, depth, fetcher, urlsch, donech) | |
| // fetches found urls | |
| for loop { | |
| select { | |
| case <-donech: | |
| live-- | |
| case u := <-urlsch: | |
| if _, ok := done[u.URL]; !ok { | |
| done[u.URL] = true | |
| live++ | |
| go fetch(u.URL, u.Depth-1, fetcher, urlsch, donech) | |
| } | |
| default: | |
| if live == 0 { | |
| loop = false | |
| fmt.Println("Bye!") | |
| } | |
| } | |
| } | |
| return | |
| } | |
| func main() { | |
| Crawl("http://golang.org/", 4, fetcher) | |
| } | |
| // fakeFetcher is Fetcher that returns canned results. | |
| type fakeFetcher map[string]*fakeResult | |
| type fakeResult struct { | |
| body string | |
| urls []string | |
| } | |
| func (f fakeFetcher) Fetch(url string) (string, []string, error) { | |
| if res, ok := f[url]; ok { | |
| return res.body, res.urls, nil | |
| } | |
| return "", nil, fmt.Errorf("not found: %s", url) | |
| } | |
| // fetcher is a populated fakeFetcher. | |
| var fetcher = fakeFetcher{ | |
| "http://golang.org/": &fakeResult{ | |
| "The Go Programming Language", | |
| []string{ | |
| "http://golang.org/pkg/", | |
| "http://golang.org/cmd/", | |
| }, | |
| }, | |
| "http://golang.org/pkg/": &fakeResult{ | |
| "Packages", | |
| []string{ | |
| "http://golang.org/", | |
| "http://golang.org/cmd/", | |
| "http://golang.org/pkg/fmt/", | |
| "http://golang.org/pkg/os/", | |
| }, | |
| }, | |
| "http://golang.org/pkg/fmt/": &fakeResult{ | |
| "Package fmt", | |
| []string{ | |
| "http://golang.org/", | |
| "http://golang.org/pkg/", | |
| }, | |
| }, | |
| "http://golang.org/pkg/os/": &fakeResult{ | |
| "Package os", | |
| []string{ | |
| "http://golang.org/", | |
| "http://golang.org/pkg/", | |
| }, | |
| }, | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment