Last active
May 11, 2016 19:11
-
-
Save Napat/fa5014a571fc5cbe15a984b5e0506313 to your computer and use it in GitHub Desktop.
go tour crawler
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package main | |
import ( | |
"fmt" | |
"sync" | |
"time" | |
) | |
type crawlData struct { | |
url string | |
depth int | |
} | |
type Fetcher interface { | |
// Fetch returns the body of URL and | |
// a slice of URLs found on that page. | |
Fetch(url string) (body string, urls []string, err error) | |
} | |
type searchCache struct { | |
Fetcher // embed field that implement interface Fetcher | |
vMux sync.Mutex | |
v map[string]string | |
in chan crawlData | |
} | |
func (c *searchCache) AskCrawlToken(url string) bool { | |
c.vMux.Lock() | |
defer c.vMux.Unlock() | |
if _, ok := c.v[url]; !ok { | |
c.v[url] = "" | |
return true | |
} | |
return false | |
} | |
func (c *searchCache) GetValue(url string) (string, bool) { | |
c.vMux.Lock() | |
defer c.vMux.Unlock() | |
v, ok := c.v[url] | |
return v, ok | |
} | |
func (c *searchCache) SaveResult(url string, v string) { | |
c.vMux.Lock() | |
// Lock so only one goroutine at a time can access the map c.v. | |
c.v[url] = v | |
c.vMux.Unlock() | |
} | |
// Crawler uses fetcher to recursively crawl | |
// pages starting with url, to a maximum of depth. | |
func (sc *searchCache) Crawler(url string, depth int) { | |
if ok := sc.AskCrawlToken(url); !ok { | |
return | |
} | |
/////////////////TEST parallel//////////////////// | |
// force 1T = 1 sec | |
time.Sleep(1000 * time.Millisecond) | |
///////////////////////////////////// | |
if depth <= 0 { | |
return | |
} | |
body, urls, err := sc.Fetch(url) | |
sc.SaveResult(url, body) | |
if err != nil { | |
fmt.Println(err) | |
return | |
} | |
for _, u := range urls { | |
sc.in <- crawlData{url: u, depth: depth - 1} | |
} | |
return | |
} | |
func (sc *searchCache) Krawl() { | |
// sc.in <-chan crawlData | |
for cd := range sc.in { | |
sc.Crawler(cd.url, cd.depth) | |
} | |
} | |
func (sc *searchCache) fanOutWorker(worker int) { | |
for i := 0; i < worker; i++ { | |
go sc.Krawl() | |
} | |
} | |
func Crawl(url string, depth int, fetcher Fetcher) *searchCache { | |
sc := &searchCache{Fetcher: fetcher, | |
v: make(map[string]string), | |
in: make(chan crawlData), | |
} | |
go sc.Crawler(url, depth) | |
//sc.in<-crawlData{url:url, depth: depth} | |
return sc | |
} | |
func main() { | |
sc := Crawl("http://golang.org/", 4, fetcher) | |
sc.fanOutWorker(2) // try change number | |
fmt.Println("-----1-----") | |
for k, v := range sc.v { | |
fmt.Println(k, v) | |
} | |
//let's worker work for 2 sec. | |
time.Sleep(2000 * time.Millisecond) | |
fmt.Println("-----2-----") | |
for k, v := range sc.v { | |
fmt.Println(k, v) | |
} | |
} | |
// fakeFetcher is Fetcher that returns canned results. | |
type fakeFetcher map[string]*fakeResult | |
type fakeResult struct { | |
body string | |
urls []string | |
} | |
func (f fakeFetcher) Fetch(url string) (string, []string, error) { | |
if res, ok := f[url]; ok { | |
return res.body, res.urls, nil | |
} | |
return "", nil, fmt.Errorf("not found: %s", url) | |
} | |
// fetcher is a populated fakeFetcher. | |
var fetcher = fakeFetcher{ | |
"http://golang.org/": &fakeResult{ | |
"The Go Programming Language", | |
[]string{ | |
"http://golang.org/pkg/", | |
"http://golang.org/cmd/", | |
}, | |
}, | |
"http://golang.org/pkg/": &fakeResult{ | |
"Packages", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/cmd/", | |
"http://golang.org/pkg/fmt/", | |
"http://golang.org/pkg/os/", | |
}, | |
}, | |
"http://golang.org/pkg/fmt/": &fakeResult{ | |
"Package fmt", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
"http://golang.org/pkg/os/": &fakeResult{ | |
"Package os", | |
[]string{ | |
"http://golang.org/", | |
"http://golang.org/pkg/", | |
}, | |
}, | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment