diff --git a/README.md b/README.md index e8d4690..f9efd1b 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,9 @@ Geziyor is a blazing fast web crawling and web scraping framework, used to crawl See scraper [Options](https://godoc.org/github.com/geziyor/geziyor#Options) for customization. +## Status +We highly recommend you to use go modules. As this project is in **development stage** right now and **API is not stable**. + ## Usage Simplest usage @@ -26,10 +29,6 @@ geziyor.NewGeziyor(geziyor.Options{ }).Start() ``` -## Status -We highly recommend you to use go modules. As this project is in **development stage** right now and **API is not stable**. - - ## Installation go get github.com/geziyor/geziyor diff --git a/geziyor.go b/geziyor.go index b942b45..2a5aba4 100644 --- a/geziyor.go +++ b/geziyor.go @@ -28,6 +28,8 @@ type Geziyor struct { wg sync.WaitGroup opt Options + Requests chan *Request + visitedURLS []string semGlobal chan struct{} semHosts struct { @@ -48,7 +50,8 @@ func NewGeziyor(opt Options) *Geziyor { client: &http.Client{ Timeout: time.Second * 180, // Google's timeout }, - opt: opt, + Requests: make(chan *Request), + opt: opt, } if opt.Cache != nil { @@ -88,9 +91,12 @@ func (g *Geziyor) Start() { go g.Get(startURL, g.opt.ParseFunc) } } else { - for _, req := range g.opt.StartRequestsFunc() { - go g.Do(req, g.opt.ParseFunc) - } + go func() { + for req := range g.Requests { + go g.Do(req, g.opt.ParseFunc) + } + }() + g.opt.StartRequestsFunc(g) } time.Sleep(time.Millisecond) @@ -190,12 +196,12 @@ func (g *Geziyor) Do(req *Request, callback func(resp *Response)) { response.DocHTML, _ = goquery.NewDocumentFromReader(bytes.NewReader(body)) } - // Export Functions + // Exporter functions for _, exp := range g.opt.Exporters { go exp.Export(&response) } - // Drain exports chan if no exporters added + // Drain exports chan if no exporter functions added if len(g.opt.Exporters) == 0 { go func() { for range response.Exports { diff --git a/geziyor_test.go b/geziyor_test.go index 6485bc3..f84878b 100644 --- a/geziyor_test.go +++ b/geziyor_test.go @@ -92,9 +92,9 @@ func TestRandomDelay(t *testing.T) { func TestStartRequestsFunc(t *testing.T) { geziyor.NewGeziyor(geziyor.Options{ - StartRequestsFunc: func() []*geziyor.Request { + StartRequestsFunc: func(g *geziyor.Geziyor) { req, _ := http.NewRequest("GET", "http://quotes.toscrape.com/", nil) - return []*geziyor.Request{{Request: req}} + g.Requests <- &geziyor.Request{Request: req} }, ParseFunc: func(r *geziyor.Response) { r.Exports <- []string{r.Status} @@ -108,16 +108,14 @@ func TestAlmaany(t *testing.T) { geziyor.NewGeziyor(geziyor.Options{ AllowedDomains: []string{"www.almaany.com"}, - StartRequestsFunc: func() []*geziyor.Request { + StartRequestsFunc: func(g *geziyor.Geziyor) { base := "http://www.almaany.com/suggest.php?term=%c%c&lang=turkish&t=d" - var requests []*geziyor.Request for _, c1 := range alphabet { for _, c2 := range alphabet { req, _ := http.NewRequest("GET", fmt.Sprintf(base, c1, c2), nil) - requests = append(requests, &geziyor.Request{Request: req, Meta: map[string]interface{}{"word": string(c1) + string(c2)}}) + g.Requests <- &geziyor.Request{Request: req, Meta: map[string]interface{}{"word": string(c1) + string(c2)}} } } - return requests }, ConcurrentRequests: 10, ParseFunc: parseAlmaany, diff --git a/options.go b/options.go index 537aefd..95e411f 100644 --- a/options.go +++ b/options.go @@ -15,7 +15,7 @@ type Options struct { StartURLs []string // StartRequestsFunc called on scraper start - StartRequestsFunc func() []*Request + StartRequestsFunc func(g *Geziyor) // ParseFunc is callback of StartURLs response. ParseFunc func(r *Response)