Making new requests on StartRequestsFunc is simplified by using channels

This commit is contained in:
Musab Gültekin 2019-06-12 21:54:57 +03:00
parent f7f4e401e2
commit d56ea161a5
4 changed files with 20 additions and 17 deletions

View File

@ -14,6 +14,9 @@ Geziyor is a blazing fast web crawling and web scraping framework, used to crawl
See scraper [Options](https://godoc.org/github.com/geziyor/geziyor#Options) for customization. See scraper [Options](https://godoc.org/github.com/geziyor/geziyor#Options) for customization.
## Status
We highly recommend you to use go modules. As this project is in **development stage** right now and **API is not stable**.
## Usage ## Usage
Simplest usage Simplest usage
@ -26,10 +29,6 @@ geziyor.NewGeziyor(geziyor.Options{
}).Start() }).Start()
``` ```
## Status
We highly recommend you to use go modules. As this project is in **development stage** right now and **API is not stable**.
## Installation ## Installation
go get github.com/geziyor/geziyor go get github.com/geziyor/geziyor

View File

@ -28,6 +28,8 @@ type Geziyor struct {
wg sync.WaitGroup wg sync.WaitGroup
opt Options opt Options
Requests chan *Request
visitedURLS []string visitedURLS []string
semGlobal chan struct{} semGlobal chan struct{}
semHosts struct { semHosts struct {
@ -48,7 +50,8 @@ func NewGeziyor(opt Options) *Geziyor {
client: &http.Client{ client: &http.Client{
Timeout: time.Second * 180, // Google's timeout Timeout: time.Second * 180, // Google's timeout
}, },
opt: opt, Requests: make(chan *Request),
opt: opt,
} }
if opt.Cache != nil { if opt.Cache != nil {
@ -88,9 +91,12 @@ func (g *Geziyor) Start() {
go g.Get(startURL, g.opt.ParseFunc) go g.Get(startURL, g.opt.ParseFunc)
} }
} else { } else {
for _, req := range g.opt.StartRequestsFunc() { go func() {
go g.Do(req, g.opt.ParseFunc) for req := range g.Requests {
} go g.Do(req, g.opt.ParseFunc)
}
}()
g.opt.StartRequestsFunc(g)
} }
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
@ -190,12 +196,12 @@ func (g *Geziyor) Do(req *Request, callback func(resp *Response)) {
response.DocHTML, _ = goquery.NewDocumentFromReader(bytes.NewReader(body)) response.DocHTML, _ = goquery.NewDocumentFromReader(bytes.NewReader(body))
} }
// Export Functions // Exporter functions
for _, exp := range g.opt.Exporters { for _, exp := range g.opt.Exporters {
go exp.Export(&response) go exp.Export(&response)
} }
// Drain exports chan if no exporters added // Drain exports chan if no exporter functions added
if len(g.opt.Exporters) == 0 { if len(g.opt.Exporters) == 0 {
go func() { go func() {
for range response.Exports { for range response.Exports {

View File

@ -92,9 +92,9 @@ func TestRandomDelay(t *testing.T) {
func TestStartRequestsFunc(t *testing.T) { func TestStartRequestsFunc(t *testing.T) {
geziyor.NewGeziyor(geziyor.Options{ geziyor.NewGeziyor(geziyor.Options{
StartRequestsFunc: func() []*geziyor.Request { StartRequestsFunc: func(g *geziyor.Geziyor) {
req, _ := http.NewRequest("GET", "http://quotes.toscrape.com/", nil) req, _ := http.NewRequest("GET", "http://quotes.toscrape.com/", nil)
return []*geziyor.Request{{Request: req}} g.Requests <- &geziyor.Request{Request: req}
}, },
ParseFunc: func(r *geziyor.Response) { ParseFunc: func(r *geziyor.Response) {
r.Exports <- []string{r.Status} r.Exports <- []string{r.Status}
@ -108,16 +108,14 @@ func TestAlmaany(t *testing.T) {
geziyor.NewGeziyor(geziyor.Options{ geziyor.NewGeziyor(geziyor.Options{
AllowedDomains: []string{"www.almaany.com"}, AllowedDomains: []string{"www.almaany.com"},
StartRequestsFunc: func() []*geziyor.Request { StartRequestsFunc: func(g *geziyor.Geziyor) {
base := "http://www.almaany.com/suggest.php?term=%c%c&lang=turkish&t=d" base := "http://www.almaany.com/suggest.php?term=%c%c&lang=turkish&t=d"
var requests []*geziyor.Request
for _, c1 := range alphabet { for _, c1 := range alphabet {
for _, c2 := range alphabet { for _, c2 := range alphabet {
req, _ := http.NewRequest("GET", fmt.Sprintf(base, c1, c2), nil) req, _ := http.NewRequest("GET", fmt.Sprintf(base, c1, c2), nil)
requests = append(requests, &geziyor.Request{Request: req, Meta: map[string]interface{}{"word": string(c1) + string(c2)}}) g.Requests <- &geziyor.Request{Request: req, Meta: map[string]interface{}{"word": string(c1) + string(c2)}}
} }
} }
return requests
}, },
ConcurrentRequests: 10, ConcurrentRequests: 10,
ParseFunc: parseAlmaany, ParseFunc: parseAlmaany,

View File

@ -15,7 +15,7 @@ type Options struct {
StartURLs []string StartURLs []string
// StartRequestsFunc called on scraper start // StartRequestsFunc called on scraper start
StartRequestsFunc func() []*Request StartRequestsFunc func(g *Geziyor)
// ParseFunc is callback of StartURLs response. // ParseFunc is callback of StartURLs response.
ParseFunc func(r *Response) ParseFunc func(r *Response)