Global and Domain Concurrency limit implemented. Updated README

This commit is contained in:
Musab Gültekin 2019-06-09 11:53:40 +03:00
parent a9aaf86df3
commit d967555b62
4 changed files with 80 additions and 9 deletions

View File

@ -1,5 +1,5 @@
# Geziyor # Geziyor
Geziyor is a fast web crawling and web scraping framework, used to crawl websites and extract structured data from their pages. It can be used for a wide range of purposes, from data mining to monitoring and automated testing. Geziyor is a blazing fast web crawling and web scraping framework, used to crawl websites and extract structured data from their pages. It can be used for a wide range of purposes, from data mining to monitoring and automated testing.
[![GoDoc](https://godoc.org/github.com/geziyor/geziyor?status.svg)](https://godoc.org/github.com/geziyor/geziyor) [![GoDoc](https://godoc.org/github.com/geziyor/geziyor?status.svg)](https://godoc.org/github.com/geziyor/geziyor)
[![report card](https://goreportcard.com/badge/github.com/geziyor/geziyor)](http://goreportcard.com/report/geziyor/geziyor) [![report card](https://goreportcard.com/badge/github.com/geziyor/geziyor)](http://goreportcard.com/report/geziyor/geziyor)
@ -8,9 +8,24 @@ Geziyor is a fast web crawling and web scraping framework, used to crawl website
- 1.000+ Requests/Sec - 1.000+ Requests/Sec
- Caching - Caching
- Automatic Data Exporting - Automatic Data Exporting
- Limit Concurrency Global/Per Domain
- Automatic response decoding to UTF-8
## Example ## Usage
Simplest usage
```go
geziyor.NewGeziyor(geziyor.Options{
StartURLs: []string{"http://api.ipify.org"},
ParseFunc: func(r *geziyor.Response) {
fmt.Println(r.Doc.Text())
},
}).Start()
```
Export all quotes and authors to out.json file.
```go ```go
geziyor := NewGeziyor(Opt{ geziyor := NewGeziyor(Opt{
StartURLs: []string{"http://quotes.toscrape.com/"}, StartURLs: []string{"http://quotes.toscrape.com/"},

View File

@ -21,6 +21,11 @@ type Geziyor struct {
opt Options opt Options
visitedURLS []string visitedURLS []string
semGlobal chan struct{}
semHosts struct {
sync.RWMutex
hostSems map[string]chan struct{}
}
} }
func init() { func init() {
@ -43,6 +48,15 @@ func NewGeziyor(opt Options) *Geziyor {
if opt.Timeout != 0 { if opt.Timeout != 0 {
geziyor.client.Timeout = opt.Timeout geziyor.client.Timeout = opt.Timeout
} }
if opt.ConcurrentRequests != 0 {
geziyor.semGlobal = make(chan struct{}, opt.ConcurrentRequests)
}
if opt.ConcurrentRequestsPerDomain != 0 {
geziyor.semHosts = struct {
sync.RWMutex
hostSems map[string]chan struct{}
}{hostSems: make(map[string]chan struct{})}
}
return geziyor return geziyor
} }
@ -86,12 +100,15 @@ func (g *Geziyor) Do(req *http.Request) {
return return
} }
// Log
log.Println("Fetching: ", req.URL.String())
// Modify Request // Modify Request
req.Header.Set("Accept-Charset", "utf-8") req.Header.Set("Accept-Charset", "utf-8")
// Acquire Semaphore
g.acquire(req)
// Log
log.Println("Fetching: ", req.URL.String())
// Do request // Do request
resp, err := g.client.Do(req) resp, err := g.client.Do(req)
if resp != nil { if resp != nil {
@ -99,6 +116,7 @@ func (g *Geziyor) Do(req *http.Request) {
} }
if err != nil { if err != nil {
log.Printf("Response error: %v\n", err) log.Printf("Response error: %v\n", err)
g.release(req)
return return
} }
@ -106,6 +124,7 @@ func (g *Geziyor) Do(req *http.Request) {
reader, err := charset.NewReader(resp.Body, resp.Header.Get("Content-Type")) reader, err := charset.NewReader(resp.Body, resp.Header.Get("Content-Type"))
if err != nil { if err != nil {
log.Printf("Determine encoding error: %v\n", err) log.Printf("Determine encoding error: %v\n", err)
g.release(req)
return return
} }
@ -113,9 +132,13 @@ func (g *Geziyor) Do(req *http.Request) {
body, err := ioutil.ReadAll(reader) body, err := ioutil.ReadAll(reader)
if err != nil { if err != nil {
log.Printf("Reading Body error: %v\n", err) log.Printf("Reading Body error: %v\n", err)
g.release(req)
return return
} }
// Release Semaphore
g.release(req)
// Create Document // Create Document
doc, _ := goquery.NewDocumentFromReader(bytes.NewReader(body)) doc, _ := goquery.NewDocumentFromReader(bytes.NewReader(body))
@ -136,18 +159,45 @@ func (g *Geziyor) Do(req *http.Request) {
time.Sleep(time.Millisecond) time.Sleep(time.Millisecond)
} }
func (g *Geziyor) acquire(req *http.Request) {
if g.opt.ConcurrentRequests != 0 {
g.semGlobal <- struct{}{}
}
if g.opt.ConcurrentRequestsPerDomain != 0 {
g.semHosts.RLock()
hostSem, exists := g.semHosts.hostSems[req.Host]
g.semHosts.RUnlock()
if !exists {
hostSem = make(chan struct{}, g.opt.ConcurrentRequestsPerDomain)
g.semHosts.Lock()
g.semHosts.hostSems[req.Host] = hostSem
g.semHosts.Unlock()
}
hostSem <- struct{}{}
}
}
func (g *Geziyor) release(req *http.Request) {
if g.opt.ConcurrentRequests != 0 {
<-g.semGlobal
}
if g.opt.ConcurrentRequestsPerDomain != 0 {
<-g.semHosts.hostSems[req.Host]
}
}
func checkURL(parsedURL *url.URL, g *Geziyor) bool { func checkURL(parsedURL *url.URL, g *Geziyor) bool {
rawURL := parsedURL.String() rawURL := parsedURL.String()
// Check for allowed domains // Check for allowed domains
if len(g.opt.AllowedDomains) != 0 && !contains(g.opt.AllowedDomains, parsedURL.Host) { if len(g.opt.AllowedDomains) != 0 && !contains(g.opt.AllowedDomains, parsedURL.Host) {
log.Printf("Domain not allowed: %s\n", parsedURL.Host) //log.Printf("Domain not allowed: %s\n", parsedURL.Host)
return false return false
} }
// Check for duplicate requests // Check for duplicate requests
if contains(g.visitedURLS, rawURL) { if contains(g.visitedURLS, rawURL) {
log.Printf("URL already visited %s\n", rawURL) //log.Printf("URL already visited %s\n", rawURL)
return false return false
} }
g.visitedURLS = append(g.visitedURLS, rawURL) g.visitedURLS = append(g.visitedURLS, rawURL)

View File

@ -12,7 +12,7 @@ func TestGeziyor_Simple(t *testing.T) {
geziyor.NewGeziyor(geziyor.Options{ geziyor.NewGeziyor(geziyor.Options{
StartURLs: []string{"http://api.ipify.org"}, StartURLs: []string{"http://api.ipify.org"},
ParseFunc: func(r *geziyor.Response) { ParseFunc: func(r *geziyor.Response) {
fmt.Println(string(r.Body)) fmt.Println(r.Doc.Text())
}, },
}).Start() }).Start()
} }

View File

@ -21,4 +21,10 @@ type Options struct {
// Memory Cache: httpcache.NewMemoryCache() // Memory Cache: httpcache.NewMemoryCache()
// Disk Cache: diskcache.New(".cache") // Disk Cache: diskcache.New(".cache")
Cache httpcache.Cache Cache httpcache.Cache
// Concurrent requests limit
ConcurrentRequests int
// Concurrent requests per domain limit
ConcurrentRequestsPerDomain int
} }