Request callbacks added.

Recover from all panics and continue scraping.
Only parse HTML if response is HTML.
This commit is contained in:
Musab Gültekin
2019-06-09 21:13:30 +03:00
parent 7abc7a370d
commit ca2414c5c8
5 changed files with 48 additions and 18 deletions

View File

@ -14,7 +14,7 @@ func TestGeziyor_Simple(t *testing.T) {
geziyor.NewGeziyor(geziyor.Options{
StartURLs: []string{"http://api.ipify.org"},
ParseFunc: func(r *geziyor.Response) {
fmt.Println(r.Doc.Text())
fmt.Println(string(r.Body))
},
}).Start()
}
@ -24,8 +24,8 @@ func TestGeziyor_IP(t *testing.T) {
StartURLs: []string{"http://api.ipify.org"},
Cache: httpcache.NewMemoryCache(),
ParseFunc: func(r *geziyor.Response) {
fmt.Println(r.Doc.Text())
r.Exports <- r.Doc.Text()
fmt.Println(string(r.Body))
r.Exports <- string(r.Body)
r.Geziyor.Get("http://api.ipify.org")
},
})
@ -36,7 +36,7 @@ func TestGeziyor_HTML(t *testing.T) {
gez := geziyor.NewGeziyor(geziyor.Options{
StartURLs: []string{"http://quotes.toscrape.com/"},
ParseFunc: func(r *geziyor.Response) {
r.Doc.Find("div.quote").Each(func(i int, s *goquery.Selection) {
r.DocHTML.Find("div.quote").Each(func(i int, s *goquery.Selection) {
// Export Data
r.Exports <- map[string]interface{}{
"text": s.Find("span.text").Text(),
@ -48,7 +48,7 @@ func TestGeziyor_HTML(t *testing.T) {
})
// Next Page
if href, ok := r.Doc.Find("li.next > a").Attr("href"); ok {
if href, ok := r.DocHTML.Find("li.next > a").Attr("href"); ok {
go r.Geziyor.Get(r.JoinURL(href))
}
},
@ -62,7 +62,7 @@ func TestGeziyor_Concurrent_Requests(t *testing.T) {
StartURLs: []string{"http://quotes.toscrape.com/"},
ParseFunc: func(r *geziyor.Response) {
//r.Exports <- map[string]interface{}{"href": r.Request.URL.String()}
r.Doc.Find("a").Each(func(i int, s *goquery.Selection) {
r.DocHTML.Find("a").Each(func(i int, s *goquery.Selection) {
if href, ok := s.Attr("href"); ok {
go r.Geziyor.Get(r.JoinURL(href))
}