-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.go
203 lines (186 loc) · 5.07 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
package main
import (
"fmt"
"log"
"math/rand"
"net/http"
"strings"
"time"
"github.com/PuerkitoBio/goquery"
)
// SeoData is a struct of useful SEO data
type SeoData struct {
URL string
Title string
H1 string
MetaDescription string
StatusCode int
}
// Parser defines the parsing interface
type Parser interface {
GetSeoData(resp *http.Response) (SeoData, error)
}
// DefaultParser is en empty struct for implmenting default parser
type DefaultParser struct {
}
var userAgents = []string{
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Safari/604.1.38",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:56.0) Gecko/20100101 Firefox/56.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Safari/604.1.38",
}
func randomUserAgent() string {
rand.Seed(time.Now().Unix())
randNum := rand.Int() % len(userAgents)
return userAgents[randNum]
}
func isSitemap(urls []string) ([]string, []string) {
sitemapFiles := []string{}
pages := []string{}
for _, page := range urls {
foundSitemap := strings.Contains(page, "xml")
if foundSitemap == true {
fmt.Println("Found Sitemap", page)
sitemapFiles = append(sitemapFiles, page)
} else {
pages = append(pages, page)
}
}
return sitemapFiles, pages
}
func extractSitemapURLs(startURL string) []string {
worklist := make(chan []string)
toCrawl := []string{}
var n int
n++
go func() { worklist <- []string{startURL} }()
for ; n > 0; n-- {
list := <-worklist
for _, link := range list {
n++
go func(link string) {
response, err := makeRequest(link)
if err != nil {
log.Printf("Error retrieving URL: %s", link)
}
urls, _ := extractUrls(response)
if err != nil {
log.Printf("Error extracting document from response, URL: %s", link)
}
sitemapFiles, pages := isSitemap(urls)
if sitemapFiles != nil {
worklist <- sitemapFiles
}
for _, page := range pages {
toCrawl = append(toCrawl, page)
}
}(link)
}
}
return toCrawl
}
func makeRequest(url string) (*http.Response, error) {
client := http.Client{
Timeout: 10 * time.Second,
}
req, err := http.NewRequest("GET", url, nil)
req.Header.Set("User-Agent", randomUserAgent())
if err != nil {
return nil, err
}
res, err := client.Do(req)
if err != nil {
return nil, err
}
return res, nil
}
func scrapeUrls(urls []string, parser Parser, concurrency int) []SeoData {
tokens := make(chan struct{}, concurrency)
var n int
n++
worklist := make(chan []string)
results := []SeoData{}
go func() { worklist <- urls }()
for ; n > 0; n-- {
list := <-worklist
for _, url := range list {
if url != "" {
n++
go func(url string, token chan struct{}) {
log.Printf("Requesting URL: %s", url)
res, err := scrapePage(url, tokens, parser)
if err != nil {
log.Printf("Encountered error, URL: %s", url)
} else {
results = append(results, res)
}
worklist <- []string{}
}(url, tokens)
}
}
}
return results
}
func extractUrls(response *http.Response) ([]string, error) {
doc, err := goquery.NewDocumentFromResponse(response)
if err != nil {
return nil, err
}
results := []string{}
sel := doc.Find("loc")
for i := range sel.Nodes {
loc := sel.Eq(i)
result := loc.Text()
results = append(results, result)
}
return results, nil
}
func scrapePage(url string, token chan struct{}, parser Parser) (SeoData, error) {
res, err := crawlPage(url, token)
if err != nil {
return SeoData{}, err
}
data, err := parser.GetSeoData(res)
if err != nil {
return SeoData{}, err
}
return data, nil
}
func crawlPage(url string, tokens chan struct{}) (*http.Response, error) {
tokens <- struct{}{}
resp, err := makeRequest(url)
<-tokens
if err != nil {
return nil, err
}
return resp, err
}
// GetSeoData concrete implementation of the default parser
func (d DefaultParser) GetSeoData(resp *http.Response) (SeoData, error) {
doc, err := goquery.NewDocumentFromResponse(resp)
if err != nil {
return SeoData{}, err
}
result := SeoData{}
result.URL = resp.Request.URL.String()
result.StatusCode = resp.StatusCode
result.Title = doc.Find("title").First().Text()
result.H1 = doc.Find("h1").First().Text()
result.MetaDescription, _ = doc.Find("meta[name^=description]").Attr("content")
return result, nil
}
// ScrapeSitemap scrapes a given sitemap
func ScrapeSitemap(url string, parser Parser, concurrency int) []SeoData {
results := extractSitemapURLs(url)
res := scrapeUrls(results, parser, concurrency)
return res
}
func main() {
p := DefaultParser{}
results := ScrapeSitemap("https://www.google.com/sitemap.xml", p, 10)
for _, res := range results {
fmt.Println(res)
}
}