-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.go
64 lines (42 loc) · 1.1 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
package main
import (
"fmt"
"io/ioutil"
"net/http"
"regexp"
)
func main() {
foundedUrllist := map[string]string{}
Crawl("http://www.google.com", 1, foundedUrllist)
fmt.Println("Founded URL Count: ", len(foundedUrllist))
}
//Crawl func fetches an URL and lists URLs on the loaded pages
func Crawl(url string, depth int, foundedUrllist map[string]string) {
if depth < 0 {
return
}
//get the page
response, err := http.Get(url)
if err != nil {
fmt.Println(err)
return
}
defer response.Body.Close()
//read the page as binary
dataBytes, err := ioutil.ReadAll(response.Body)
//convert the page to string
pageContent := string(dataBytes)
//regex for URL detection
regex := regexp.MustCompile(`(http|ftp|https):\/\/([\w\-_]+(?:(?:\.[\w\-_]+)+))([\w\-\.,@?^=%&:/~\+#]*[\w\-\@?^=%&/~\+#])?`)
URLs := regex.FindAllStringSubmatch(pageContent, -1)
depth--
for i := range URLs {
urlFounded := URLs[i][0]
_, ok := foundedUrllist[urlFounded]
if !ok {
foundedUrllist[urlFounded] = ""
fmt.Printf("URL: %s\n", urlFounded)
Crawl(urlFounded, depth, foundedUrllist)
}
}
}