-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.go
68 lines (56 loc) · 1.37 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
package main
import (
"go-link-crawler/config"
"go-link-crawler/log"
"go-link-crawler/services"
"io/ioutil"
"os"
"strings"
)
func main() {
// set log level
log.SetLevel(log.TraceLevel)
conf := config.Init()
crawler := services.NewCrawlerService(conf.CrawlerConfig)
if len(os.Args) < 2 {
log.Fatalf("use filepath as first argument")
}
arg := os.Args[1]
f, err := os.Open(arg)
if err != nil {
log.Fatalf("cannot open %s err: %v", arg, err)
}
data, err := ioutil.ReadAll(f)
if err != nil {
log.Fatalf("cannot read %s err: %v", arg, err)
}
links := strings.Split(string(data), "\n")
crawlerProcesses := make([]*services.CrawlerProcess, 0)
for _, link := range links {
p, err := crawler.Start(link)
if err != nil {
log.Errorf("crawler.Start err: %v", err)
}
crawlerProcesses = append(crawlerProcesses, p)
}
// get results
var req float32
count := 0
req = 0
for _, p := range crawlerProcesses {
res := p.GetResult()
//j, err := json.Marshal(res)
//if err != nil {
// log.Errorf("json.Marshal(res) err: %v", err)
// continue
//}
log.Infof("Domain: %s, Links count: %d, External links count: %d, req/sec: %.2f", res.Domain, res.InnerLinksCount, res.ExternalLinksCount, res.RequestsPerSec)
count++
req = req + res.RequestsPerSec
}
if count > 0 {
req = req / float32(count)
}
log.Infof("requests/sec: %.2f", req)
crawler.Close()
}