This repository was archived by the owner on Jul 11, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain.go
60 lines (56 loc) · 1.52 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
package main
import (
"flag"
"github.com/RonniSkansing/go-rip-git/scraper"
"log"
"net/http"
"net/url"
"time"
)
func main() {
var (
target = flag.String("u", "", "URL to scan")
scrape = flag.Bool("s", false, "scrape source files")
idleConnTimeout = flag.Int("t", 5, "request connection idle timeout in seconds")
gitPath = flag.String("p", "/.git/", "the absolute path to the git folder")
concurrency = flag.Int("c", 100, "concurrent scrape requests")
wait = flag.Duration("w", 0 * time.Second, "time in seconds to wait between each request, example 5s")
veryVerbose = flag.Bool("vv", false, "very verbose output")
)
flag.Parse()
if len(*target) == 0 {
flag.PrintDefaults()
return
}
c := scraper.Config{
ConcurrentRequests: *concurrency,
WaitTimeBetweenRequest: *wait,
VeryVerbose: *veryVerbose,
}
sr := scraper.NewScraper(
&http.Client{Timeout: time.Duration(*idleConnTimeout) * time.Second},
&c,
func(err error) {
log.Printf("scrape error: %v", err)
},
)
uri, err := url.ParseRequestURI(*target + *gitPath)
if err != nil {
log.Fatalf("invalid URL: %v", err)
}
if *scrape {
err := sr.Scrape(uri)
if err != nil {
log.Fatalf("failed to scrape: %v", err)
}
} else {
entries, err := sr.GetEntries(uri)
if err != nil {
log.Fatalf("failed to get index entries: %v", err)
}
log.Println("Contents of " + uri.String())
for i := 0; i < len(entries); i++ {
log.Println(entries[i].Sha + " " + entries[i].FileName)
}
}
}