-
Notifications
You must be signed in to change notification settings - Fork 176
/
mirror.go
236 lines (220 loc) · 4.99 KB
/
mirror.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
// ex8.10 is a web-mirroring tool that can be gracefully interrupted using ctrl-c.
//
// Base on ex8.7.
package main
import (
"bytes"
"flag"
"fmt"
"io"
"log"
"net/http"
"net/url"
"os"
"os/signal"
"path/filepath"
"strings"
"sync"
"golang.org/x/net/html"
)
// tokens is a counting semaphore used to
// enforce a limit of 20 concurrent requests.
var tokens = make(chan struct{}, 20)
var maxDepth int
var seen = make(map[string]bool)
var seenLock = sync.Mutex{}
var base *url.URL
var cancel = make(chan struct{})
func crawl(url string, depth int, wg *sync.WaitGroup) {
defer wg.Done()
tokens <- struct{}{} // acquire a token
urls, err := visit(url)
<-tokens //release token
if err != nil {
log.Printf("visit %s: %s", url, err)
}
if depth >= maxDepth {
return
}
for _, link := range urls {
seenLock.Lock()
if seen[link] {
seenLock.Unlock()
continue
}
seen[link] = true
seenLock.Unlock()
wg.Add(1)
go crawl(link, depth+1, wg)
}
}
// Copied from gopl.io/ch5/outline2.
func forEachNode(n *html.Node, pre, post func(n *html.Node)) {
if pre != nil {
pre(n)
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
forEachNode(c, pre, post)
}
if post != nil {
post(n)
}
}
func linkNodes(n *html.Node) []*html.Node {
var links []*html.Node
visitNode := func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "a" {
links = append(links, n)
}
}
forEachNode(n, visitNode, nil)
return links
}
func linkURLs(linkNodes []*html.Node, base *url.URL) []string {
var urls []string
for _, n := range linkNodes {
for _, a := range n.Attr {
if a.Key != "href" {
continue
}
link, err := base.Parse(a.Val)
// ignore bad and non-local URLs
if err != nil {
log.Printf("skipping %q: %s", a.Val, err)
continue
}
if link.Host != base.Host {
//log.Printf("skipping %q: non-local host", a.Val)
continue
}
urls = append(urls, link.String())
}
}
return urls
}
// rewriteLocalLinks rewrites local links to be relative and links without
// extensions to point to index.html, eg /hi/there -> /hi/there/index.html.
func rewriteLocalLinks(linkNodes []*html.Node, base *url.URL) {
for _, n := range linkNodes {
for i, a := range n.Attr {
if a.Key != "href" {
continue
}
link, err := base.Parse(a.Val)
if err != nil || link.Host != base.Host {
continue // ignore bad and non-local URLs
}
// Clear fields so the url is formatted as /PATH?QUERY#FRAGMENT
link.Scheme = ""
link.Host = ""
link.User = nil
a.Val = link.String()
n.Attr[i] = a
}
}
}
func visit(rawurl string) (urls []string, err error) {
fmt.Println(rawurl)
req, err := http.NewRequest("GET", rawurl, nil)
req.Cancel = cancel
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
resp.Body.Close()
return nil, fmt.Errorf("GET %s: %s", rawurl, resp.Status)
}
u, err := base.Parse(rawurl)
if err != nil {
return nil, err
}
if base.Host != u.Host {
log.Printf("not saving %s: non-local", rawurl)
return nil, nil
}
var body io.Reader
contentType := resp.Header["Content-Type"]
if strings.Contains(strings.Join(contentType, ","), "text/html") {
doc, err := html.Parse(resp.Body)
resp.Body.Close()
if err != nil {
return nil, fmt.Errorf("parsing %s as HTML: %v", u, err)
}
nodes := linkNodes(doc)
urls = linkURLs(nodes, u) // Extract links before they're rewritten.
rewriteLocalLinks(nodes, u)
b := &bytes.Buffer{}
err = html.Render(b, doc)
if err != nil {
log.Printf("render %s: %s", u, err)
}
body = b
}
err = save(resp, body)
return urls, err
}
// If resp.Body has already been consumed, `body` can be passed and will be
// read instead.
func save(resp *http.Response, body io.Reader) error {
u := resp.Request.URL
filename := filepath.Join(u.Host, u.Path)
if filepath.Ext(u.Path) == "" {
filename = filepath.Join(u.Host, u.Path, "index.html")
}
err := os.MkdirAll(filepath.Dir(filename), 0777)
if err != nil {
return err
}
fmt.Println("filename:", filename)
file, err := os.Create(filename)
if err != nil {
return err
}
if body != nil {
_, err = io.Copy(file, body)
} else {
_, err = io.Copy(file, resp.Body)
}
if err != nil {
log.Print("save: ", err)
}
// Check for delayed write errors, as mentioned at the end of section 5.8.
err = file.Close()
if err != nil {
log.Print("save: ", err)
}
return nil
}
func main() {
flag.IntVar(&maxDepth, "d", 3, "max crawl depth")
flag.Parse()
wg := &sync.WaitGroup{}
if len(flag.Args()) == 0 {
fmt.Fprintln(os.Stderr, "usage: mirror URL ...")
os.Exit(1)
}
u, err := url.Parse(flag.Arg(0))
if err != nil {
fmt.Fprintf(os.Stderr, "invalid url: %s\n", err)
}
base = u
for _, link := range flag.Args() {
wg.Add(1)
go crawl(link, 1, wg)
}
done := make(chan struct{})
go func() {
wg.Wait()
done <- struct{}{}
}()
interrupt := make(chan os.Signal, 1)
signal.Notify(interrupt, os.Interrupt)
select {
case <-done:
return
case <-interrupt:
close(cancel)
}
}