-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.go
175 lines (148 loc) · 4.23 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
package main
// This is a nice challenge to demonstrate different ways to fetch multiple urls concurrently
// and display on the terminal the response status code with the associated url.
// We could improve the fetch result by adding response body size and time taken to fetch.
// start := time.Now() then n, err := io.Copy(ioutil.Discard, resp.Body)
// secs := time.Since(start).Seconds() and fmt.Sprintf("%s :: %.2fs :: %7d", resp.Status, secs, n)
// Version : 1.0
// Author : Jerome AMON
// Created : 26 August 2021
import (
"fmt"
"net/http"
"runtime"
"sync"
"time"
)
// Fetch the link and print out the URL and HTTP response code for each link concurrently
func Fetch(link string) string {
// set the http connection timeout.
client := http.Client{Timeout: 5 * time.Second}
// get the full content.
resp, err := client.Get(link)
if err != nil {
// failed to fetch.
return "n/a"
}
defer resp.Body.Close()
return resp.Status
}
// FirstWorker demonstrates an approach to use Fetch concurrently.
// Each url is fetched by a unique goroutine. And the status is
// displayed by that same goroutine. Here we inject the url link
// directly into the goroutine as global variable.
func FirstWorker(links []string) {
wg := &sync.WaitGroup{}
for _, link := range links {
wg.Add(1)
// create a new variable.
link := link
go func() {
defer wg.Done()
status := Fetch(link)
fmt.Printf("%s : %s\n", link, status)
}()
}
wg.Wait()
}
// SecondWorker demonstrates an approach to use Fetch concurrently.
// Each url is fetched by a unique goroutine. And the status is
// displayed by that same goroutine. Here we inject the url link
// as input parameter to the goroutine function.
func SecondWorker(links []string) {
wg := &sync.WaitGroup{}
for _, link := range links {
wg.Add(1)
// pass variable to anonymous func.
go func(url string) {
defer wg.Done()
status := Fetch(url)
fmt.Printf("%s : %s\n", url, status)
}(link)
}
wg.Wait()
}
// ThirdWorker demonstrates an approach to use Fetch concurrently.
// Each goroutine puts its result on a channel. Another goroutine
// monitor that channel and retrieve the result for displaying.
func ThirdWorker(links []string) {
numberOfLinks := len(links)
resultsChannel := make(chan string)
done := make(chan bool)
go func() {
for i := 0; i < numberOfLinks; i++ {
result := <-resultsChannel
fmt.Print(result)
}
done <- true
}()
for _, link := range links {
// pass variable to anonymous func.
go func(url string) {
status := Fetch(url)
resultsChannel <- fmt.Sprintf("%s : %s\n", url, status)
}(link)
}
// block until all results displayed.
<-done
}
// FourthWorker demonstrates an approach to use Fetch concurrently.
// A pool (with a predefined number) of worker handles all the urls.
// Each worker fetches the url and builds the result then send it on
// a results channel. A deterministic loop read & displays each result.
func FourthWorker(links []string) {
n := len(links)
// lets use the number of Cores.
numberOfWorkers := runtime.NumCPU()
if n < numberOfWorkers {
numberOfWorkers = n
}
// buffered channels with number of links.
jobsChannel := make(chan string, n)
resultsChannel := make(chan string, n)
// spin up all workers.
for i := 0; i < numberOfWorkers; i++ {
go func(id int) {
for url := range jobsChannel {
status := Fetch(url)
// build result and add to results channel for displaying.
resultsChannel <- fmt.Sprintf("worker %d :: %s : %s\n", id, url, status)
}
}(i)
}
// asynchronously feed the jobs channel.
go func() {
for _, url := range links {
jobsChannel <- url
}
}()
// ensure to read n number of results.
for r := 0; r < n; r++ {
fmt.Print(<-resultsChannel)
}
// close to signal workers to terminate.
close(jobsChannel)
}
func main() {
// list of urls for testing.
links := []string{
"https://cisco.com",
"https://google.com",
"https://facebook.com",
"https://microsoft.com",
"https://amazon.com",
"https://twitter.com",
}
fmt.Println()
// launch the 1st technique.
FirstWorker(links)
fmt.Println()
// launch the 2nd technique.
SecondWorker(links)
fmt.Println()
// launch the 3rd technique.
ThirdWorker(links)
fmt.Println()
// launch the 4th technique.
FourthWorker(links)
}