Home >Backend Development >Golang >How Can I Efficiently Maximize Concurrent HTTP Requests in Go While Avoiding System Resource Exhaustion?
Effective Maximization of Concurrent HTTP Requests in Go
In your code, you attempted to concurrently send 1 million HTTP requests, but encountered errors due to file descriptor limitations. Here's how to effectively 'flood' your laptop with requests within system constraints:
Modified Code Using Channel-Based Concurrency:
<br>package main</p> <p>import (</p> <pre class="brush:php;toolbar:false">"flag" "fmt" "log" "net/http" "runtime" "sync" "time"
)
var (
reqs int max int
)
func init() {
flag.IntVar(&reqs, "reqs", 1000000, "Total requests") flag.IntVar(&max, "concurrent", 200, "Maximum concurrent requests")
}
type Response struct {
*http.Response err error
}
func dispatcher(reqChan chan *http.Request) {
defer close(reqChan) for i := 0; i < reqs; i++ { req, err := http.NewRequest("GET", "http://localhost/", nil) if err != nil { log.Println(err) } reqChan <- req }
}
func workerPool(reqChan chan http.Request, respChan chan Response, wg sync.WaitGroup) {
t := &http.Transport{} for i := 0; i < max; i++ { go worker(t, reqChan, respChan, wg) }
}
func worker(t http.Transport, reqChan chan http.Request, respChan chan Response, wg *sync.WaitGroup) {
for req := range reqChan { resp, err := t.RoundTrip(req) r := Response{resp, err} respChan <- r } wg.Done()
}
func consumer(respChan chan Response) (int64, int64) {
var ( conns int64 size int64 ) for conns < int64(reqs) { select { case r, ok := <-respChan: if ok { if r.err != nil { log.Println(r.err) } else { size += r.ContentLength if err := r.Body.Close(); err != nil { log.Println(r.err) } } conns++ } } } return conns, size
}
func main() {
flag.Parse() runtime.GOMAXPROCS(runtime.NumCPU()) reqChan := make(chan *http.Request, max) respChan := make(chan Response) wg := sync.WaitGroup{} wg.Add(max) start := time.Now() go dispatcher(reqChan) go workerPool(reqChan, respChan, &wg) conns, size := consumer(respChan) wg.Wait() took := time.Since(start) ns := took.Nanoseconds() av := ns / conns average, err := time.ParseDuration(fmt.Sprintf("%d", av) + "ns") if err != nil { log.Println(err) } fmt.Printf("Connections:\t%d\nConcurrent:\t%d\nTotal size:\t%d bytes\nTotal time:\t%s\nAverage time:\t%s\n", conns, max, size, took, average)
}
Explanation:
Benefits of Modifications:
By following these modifications, you can effectively 'flood' your laptop with as many HTTP requests as possible within the limitations of your system's resources.
The above is the detailed content of How Can I Efficiently Maximize Concurrent HTTP Requests in Go While Avoiding System Resource Exhaustion?. For more information, please follow other related articles on the PHP Chinese website!