Make concurrent requests in Go

Our API is designed to allow you to have multiple concurrent scraping operations. That means you can speed up scraping for hundreds, thousands or even millions of pages per day, depending on your plan.

The more concurrent requests limit you have the more calls you can have active in parallel, and the faster you can scrape.

Making concurrent requests in GoLang is as easy as adding a “go” keyword before our scraping functions! The code below will make two concurrent requests to ScrapingBee’s pages, and save the content in an HTML file.

package main

import (
  "fmt"
  "io"
  "net/http"
  "os"
  "sync"
)

const API_KEY = "YOUR-API-KEY"
const SCRAPINGBEE_URL = "https://app.scrapingbee.com/api/v1"

func save_page_to_html(target_url string, file_path string, wg *sync.WaitGroup) (interface{}, error) { // Using sync.Waitgroup to wait for goroutines to finish

  req, err := http.NewRequest("GET", SCRAPINGBEE_URL, nil)
  if err != nil {
    return nil, fmt.Errorf("Failed to build the request: %s", err)
  }

  q := req.URL.Query()
  q.Add("api_key", API_KEY)
  q.Add("url", target_url)
  req.URL.RawQuery = q.Encode()

  client := &http.Client{}
  resp, err := client.Do(req)
  if err != nil {
    return nil, fmt.Errorf("Failed to request ScrapingBee: %s", err)
  }
  defer resp.Body.Close()

  if resp.StatusCode != http.StatusOK {
    return nil, fmt.Errorf("Error request response with status code %d", resp.StatusCode)
  }

  bodyBytes, err := io.ReadAll(resp.Body)

  file, err := os.Create(file_path)
  if err != nil {
      return nil, fmt.Errorf("Couldn't create the file ", err)
  }

  l, err := file.Write(bodyBytes) // Write content to the file.
  if err != nil {
      file.Close()
      return nil, fmt.Errorf("Couldn't write content to the file ", err)
  }
  err = file.Close()
  if err != nil {
      return nil, fmt.Errorf("Couldn't close the file ", err)
  }

  wg.Done()
  return l, nil
}

func main() {

    var wg sync.WaitGroup // New wait group
    wg.Add(2) // Using two goroutines

    go save_page_to_html("https://scrapingbee.com/blog", "blog.html", &wg)
    go save_page_to_html("https://scrapingbee.com/documentation", "documentation.html", &wg)

    wg.Wait()

}
Go back to tutorials