For most websites, your first requests will always be successful, however, it’s inevitable that some of them will fail. For these failed requests, the API will return a 500 status code and won’t charge you for the request.
In this case, we can make our code retry to make the requests until we reach a maximum number of retries that we set:
package main
import (
"fmt"
"io"
"net/http"
"os"
)
const API_KEY = "YOUR-API-KEY"
const SCRAPINGBEE_URL = "https://app.scrapingbee.com/api/v1"
func save_page_to_html(target_url string, file_path string) (interface{}, error) { // Using sync.Waitgroup to wait for goroutines to finish
req, err := http.NewRequest("GET", SCRAPINGBEE_URL, nil)
if err != nil {
return nil, fmt.Errorf("Failed to build the request: %s", err)
}
q := req.URL.Query()
q.Add("api_key", API_KEY)
q.Add("url", target_url)
req.URL.RawQuery = q.Encode()
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("Failed to request ScrapingBee: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("Error request response with status code %d", resp.StatusCode)
}
bodyBytes, err := io.ReadAll(resp.Body)
file, err := os.Create(file_path)
if err != nil {
return nil, fmt.Errorf("Couldn't create the file ", err)
}
l, err := file.Write(bodyBytes) // Write content to the file.
if err != nil {
file.Close()
return nil, fmt.Errorf("Couldn't write content to the file ", err)
}
err = file.Close()
if err != nil {
return nil, fmt.Errorf("Couldn't close the file ", err)
}
return l, nil
}
func main() {
MAX_RETRIES := 5 // Set a maximum number of retries
target_url := "https://www.scrapingbee.com"
for i := 0; i < MAX_RETRIES; i++ {
saved_screenshot, err := save_page_to_html(target_url,"./scrapingbee.html")
if err != nil {
fmt.Println(err)
fmt.Println("Retrying...")
} else {
fmt.Println("Done!", saved_screenshot)
break
}
}
}
Go back to tutorials