web-crawler/main.go

87 lines
1.5 KiB
Go

package main
import (
"errors"
"fmt"
"io"
"net/http"
"os"
"strings"
"time"
)
var (
binaryVersion string
buildTime string
goVersion string
gitCommit string
)
func main() {
if err := run(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
func run() error {
args := os.Args[1:]
if len(args) == 0 {
return errors.New("no website provided")
}
if len(args) > 1 {
return errors.New("too many arguments provided")
}
baseURL := args[0]
htmlBody, err := getHTML(baseURL)
if err != nil {
return err
}
fmt.Println(htmlBody)
return nil
}
func getHTML(rawURL string) (string, error) {
req, err := http.NewRequest(http.MethodGet, rawURL, nil)
if err != nil {
return "", fmt.Errorf("error creating the request: %w", err)
}
client := http.Client{
Timeout: time.Duration(10 * time.Second),
}
resp, err := client.Do(req)
if err != nil {
return "", fmt.Errorf("error getting the response: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode >= 400 {
return "", fmt.Errorf(
"received a bad status from %s: (%d) %s",
rawURL,
resp.StatusCode,
resp.Status,
)
}
contentType := resp.Header.Get("content-type")
if !strings.Contains(contentType, "text/html") {
return "", fmt.Errorf("unexpected content type received: want text/html, got %s", contentType)
}
data, err := io.ReadAll(resp.Body)
if err != nil {
return "", fmt.Errorf("error reading the data from the response: %w", err)
}
return string(data), nil
}