web-crawler/main.go

52 lines
814 B
Go

package main
import (
"errors"
"fmt"
"maps"
"os"
)
var (
errNoWebsiteProvided = errors.New("no website provided")
errTooManyArgs = errors.New("too many arguments provided")
)
func main() {
if err := run(); err != nil {
os.Stderr.WriteString("ERROR: " + err.Error() + "\n")
os.Exit(1)
}
}
func run() error {
args := os.Args[1:]
if len(args) == 0 {
return errNoWebsiteProvided
}
if len(args) > 1 {
return errTooManyArgs
}
//baseURL := args[0]
pages := make(map[string]int)
//var err error
//pages, err = crawlPage(baseURL, baseURL, pages)
//if err != nil {
// return fmt.Errorf("received an error while crawling the website: %w", err)
//}
fmt.Printf("\n\nRESULTS:\n")
for page, count := range maps.All(pages) {
fmt.Printf("%s: %d\n", page, count)
}
return nil
}