web-crawler/main.go

53 lines
814 B
Go
Raw Normal View History

2024-08-26 10:30:14 +01:00
package main
import (
2024-08-26 18:37:45 +01:00
"errors"
2024-08-26 10:30:14 +01:00
"fmt"
2024-08-27 07:38:20 +01:00
"maps"
2024-08-26 10:30:14 +01:00
"os"
)
var (
2024-08-27 07:38:20 +01:00
errNoWebsiteProvided = errors.New("no website provided")
errTooManyArgs = errors.New("too many arguments provided")
2024-08-26 10:30:14 +01:00
)
func main() {
if err := run(); err != nil {
2024-08-27 07:38:20 +01:00
os.Stderr.WriteString("ERROR: " + err.Error() + "\n")
2024-08-26 10:30:14 +01:00
os.Exit(1)
}
}
func run() error {
2024-08-26 18:37:45 +01:00
args := os.Args[1:]
if len(args) == 0 {
2024-08-27 07:38:20 +01:00
return errNoWebsiteProvided
2024-08-26 18:37:45 +01:00
}
if len(args) > 1 {
2024-08-27 07:38:20 +01:00
return errTooManyArgs
2024-08-26 18:37:45 +01:00
}
//baseURL := args[0]
2024-08-26 18:37:45 +01:00
2024-08-27 07:38:20 +01:00
pages := make(map[string]int)
//var err error
2024-08-27 07:38:20 +01:00
//pages, err = crawlPage(baseURL, baseURL, pages)
//if err != nil {
// return fmt.Errorf("received an error while crawling the website: %w", err)
//}
2024-08-26 19:00:44 +01:00
2024-08-27 07:38:20 +01:00
fmt.Printf("\n\nRESULTS:\n")
for page, count := range maps.All(pages) {
fmt.Printf("%s: %d\n", page, count)
}
2024-08-26 18:37:45 +01:00
2024-08-26 10:30:14 +01:00
return nil
}