web-crawler/main.go

50 lines
719 B
Go
Raw Normal View History

2024-08-26 10:30:14 +01:00
package main
import (
2024-08-26 18:37:45 +01:00
"errors"
2024-08-26 10:30:14 +01:00
"fmt"
"os"
2024-08-27 13:11:16 +01:00
"codeflow.dananglin.me.uk/apollo/web-crawler/internal/crawler"
2024-08-26 10:30:14 +01:00
)
var (
2024-08-27 07:38:20 +01:00
errNoWebsiteProvided = errors.New("no website provided")
errTooManyArgs = errors.New("too many arguments provided")
2024-08-26 10:30:14 +01:00
)
func main() {
if err := run(); err != nil {
2024-08-27 07:38:20 +01:00
os.Stderr.WriteString("ERROR: " + err.Error() + "\n")
2024-08-26 10:30:14 +01:00
os.Exit(1)
}
}
func run() error {
2024-08-26 18:37:45 +01:00
args := os.Args[1:]
if len(args) == 0 {
2024-08-27 07:38:20 +01:00
return errNoWebsiteProvided
2024-08-26 18:37:45 +01:00
}
if len(args) > 1 {
2024-08-27 07:38:20 +01:00
return errTooManyArgs
2024-08-26 18:37:45 +01:00
}
2024-08-27 13:11:16 +01:00
baseURL := args[0]
2024-08-27 07:38:20 +01:00
2024-08-27 13:11:16 +01:00
c, err := crawler.NewCrawler(baseURL)
if err != nil {
return fmt.Errorf("unable to create the crawler: %w", err)
}
2024-08-27 07:38:20 +01:00
2024-08-27 13:11:16 +01:00
go c.Crawl(baseURL)
2024-08-26 19:00:44 +01:00
2024-08-27 13:11:16 +01:00
c.Wait()
2024-08-27 07:38:20 +01:00
2024-08-27 13:11:16 +01:00
c.PrintReport()
2024-08-26 18:37:45 +01:00
2024-08-26 10:30:14 +01:00
return nil
}