web-crawler/main.go

40 lines
497 B
Go
Raw Normal View History

2024-08-26 10:30:14 +01:00
package main
import (
2024-08-26 18:37:45 +01:00
"errors"
2024-08-26 10:30:14 +01:00
"fmt"
"os"
)
var (
binaryVersion string
buildTime string
goVersion string
gitCommit string
)
func main() {
if err := run(); err != nil {
2024-08-26 18:37:45 +01:00
fmt.Println(err)
2024-08-26 10:30:14 +01:00
os.Exit(1)
}
}
func run() error {
2024-08-26 18:37:45 +01:00
args := os.Args[1:]
if len(args) == 0 {
return errors.New("no website provided")
}
if len(args) > 1 {
return errors.New("too many arguments provided")
}
baseURL := args[0]
fmt.Printf("starting crawl of: %s\n", baseURL)
2024-08-26 10:30:14 +01:00
return nil
}