web-crawler/main.go
2024-08-26 18:37:45 +01:00

39 lines
497 B
Go

package main
import (
"errors"
"fmt"
"os"
)
var (
binaryVersion string
buildTime string
goVersion string
gitCommit string
)
func main() {
if err := run(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
func run() error {
args := os.Args[1:]
if len(args) == 0 {
return errors.New("no website provided")
}
if len(args) > 1 {
return errors.New("too many arguments provided")
}
baseURL := args[0]
fmt.Printf("starting crawl of: %s\n", baseURL)
return nil
}