web-crawler/internal/util/url.go
Dan Anglin 4519de764e
All checks were successful
Tests / test (pull_request) Successful in 13s
feat: add the web crawler
Add the source code for the web crawler. The web crawler is a simple Go
CLI application that traverses through a website and generates a report
of all the internal links found in the site.
2024-08-27 15:42:26 +01:00

18 lines
382 B
Go

package util
import (
"fmt"
"net/url"
"strings"
)
func NormaliseURL(rawURL string) (string, error) {
const normalisedFormat string = "%s%s"
parsedURL, err := url.Parse(rawURL)
if err != nil {
return "", fmt.Errorf("error parsing the URL %q: %w", rawURL, err)
}
return fmt.Sprintf(normalisedFormat, parsedURL.Hostname(), strings.TrimSuffix(parsedURL.Path, "/")), nil
}