Normalize url
This commit is contained in:
parent
8ff36ca84d
commit
bdcbc8ddbb
3 changed files with 43 additions and 1 deletions
|
|
@ -6,6 +6,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/dustin/go-humanize"
|
"github.com/dustin/go-humanize"
|
||||||
"html/template"
|
"html/template"
|
||||||
|
"media-roller/src/utils"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
@ -94,6 +95,8 @@ func getMediaResults(url string) ([]Media, string, error) {
|
||||||
return nil, "", errors.New("missing URL")
|
return nil, "", errors.New("missing URL")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
url = utils.NormalizeUrl(url)
|
||||||
|
|
||||||
// NOTE: This system is for a simple use case, meant to run at home. This is not a great design for a robust system.
|
// NOTE: This system is for a simple use case, meant to run at home. This is not a great design for a robust system.
|
||||||
// We are hashing the URL here and writing files to disk to a consistent directory based on the ID. You can imagine
|
// We are hashing the URL here and writing files to disk to a consistent directory based on the ID. You can imagine
|
||||||
// concurrent users would break this for the same URL. That's fine given this is for a simple home system.
|
// concurrent users would break this for the same URL. That's fine given this is for a simple home system.
|
||||||
|
|
@ -128,7 +131,7 @@ func downloadMedia(url string) (string, string, error) {
|
||||||
cmd := exec.Command("yt-dlp",
|
cmd := exec.Command("yt-dlp",
|
||||||
"--format", "bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best",
|
"--format", "bestvideo[ext=mp4]+bestaudio[ext=m4a]/best[ext=mp4]/best",
|
||||||
"--merge-output-format", "mp4",
|
"--merge-output-format", "mp4",
|
||||||
"--trim-filenames", "200",
|
"--trim-filenames", "200",
|
||||||
"--restrict-filenames",
|
"--restrict-filenames",
|
||||||
"--write-info-json",
|
"--write-info-json",
|
||||||
"--verbose",
|
"--verbose",
|
||||||
|
|
|
||||||
18
src/utils/urls.go
Normal file
18
src/utils/urls.go
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
package utils
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
func NormalizeUrl(url string) string {
|
||||||
|
url = strings.TrimSpace(url)
|
||||||
|
parts := strings.Split(url, " ")
|
||||||
|
|
||||||
|
for _, part := range parts {
|
||||||
|
// Take the firs string that looks like a URL.
|
||||||
|
// TODO: We could try to parse the url, but will save that for later
|
||||||
|
if strings.HasPrefix(part, "http") || strings.HasPrefix(part, "www") {
|
||||||
|
return part
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return url
|
||||||
|
}
|
||||||
21
src/utils/urls_test.go
Normal file
21
src/utils/urls_test.go
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
package utils
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestNormalizeUrl(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
url string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{url: "example.com", want: "example.com"},
|
||||||
|
{url: "https://example.com", want: "https://example.com"},
|
||||||
|
{url: "https://example.com this is an example", want: "https://example.com"},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.url, func(t *testing.T) {
|
||||||
|
if got := NormalizeUrl(tt.url); got != tt.want {
|
||||||
|
t.Errorf("NormalizeUrl() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Reference in a new issue