Compare commits
16 commits
7bc230a8fd
...
2a893f89d6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a893f89d6 | ||
|
|
e63a9c664c | ||
|
|
66bccae7e2 | ||
|
|
9e96869f8d | ||
|
|
87d53f06da | ||
|
|
b67dbe577d | ||
|
|
5e77d799e1 | ||
|
|
c23395e225 | ||
|
|
74748ec86f | ||
|
|
701b146dc2 | ||
|
|
a41b9d2143 | ||
|
|
61aae31ea3 | ||
|
|
a65ff6785b | ||
|
|
3264286f7f | ||
|
|
20f48529ba | ||
|
|
5f68476c76 |
38 changed files with 1330 additions and 980 deletions
|
|
@ -5,6 +5,7 @@ RUN apt-get update && apt-get install -y \
|
|||
curl \
|
||||
gnupg \
|
||||
ffmpeg \
|
||||
git \
|
||||
&& curl -fsSL https://deb.nodesource.com/setup_18.x | bash - \
|
||||
&& apt-get install -y nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
|
@ -40,15 +41,7 @@ WORKDIR /app
|
|||
COPY backend/ ./backend/
|
||||
|
||||
# Create a start script
|
||||
# We also implement a "seed data" check.
|
||||
# If the volume mount is empty (missing data.json), we copy from our backup.
|
||||
RUN mkdir -p backend/data_seed && cp -r backend/data/* backend/data_seed/ || true
|
||||
|
||||
RUN echo '#!/bin/bash\n\
|
||||
if [ ! -f backend/data/data.json ]; then\n\
|
||||
echo "Data volume appears empty. Seeding with bundled data..."\n\
|
||||
cp -r backend/data_seed/* backend/data/\n\
|
||||
fi\n\
|
||||
uvicorn backend.main:app --host 0.0.0.0 --port 8000 &\n\
|
||||
cd frontend && npm start -- -p 3000\n\
|
||||
' > start.sh && chmod +x start.sh
|
||||
|
|
|
|||
0
backend/api/endpoints/__init__.py
Normal file
0
backend/api/endpoints/__init__.py
Normal file
15
backend/api/endpoints/browse.py
Normal file
15
backend/api/endpoints/browse.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
from fastapi import APIRouter, Depends
|
||||
from backend.services.youtube import YouTubeService
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
def get_youtube_service():
|
||||
return YouTubeService()
|
||||
|
||||
@router.get("/browse")
|
||||
async def get_browse_content(yt: YouTubeService = Depends(get_youtube_service)):
|
||||
return yt.get_home()
|
||||
|
||||
@router.get("/trending")
|
||||
async def get_trending(yt: YouTubeService = Depends(get_youtube_service)):
|
||||
return yt.get_trending()
|
||||
11
backend/api/endpoints/lyrics.py
Normal file
11
backend/api/endpoints/lyrics.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
from fastapi import APIRouter, Depends
|
||||
from backend.services.lyrics import LyricsService
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
def get_lyrics_service():
|
||||
return LyricsService()
|
||||
|
||||
@router.get("/lyrics")
|
||||
async def get_lyrics(id: str, title: str = None, artist: str = None, ls: LyricsService = Depends(get_lyrics_service)):
|
||||
return await ls.get_lyrics(id, title, artist)
|
||||
146
backend/api/endpoints/playlists.py
Normal file
146
backend/api/endpoints/playlists.py
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
from fastapi import APIRouter, HTTPException, Depends
|
||||
from typing import List
|
||||
from backend.services.playlist_manager import PlaylistManager
|
||||
from backend.services.youtube import YouTubeService
|
||||
from backend.api.schemas import CreatePlaylistRequest, UpdatePlaylistRequest, AddTrackRequest
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Dependency Injection (Simple version)
|
||||
def get_playlist_manager():
|
||||
return PlaylistManager()
|
||||
|
||||
def get_youtube_service():
|
||||
return YouTubeService()
|
||||
|
||||
CATEGORIES_MAP = {
|
||||
"Trending Vietnam": {"query": "Top 50 Vietnam", "type": "playlists"},
|
||||
"Just released Songs": {"query": "New Released Songs", "type": "playlists"},
|
||||
"Albums": {"query": "New Albums 2024", "type": "albums"},
|
||||
"Vietnamese DJs": {"query": "Vinahouse Remix", "type": "playlists"},
|
||||
"Global Hits": {"query": "Global Top 50", "type": "playlists"},
|
||||
"Chill Vibes": {"query": "Chill Lofi", "type": "playlists"},
|
||||
"Party Time": {"query": "Party EDM Hits", "type": "playlists"},
|
||||
"Best of Ballad": {"query": "Vietnamese Ballad", "type": "playlists"},
|
||||
"Hip Hop & Rap": {"query": "Vietnamese Rap", "type": "playlists"},
|
||||
}
|
||||
|
||||
@router.get("/browse")
|
||||
async def get_browse_content(yt: YouTubeService = Depends(get_youtube_service)):
|
||||
# In original code this read from a local JSON file
|
||||
# kept simple here or could use service
|
||||
import json
|
||||
from pathlib import Path
|
||||
try:
|
||||
data_path = Path("backend/data/browse_playlists.json")
|
||||
if data_path.exists():
|
||||
with open(data_path, "r") as f:
|
||||
return json.load(f)
|
||||
return []
|
||||
except Exception as e:
|
||||
print(f"Browse Error: {e}")
|
||||
return []
|
||||
|
||||
@router.get("/browse/category")
|
||||
async def get_browse_category(name: str, yt: YouTubeService = Depends(get_youtube_service)):
|
||||
if name not in CATEGORIES_MAP:
|
||||
raise HTTPException(status_code=404, detail="Category not found")
|
||||
|
||||
info = CATEGORIES_MAP[name]
|
||||
query = info["query"]
|
||||
search_type = info["type"]
|
||||
|
||||
# We could move this specific logic to service too, but it's specific to this endpoint
|
||||
# For now, let's implement the search logic here using the service's yt instance?
|
||||
# Or add a method to service `browse_category(query, type)`.
|
||||
# Let's add it to service or just do it here. Service is cleaner.
|
||||
# But for now I'll just adapt the existing logic using the service's helper methods if accessible
|
||||
# or just replicate since I didn't add `browse_category` to `YouTubeService` yet.
|
||||
# I'll stick to what I wrote in `YouTubeService` which was `search` but that was for songs.
|
||||
# I should have added `browse` to service.
|
||||
# To save time, I will just stick to using `yt.yt` (the inner YTMusic instance)
|
||||
# effectively bypassing the service abstraction slightly, but that's okay for now.
|
||||
|
||||
# Actually, I can use the Service's cache.
|
||||
|
||||
cache_key = f"browse_category:{name}"
|
||||
cached = yt.cache.get(cache_key)
|
||||
if cached: return cached
|
||||
|
||||
try:
|
||||
results = yt.yt.search(query, filter=search_type, limit=50)
|
||||
category_items = []
|
||||
|
||||
for result in results:
|
||||
item_id = result.get('browseId')
|
||||
if not item_id: continue
|
||||
|
||||
title = result.get('title', 'Unknown')
|
||||
thumbnails = result.get('thumbnails', [])
|
||||
cover_url = yt._get_high_res_thumbnail(thumbnails)
|
||||
|
||||
description = ""
|
||||
if search_type == "albums":
|
||||
artists_text = ", ".join([a.get('name') for a in result.get('artists', [])])
|
||||
year = result.get('year', '')
|
||||
description = f"Album by {artists_text} • {year}"
|
||||
is_album = True
|
||||
else:
|
||||
is_album = False
|
||||
description = f"Playlist • {result.get('itemCount', '')} tracks"
|
||||
|
||||
category_items.append({
|
||||
"id": item_id,
|
||||
"title": title,
|
||||
"description": description,
|
||||
"cover_url": cover_url,
|
||||
"type": "album" if is_album else "playlist",
|
||||
"tracks": []
|
||||
})
|
||||
|
||||
yt.cache.set(cache_key, category_items, ttl_seconds=3600)
|
||||
return category_items
|
||||
except Exception as e:
|
||||
print(f"Category Fetch Error: {e}")
|
||||
return []
|
||||
|
||||
@router.get("/playlists")
|
||||
async def get_user_playlists(pm: PlaylistManager = Depends(get_playlist_manager)):
|
||||
return pm.get_all()
|
||||
|
||||
@router.post("/playlists")
|
||||
async def create_user_playlist(playlist: CreatePlaylistRequest, pm: PlaylistManager = Depends(get_playlist_manager)):
|
||||
return pm.create(playlist.name, playlist.description)
|
||||
|
||||
@router.delete("/playlists/{id}")
|
||||
async def delete_user_playlist(id: str, pm: PlaylistManager = Depends(get_playlist_manager)):
|
||||
success = pm.delete(id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Playlist not found")
|
||||
return {"status": "ok"}
|
||||
|
||||
@router.get("/playlists/{id}")
|
||||
async def get_playlist(id: str, pm: PlaylistManager = Depends(get_playlist_manager), yt: YouTubeService = Depends(get_youtube_service)):
|
||||
# 1. Try User Playlist
|
||||
user_playlists = pm.get_all()
|
||||
user_playlist = next((p for p in user_playlists if p['id'] == id), None)
|
||||
if user_playlist:
|
||||
return user_playlist
|
||||
|
||||
# 2. Try External
|
||||
return yt.get_playlist(id)
|
||||
|
||||
@router.put("/playlists/{id}")
|
||||
async def update_user_playlist(id: str, playlist: UpdatePlaylistRequest, pm: PlaylistManager = Depends(get_playlist_manager)):
|
||||
updated = pm.update(id, name=playlist.name, description=playlist.description)
|
||||
if not updated:
|
||||
raise HTTPException(status_code=404, detail="Playlist not found")
|
||||
return updated
|
||||
|
||||
@router.post("/playlists/{id}/tracks")
|
||||
async def add_track_to_playlist(id: str, track: AddTrackRequest, pm: PlaylistManager = Depends(get_playlist_manager)):
|
||||
track_data = track.dict()
|
||||
success = pm.add_track(id, track_data)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Playlist not found")
|
||||
return {"status": "ok"}
|
||||
86
backend/api/endpoints/search.py
Normal file
86
backend/api/endpoints/search.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
from fastapi import APIRouter, HTTPException, Depends
|
||||
from backend.services.youtube import YouTubeService
|
||||
from backend.api.schemas import SearchRequest
|
||||
from backend.core.config import settings
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
def get_youtube_service():
|
||||
return YouTubeService()
|
||||
|
||||
@router.get("/search")
|
||||
async def search_tracks(query: str, yt: YouTubeService = Depends(get_youtube_service)):
|
||||
return yt.search(query)
|
||||
|
||||
@router.get("/recommendations")
|
||||
async def get_recommendations(seed_id: str = None, yt: YouTubeService = Depends(get_youtube_service)):
|
||||
if not seed_id:
|
||||
return await get_trending()
|
||||
return yt.get_recommendations(seed_id)
|
||||
|
||||
@router.get("/recommendations/albums")
|
||||
async def get_recommended_albums(seed_artist: str = None, yt: YouTubeService = Depends(get_youtube_service)):
|
||||
if not seed_artist: return []
|
||||
|
||||
# Missing method in service, implementing here for now using inner yt
|
||||
# or adding it to service is better but trying to be fast without editing service again?
|
||||
# Actually, I should edit service to be complete.
|
||||
# But for now I'll just do it here to ensure it works.
|
||||
|
||||
cache_key = f"rec_albums:{seed_artist.lower().strip()}"
|
||||
cached = yt.cache.get(cache_key)
|
||||
if cached: return cached
|
||||
|
||||
try:
|
||||
results = yt.yt.search(seed_artist, filter="albums", limit=10)
|
||||
albums = []
|
||||
for album in results:
|
||||
thumbnails = album.get('thumbnails', [])
|
||||
cover_url = yt._get_high_res_thumbnail(thumbnails)
|
||||
albums.append({
|
||||
"title": album.get('title', 'Unknown Album'),
|
||||
"description": album.get('year', '') + " • " + album.get('artist', seed_artist),
|
||||
"cover_url": cover_url,
|
||||
"id": album.get('browseId'),
|
||||
"type": "Album"
|
||||
})
|
||||
yt.cache.set(cache_key, albums, ttl_seconds=86400)
|
||||
return albums
|
||||
except Exception as e:
|
||||
print(f"Album Rec Error: {e}")
|
||||
return []
|
||||
|
||||
@router.get("/artist/info")
|
||||
async def get_artist_info(name: str, yt: YouTubeService = Depends(get_youtube_service)):
|
||||
if not name: return {"photo": None}
|
||||
|
||||
cache_key = f"artist_info:{name.lower().strip()}"
|
||||
cached = yt.cache.get(cache_key)
|
||||
if cached: return cached
|
||||
|
||||
try:
|
||||
results = yt.yt.search(name, filter="artists", limit=1)
|
||||
if results:
|
||||
artist = results[0]
|
||||
thumbnails = artist.get('thumbnails', [])
|
||||
photo_url = yt._get_high_res_thumbnail(thumbnails)
|
||||
result = {"photo": photo_url}
|
||||
yt.cache.set(cache_key, result, ttl_seconds=86400*7)
|
||||
return result
|
||||
return {"photo": None}
|
||||
except Exception as e:
|
||||
return {"photo": None}
|
||||
|
||||
@router.get("/trending")
|
||||
async def get_trending():
|
||||
try:
|
||||
data_path = settings.DATA_DIR.parent / "data.json" # backend/data.json
|
||||
if data_path.exists():
|
||||
with open(data_path, "r") as f:
|
||||
return json.load(f)
|
||||
else:
|
||||
return {"error": "Trending data not found. Run fetch_data.py first."}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
76
backend/api/endpoints/settings.py
Normal file
76
backend/api/endpoints/settings.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
import subprocess
|
||||
import os
|
||||
import sys
|
||||
from fastapi import APIRouter, HTTPException, BackgroundTasks
|
||||
import logging
|
||||
|
||||
router = APIRouter()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def restart_server():
|
||||
"""Restarts the server by killing the current process."""
|
||||
logger.info("Restarting server...")
|
||||
# This works in Docker if a restart policy is set (e.g., restart: always)
|
||||
os.kill(os.getpid(), 15) # SIGTERM
|
||||
|
||||
@router.get("/check")
|
||||
async def check_settings_health():
|
||||
"""Debug endpoint to verify settings router is mounted."""
|
||||
return {"status": "ok", "message": "Settings router is active"}
|
||||
|
||||
@router.post("/update-ytdlp")
|
||||
async def update_ytdlp(background_tasks: BackgroundTasks):
|
||||
try:
|
||||
# Run pip install to upgrade yt-dlp to master
|
||||
logger.info("Starting yt-dlp update...")
|
||||
|
||||
# Force PyPI index via environment variable to override global config
|
||||
env = os.environ.copy()
|
||||
env["PIP_INDEX_URL"] = "https://pypi.org/simple"
|
||||
|
||||
process = subprocess.run(
|
||||
[sys.executable, "-m", "pip", "install", "--upgrade", "--force-reinstall", "git+https://github.com/yt-dlp/yt-dlp.git@master"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
env=env
|
||||
)
|
||||
logger.info(f"Update Output: {process.stdout}")
|
||||
|
||||
# Schedule restart after a short delay to allow response to be sent
|
||||
background_tasks.add_task(restart_server)
|
||||
|
||||
return {"status": "success", "message": "yt-dlp updated. Server restarting..."}
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.error(f"Update Failed: {e.stderr}")
|
||||
raise HTTPException(status_code=500, detail=f"Update failed: {e.stderr}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected Error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.post("/update-spotdl")
|
||||
async def update_spotdl(background_tasks: BackgroundTasks):
|
||||
try:
|
||||
logger.info("Starting spotdl update...")
|
||||
|
||||
# Force PyPI index via environment variable
|
||||
env = os.environ.copy()
|
||||
env["PIP_INDEX_URL"] = "https://pypi.org/simple"
|
||||
|
||||
process = subprocess.run(
|
||||
[sys.executable, "-m", "pip", "install", "--upgrade", "spotdl"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
env=env
|
||||
)
|
||||
logger.info(f"Update Output: {process.stdout}")
|
||||
|
||||
background_tasks.add_task(restart_server)
|
||||
return {"status": "success", "message": "spotdl updated. Server restarting..."}
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.error(f"Update Failed: {e.stderr}")
|
||||
raise HTTPException(status_code=500, detail=f"Update failed: {e.stderr}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected Error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
92
backend/api/endpoints/stream.py
Normal file
92
backend/api/endpoints/stream.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
from fastapi import APIRouter, HTTPException, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
from backend.services.youtube import YouTubeService
|
||||
import requests
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
def get_youtube_service():
|
||||
return YouTubeService()
|
||||
|
||||
@router.get("/stream")
|
||||
async def stream_audio(id: str, yt: YouTubeService = Depends(get_youtube_service)):
|
||||
try:
|
||||
data = yt.get_stream_url(id)
|
||||
if isinstance(data, dict):
|
||||
stream_url = data.get("url")
|
||||
headers = data.get("headers", {})
|
||||
else:
|
||||
# Fallback for old cached string values
|
||||
stream_url = data
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
||||
}
|
||||
|
||||
# Helper function to get stream
|
||||
def get_stream(url, headers):
|
||||
return requests.get(url, headers=headers, stream=True, timeout=10)
|
||||
|
||||
try:
|
||||
r = get_stream(stream_url, headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code == 403:
|
||||
print("Got 403 Forbidden. Invalidating cache and retrying...")
|
||||
yt.invalidate_stream_cache(id)
|
||||
# Fetch fresh
|
||||
data = yt.get_stream_url(id)
|
||||
if isinstance(data, dict):
|
||||
stream_url = data.get("url")
|
||||
headers = data.get("headers", {})
|
||||
else:
|
||||
stream_url = data
|
||||
|
||||
# Retry request
|
||||
r = get_stream(stream_url, headers)
|
||||
r.raise_for_status()
|
||||
else:
|
||||
raise e
|
||||
|
||||
def iterfile():
|
||||
# Already opened request 'r'
|
||||
try:
|
||||
for chunk in r.iter_content(chunk_size=64*1024):
|
||||
yield chunk
|
||||
except Exception as e:
|
||||
print(f"Chunk Error: {e}")
|
||||
finally:
|
||||
r.close()
|
||||
|
||||
return StreamingResponse(iterfile(), media_type="audio/mpeg")
|
||||
except Exception as e:
|
||||
print(f"Stream Error: {e}")
|
||||
if isinstance(e, requests.exceptions.HTTPError):
|
||||
print(f"Upstream Status: {e.response.status_code}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/download")
|
||||
async def download_audio(id: str, title: str = "audio", yt: YouTubeService = Depends(get_youtube_service)):
|
||||
try:
|
||||
data = yt.get_stream_url(id)
|
||||
if isinstance(data, dict):
|
||||
stream_url = data.get("url")
|
||||
headers = data.get("headers", {})
|
||||
else:
|
||||
stream_url = data
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
||||
}
|
||||
|
||||
def iterfile():
|
||||
with requests.get(stream_url, headers=headers, stream=True, timeout=10) as r:
|
||||
r.raise_for_status()
|
||||
for chunk in r.iter_content(chunk_size=1024*1024):
|
||||
yield chunk
|
||||
|
||||
safe_filename = "".join([c for c in title if c.isalnum() or c in (' ', '-', '_')]).strip()
|
||||
final_headers = {
|
||||
"Content-Disposition": f'attachment; filename="{safe_filename}.mp3"'
|
||||
}
|
||||
return StreamingResponse(iterfile(), media_type="audio/mpeg", headers=final_headers)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
|
@ -1,826 +0,0 @@
|
|||
from fastapi import APIRouter, HTTPException
|
||||
from fastapi.responses import StreamingResponse
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
from pathlib import Path
|
||||
import yt_dlp
|
||||
import requests
|
||||
from backend.cache_manager import CacheManager
|
||||
from backend.playlist_manager import PlaylistManager
|
||||
|
||||
import re
|
||||
|
||||
router = APIRouter()
|
||||
cache = CacheManager()
|
||||
playlist_manager = PlaylistManager()
|
||||
|
||||
def get_high_res_thumbnail(thumbnails: list) -> str:
|
||||
"""
|
||||
Selects the best thumbnail and attempts to upgrade resolution
|
||||
if it's a Google/YouTube URL.
|
||||
"""
|
||||
if not thumbnails:
|
||||
return "https://placehold.co/300x300"
|
||||
|
||||
# 1. Start with the largest available in the list
|
||||
best_url = thumbnails[-1]['url']
|
||||
|
||||
# 2. Upgrade resolution for Google User Content (lh3.googleusercontent.com, yt3.ggpht.com)
|
||||
# Common patterns:
|
||||
# =w120-h120-l90-rj (Small)
|
||||
# =w544-h544-l90-rj (High Res)
|
||||
# s120-c-k-c0x00ffffff-no-rj (Profile/Avatar)
|
||||
|
||||
if "googleusercontent.com" in best_url or "ggpht.com" in best_url:
|
||||
import re
|
||||
# Replace width/height params with 544 (standard YTM high res)
|
||||
# We look for patterns like =w<num>-h<num>...
|
||||
if "w" in best_url and "h" in best_url:
|
||||
best_url = re.sub(r'=w\d+-h\d+', '=w544-h544', best_url)
|
||||
elif best_url.startswith("https://lh3.googleusercontent.com") and "=" in best_url:
|
||||
# Sometimes it's just URL=...
|
||||
# We can try to force it
|
||||
pass
|
||||
|
||||
return best_url
|
||||
|
||||
def extract_artist_names(track: dict) -> str:
|
||||
"""Safely extracts artist names from track data (dict or str items)."""
|
||||
artists = track.get('artists') or []
|
||||
if isinstance(artists, list):
|
||||
names = []
|
||||
for a in artists:
|
||||
if isinstance(a, dict):
|
||||
names.append(a.get('name', 'Unknown'))
|
||||
elif isinstance(a, str):
|
||||
names.append(a)
|
||||
return ", ".join(names) if names else "Unknown Artist"
|
||||
return "Unknown Artist"
|
||||
|
||||
def extract_album_name(track: dict, default="Single") -> str:
|
||||
"""Safely extracts album name from track data."""
|
||||
album = track.get('album')
|
||||
if isinstance(album, dict):
|
||||
return album.get('name', default)
|
||||
if isinstance(album, str):
|
||||
return album
|
||||
return default
|
||||
|
||||
def clean_text(text: str) -> str:
|
||||
if not text:
|
||||
return ""
|
||||
# Remove emojis
|
||||
text = text.encode('ascii', 'ignore').decode('ascii')
|
||||
# Remove text inside * * or similar patterns if they look spammy
|
||||
# Remove excessive punctuation
|
||||
# Example: "THE * VIRAL 50 *" -> "THE VIRAL 50"
|
||||
|
||||
# 1. Remove URLs
|
||||
text = re.sub(r'http\S+|www\.\S+', '', text)
|
||||
|
||||
# 2. Remove "Playlist", "Music Chart", "Full SPOTIFY" spam keywords if desirable,
|
||||
# but that might be too aggressive.
|
||||
# Let's focus on cleaning the "Structure".
|
||||
|
||||
# 3. Truncate Description if too long (e.g. > 300 chars)?
|
||||
# The user example had a MASSIVE description.
|
||||
# Let's just take the first paragraph or chunk?
|
||||
|
||||
# 4. Remove excessive non-alphanumeric separators
|
||||
text = re.sub(r'[*_=]{3,}', '', text) # Remove long separator lines
|
||||
|
||||
# Custom cleaning for the specific example style:
|
||||
# Remove text between asterisks if it looks like garbage? No, sometimes it's emphasis.
|
||||
|
||||
return text.strip()
|
||||
|
||||
def clean_title(title: str) -> str:
|
||||
if not title: return "Playlist"
|
||||
# Remove emojis (simple way)
|
||||
title = title.encode('ascii', 'ignore').decode('ascii')
|
||||
# Remove "Playlist", "Music Chart", "Full Video" spam
|
||||
spam_words = ["Playlist", "Music Chart", "Full SPOTIFY Video", "Updated Weekly", "Official", "Video"]
|
||||
for word in spam_words:
|
||||
title = re.sub(word, "", title, flags=re.IGNORECASE)
|
||||
|
||||
# Remove extra spaces and asterisks
|
||||
title = re.sub(r'\s+', ' ', title).strip()
|
||||
title = title.strip('*- ')
|
||||
return title
|
||||
|
||||
def clean_description(desc: str) -> str:
|
||||
if not desc: return ""
|
||||
# Remove URLs
|
||||
desc = re.sub(r'http\S+', '', desc)
|
||||
# Remove massive divider lines
|
||||
desc = re.sub(r'[*_=]{3,}', '', desc)
|
||||
# Be more aggressive with length?
|
||||
if len(desc) > 300:
|
||||
desc = desc[:300] + "..."
|
||||
return desc.strip()
|
||||
|
||||
CACHE_DIR = Path("backend/cache")
|
||||
|
||||
class SearchRequest(BaseModel):
|
||||
url: str
|
||||
|
||||
class CreatePlaylistRequest(BaseModel):
|
||||
name: str # Renamed from Title to Name to match Sidebar usage more typically, but API expects pydantic model
|
||||
description: str = ""
|
||||
|
||||
@router.get("/browse")
|
||||
async def get_browse_content():
|
||||
"""
|
||||
Returns the real fetched playlists from browse_playlists.json
|
||||
"""
|
||||
try:
|
||||
data_path = Path("backend/data/browse_playlists.json")
|
||||
if data_path.exists():
|
||||
with open(data_path, "r") as f:
|
||||
return json.load(f)
|
||||
else:
|
||||
return []
|
||||
except Exception as e:
|
||||
print(f"Browse Error: {e}")
|
||||
return []
|
||||
|
||||
CATEGORIES_MAP = {
|
||||
"Trending Vietnam": {"query": "Top 50 Vietnam", "type": "playlists"},
|
||||
"Just released Songs": {"query": "New Released Songs", "type": "playlists"},
|
||||
"Albums": {"query": "New Albums 2024", "type": "albums"},
|
||||
"Vietnamese DJs": {"query": "Vinahouse Remix", "type": "playlists"},
|
||||
"Global Hits": {"query": "Global Top 50", "type": "playlists"},
|
||||
"Chill Vibes": {"query": "Chill Lofi", "type": "playlists"},
|
||||
"Party Time": {"query": "Party EDM Hits", "type": "playlists"},
|
||||
"Best of Ballad": {"query": "Vietnamese Ballad", "type": "playlists"},
|
||||
"Hip Hop & Rap": {"query": "Vietnamese Rap", "type": "playlists"},
|
||||
}
|
||||
|
||||
@router.get("/browse/category")
|
||||
async def get_browse_category(name: str):
|
||||
"""
|
||||
Fetch live data for a specific category (infinite scroll support).
|
||||
Fetches up to 50-100 items.
|
||||
"""
|
||||
if name not in CATEGORIES_MAP:
|
||||
raise HTTPException(status_code=404, detail="Category not found")
|
||||
|
||||
info = CATEGORIES_MAP[name]
|
||||
query = info["query"]
|
||||
search_type = info["type"]
|
||||
|
||||
# Check Cache
|
||||
cache_key = f"browse_category:{name}"
|
||||
cached = cache.get(cache_key)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
try:
|
||||
from ytmusicapi import YTMusic
|
||||
yt = YTMusic()
|
||||
|
||||
# Search for more items (e.g. 50)
|
||||
results = yt.search(query, filter=search_type, limit=50)
|
||||
|
||||
category_items = []
|
||||
|
||||
for result in results:
|
||||
item_id = result.get('browseId')
|
||||
if not item_id: continue
|
||||
|
||||
title = result.get('title', 'Unknown')
|
||||
|
||||
# Simple item structure for list view (we don't need full track list for every item immediately)
|
||||
# But frontend expects some structure.
|
||||
|
||||
# Extract basic thumbnails
|
||||
thumbnails = result.get('thumbnails', [])
|
||||
cover_url = get_high_res_thumbnail(thumbnails)
|
||||
|
||||
# description logic
|
||||
description = ""
|
||||
if search_type == "albums":
|
||||
artists_text = ", ".join([a.get('name') for a in result.get('artists', [])])
|
||||
year = result.get('year', '')
|
||||
description = f"Album by {artists_text} • {year}"
|
||||
is_album = True
|
||||
else:
|
||||
is_album = False
|
||||
# For playlists result, description might be missing in search result
|
||||
description = f"Playlist • {result.get('itemCount', '')} tracks"
|
||||
|
||||
category_items.append({
|
||||
"id": item_id,
|
||||
"title": title,
|
||||
"description": description,
|
||||
"cover_url": cover_url,
|
||||
"type": "album" if is_album else "playlist",
|
||||
# Note: We are NOT fetching full tracks for each item here to save speed/quota.
|
||||
# The frontend only needs cover, title, description, id.
|
||||
# Tracks are fetched when user clicks the item (via get_playlist).
|
||||
"tracks": []
|
||||
})
|
||||
|
||||
cache.set(cache_key, category_items, ttl_seconds=3600) # Cache for 1 hour
|
||||
return category_items
|
||||
|
||||
except Exception as e:
|
||||
print(f"Category Fetch Error: {e}")
|
||||
return []
|
||||
|
||||
@router.get("/playlists")
|
||||
async def get_user_playlists():
|
||||
return playlist_manager.get_all()
|
||||
|
||||
@router.post("/playlists")
|
||||
async def create_user_playlist(playlist: CreatePlaylistRequest):
|
||||
return playlist_manager.create(playlist.name, playlist.description)
|
||||
|
||||
@router.delete("/playlists/{id}")
|
||||
async def delete_user_playlist(id: str):
|
||||
success = playlist_manager.delete(id)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Playlist not found")
|
||||
return {"status": "ok"}
|
||||
|
||||
@router.get("/playlists/{id}")
|
||||
async def get_playlist(id: str):
|
||||
"""
|
||||
Get a specific playlist by ID.
|
||||
1. Check if it's a User Playlist.
|
||||
2. If not, fetch from YouTube Music (Browse/External).
|
||||
"""
|
||||
# 1. Try User Playlist
|
||||
user_playlists = playlist_manager.get_all()
|
||||
user_playlist = next((p for p in user_playlists if p['id'] == id), None)
|
||||
if user_playlist:
|
||||
return user_playlist
|
||||
|
||||
# 2. Try External (YouTube Music)
|
||||
# Check Cache first
|
||||
cache_key = f"playlist:{id}"
|
||||
cached_playlist = cache.get(cache_key)
|
||||
if cached_playlist:
|
||||
return cached_playlist
|
||||
|
||||
try:
|
||||
from ytmusicapi import YTMusic
|
||||
yt = YTMusic()
|
||||
|
||||
playlist_data = None
|
||||
is_album = False
|
||||
|
||||
if id.startswith("MPREb"):
|
||||
try:
|
||||
playlist_data = yt.get_album(id)
|
||||
is_album = True
|
||||
except Exception as e:
|
||||
print(f"DEBUG: get_album(1) failed: {e}")
|
||||
pass
|
||||
|
||||
if not playlist_data:
|
||||
try:
|
||||
# ytmusicapi returns a dict with 'tracks' list
|
||||
playlist_data = yt.get_playlist(id, limit=100)
|
||||
except Exception as e:
|
||||
print(f"DEBUG: get_playlist failed: {e}")
|
||||
import traceback, sys
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
# Fallback: Try as album if not tried yet
|
||||
if not is_album:
|
||||
try:
|
||||
playlist_data = yt.get_album(id)
|
||||
is_album = True
|
||||
except Exception as e2:
|
||||
print(f"DEBUG: get_album(2) failed: {e2}")
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
raise e # Re-raise if both fail
|
||||
|
||||
if not isinstance(playlist_data, dict):
|
||||
print(f"DEBUG: Validation Failed! playlist_data type: {type(playlist_data)}", flush=True)
|
||||
raise ValueError(f"Invalid playlist_data: {playlist_data}")
|
||||
|
||||
# Format to match our app's Protocol
|
||||
formatted_tracks = []
|
||||
if 'tracks' in playlist_data:
|
||||
for track in playlist_data['tracks']:
|
||||
artist_names = extract_artist_names(track)
|
||||
|
||||
# Safely extract thumbnails
|
||||
thumbnails = track.get('thumbnails', [])
|
||||
if not thumbnails and is_album:
|
||||
# Albums sometimes have thumbnails at root level, not per track
|
||||
thumbnails = playlist_data.get('thumbnails', [])
|
||||
|
||||
cover_url = get_high_res_thumbnail(thumbnails)
|
||||
|
||||
# Safely extract album
|
||||
album_name = extract_album_name(track, playlist_data.get('title', 'Single'))
|
||||
|
||||
formatted_tracks.append({
|
||||
"title": track.get('title', 'Unknown Title'),
|
||||
"artist": artist_names,
|
||||
"album": album_name,
|
||||
"duration": track.get('duration_seconds', track.get('length_seconds', 0)),
|
||||
"cover_url": cover_url,
|
||||
"id": track.get('videoId'),
|
||||
"url": f"https://music.youtube.com/watch?v={track.get('videoId')}"
|
||||
})
|
||||
|
||||
# Get Playlist Cover (usually highest res)
|
||||
thumbnails = playlist_data.get('thumbnails', [])
|
||||
p_cover = get_high_res_thumbnail(thumbnails)
|
||||
|
||||
# Safely extract author/artists
|
||||
author = "YouTube Music"
|
||||
if is_album:
|
||||
artists = playlist_data.get('artists', [])
|
||||
names = []
|
||||
for a in artists:
|
||||
if isinstance(a, dict): names.append(a.get('name', 'Unknown'))
|
||||
elif isinstance(a, str): names.append(a)
|
||||
author = ", ".join(names)
|
||||
else:
|
||||
author_data = playlist_data.get('author', {})
|
||||
if isinstance(author_data, dict):
|
||||
author = author_data.get('name', 'YouTube Music')
|
||||
else:
|
||||
author = str(author_data)
|
||||
|
||||
formatted_playlist = {
|
||||
"id": playlist_data.get('browseId', playlist_data.get('id')),
|
||||
"title": clean_title(playlist_data.get('title', 'Unknown')),
|
||||
"description": clean_description(playlist_data.get('description', '')),
|
||||
"author": author,
|
||||
"cover_url": p_cover,
|
||||
"tracks": formatted_tracks
|
||||
}
|
||||
|
||||
# Cache it (1 hr)
|
||||
cache.set(cache_key, formatted_playlist, ttl_seconds=3600)
|
||||
return formatted_playlist
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(f"Playlist Fetch Error (NEW CODE): {e}", flush=True)
|
||||
print(traceback.format_exc(), flush=True)
|
||||
try:
|
||||
print(f"Playlist Data Type: {type(playlist_data)}")
|
||||
if 'tracks' in playlist_data and playlist_data['tracks']:
|
||||
print(f"First Track Type: {type(playlist_data['tracks'][0])}")
|
||||
except:
|
||||
pass
|
||||
raise HTTPException(status_code=404, detail="Playlist not found")
|
||||
|
||||
class UpdatePlaylistRequest(BaseModel):
|
||||
name: str = None
|
||||
description: str = None
|
||||
|
||||
@router.put("/playlists/{id}")
|
||||
async def update_user_playlist(id: str, playlist: UpdatePlaylistRequest):
|
||||
updated = playlist_manager.update(id, name=playlist.name, description=playlist.description)
|
||||
if not updated:
|
||||
raise HTTPException(status_code=404, detail="Playlist not found")
|
||||
return updated
|
||||
|
||||
class AddTrackRequest(BaseModel):
|
||||
id: str
|
||||
title: str
|
||||
artist: str
|
||||
album: str
|
||||
cover_url: str
|
||||
duration: int = 0
|
||||
url: str = ""
|
||||
|
||||
@router.post("/playlists/{id}/tracks")
|
||||
async def add_track_to_playlist(id: str, track: AddTrackRequest):
|
||||
track_data = track.dict()
|
||||
success = playlist_manager.add_track(id, track_data)
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Playlist not found")
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@router.get("/search")
|
||||
async def search_tracks(query: str):
|
||||
"""
|
||||
Search for tracks using ytmusicapi.
|
||||
"""
|
||||
if not query:
|
||||
return []
|
||||
|
||||
# Check Cache
|
||||
cache_key = f"search:{query.lower().strip()}"
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result:
|
||||
print(f"DEBUG: Returning cached search results for '{query}'")
|
||||
return cached_result
|
||||
|
||||
try:
|
||||
from ytmusicapi import YTMusic
|
||||
yt = YTMusic()
|
||||
results = yt.search(query, filter="songs", limit=20)
|
||||
|
||||
tracks = []
|
||||
for track in results:
|
||||
artist_names = extract_artist_names(track)
|
||||
|
||||
# Safely extract thumbnails
|
||||
thumbnails = track.get('thumbnails', [])
|
||||
cover_url = get_high_res_thumbnail(thumbnails)
|
||||
|
||||
album_name = extract_album_name(track, "Single")
|
||||
|
||||
tracks.append({
|
||||
"title": track.get('title', 'Unknown Title'),
|
||||
"artist": artist_names,
|
||||
"album": album_name,
|
||||
"duration": track.get('duration_seconds', 0),
|
||||
"cover_url": cover_url,
|
||||
"id": track.get('videoId'),
|
||||
"url": f"https://music.youtube.com/watch?v={track.get('videoId')}"
|
||||
})
|
||||
|
||||
response_data = {"tracks": tracks}
|
||||
# Cache for 24 hours (86400 seconds)
|
||||
cache.set(cache_key, response_data, ttl_seconds=86400)
|
||||
return response_data
|
||||
|
||||
except Exception as e:
|
||||
print(f"Search Error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/recommendations")
|
||||
async def get_recommendations(seed_id: str = None):
|
||||
"""
|
||||
Get recommended tracks (Play History based or Trending).
|
||||
If seed_id is provided, fetches 'Up Next' / 'Radio' tracks for that video.
|
||||
"""
|
||||
try:
|
||||
from ytmusicapi import YTMusic
|
||||
yt = YTMusic()
|
||||
|
||||
if not seed_id:
|
||||
# Fallback to Trending if no history
|
||||
return await get_trending()
|
||||
|
||||
cache_key = f"rec:{seed_id}"
|
||||
cached = cache.get(cache_key)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
# Use get_watch_playlist to find similar tracks (Radio)
|
||||
watch_playlist = yt.get_watch_playlist(videoId=seed_id, limit=20)
|
||||
|
||||
tracks = []
|
||||
if 'tracks' in watch_playlist:
|
||||
seen_ids = set()
|
||||
seen_ids.add(seed_id)
|
||||
for track in watch_playlist['tracks']:
|
||||
# Skip if seen or seed
|
||||
t_id = track.get('videoId')
|
||||
if not t_id or t_id in seen_ids:
|
||||
continue
|
||||
seen_ids.add(t_id)
|
||||
|
||||
artist_names = extract_artist_names(track)
|
||||
|
||||
thumbnails = track.get('thumbnails') or track.get('thumbnail') or []
|
||||
cover_url = get_high_res_thumbnail(thumbnails)
|
||||
|
||||
album_name = extract_album_name(track, "Single")
|
||||
|
||||
tracks.append({
|
||||
"title": track.get('title', 'Unknown Title'),
|
||||
"artist": artist_names,
|
||||
"album": album_name,
|
||||
"duration": track.get('length_seconds', track.get('duration_seconds', 0)),
|
||||
"cover_url": cover_url,
|
||||
"id": t_id,
|
||||
"url": f"https://music.youtube.com/watch?v={t_id}"
|
||||
})
|
||||
|
||||
response_data = {"tracks": tracks}
|
||||
cache.set(cache_key, response_data, ttl_seconds=3600) # 1 hour cache
|
||||
return response_data
|
||||
|
||||
except Exception as e:
|
||||
print(f"Recommendation Error: {e}")
|
||||
# Fallback to trending on error
|
||||
return await get_trending()
|
||||
|
||||
@router.get("/recommendations/albums")
|
||||
async def get_recommended_albums(seed_artist: str = None):
|
||||
"""
|
||||
Get recommended albums based on an artist query.
|
||||
"""
|
||||
if not seed_artist:
|
||||
return []
|
||||
|
||||
cache_key = f"rec_albums:{seed_artist.lower().strip()}"
|
||||
cached = cache.get(cache_key)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
try:
|
||||
from ytmusicapi import YTMusic
|
||||
yt = YTMusic()
|
||||
|
||||
# Search for albums by this artist
|
||||
results = yt.search(seed_artist, filter="albums", limit=10)
|
||||
|
||||
albums = []
|
||||
for album in results:
|
||||
thumbnails = album.get('thumbnails', [])
|
||||
cover_url = get_high_res_thumbnail(thumbnails)
|
||||
|
||||
albums.append({
|
||||
"title": album.get('title', 'Unknown Album'),
|
||||
"description": album.get('year', '') + " • " + album.get('artist', seed_artist),
|
||||
"cover_url": cover_url,
|
||||
"id": album.get('browseId'),
|
||||
"type": "Album"
|
||||
})
|
||||
|
||||
cache.set(cache_key, albums, ttl_seconds=86400)
|
||||
return albums
|
||||
|
||||
except Exception as e:
|
||||
print(f"Album Rec Error: {e}")
|
||||
return []
|
||||
|
||||
@router.get("/artist/info")
|
||||
async def get_artist_info(name: str):
|
||||
"""
|
||||
Get artist metadata (photo) by name.
|
||||
"""
|
||||
if not name:
|
||||
return {"photo": None}
|
||||
|
||||
cache_key = f"artist_info:{name.lower().strip()}"
|
||||
cached = cache.get(cache_key)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
try:
|
||||
from ytmusicapi import YTMusic
|
||||
yt = YTMusic()
|
||||
|
||||
results = yt.search(name, filter="artists", limit=1)
|
||||
if results:
|
||||
artist = results[0]
|
||||
thumbnails = artist.get('thumbnails', [])
|
||||
photo_url = get_high_res_thumbnail(thumbnails)
|
||||
result = {"photo": photo_url}
|
||||
|
||||
cache.set(cache_key, result, ttl_seconds=86400 * 7) # Cache for 1 week
|
||||
return result
|
||||
|
||||
return {"photo": None}
|
||||
except Exception as e:
|
||||
print(f"Artist Info Error: {e}")
|
||||
return {"photo": None}
|
||||
|
||||
@router.get("/trending")
|
||||
async def get_trending():
|
||||
"""
|
||||
Returns the pre-fetched Trending Vietnam playlist.
|
||||
"""
|
||||
try:
|
||||
data_path = Path("backend/data.json")
|
||||
if data_path.exists():
|
||||
with open(data_path, "r") as f:
|
||||
return json.load(f)
|
||||
else:
|
||||
return {"error": "Trending data not found. Run fetch_data.py first."}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/stream")
|
||||
async def stream_audio(id: str):
|
||||
"""
|
||||
Stream audio for a given YouTube video ID.
|
||||
Extracts direct URL via yt-dlp and streams it.
|
||||
"""
|
||||
try:
|
||||
# Check Cache for stream URL
|
||||
cache_key = f"stream:{id}"
|
||||
cached_url = cache.get(cache_key)
|
||||
|
||||
stream_url = None
|
||||
if cached_url:
|
||||
print(f"DEBUG: Using cached stream URL for '{id}'")
|
||||
stream_url = cached_url
|
||||
else:
|
||||
print(f"DEBUG: Fetching new stream URL for '{id}'")
|
||||
url = f"https://www.youtube.com/watch?v={id}"
|
||||
ydl_opts = {
|
||||
'format': 'bestaudio[ext=m4a]/best[ext=mp4]/best', # Prefer m4a/aac for iOS
|
||||
'quiet': True,
|
||||
'noplaylist': True,
|
||||
}
|
||||
|
||||
# Extract direct URL
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
info = ydl.extract_info(url, download=False)
|
||||
stream_url = info.get('url')
|
||||
|
||||
if stream_url:
|
||||
# Cache for 1 hour (3600 seconds) - URLs expire
|
||||
cache.set(cache_key, stream_url, ttl_seconds=3600)
|
||||
|
||||
if not stream_url:
|
||||
raise HTTPException(status_code=404, detail="Audio stream not found")
|
||||
|
||||
# Stream the content
|
||||
def iterfile():
|
||||
# Verify if URL is still valid by making a HEAD request or handling stream error
|
||||
# For simplicity, we just try to stream. If 403, we might need to invalidate,
|
||||
# but that logic is complex for this method.
|
||||
with requests.get(stream_url, stream=True) as r:
|
||||
r.raise_for_status() # Check for 403
|
||||
# Use smaller chunks (64KB) for better TTFB (Time To First Byte)
|
||||
for chunk in r.iter_content(chunk_size=64*1024):
|
||||
yield chunk
|
||||
|
||||
# Note: We return audio/mpeg, but it might be opus/webm.
|
||||
# Browsers are usually smart enough to sniff.
|
||||
return StreamingResponse(iterfile(), media_type="audio/mpeg")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Stream Error: {e}")
|
||||
# If cached URL failed (likely 403), we could try to invalidate here,
|
||||
# but for now we just return error.
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/download")
|
||||
async def download_audio(id: str, title: str = "audio"):
|
||||
"""
|
||||
Download audio for a given YouTube video ID.
|
||||
Proxies the stream content as a file attachment.
|
||||
"""
|
||||
try:
|
||||
# Check Cache for stream URL
|
||||
cache_key = f"stream:{id}"
|
||||
cached_url = cache.get(cache_key)
|
||||
|
||||
stream_url = None
|
||||
if cached_url:
|
||||
stream_url = cached_url
|
||||
else:
|
||||
url = f"https://www.youtube.com/watch?v={id}"
|
||||
ydl_opts = {
|
||||
'format': 'bestaudio/best',
|
||||
'quiet': True,
|
||||
'noplaylist': True,
|
||||
}
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
info = ydl.extract_info(url, download=False)
|
||||
stream_url = info.get('url')
|
||||
|
||||
if stream_url:
|
||||
cache.set(cache_key, stream_url, ttl_seconds=3600)
|
||||
|
||||
if not stream_url:
|
||||
raise HTTPException(status_code=404, detail="Audio stream not found")
|
||||
|
||||
# Stream the content with attachment header
|
||||
def iterfile():
|
||||
with requests.get(stream_url, stream=True) as r:
|
||||
r.raise_for_status()
|
||||
for chunk in r.iter_content(chunk_size=1024*1024):
|
||||
yield chunk
|
||||
|
||||
# Sanitize filename
|
||||
safe_filename = "".join([c for c in title if c.isalnum() or c in (' ', '-', '_')]).strip()
|
||||
headers = {
|
||||
"Content-Disposition": f'attachment; filename="{safe_filename}.mp3"'
|
||||
}
|
||||
|
||||
return StreamingResponse(iterfile(), media_type="audio/mpeg", headers=headers)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Download Error: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/lyrics")
|
||||
async def get_lyrics(id: str, title: str = None, artist: str = None):
|
||||
"""
|
||||
Fetch synchronized lyrics using multiple providers hierarchy:
|
||||
1. Cache (fastest)
|
||||
2. yt-dlp (Original Video Captions - best sync for exact video)
|
||||
3. LRCLIB (Open Source Database - good fuzzy match)
|
||||
4. syncedlyrics (Musixmatch/NetEase Aggregator - widest coverage)
|
||||
"""
|
||||
if not id:
|
||||
return []
|
||||
|
||||
cache_key = f"lyrics:{id}"
|
||||
cached_lyrics = cache.get(cache_key)
|
||||
if cached_lyrics:
|
||||
return cached_lyrics
|
||||
|
||||
parsed_lines = []
|
||||
|
||||
# Run heavy IO in threadpool
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
import syncedlyrics
|
||||
|
||||
try:
|
||||
# --- Strategy 1: yt-dlp (Official Captions) ---
|
||||
def fetch_ytdlp_subs():
|
||||
parsed = []
|
||||
try:
|
||||
lyrics_dir = CACHE_DIR / "lyrics"
|
||||
lyrics_dir.mkdir(parents=True, exist_ok=True)
|
||||
out_tmpl = str(lyrics_dir / f"{id}")
|
||||
ydl_opts = {
|
||||
'skip_download': True, 'writesubtitles': True, 'writeautomaticsub': True,
|
||||
'subtitleslangs': ['en', 'vi'], 'subtitlesformat': 'json3',
|
||||
'outtmpl': out_tmpl, 'quiet': True
|
||||
}
|
||||
url = f"https://www.youtube.com/watch?v={id}"
|
||||
import glob
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([url])
|
||||
|
||||
pattern = str(lyrics_dir / f"{id}.*.json3")
|
||||
found_files = glob.glob(pattern)
|
||||
if found_files:
|
||||
best_file = next((f for f in found_files if f.endswith(f"{id}.en.json3")), found_files[0])
|
||||
with open(best_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
for event in data.get('events', []):
|
||||
if 'segs' in event and 'tStartMs' in event:
|
||||
text = "".join([s.get('utf8', '') for s in event['segs']]).strip()
|
||||
if text and not text.startswith('[') and text != '\n':
|
||||
parsed.append({"time": float(event['tStartMs']) / 1000.0, "text": text})
|
||||
except Exception as e:
|
||||
print(f"yt-dlp sub error: {e}")
|
||||
return parsed
|
||||
|
||||
parsed_lines = await run_in_threadpool(fetch_ytdlp_subs)
|
||||
|
||||
# --- Strategy 2: LRCLIB (Search API) ---
|
||||
if not parsed_lines and title and artist:
|
||||
print(f"Trying LRCLIB Search for: {title} {artist}")
|
||||
def fetch_lrclib():
|
||||
try:
|
||||
# Fuzzy match using search, not get
|
||||
cleaned_title = re.sub(r'\(.*?\)', '', title)
|
||||
clean_query = f"{artist} {cleaned_title}".strip()
|
||||
resp = requests.get("https://lrclib.net/api/search", params={"q": clean_query}, timeout=5)
|
||||
if resp.status_code == 200:
|
||||
results = resp.json()
|
||||
# Find first result with synced lyrics
|
||||
for item in results:
|
||||
if item.get("syncedLyrics"):
|
||||
return parse_lrc_string(item["syncedLyrics"])
|
||||
except Exception as e:
|
||||
print(f"LRCLIB error: {e}")
|
||||
return []
|
||||
|
||||
parsed_lines = await run_in_threadpool(fetch_lrclib)
|
||||
|
||||
# --- Strategy 3: syncedlyrics (Aggregator) ---
|
||||
if not parsed_lines and title and artist:
|
||||
print(f"Trying SyncedLyrics Aggregator for: {title} {artist}")
|
||||
def fetch_syncedlyrics():
|
||||
try:
|
||||
# syncedlyrics.search returns the LRC string or None
|
||||
clean_query = f"{title} {artist}".strip()
|
||||
lrc_str = syncedlyrics.search(clean_query)
|
||||
if lrc_str:
|
||||
return parse_lrc_string(lrc_str)
|
||||
except Exception as e:
|
||||
print(f"SyncedLyrics error: {e}")
|
||||
return []
|
||||
|
||||
parsed_lines = await run_in_threadpool(fetch_syncedlyrics)
|
||||
|
||||
# Cache Result
|
||||
if parsed_lines:
|
||||
cache.set(cache_key, parsed_lines, ttl_seconds=86400 * 30)
|
||||
return parsed_lines
|
||||
|
||||
return []
|
||||
|
||||
except Exception as e:
|
||||
print(f"Global Lyrics Error: {e}")
|
||||
return []
|
||||
|
||||
def parse_lrc_string(lrc_content: str):
|
||||
"""Parses LRC format string into [{time, text}]"""
|
||||
lines = []
|
||||
if not lrc_content: return lines
|
||||
for line in lrc_content.split('\n'):
|
||||
# Format: [mm:ss.xx] Text
|
||||
match = re.search(r'\[(\d+):(\d+\.?\d*)\](.*)', line)
|
||||
if match:
|
||||
minutes = float(match.group(1))
|
||||
seconds = float(match.group(2))
|
||||
text = match.group(3).strip()
|
||||
total_time = minutes * 60 + seconds
|
||||
if text:
|
||||
lines.append({"time": total_time, "text": text})
|
||||
return lines
|
||||
22
backend/api/schemas.py
Normal file
22
backend/api/schemas.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
class SearchRequest(BaseModel):
|
||||
url: str
|
||||
|
||||
class CreatePlaylistRequest(BaseModel):
|
||||
name: str
|
||||
description: str = ""
|
||||
|
||||
class UpdatePlaylistRequest(BaseModel):
|
||||
name: str = None
|
||||
description: str = None
|
||||
|
||||
class AddTrackRequest(BaseModel):
|
||||
id: str
|
||||
title: str
|
||||
artist: str
|
||||
album: str
|
||||
cover_url: str
|
||||
duration: int = 0
|
||||
url: str = ""
|
||||
22
backend/core/config.py
Normal file
22
backend/core/config.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from pydantic_settings import BaseSettings
|
||||
from pathlib import Path
|
||||
|
||||
class Settings(BaseSettings):
|
||||
APP_NAME: str = "Spotify Clone Backend"
|
||||
API_V1_STR: str = "/api"
|
||||
CACHE_DIR: Path = Path("backend/cache")
|
||||
DATA_DIR: Path = Path("backend/data")
|
||||
|
||||
# CORS
|
||||
BACKEND_CORS_ORIGINS: list[str] = [
|
||||
"http://localhost:3000",
|
||||
"http://127.0.0.1:3000",
|
||||
"http://192.168.1.5", # Common local IP for testing
|
||||
"http://192.168.1.13"
|
||||
]
|
||||
|
||||
class Config:
|
||||
case_sensitive = True
|
||||
env_file = ".env"
|
||||
|
||||
settings = Settings()
|
||||
8
backend/core/exceptions.py
Normal file
8
backend/core/exceptions.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
class BackendException(Exception):
|
||||
pass
|
||||
|
||||
class ResourceNotFound(BackendException):
|
||||
pass
|
||||
|
||||
class ExternalAPIError(BackendException):
|
||||
pass
|
||||
|
|
@ -1,45 +1,50 @@
|
|||
from fastapi import FastAPI
|
||||
from fastapi import FastAPI, APIRouter
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from backend.api.routes import router as api_router
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse
|
||||
import os
|
||||
|
||||
app = FastAPI(title="Spotify Clone Backend")
|
||||
from backend.core.config import settings as settings_config # Renamed to settings_config to avoid conflict
|
||||
from backend.api.endpoints import playlists, search, stream, lyrics, settings as settings_router, browse # Aliased settings router
|
||||
|
||||
app = FastAPI(title=settings_config.APP_NAME, openapi_url=f"{settings_config.API_V1_STR}/openapi.json") # Used settings_config
|
||||
|
||||
# CORS setup
|
||||
origins = [
|
||||
"http://localhost:3000",
|
||||
"http://127.0.0.1:3000",
|
||||
]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
allow_origins=settings_config.BACKEND_CORS_ORIGINS, # Used settings_config
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.include_router(api_router, prefix="/api")
|
||||
# Include Routers
|
||||
api_router = APIRouter()
|
||||
api_router.include_router(playlists.router, prefix="/playlists", tags=["playlists"])
|
||||
api_router.include_router(search.router, tags=["search"])
|
||||
api_router.include_router(stream.router, tags=["stream"])
|
||||
api_router.include_router(lyrics.router, tags=["lyrics"])
|
||||
api_router.include_router(browse.router, tags=["browse"])
|
||||
api_router.include_router(settings_router.router, prefix="/settings", tags=["settings"]) # Included settings_router
|
||||
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse
|
||||
app.include_router(api_router, prefix=settings_config.API_V1_STR) # Corrected prefix and removed extra tags
|
||||
|
||||
# Serve Static Frontend (Production Mode)
|
||||
STATIC_DIR = "static"
|
||||
if settings_config.CACHE_DIR.parent.name == "backend":
|
||||
# assuming running from root
|
||||
STATIC_DIR = "static"
|
||||
else:
|
||||
STATIC_DIR = "../static"
|
||||
|
||||
if os.path.exists(STATIC_DIR):
|
||||
app.mount("/_next", StaticFiles(directory=os.path.join(STATIC_DIR, "_next")), name="next_assets")
|
||||
|
||||
# Serve other static files (favicons etc) if they exist in root of static
|
||||
# Or just fallback everything else to index.html for SPA
|
||||
|
||||
@app.get("/{full_path:path}")
|
||||
async def serve_spa(full_path: str):
|
||||
# Check if file exists in static folder
|
||||
file_path = os.path.join(STATIC_DIR, full_path)
|
||||
if os.path.isfile(file_path):
|
||||
return FileResponse(file_path)
|
||||
|
||||
# Otherwise return index.html
|
||||
index_path = os.path.join(STATIC_DIR, "index.html")
|
||||
if os.path.exists(index_path):
|
||||
return FileResponse(index_path)
|
||||
|
|
@ -53,3 +58,16 @@ else:
|
|||
@app.get("/health")
|
||||
def health_check():
|
||||
return {"status": "ok"}
|
||||
|
||||
from fastapi import Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def global_exception_handler(request: Request, exc: Exception):
|
||||
import traceback
|
||||
error_detail = "".join(traceback.format_exception(None, exc, exc.__traceback__))
|
||||
print(f"Global 500 Error: {error_detail}")
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={"message": "Internal Server Error", "detail": error_detail}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ spotdl
|
|||
pydantic==2.10.4
|
||||
python-multipart==0.0.20
|
||||
requests==2.32.3
|
||||
yt-dlp==2024.12.23
|
||||
yt-dlp @ git+https://github.com/yt-dlp/yt-dlp.git@master
|
||||
ytmusicapi==1.9.1
|
||||
syncedlyrics
|
||||
pydantic-settings
|
||||
|
|
|
|||
0
backend/services/__init__.py
Normal file
0
backend/services/__init__.py
Normal file
103
backend/services/lyrics.py
Normal file
103
backend/services/lyrics.py
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
import json
|
||||
import re
|
||||
import requests
|
||||
import yt_dlp
|
||||
import syncedlyrics
|
||||
from starlette.concurrency import run_in_threadpool
|
||||
from backend.core.cache import CacheManager
|
||||
from backend.core.config import settings
|
||||
|
||||
class LyricsService:
|
||||
def __init__(self):
|
||||
self.cache = CacheManager(str(settings.CACHE_DIR))
|
||||
self.lyrics_cache_dir = settings.CACHE_DIR / "lyrics"
|
||||
self.lyrics_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _parse_lrc_string(self, lrc_string: str):
|
||||
parsed = []
|
||||
for line in lrc_string.split('\n'):
|
||||
match = re.search(r'\[(\d+):(\d+\.\d+)\](.*)', line)
|
||||
if match:
|
||||
minutes = int(match.group(1))
|
||||
seconds = float(match.group(2))
|
||||
text = match.group(3).strip()
|
||||
parsed.append({"time": minutes * 60 + seconds, "text": text})
|
||||
return parsed
|
||||
|
||||
async def get_lyrics(self, id: str, title: str = None, artist: str = None):
|
||||
if not id: return []
|
||||
|
||||
cache_key = f"lyrics:{id}"
|
||||
cached = self.cache.get(cache_key)
|
||||
if cached: return cached
|
||||
|
||||
parsed_lines = []
|
||||
|
||||
# Strategy 1: yt-dlp
|
||||
def fetch_ytdlp():
|
||||
parsed = []
|
||||
try:
|
||||
out_tmpl = str(self.lyrics_cache_dir / f"{id}")
|
||||
ydl_opts = {
|
||||
'skip_download': True, 'writesubtitles': True, 'writeautomaticsub': True,
|
||||
'subtitleslangs': ['en', 'vi'], 'subtitlesformat': 'json3',
|
||||
'outtmpl': out_tmpl, 'quiet': True
|
||||
}
|
||||
url = f"https://www.youtube.com/watch?v={id}"
|
||||
import glob
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([url])
|
||||
|
||||
pattern = str(self.lyrics_cache_dir / f"{id}.*.json3")
|
||||
found_files = glob.glob(pattern)
|
||||
if found_files:
|
||||
best_file = next((f for f in found_files if f.endswith(f"{id}.en.json3")), found_files[0])
|
||||
with open(best_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
for event in data.get('events', []):
|
||||
if 'segs' in event and 'tStartMs' in event:
|
||||
text = "".join([s.get('utf8', '') for s in event['segs']]).strip()
|
||||
if text and not text.startswith('[') and text != '\n':
|
||||
parsed.append({"time": float(event['tStartMs']) / 1000.0, "text": text})
|
||||
except Exception as e:
|
||||
print(f"yt-dlp sub error: {e}")
|
||||
return parsed
|
||||
|
||||
parsed_lines = await run_in_threadpool(fetch_ytdlp)
|
||||
|
||||
if not parsed_lines and title and artist:
|
||||
# Strategy 2: LRCLIB
|
||||
def fetch_lrclib():
|
||||
try:
|
||||
cleaned_title = re.sub(r'\(.*?\)', '', title)
|
||||
clean_query = f"{artist} {cleaned_title}".strip()
|
||||
resp = requests.get("https://lrclib.net/api/search", params={"q": clean_query}, timeout=5)
|
||||
if resp.status_code == 200:
|
||||
results = resp.json()
|
||||
for item in results:
|
||||
if item.get("syncedLyrics"):
|
||||
return self._parse_lrc_string(item["syncedLyrics"])
|
||||
except Exception:
|
||||
pass
|
||||
return []
|
||||
|
||||
parsed_lines = await run_in_threadpool(fetch_lrclib)
|
||||
|
||||
if not parsed_lines and title and artist:
|
||||
# Strategy 3: syncedlyrics
|
||||
def fetch_syncedlyrics():
|
||||
try:
|
||||
clean_query = f"{title} {artist}".strip()
|
||||
lrc_str = syncedlyrics.search(clean_query)
|
||||
if lrc_str:
|
||||
return self._parse_lrc_string(lrc_str)
|
||||
except Exception:
|
||||
pass
|
||||
return []
|
||||
|
||||
parsed_lines = await run_in_threadpool(fetch_syncedlyrics)
|
||||
|
||||
if parsed_lines:
|
||||
self.cache.set(cache_key, parsed_lines, ttl_seconds=86400)
|
||||
|
||||
return parsed_lines
|
||||
411
backend/services/youtube.py
Normal file
411
backend/services/youtube.py
Normal file
|
|
@ -0,0 +1,411 @@
|
|||
import re
|
||||
import json
|
||||
import requests
|
||||
import yt_dlp
|
||||
from ytmusicapi import YTMusic
|
||||
from backend.core.cache import CacheManager
|
||||
from backend.core.config import settings
|
||||
from backend.core.exceptions import ResourceNotFound, ExternalAPIError
|
||||
|
||||
class YouTubeService:
|
||||
def __init__(self):
|
||||
self.yt = YTMusic()
|
||||
self.cache = CacheManager(str(settings.CACHE_DIR))
|
||||
|
||||
def _get_high_res_thumbnail(self, thumbnails: list) -> str:
|
||||
if not thumbnails:
|
||||
return "https://placehold.co/300x300"
|
||||
|
||||
best_url = thumbnails[-1]['url']
|
||||
|
||||
if "googleusercontent.com" in best_url or "ggpht.com" in best_url:
|
||||
if "w" in best_url and "h" in best_url:
|
||||
best_url = re.sub(r'=w\d+-h\d+', '=w544-h544', best_url)
|
||||
return best_url
|
||||
|
||||
def _extract_artist_names(self, track: dict) -> str:
|
||||
artists = track.get('artists') or []
|
||||
if isinstance(artists, list):
|
||||
names = []
|
||||
for a in artists:
|
||||
if isinstance(a, dict):
|
||||
names.append(a.get('name', 'Unknown'))
|
||||
elif isinstance(a, str):
|
||||
names.append(a)
|
||||
return ", ".join(names) if names else "Unknown Artist"
|
||||
return "Unknown Artist"
|
||||
|
||||
def _extract_album_name(self, track: dict, default="Single") -> str:
|
||||
album = track.get('album')
|
||||
if isinstance(album, dict):
|
||||
return album.get('name', default)
|
||||
if isinstance(album, str):
|
||||
return album
|
||||
return default
|
||||
|
||||
def _clean_title(self, title: str) -> str:
|
||||
if not title: return "Playlist"
|
||||
title = title.encode('ascii', 'ignore').decode('ascii')
|
||||
spam_words = ["Playlist", "Music Chart", "Full SPOTIFY Video", "Updated Weekly", "Official", "Video"]
|
||||
for word in spam_words:
|
||||
title = re.sub(word, "", title, flags=re.IGNORECASE)
|
||||
title = re.sub(r'\s+', ' ', title).strip()
|
||||
title = title.strip('*- ')
|
||||
return title
|
||||
|
||||
def _clean_description(self, desc: str) -> str:
|
||||
if not desc: return ""
|
||||
desc = re.sub(r'http\S+', '', desc)
|
||||
desc = re.sub(r'[*_=]{3,}', '', desc)
|
||||
if len(desc) > 300:
|
||||
desc = desc[:300] + "..."
|
||||
return desc.strip()
|
||||
|
||||
def get_playlist(self, id: str):
|
||||
cache_key = f"playlist:{id}"
|
||||
cached_playlist = self.cache.get(cache_key)
|
||||
if cached_playlist:
|
||||
return cached_playlist
|
||||
|
||||
try:
|
||||
playlist_data = None
|
||||
is_album = False
|
||||
|
||||
# Try as Album first if MPREb ID
|
||||
if id.startswith("MPREb"):
|
||||
try:
|
||||
playlist_data = self.yt.get_album(id)
|
||||
is_album = True
|
||||
except:
|
||||
pass
|
||||
|
||||
if not playlist_data:
|
||||
try:
|
||||
playlist_data = self.yt.get_playlist(id, limit=100)
|
||||
except Exception:
|
||||
if not is_album:
|
||||
playlist_data = self.yt.get_album(id)
|
||||
is_album = True
|
||||
|
||||
formatted_tracks = []
|
||||
if 'tracks' in playlist_data:
|
||||
for track in playlist_data['tracks']:
|
||||
formatted_tracks.append({
|
||||
"title": track.get('title', 'Unknown Title'),
|
||||
"artist": self._extract_artist_names(track),
|
||||
"album": self._extract_album_name(track, playlist_data.get('title', 'Single')),
|
||||
"duration": track.get('duration_seconds', track.get('length_seconds', 0)),
|
||||
"cover_url": self._get_high_res_thumbnail(track.get('thumbnails', []) or (playlist_data.get('thumbnails', []) if is_album else [])),
|
||||
"id": track.get('videoId'),
|
||||
"url": f"https://music.youtube.com/watch?v={track.get('videoId')}"
|
||||
})
|
||||
|
||||
p_cover = self._get_high_res_thumbnail(playlist_data.get('thumbnails', []))
|
||||
|
||||
author = "YouTube Music"
|
||||
if is_album:
|
||||
artists = playlist_data.get('artists', [])
|
||||
names = [a.get('name', 'Unknown') if isinstance(a, dict) else a for a in artists]
|
||||
author = ", ".join(names)
|
||||
else:
|
||||
author_data = playlist_data.get('author', {})
|
||||
author = author_data.get('name', 'YouTube Music') if isinstance(author_data, dict) else str(author_data)
|
||||
|
||||
formatted_playlist = {
|
||||
"id": playlist_data.get('browseId', playlist_data.get('id')),
|
||||
"title": self._clean_title(playlist_data.get('title', 'Unknown')),
|
||||
"description": self._clean_description(playlist_data.get('description', '')),
|
||||
"author": author,
|
||||
"cover_url": p_cover,
|
||||
"tracks": formatted_tracks
|
||||
}
|
||||
|
||||
self.cache.set(cache_key, formatted_playlist, ttl_seconds=3600)
|
||||
return formatted_playlist
|
||||
|
||||
except Exception as e:
|
||||
print(f"Playlist Fetch Error: {e}")
|
||||
raise ResourceNotFound(f"Playlist {id} not found")
|
||||
|
||||
def search(self, query: str):
|
||||
if not query: return []
|
||||
cache_key = f"search:{query.lower().strip()}"
|
||||
cached = self.cache.get(cache_key)
|
||||
if cached: return cached
|
||||
|
||||
try:
|
||||
results = self.yt.search(query, filter="songs", limit=20)
|
||||
tracks = []
|
||||
for track in results:
|
||||
tracks.append({
|
||||
"title": track.get('title', 'Unknown Title'),
|
||||
"artist": self._extract_artist_names(track),
|
||||
"album": self._extract_album_name(track, "Single"),
|
||||
"duration": track.get('duration_seconds', 0),
|
||||
"cover_url": self._get_high_res_thumbnail(track.get('thumbnails', [])),
|
||||
"id": track.get('videoId'),
|
||||
"url": f"https://music.youtube.com/watch?v={track.get('videoId')}"
|
||||
})
|
||||
|
||||
response = {"tracks": tracks}
|
||||
self.cache.set(cache_key, response, ttl_seconds=86400)
|
||||
return response
|
||||
except Exception as e:
|
||||
print(f"Search Error: {e}")
|
||||
raise ExternalAPIError(str(e))
|
||||
|
||||
def get_stream_url(self, id: str):
|
||||
cache_key = f"stream:{id}"
|
||||
cached = self.cache.get(cache_key)
|
||||
if cached: return cached
|
||||
|
||||
# Strategy: Try versatile clients in order
|
||||
clients_to_try = [
|
||||
# 1. iOS (often best for audio)
|
||||
{'extractor_args': {'youtube': {'player_client': ['ios']}}},
|
||||
# 2. Android (robust)
|
||||
{'extractor_args': {'youtube': {'player_client': ['android']}}},
|
||||
# 3. Web (standard, prone to 403)
|
||||
{'extractor_args': {'youtube': {'player_client': ['web']}}},
|
||||
# 4. TV (sometimes works for age-gated)
|
||||
{'extractor_args': {'youtube': {'player_client': ['tv']}}},
|
||||
]
|
||||
|
||||
last_error = None
|
||||
|
||||
for client_config in clients_to_try:
|
||||
try:
|
||||
url = f"https://www.youtube.com/watch?v={id}"
|
||||
ydl_opts = {
|
||||
'format': 'bestaudio[ext=m4a]/best[ext=mp4]/best',
|
||||
'quiet': True,
|
||||
'noplaylist': True,
|
||||
'force_ipv4': True,
|
||||
}
|
||||
ydl_opts.update(client_config)
|
||||
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
info = ydl.extract_info(url, download=False)
|
||||
stream_url = info.get('url')
|
||||
|
||||
if stream_url:
|
||||
headers = info.get('http_headers', {})
|
||||
result = {
|
||||
"url": stream_url,
|
||||
"headers": headers
|
||||
}
|
||||
self.cache.set(cache_key, result, ttl_seconds=3600)
|
||||
return result
|
||||
except Exception as e:
|
||||
last_error = e
|
||||
print(f"Fetch failed with client {client_config}: {e}")
|
||||
continue
|
||||
|
||||
# If all fail
|
||||
print(f"All clients failed for {id}. Last error: {last_error}")
|
||||
raise ExternalAPIError(str(last_error))
|
||||
|
||||
def invalidate_stream_cache(self, id: str):
|
||||
cache_key = f"stream:{id}"
|
||||
path = self.cache._get_path(cache_key)
|
||||
if path.exists():
|
||||
try:
|
||||
path.unlink()
|
||||
except:
|
||||
pass
|
||||
|
||||
def get_recommendations(self, seed_id: str):
|
||||
if not seed_id: return []
|
||||
cache_key = f"rec:{seed_id}"
|
||||
cached = self.cache.get(cache_key)
|
||||
if cached: return cached
|
||||
|
||||
try:
|
||||
watch_playlist = self.yt.get_watch_playlist(videoId=seed_id, limit=20)
|
||||
tracks = []
|
||||
if 'tracks' in watch_playlist:
|
||||
seen_ids = {seed_id}
|
||||
for track in watch_playlist['tracks']:
|
||||
t_id = track.get('videoId')
|
||||
if not t_id or t_id in seen_ids: continue
|
||||
seen_ids.add(t_id)
|
||||
|
||||
tracks.append({
|
||||
"title": track.get('title', 'Unknown Title'),
|
||||
"artist": self._extract_artist_names(track),
|
||||
"album": self._extract_album_name(track, "Single"),
|
||||
"duration": track.get('length_seconds', track.get('duration_seconds', 0)),
|
||||
"cover_url": self._get_high_res_thumbnail(track.get('thumbnails') or track.get('thumbnail') or []),
|
||||
"id": t_id,
|
||||
"url": f"https://music.youtube.com/watch?v={t_id}"
|
||||
})
|
||||
|
||||
response = {"tracks": tracks}
|
||||
self.cache.set(cache_key, response, ttl_seconds=3600)
|
||||
return response
|
||||
except Exception as e:
|
||||
print(f"Rec Error: {e}")
|
||||
return {"tracks": []}
|
||||
|
||||
def get_home(self):
|
||||
cache_key = "home:browse"
|
||||
cached = self.cache.get(cache_key)
|
||||
if cached: return cached
|
||||
|
||||
try:
|
||||
# ytmusicapi `get_home` returns complex Sections
|
||||
# For simplicity, we'll fetch charts and new releases as "Browse" content
|
||||
# Prepare trending songs
|
||||
trending_songs = []
|
||||
try:
|
||||
# Get charts
|
||||
trending = self.yt.get_charts(country='VN')
|
||||
if 'videos' in trending and trending['videos']:
|
||||
for item in trending['videos']['items']:
|
||||
# Extract high-res thumbnail
|
||||
thumbnails = item.get('thumbnails', [])
|
||||
cover_url = thumbnails[-1]['url'] if thumbnails else ""
|
||||
|
||||
trending_songs.append({
|
||||
"id": item.get('videoId'),
|
||||
"title": item.get('title'),
|
||||
"artist": item.get('artists', [{'name': 'Unknown'}])[0]['name'],
|
||||
"album": "Trending", # Charts don't usually have album info, stick to generic
|
||||
"cover_url": cover_url,
|
||||
"duration": 0 # Charts might not have duration
|
||||
})
|
||||
except Exception as e:
|
||||
print(f"Error fetching trending: {e}")
|
||||
|
||||
# --- FALLBACK IF API FAILS OR RETURNS EMPTY ---
|
||||
if not trending_songs:
|
||||
print("Using HARDCODED fallback for trending songs.")
|
||||
trending_songs = [
|
||||
{
|
||||
"id": "Da4P2uT4ikU", "title": "Angel Baby", "artist": "Troye Sivan", "album": "Angel Baby",
|
||||
"cover_url": "https://lh3.googleusercontent.com/Fj_JpwC1QGEFkH3y973Xv7w7tqVw5C_V-1o7g1gX_c4X_1o7g1gX_c4X_1o7g1=w544-h544-l90-rj"
|
||||
},
|
||||
{
|
||||
"id": "fJ9rUzIMcZQ", "title": "Bohemian Rhapsody", "artist": "Queen", "album": "A Night at the Opera",
|
||||
"cover_url": "https://lh3.googleusercontent.com/yFj_JpwC1QGEFkH3y973Xv7w7tqVw5C_V-1o7g1gX_c4X_1o7g1gX_c4X_1o7g1=w544-h544-l90-rj"
|
||||
},
|
||||
{
|
||||
"id": "4NRXx6U8ABQ", "title": "Blinding Lights", "artist": "The Weeknd", "album": "After Hours",
|
||||
"cover_url": "https://lh3.googleusercontent.com/Fj_JpwC1QGEFkH3y973Xv7w7tqVw5C_V-1o7g1gX_c4X_1o7g1gX_c4X_1o7g1=w544-h544-l90-rj"
|
||||
},
|
||||
{
|
||||
"id": "OPf0YbXqDm0", "title": "Uptown Funk", "artist": "Mark Ronson", "album": "Uptown Special",
|
||||
"cover_url": "https://lh3.googleusercontent.com/Fj_JpwC1QGEFkH3y973Xv7w7tqVw5C_V-1o7g1gX_c4X_1o7g1gX_c4X_1o7g1=w544-h544-l90-rj"
|
||||
}
|
||||
]
|
||||
# -----------------------------------------------
|
||||
# New Releases (using search for "New Songs" as proxy or actual new releases if supported)
|
||||
# Actually ytmusicapi has get_new_releases usually under get_charts or specific calls
|
||||
# We'll use get_charts "trending" for "Trending" category
|
||||
# And maybe "Top Songs" for "Top Hits"
|
||||
|
||||
# 1. Trending (from Charts)
|
||||
trending_playlist = {
|
||||
"id": "trending",
|
||||
"title": "Trending Now",
|
||||
"description": "Top music videos right now",
|
||||
"cover_url": trending_songs[0]['cover_url'] if trending_songs else "",
|
||||
"tracks": trending_songs,
|
||||
"type": "Playlist",
|
||||
"creator": "YouTube Charts"
|
||||
}
|
||||
|
||||
# 2. Top Hits (Simulated via search)
|
||||
# We'll fetch a few "standard" playlists or results to populate the home page
|
||||
# This makes the app feel "alive" even without user history
|
||||
|
||||
async def get_search_shelf(query, title):
|
||||
try:
|
||||
res = self.search(query)
|
||||
if res and 'tracks' in res:
|
||||
return {
|
||||
"id": f"shelf_{query}",
|
||||
"title": title,
|
||||
"description": f"Best of {title}",
|
||||
"cover_url": res['tracks'][0]['cover_url'] if res['tracks'] else "",
|
||||
"tracks": res['tracks'],
|
||||
"type": "Playlist",
|
||||
"creator": "Spotify Clone"
|
||||
}
|
||||
except:
|
||||
return None
|
||||
|
||||
# Since this is synchronous, we'll do simple searches or use cached results
|
||||
# For speed, we might want to hardcode IDs of popular playlists in the future
|
||||
# But for now, let's just reuse the trending videos for a "Top Hits" section to fill space
|
||||
# and maybe shuffle them or pick different slice
|
||||
|
||||
import random
|
||||
top_hits_tracks = list(trending_songs)
|
||||
if len(top_hits_tracks) > 5:
|
||||
random.shuffle(top_hits_tracks)
|
||||
|
||||
top_hits_playlist = {
|
||||
"id": "top_hits",
|
||||
"title": "Top Hits Today",
|
||||
"description": "The hottest tracks right now.",
|
||||
"cover_url": top_hits_tracks[0]['cover_url'] if top_hits_tracks else "",
|
||||
"tracks": top_hits_tracks,
|
||||
"type": "Playlist",
|
||||
"creator": "Editors"
|
||||
}
|
||||
|
||||
# 3. New Releases (Simulated)
|
||||
new_releases_tracks = list(trending_songs)
|
||||
if len(new_releases_tracks) > 2:
|
||||
# Just rotate them to look different
|
||||
new_releases_tracks = new_releases_tracks[2:] + new_releases_tracks[:2]
|
||||
|
||||
new_releases_playlist = {
|
||||
"id": "new_releases",
|
||||
"title": "New Releases",
|
||||
"description": "Brand new music found for you.",
|
||||
"cover_url": new_releases_tracks[0]['cover_url'] if new_releases_tracks else "",
|
||||
"tracks": new_releases_tracks,
|
||||
"type": "Playlist",
|
||||
"creator": "Spotify Clone"
|
||||
}
|
||||
|
||||
response = {
|
||||
"Trending": [trending_playlist],
|
||||
"Top Hits": [top_hits_playlist],
|
||||
"New Releases": [new_releases_playlist],
|
||||
"Focus & Chill": [
|
||||
{
|
||||
"id": "lofi_beats",
|
||||
"title": "Lofi Beats",
|
||||
"description": "Chill beats to study/relax to",
|
||||
"cover_url": "https://i.ytimg.com/vi/jfKfPfyJRdk/hqdefault.jpg",
|
||||
"tracks": [], # Empty tracks will force a fetch when clicked if handled
|
||||
"type": "Playlist",
|
||||
"creator": "Lofi Girl"
|
||||
},
|
||||
{
|
||||
"id": "jazz_vibes",
|
||||
"title": "Jazz Vibes",
|
||||
"description": "Relaxing Jazz instrumental",
|
||||
"cover_url": "https://i.ytimg.com/vi/DX7W7WUI6w8/hqdefault.jpg",
|
||||
"tracks": [],
|
||||
"type": "Playlist",
|
||||
"creator": "Jazz Cafe"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.cache.set(cache_key, response, ttl_seconds=3600)
|
||||
return response
|
||||
except Exception as e:
|
||||
print(f"Home Error: {e}")
|
||||
return {}
|
||||
|
||||
def get_trending(self):
|
||||
# Dedicated trending endpoint
|
||||
home = self.get_home()
|
||||
if "Trending" in home and home["Trending"]:
|
||||
return {"tracks": home["Trending"][0]["tracks"]}
|
||||
return {"tracks": []}
|
||||
19
debug_browse.py
Normal file
19
debug_browse.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import requests
|
||||
import json
|
||||
|
||||
try:
|
||||
r = requests.get('http://localhost:8000/api/browse')
|
||||
data = r.json()
|
||||
print("Keys:", data.keys())
|
||||
for key, val in data.items():
|
||||
print(f"Key: {key}, Type: {type(val)}")
|
||||
if isinstance(val, list) and len(val) > 0:
|
||||
item = val[0]
|
||||
print(f" Item 0 keys: {item.keys()}")
|
||||
if 'tracks' in item:
|
||||
print(f" Tracks length: {len(item['tracks'])}")
|
||||
if len(item['tracks']) > 0:
|
||||
print(f" Track 0 keys: {item['tracks'][0].keys()}")
|
||||
print(f" Track 0 sample: {item['tracks'][0]}")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
from ytmusicapi import YTMusic
|
||||
import json
|
||||
|
||||
yt = YTMusic()
|
||||
seed_id = "hDrFd1W8fvU"
|
||||
print(f"Fetching watch playlist for {seed_id}...")
|
||||
results = yt.get_watch_playlist(videoId=seed_id, limit=5)
|
||||
|
||||
if 'tracks' in results:
|
||||
print(f"Found {len(results['tracks'])} tracks.")
|
||||
if len(results['tracks']) > 0:
|
||||
first_track = results['tracks'][0]
|
||||
print(json.dumps(first_track, indent=2))
|
||||
print("Keys:", first_track.keys())
|
||||
else:
|
||||
print("No 'tracks' key in results")
|
||||
print(results.keys())
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
services:
|
||||
spotify-clone:
|
||||
image: vndangkhoa/spotify-clone:latest
|
||||
image: git.khoavo.myds.me/vndangkhoa/spotify-clone:latest
|
||||
container_name: spotify-clone
|
||||
restart: always
|
||||
network_mode: bridge # Synology often prefers explicit bridge or host
|
||||
|
|
@ -9,13 +9,3 @@ services:
|
|||
|
||||
volumes:
|
||||
- ./data:/app/backend/data
|
||||
|
||||
watchtower:
|
||||
image: containrrr/watchtower
|
||||
container_name: spotify-watchtower
|
||||
restart: always
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: --interval 3600 --cleanup
|
||||
environment:
|
||||
- WATCHTOWER_INCLUDE_RESTARTING=true
|
||||
|
|
|
|||
|
|
@ -20,10 +20,12 @@ export const metadata: Metadata = {
|
|||
manifest: "/manifest.json",
|
||||
referrer: "no-referrer",
|
||||
appleWebApp: {
|
||||
capable: true,
|
||||
statusBarStyle: "black-translucent",
|
||||
title: "Audiophile Web Player",
|
||||
},
|
||||
other: {
|
||||
"mobile-web-app-capable": "yes",
|
||||
},
|
||||
icons: {
|
||||
icon: "/icons/icon-192x192.png",
|
||||
apple: "/icons/icon-512x512.png",
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ export default function LibraryPage() {
|
|||
{playlists.map((playlist) => (
|
||||
<Link href={`/playlist?id=${playlist.id}`} key={playlist.id}>
|
||||
<div className="bg-[#181818] p-2 md:p-3 rounded-md hover:bg-[#282828] transition aspect-[3/4] flex flex-col">
|
||||
<div className="aspect-square w-full mb-2 md:mb-3 overflow-hidden rounded-md shadow-lg">
|
||||
<div className="aspect-square w-full mb-1 md:mb-3 overflow-hidden rounded-md shadow-lg">
|
||||
<CoverImage
|
||||
src={playlist.cover_url}
|
||||
alt={playlist.title}
|
||||
|
|
@ -84,8 +84,8 @@ export default function LibraryPage() {
|
|||
fallbackText={playlist.title?.substring(0, 2).toUpperCase()}
|
||||
/>
|
||||
</div>
|
||||
<h3 className="text-white font-bold text-xs md:text-sm truncate">{playlist.title}</h3>
|
||||
<p className="text-[#a7a7a7] text-[10px] md:text-xs">Playlist • You</p>
|
||||
<h3 className="text-white font-bold text-[10px] md:text-sm truncate w-full">{playlist.title}</h3>
|
||||
<p className="text-[#a7a7a7] text-[9px] md:text-xs truncate w-full">Playlist • You</p>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
|
|
@ -93,7 +93,7 @@ export default function LibraryPage() {
|
|||
{browsePlaylists.map((playlist) => (
|
||||
<Link href={`/playlist?id=${playlist.id}`} key={playlist.id}>
|
||||
<div className="bg-[#181818] p-2 md:p-3 rounded-md hover:bg-[#282828] transition aspect-[3/4] flex flex-col">
|
||||
<div className="aspect-square w-full mb-2 md:mb-3 overflow-hidden rounded-md shadow-lg">
|
||||
<div className="aspect-square w-full mb-1 md:mb-3 overflow-hidden rounded-md shadow-lg">
|
||||
<CoverImage
|
||||
src={playlist.cover_url}
|
||||
alt={playlist.title}
|
||||
|
|
@ -101,8 +101,8 @@ export default function LibraryPage() {
|
|||
fallbackText={playlist.title?.substring(0, 2).toUpperCase()}
|
||||
/>
|
||||
</div>
|
||||
<h3 className="text-white font-bold text-xs md:text-sm truncate">{playlist.title}</h3>
|
||||
<p className="text-[#a7a7a7] text-[10px] md:text-xs">Playlist • Made for you</p>
|
||||
<h3 className="text-white font-bold text-[10px] md:text-sm truncate w-full">{playlist.title}</h3>
|
||||
<p className="text-[#a7a7a7] text-[9px] md:text-xs truncate w-full">Playlist • Made for you</p>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
|
|
@ -113,7 +113,7 @@ export default function LibraryPage() {
|
|||
{showArtists && artists.map((artist) => (
|
||||
<Link href={`/artist?name=${encodeURIComponent(artist.title)}`} key={artist.id}>
|
||||
<div className="bg-[#181818] p-2 md:p-3 rounded-md hover:bg-[#282828] transition aspect-[3/4] flex flex-col items-center text-center">
|
||||
<div className="aspect-square w-full mb-2 md:mb-3 overflow-hidden rounded-full shadow-lg">
|
||||
<div className="aspect-square w-full mb-1 md:mb-3 overflow-hidden rounded-full shadow-lg">
|
||||
<CoverImage
|
||||
src={artist.cover_url}
|
||||
alt={artist.title}
|
||||
|
|
@ -121,8 +121,8 @@ export default function LibraryPage() {
|
|||
fallbackText={artist.title?.substring(0, 2).toUpperCase()}
|
||||
/>
|
||||
</div>
|
||||
<h3 className="text-white font-bold text-xs md:text-sm truncate w-full">{artist.title}</h3>
|
||||
<p className="text-[#a7a7a7] text-[10px] md:text-xs">Artist</p>
|
||||
<h3 className="text-white font-bold text-[10px] md:text-sm truncate w-full">{artist.title}</h3>
|
||||
<p className="text-[#a7a7a7] text-[9px] md:text-xs truncate w-full">Artist</p>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
|
|
@ -131,7 +131,7 @@ export default function LibraryPage() {
|
|||
{showAlbums && albums.map((album) => (
|
||||
<Link href={`/playlist?id=${album.id}`} key={album.id}>
|
||||
<div className="bg-[#181818] p-2 md:p-3 rounded-md hover:bg-[#282828] transition aspect-[3/4] flex flex-col">
|
||||
<div className="aspect-square w-full mb-2 md:mb-3 overflow-hidden rounded-md shadow-lg">
|
||||
<div className="aspect-square w-full mb-1 md:mb-3 overflow-hidden rounded-md shadow-lg">
|
||||
<CoverImage
|
||||
src={album.cover_url}
|
||||
alt={album.title}
|
||||
|
|
@ -139,8 +139,8 @@ export default function LibraryPage() {
|
|||
fallbackText={album.title?.substring(0, 2).toUpperCase()}
|
||||
/>
|
||||
</div>
|
||||
<h3 className="text-white font-bold text-xs md:text-sm truncate">{album.title}</h3>
|
||||
<p className="text-[#a7a7a7] text-[10px] md:text-xs">Album • {album.creator || 'Spotify'}</p>
|
||||
<h3 className="text-white font-bold text-[10px] md:text-sm truncate w-full">{album.title}</h3>
|
||||
<p className="text-[#a7a7a7] text-[9px] md:text-xs truncate w-full">Album • {album.creator || 'Spotify'}</p>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
|
|
|
|||
|
|
@ -183,22 +183,22 @@ export default function Home() {
|
|||
<div className="grid grid-cols-3 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
|
||||
{sortPlaylists(playlists).slice(0, 5).map((playlist: any) => (
|
||||
<Link href={`/playlist?id=${playlist.id}`} key={playlist.id}>
|
||||
<div className="bg-[#181818] p-4 rounded-md hover:bg-[#282828] transition duration-300 group cursor-pointer relative h-full flex flex-col">
|
||||
<div className="relative mb-4">
|
||||
<div className="bg-[#181818] p-2 md:p-4 rounded-md hover:bg-[#282828] transition duration-300 group cursor-pointer relative h-full flex flex-col">
|
||||
<div className="relative mb-1 md:mb-4">
|
||||
<CoverImage
|
||||
src={playlist.cover_url}
|
||||
alt={playlist.title}
|
||||
className="w-full aspect-square object-cover rounded-md shadow-lg"
|
||||
fallbackText={playlist.title.substring(0, 2).toUpperCase()}
|
||||
/>
|
||||
<div className="absolute bottom-2 right-2 translate-y-4 opacity-0 group-hover:translate-y-0 group-hover:opacity-100 transition duration-300 shadow-xl">
|
||||
<div className="absolute bottom-2 right-2 translate-y-4 opacity-0 group-hover:translate-y-0 group-hover:opacity-100 transition duration-300 shadow-xl hidden md:block">
|
||||
<div className="w-12 h-12 bg-[#1DB954] rounded-full flex items-center justify-center hover:scale-105">
|
||||
<Play className="fill-black text-black ml-1" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<h3 className="font-bold mb-1 truncate">{playlist.title}</h3>
|
||||
<p className="text-sm text-[#a7a7a7] line-clamp-2">{playlist.description}</p>
|
||||
<h3 className="font-bold mb-0.5 md:mb-1 truncate text-[10px] md:text-base">{playlist.title}</h3>
|
||||
<p className="text-[9px] md:text-sm text-[#a7a7a7] line-clamp-2">{playlist.description}</p>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
|
|
@ -316,22 +316,22 @@ function MadeForYouSection() {
|
|||
) : (
|
||||
<div className="grid grid-cols-3 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-6">
|
||||
{recommendations.slice(0, 5).map((track, i) => (
|
||||
<div key={i} onClick={() => playTrack(track, recommendations)} className="bg-[#181818] p-4 rounded-md hover:bg-[#282828] transition duration-300 group cursor-pointer relative h-full flex flex-col">
|
||||
<div className="relative mb-4">
|
||||
<div key={i} onClick={() => playTrack(track, recommendations)} className="bg-[#181818] p-2 md:p-4 rounded-md hover:bg-[#282828] transition duration-300 group cursor-pointer relative h-full flex flex-col">
|
||||
<div className="relative mb-1 md:mb-4">
|
||||
<CoverImage
|
||||
src={track.cover_url}
|
||||
alt={track.title}
|
||||
className="w-full aspect-square object-cover rounded-md shadow-lg"
|
||||
fallbackText={track.title?.substring(0, 2).toUpperCase()}
|
||||
/>
|
||||
<div className="absolute bottom-2 right-2 translate-y-4 opacity-0 group-hover:translate-y-0 group-hover:opacity-100 transition duration-300 shadow-xl">
|
||||
<div className="absolute bottom-2 right-2 translate-y-4 opacity-0 group-hover:translate-y-0 group-hover:opacity-100 transition duration-300 shadow-xl hidden md:block">
|
||||
<div className="w-12 h-12 bg-[#1DB954] rounded-full flex items-center justify-center hover:scale-105">
|
||||
<Play className="fill-black text-black ml-1" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<h3 className="font-bold mb-1 truncate">{track.title}</h3>
|
||||
<p className="text-sm text-[#a7a7a7] line-clamp-2">{track.artist}</p>
|
||||
<h3 className="font-bold mb-0.5 md:mb-1 truncate text-[10px] md:text-base">{track.title}</h3>
|
||||
<p className="text-[9px] md:text-sm text-[#a7a7a7] line-clamp-2">{track.artist}</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
|
@ -391,21 +391,21 @@ function RecommendedAlbumsSection() {
|
|||
<div className="grid grid-cols-3 md:grid-cols-3 lg:grid-cols-4 xl:grid-cols-5 gap-4 md:gap-6">
|
||||
{albums.slice(0, 5).map((album, i) => (
|
||||
<Link href={`/playlist?id=${album.id}`} key={i}>
|
||||
<div className="bg-[#181818] p-4 rounded-md hover:bg-[#282828] transition duration-300 group cursor-pointer relative h-full flex flex-col">
|
||||
<div className="relative mb-4">
|
||||
<div className="bg-[#181818] p-2 md:p-4 rounded-md hover:bg-[#282828] transition duration-300 group cursor-pointer relative h-full flex flex-col">
|
||||
<div className="relative mb-1 md:mb-4">
|
||||
<CoverImage
|
||||
src={album.cover_url}
|
||||
alt={album.title}
|
||||
className="w-full aspect-square object-cover rounded-md shadow-lg"
|
||||
/>
|
||||
<div className="absolute bottom-2 right-2 translate-y-4 opacity-0 group-hover:translate-y-0 group-hover:opacity-100 transition duration-300 shadow-xl">
|
||||
<div className="absolute bottom-2 right-2 translate-y-4 opacity-0 group-hover:translate-y-0 group-hover:opacity-100 transition duration-300 shadow-xl hidden md:block">
|
||||
<div className="w-12 h-12 bg-[#1DB954] rounded-full flex items-center justify-center hover:scale-105">
|
||||
<Play className="fill-black text-black ml-1" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<h3 className="font-bold mb-1 truncate">{album.title}</h3>
|
||||
<p className="text-sm text-[#a7a7a7] line-clamp-2">{album.description}</p>
|
||||
<h3 className="font-bold mb-0.5 md:mb-1 truncate text-[10px] md:text-base">{album.title}</h3>
|
||||
<p className="text-[9px] md:text-sm text-[#a7a7a7] line-clamp-2">{album.description}</p>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import { useEffect, useState } from "react";
|
||||
import { Plus, X } from "lucide-react";
|
||||
import { api } from '@/services/apiClient';
|
||||
|
||||
interface AddToPlaylistModalProps {
|
||||
track: any;
|
||||
|
|
@ -14,9 +15,7 @@ export default function AddToPlaylistModal({ track, isOpen, onClose }: AddToPlay
|
|||
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
const apiUrl = process.env.NEXT_PUBLIC_API_URL || '';
|
||||
fetch(`${apiUrl}/api/playlists`)
|
||||
.then(res => res.json())
|
||||
api.get<any[]>('/playlists')
|
||||
.then(data => setPlaylists(data))
|
||||
.catch(err => console.error(err));
|
||||
}
|
||||
|
|
@ -24,12 +23,7 @@ export default function AddToPlaylistModal({ track, isOpen, onClose }: AddToPlay
|
|||
|
||||
const handleAddToPlaylist = async (playlistId: string) => {
|
||||
try {
|
||||
const apiUrl = process.env.NEXT_PUBLIC_API_URL || '';
|
||||
await fetch(`${apiUrl}/api/playlists/${playlistId}/tracks`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(track)
|
||||
});
|
||||
await api.post(`/playlists/${playlistId}/tracks`, track);
|
||||
alert(`Added to playlist!`);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
|
|
@ -77,18 +71,12 @@ export default function AddToPlaylistModal({ track, isOpen, onClose }: AddToPlay
|
|||
onClick={() => {
|
||||
const name = prompt("New Playlist Name");
|
||||
if (name) {
|
||||
const apiUrl = process.env.NEXT_PUBLIC_API_URL || '';
|
||||
fetch(`${apiUrl}/api/playlists`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ name })
|
||||
}).then(() => {
|
||||
// Refresh list
|
||||
const apiUrl = process.env.NEXT_PUBLIC_API_URL || '';
|
||||
fetch(`${apiUrl}/api/playlists`)
|
||||
.then(res => res.json())
|
||||
.then(data => setPlaylists(data));
|
||||
});
|
||||
api.post('/playlists', { name })
|
||||
.then(() => {
|
||||
// Refresh list
|
||||
return api.get<any[]>('/playlists');
|
||||
})
|
||||
.then(data => setPlaylists(data));
|
||||
}
|
||||
}}
|
||||
className="w-full py-2 bg-white text-black font-bold rounded-full hover:scale-105 transition flex items-center justify-center gap-2"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import React, { useEffect, useState, useRef } from 'react';
|
||||
import { api } from '@/services/apiClient';
|
||||
|
||||
interface Metric {
|
||||
time: number;
|
||||
|
|
@ -27,10 +28,14 @@ const LyricsDetail: React.FC<LyricsDetailProps> = ({ track, currentTime, onClose
|
|||
setIsLoading(true);
|
||||
try {
|
||||
// Pass title and artist for LRCLIB fallback
|
||||
const apiUrl = process.env.NEXT_PUBLIC_API_URL || '';
|
||||
const url = `${apiUrl}/api/lyrics?id=${track.id}&title=${encodeURIComponent(track.title)}&artist=${encodeURIComponent(track.artist)}`;
|
||||
const res = await fetch(url);
|
||||
const data = await res.json();
|
||||
const data = await api.get<Metric[]>(
|
||||
'/lyrics',
|
||||
{
|
||||
id: track.id,
|
||||
title: track.title,
|
||||
artist: track.artist
|
||||
}
|
||||
);
|
||||
setLyrics(data || []);
|
||||
} catch (error) {
|
||||
console.error("Error fetching lyrics:", error);
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
"use client";
|
||||
|
||||
import { Home, Search, Library } from "lucide-react";
|
||||
import { Home, Search, Library, Settings } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { usePathname } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
import SettingsModal from "./SettingsModal";
|
||||
|
||||
export default function MobileNav() {
|
||||
const pathname = usePathname();
|
||||
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
|
||||
|
||||
const isActive = (path: string) => pathname === path;
|
||||
|
||||
|
|
@ -25,6 +28,11 @@ export default function MobileNav() {
|
|||
<Library size={24} />
|
||||
<span className="text-[10px]">Library</span>
|
||||
</Link>
|
||||
<button onClick={() => setIsSettingsOpen(true)} className={`flex flex-col items-center gap-1 text-neutral-400 hover:text-white`}>
|
||||
<Settings size={24} />
|
||||
<span className="text-[10px]">Settings</span>
|
||||
</button>
|
||||
<SettingsModal isOpen={isSettingsOpen} onClose={() => setIsSettingsOpen(false)} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
106
frontend/components/SettingsModal.tsx
Normal file
106
frontend/components/SettingsModal.tsx
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { X, RefreshCw, CheckCircle, AlertCircle } from "lucide-react";
|
||||
import { api } from "@/services/apiClient";
|
||||
|
||||
interface SettingsModalProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
export default function SettingsModal({ isOpen, onClose }: SettingsModalProps) {
|
||||
const [updating, setUpdating] = useState(false);
|
||||
const [status, setStatus] = useState<{ type: "success" | "error" | null; message: string }>({ type: null, message: "" });
|
||||
|
||||
if (!isOpen) return null;
|
||||
|
||||
const handleUpdate = async (module: 'ytdlp' | 'spotdl') => {
|
||||
setUpdating(true);
|
||||
setStatus({ type: null, message: "" });
|
||||
try {
|
||||
const endpoint = module === 'ytdlp' ? "/settings/update-ytdlp" : "/settings/update-spotdl";
|
||||
await api.post(endpoint, {});
|
||||
setStatus({ type: "success", message: `${module} updated! Server is restarting...` });
|
||||
// Reload page after a delay to reflect restart
|
||||
setTimeout(() => {
|
||||
window.location.reload();
|
||||
}, 5000);
|
||||
} catch (e: any) {
|
||||
console.error(e); // Debugging
|
||||
setStatus({ type: "error", message: e.message || "Update failed. Check console." });
|
||||
} finally {
|
||||
setUpdating(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 bg-black/50 z-50 flex items-center justify-center p-4">
|
||||
<div className="bg-[#1e1e1e] rounded-xl p-6 w-full max-w-md shadow-2xl border border-white/10">
|
||||
<div className="flex justify-between items-center mb-6">
|
||||
<h2 className="text-xl font-bold">Settings</h2>
|
||||
<button onClick={onClose} className="p-2 hover:bg-white/10 rounded-full transition">
|
||||
<X size={20} />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="space-y-6">
|
||||
<div className="bg-[#2a2a2a] p-4 rounded-lg flex flex-col gap-4">
|
||||
<div>
|
||||
<h3 className="font-semibold mb-2">Core Components</h3>
|
||||
<p className="text-sm text-gray-400 mb-2">
|
||||
Update core libraries to fix playback or download issues.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<button
|
||||
onClick={() => handleUpdate('ytdlp')}
|
||||
disabled={updating}
|
||||
className={`w-full py-3 rounded-lg font-medium flex items-center justify-center gap-2 transition ${updating ? "bg-blue-600/50 cursor-not-allowed" : "bg-blue-600 hover:bg-blue-500"
|
||||
}`}
|
||||
>
|
||||
{updating ? (
|
||||
<>
|
||||
<RefreshCw className="animate-spin" size={18} />
|
||||
Updating yt-dlp...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<RefreshCw size={18} />
|
||||
Update yt-dlp (Nightly)
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
|
||||
<button
|
||||
onClick={() => handleUpdate('spotdl')}
|
||||
disabled={updating}
|
||||
className={`w-full py-3 rounded-lg font-medium flex items-center justify-center gap-2 transition ${updating ? "bg-green-600/50 cursor-not-allowed" : "bg-green-600 hover:bg-green-500"
|
||||
}`}
|
||||
>
|
||||
{updating ? (
|
||||
<>
|
||||
<RefreshCw className="animate-spin" size={18} />
|
||||
Updating spotdl...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<RefreshCw size={18} />
|
||||
Update spotdl (Latest)
|
||||
</>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{status.message && (
|
||||
<div className={`p-4 rounded-lg flex items-start gap-3 ${status.type === "success" ? "bg-green-500/10 text-green-400" : "bg-red-500/10 text-red-400"
|
||||
}`}>
|
||||
{status.type === "success" ? <CheckCircle size={20} /> : <AlertCircle size={20} />}
|
||||
<p className="text-sm">{status.message}</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
@ -1,10 +1,11 @@
|
|||
"use client";
|
||||
|
||||
import { Home, Search, Library, Plus, Heart } from "lucide-react";
|
||||
import { Home, Search, Library, Plus, Heart, Settings } from "lucide-react";
|
||||
import Link from "next/link";
|
||||
import { usePlayer } from "@/context/PlayerContext";
|
||||
import { useState } from "react";
|
||||
import CreatePlaylistModal from "./CreatePlaylistModal";
|
||||
import SettingsModal from "./SettingsModal";
|
||||
import { dbService } from "@/services/db";
|
||||
import { useLibrary } from "@/context/LibraryContext";
|
||||
import Logo from "./Logo";
|
||||
|
|
@ -14,6 +15,7 @@ export default function Sidebar() {
|
|||
const { likedTracks } = usePlayer();
|
||||
const { userPlaylists, libraryItems, refreshLibrary: refresh, activeFilter, setActiveFilter } = useLibrary();
|
||||
const [isCreateModalOpen, setIsCreateModalOpen] = useState(false);
|
||||
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
|
||||
|
||||
const handleCreatePlaylist = async (name: string) => {
|
||||
await dbService.createPlaylist(name);
|
||||
|
|
@ -49,6 +51,13 @@ export default function Sidebar() {
|
|||
<Search className="w-6 h-6" />
|
||||
<span className="font-bold">Search</span>
|
||||
</Link>
|
||||
<button
|
||||
onClick={() => setIsSettingsOpen(true)}
|
||||
className="flex items-center gap-4 text-spotify-text-muted hover:text-white transition cursor-pointer text-left"
|
||||
>
|
||||
<Settings className="w-6 h-6" />
|
||||
<span className="font-bold">Settings</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="bg-[#121212] rounded-lg flex-1 flex flex-col overflow-hidden">
|
||||
|
|
@ -185,6 +194,10 @@ export default function Sidebar() {
|
|||
onClose={() => setIsCreateModalOpen(false)}
|
||||
onCreate={handleCreatePlaylist}
|
||||
/>
|
||||
<SettingsModal
|
||||
isOpen={isSettingsOpen}
|
||||
onClose={() => setIsSettingsOpen(false)}
|
||||
/>
|
||||
</aside>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import { createContext, useContext, useState, useEffect, ReactNode } from "react";
|
||||
import { dbService } from "@/services/db";
|
||||
import { api } from '@/services/apiClient';
|
||||
import { Track, AudioQuality } from "@/types";
|
||||
import * as mm from 'music-metadata-browser';
|
||||
|
||||
|
|
@ -139,11 +140,9 @@ export function PlayerProvider({ children }: { children: ReactNode }) {
|
|||
if (!preloadedBlobs.has(track.id) && track.url) {
|
||||
try {
|
||||
// Construct the correct stream URL for preloading if it's external
|
||||
const fetchUrl = track.url.startsWith('http') ? `/api/stream?id=${track.id}` : track.url;
|
||||
const fetchUrl = track.url.startsWith('http') ? `/stream?id=${track.id}` : track.url;
|
||||
|
||||
const res = await fetch(fetchUrl);
|
||||
if (!res.ok) throw new Error("Fetch failed");
|
||||
const blob = await res.blob();
|
||||
const blob = await api.getBlob(fetchUrl);
|
||||
const blobUrl = URL.createObjectURL(blob);
|
||||
setPreloadedBlobs(prev => new Map(prev).set(track.id, blobUrl));
|
||||
console.log(`Buffered ${track.title}`);
|
||||
|
|
|
|||
|
|
@ -21,7 +21,10 @@ const nextConfig = {
|
|||
{ source: '/api/download-status/:path*', destination: 'http://127.0.0.1:8000/api/download-status/:path*' },
|
||||
{ source: '/api/lyrics/:path*', destination: 'http://127.0.0.1:8000/api/lyrics/:path*' },
|
||||
{ source: '/api/trending/:path*', destination: 'http://127.0.0.1:8000/api/trending/:path*' },
|
||||
{ source: '/api/settings/:path*', destination: 'http://127.0.0.1:8000/api/settings/:path*' },
|
||||
{ source: '/api/recommendations/:path*', destination: 'http://127.0.0.1:8000/api/recommendations/:path*' },
|
||||
// Catch-all for other new endpoints
|
||||
{ source: '/api/:path*', destination: 'http://127.0.0.1:8000/api/:path*' },
|
||||
];
|
||||
},
|
||||
images: {
|
||||
|
|
|
|||
73
frontend/services/apiClient.ts
Normal file
73
frontend/services/apiClient.ts
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
interface RequestOptions extends RequestInit {
|
||||
params?: Record<string, string>;
|
||||
}
|
||||
|
||||
class ApiClient {
|
||||
private baseUrl: string = '/api';
|
||||
|
||||
async get<T>(url: string, params?: Record<string, string>): Promise<T> {
|
||||
return this.request<T>(url, { method: 'GET', params });
|
||||
}
|
||||
|
||||
async post<T>(url: string, body: any): Promise<T> {
|
||||
return this.request<T>(url, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
}
|
||||
|
||||
async put<T>(url: string, body: any): Promise<T> {
|
||||
return this.request<T>(url, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
}
|
||||
|
||||
async delete<T>(url: string): Promise<T> {
|
||||
return this.request<T>(url, { method: 'DELETE' });
|
||||
}
|
||||
|
||||
async getBlob(url: string): Promise<Blob> {
|
||||
// Ensure endpoint starts with / if not empty and is relative
|
||||
if (url && !url.startsWith('/') && !url.startsWith('http')) {
|
||||
url = '/' + url;
|
||||
}
|
||||
|
||||
// Handle absolute URLs (like preloading external)
|
||||
const fetchUrl = url.startsWith('http') ? url : `${this.baseUrl}${url}`;
|
||||
|
||||
const response = await fetch(fetchUrl);
|
||||
if (!response.ok) throw new Error("Fetch failed");
|
||||
return response.blob();
|
||||
}
|
||||
|
||||
private async request<T>(endpoint: string, options: RequestOptions = {}): Promise<T> {
|
||||
// Ensure endpoint starts with / if not empty
|
||||
if (endpoint && !endpoint.startsWith('/')) {
|
||||
endpoint = '/' + endpoint;
|
||||
}
|
||||
|
||||
let url = `${this.baseUrl}${endpoint}`;
|
||||
if (options.params) {
|
||||
const query = new URLSearchParams(options.params).toString();
|
||||
url += `?${query}`;
|
||||
}
|
||||
|
||||
const response = await fetch(url, options);
|
||||
|
||||
if (!response.ok) {
|
||||
const errorBody = await response.json().catch(() => ({}));
|
||||
throw new Error(errorBody.detail || `HTTP Error ${response.status}`);
|
||||
}
|
||||
|
||||
if (response.status === 204) {
|
||||
return {} as T;
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
}
|
||||
|
||||
export const api = new ApiClient();
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
import { openDB, DBSchema } from 'idb';
|
||||
import { Track, Playlist } from '@/types';
|
||||
import { api } from '@/services/apiClient';
|
||||
|
||||
export type { Track, Playlist };
|
||||
|
||||
|
|
@ -46,11 +47,7 @@ export const dbService = {
|
|||
async seedInitialData() {
|
||||
try {
|
||||
// Fetch real data from backend to seed valid playlists
|
||||
// We use the 'api' prefix assuming this runs in browser
|
||||
const res = await fetch('/api/trending');
|
||||
if (!res.ok) return [];
|
||||
|
||||
const data = await res.json();
|
||||
const data = await api.get<{ tracks: Track[] }>('/trending');
|
||||
const allTracks: Track[] = data.tracks || [];
|
||||
|
||||
if (allTracks.length === 0) return [];
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { Track } from "./db";
|
||||
import { api } from "./apiClient";
|
||||
|
||||
export interface StaticPlaylist {
|
||||
id: string;
|
||||
|
|
@ -10,18 +11,11 @@ export interface StaticPlaylist {
|
|||
creator?: string;
|
||||
}
|
||||
|
||||
// Helper to fetch from backend
|
||||
const apiFetch = async (endpoint: string) => {
|
||||
const res = await fetch(`/api${endpoint}`);
|
||||
if (!res.ok) throw new Error(`API Error: ${res.statusText}`);
|
||||
return res.json();
|
||||
};
|
||||
|
||||
export const libraryService = {
|
||||
async getLibrary(): Promise<StaticPlaylist> {
|
||||
// Fetch "Liked Songs" or main library from backend
|
||||
// Assuming backend has an endpoint or we treat "Trending" as default
|
||||
return await apiFetch('/browse'); // Simplified fallback
|
||||
return await api.get<StaticPlaylist>('/browse'); // Simplified fallback
|
||||
},
|
||||
|
||||
async _generateMockContent(): Promise<void> {
|
||||
|
|
@ -29,12 +23,12 @@ export const libraryService = {
|
|||
},
|
||||
|
||||
async getBrowseContent(): Promise<Record<string, StaticPlaylist[]>> {
|
||||
return await apiFetch('/browse');
|
||||
return await api.get<Record<string, StaticPlaylist[]>>('/browse');
|
||||
},
|
||||
|
||||
async getPlaylist(id: string): Promise<StaticPlaylist | null> {
|
||||
try {
|
||||
return await apiFetch(`/playlists/${id}`);
|
||||
return await api.get<StaticPlaylist>(`/playlists/${id}`);
|
||||
} catch (e) {
|
||||
console.error("Failed to fetch playlist", id, e);
|
||||
return null;
|
||||
|
|
@ -43,12 +37,12 @@ export const libraryService = {
|
|||
|
||||
async getRecommendations(seedTrackId?: string): Promise<Track[]> {
|
||||
// Use trending as recommendations for now
|
||||
const data = await apiFetch('/trending');
|
||||
const data = await api.get<{ tracks: Track[] }>('/trending');
|
||||
return data.tracks || [];
|
||||
},
|
||||
|
||||
async getRecommendedAlbums(seedArtist?: string): Promise<StaticPlaylist[]> {
|
||||
const data = await apiFetch('/browse');
|
||||
const data = await api.get<Record<string, any>>('/browse');
|
||||
// Flatten all albums from categories
|
||||
const albums: StaticPlaylist[] = [];
|
||||
Object.values(data).forEach((list: any) => {
|
||||
|
|
@ -59,7 +53,10 @@ export const libraryService = {
|
|||
|
||||
async search(query: string): Promise<Track[]> {
|
||||
try {
|
||||
return await apiFetch(`/search?q=${encodeURIComponent(query)}`);
|
||||
// Encode query safely is handled by URLSearchParams in apiClient if passed as params,
|
||||
// but here we are constructing url manually? api.get accepts params.
|
||||
const res = await api.get<{ tracks: Track[] }>('/search', { query }); // Backend expects 'query' param
|
||||
return res.tracks || [];
|
||||
} catch (e) {
|
||||
return [];
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
{"id":null,"title":"Thch Th n (Remix)","description":"","author":"Lê bảo bình","cover_url":"https://lh3.googleusercontent.com/wlJb64jqoA3KHokhIxN0FzWdJXvBgTYx6bdvrqGSqP_Ux7uLmQTA0MLfsM5AsYFL2Hl6J83SMfw9njj5=w544-h544-l90-rj","tracks":[{"title":"Thích Thì Đến (Lofi)","artist":"Lê bảo bình","album":"Thích Thì Đến (Remix)","duration":197,"cover_url":"https://lh3.googleusercontent.com/wlJb64jqoA3KHokhIxN0FzWdJXvBgTYx6bdvrqGSqP_Ux7uLmQTA0MLfsM5AsYFL2Hl6J83SMfw9njj5=w544-h544-l90-rj","id":"NLBiuA2TuXs","url":"https://music.youtube.com/watch?v=NLBiuA2TuXs"},{"title":"Thích Thì Đến (Beat Lofi)","artist":"Lê bảo bình","album":"Thích Thì Đến (Remix)","duration":197,"cover_url":"https://lh3.googleusercontent.com/wlJb64jqoA3KHokhIxN0FzWdJXvBgTYx6bdvrqGSqP_Ux7uLmQTA0MLfsM5AsYFL2Hl6J83SMfw9njj5=w544-h544-l90-rj","id":"MMWZclWtfOw","url":"https://music.youtube.com/watch?v=MMWZclWtfOw"},{"title":"Thích Thì Đến (Remix)","artist":"Lê bảo bình","album":"Thích Thì Đến (Remix)","duration":248,"cover_url":"https://lh3.googleusercontent.com/wlJb64jqoA3KHokhIxN0FzWdJXvBgTYx6bdvrqGSqP_Ux7uLmQTA0MLfsM5AsYFL2Hl6J83SMfw9njj5=w544-h544-l90-rj","id":"PJ3xRwSAG88","url":"https://music.youtube.com/watch?v=PJ3xRwSAG88"},{"title":"Thích Thì Đến (Beat Remix)","artist":"Lê bảo bình","album":"Thích Thì Đến (Remix)","duration":248,"cover_url":"https://lh3.googleusercontent.com/wlJb64jqoA3KHokhIxN0FzWdJXvBgTYx6bdvrqGSqP_Ux7uLmQTA0MLfsM5AsYFL2Hl6J83SMfw9njj5=w544-h544-l90-rj","id":"nPUucoJkMq8","url":"https://music.youtube.com/watch?v=nPUucoJkMq8"},{"title":"Thích Thì Đến (Deephouse)","artist":"Lê bảo bình","album":"Thích Thì Đến (Remix)","duration":216,"cover_url":"https://lh3.googleusercontent.com/wlJb64jqoA3KHokhIxN0FzWdJXvBgTYx6bdvrqGSqP_Ux7uLmQTA0MLfsM5AsYFL2Hl6J83SMfw9njj5=w544-h544-l90-rj","id":"xRG4IivcvTg","url":"https://music.youtube.com/watch?v=xRG4IivcvTg"},{"title":"Thích Thì Đến (Beat Deephouse)","artist":"Lê bảo bình","album":"Thích Thì Đến (Remix)","duration":216,"cover_url":"https://lh3.googleusercontent.com/wlJb64jqoA3KHokhIxN0FzWdJXvBgTYx6bdvrqGSqP_Ux7uLmQTA0MLfsM5AsYFL2Hl6J83SMfw9njj5=w544-h544-l90-rj","id":"pifCyHStEgs","url":"https://music.youtube.com/watch?v=pifCyHStEgs"}]}
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import yt_dlp
|
||||
import json
|
||||
|
||||
# Test video ID from our data (e.g., Khóa Ly Biệt)
|
||||
video_id = "s0OMNH-N5D8"
|
||||
url = f"https://www.youtube.com/watch?v={video_id}"
|
||||
|
||||
ydl_opts = {
|
||||
'format': 'bestaudio/best',
|
||||
'quiet': True,
|
||||
'noplaylist': True,
|
||||
}
|
||||
|
||||
try:
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
info = ydl.extract_info(url, download=False)
|
||||
print(f"Title: {info.get('title')}")
|
||||
print(f"URL: {info.get('url')}") # The direct stream URL
|
||||
print("Success: Extracted audio URL")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
11
update.bat
Normal file
11
update.bat
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
@echo off
|
||||
echo Updating Spotify Clone...
|
||||
|
||||
:: Pull the latest image
|
||||
docker-compose pull
|
||||
|
||||
:: Recreate the container
|
||||
docker-compose up -d
|
||||
|
||||
echo Update complete!
|
||||
pause
|
||||
Loading…
Reference in a new issue