From a7e1692bdbd9f777ad1fa465a65a4dc67df7701c Mon Sep 17 00:00:00 2001 From: Khoa Vo Date: Thu, 1 Jan 2026 16:44:29 +0700 Subject: [PATCH] Optimize stream for NAS: prioritize iOS client, flush logs --- backend/api/routes.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/backend/api/routes.py b/backend/api/routes.py index 7626a40..7426d4a 100644 --- a/backend/api/routes.py +++ b/backend/api/routes.py @@ -608,7 +608,8 @@ async def stream_audio(id: str): """ try: # Check Cache for stream URL - cache_key = f"v4:stream:{id}" # v4 cache key for clean slate + # Check Cache for stream URL + cache_key = f"v5:stream:{id}" # v5 cache key for ios client priority cached_data = cache.get(cache_key) stream_url = None @@ -634,7 +635,7 @@ async def stream_audio(id: str): 'socket_timeout': 30, 'retries': 3, 'force_ipv4': True, - 'extractor_args': {'youtube': {'player_client': ['android', 'web', 'ios']}}, # Android often gives good progressive streams + 'extractor_args': {'youtube': {'player_client': ['ios', 'web', 'android']}}, # Prioritize iOS for server-like environments } try: @@ -679,30 +680,30 @@ async def stream_audio(id: str): if 'Cookie' in base_headers: req_headers['Cookie'] = base_headers['Cookie'] - print(f"DEBUG_STREAM [{id}]: Opening connection to {stream_url[:50]}...") - print(f"DEBUG_STREAM [{id}]: Headers: User-Agent={req_headers.get('User-Agent')[:30]}..., Cookie Present={'Cookie' in req_headers}") + print(f"DEBUG_STREAM [{id}]: Opening connection to {stream_url[:50]}...", flush=True) + print(f"DEBUG_STREAM [{id}]: Headers: User-Agent={req_headers.get('User-Agent')[:30]}..., Cookie Present={'Cookie' in req_headers}", flush=True) # Disable SSL verify to match yt-dlp 'nocheckcertificate' (fixes NAS CA issues) external_req = requests.get(stream_url, stream=True, timeout=30, headers=req_headers, verify=False) - print(f"DEBUG_STREAM [{id}]: Upstream Status: {external_req.status_code}") + print(f"DEBUG_STREAM [{id}]: Upstream Status: {external_req.status_code}", flush=True) external_req.raise_for_status() except requests.exceptions.HTTPError as http_err: error_details = f"Upstream error: {http_err.response.status_code}" - print(f"DEBUG_STREAM [{id}]: FAILED {error_details}") + print(f"DEBUG_STREAM [{id}]: FAILED {error_details}", flush=True) try: - print(f"DEBUG_STREAM [{id}]: Response Headers: {http_err.response.headers}") - print(f"DEBUG_STREAM [{id}]: Response Body keys: {http_err.response.text[:200]}") + print(f"DEBUG_STREAM [{id}]: Response Headers: {http_err.response.headers}", flush=True) + print(f"DEBUG_STREAM [{id}]: Response Body keys: {http_err.response.text[:200]}", flush=True) except: pass # If 403/404/410, invalidate cache if http_err.response.status_code in [403, 404, 410]: - print(f"DEBUG_STREAM [{id}]: Invalidating cache key {cache_key}") + print(f"DEBUG_STREAM [{id}]: Invalidating cache key {cache_key}", flush=True) cache.delete(cache_key) raise HTTPException(status_code=500, detail=error_details) except Exception as e: - print(f"DEBUG_STREAM [{id}]: Connection Failed: {e}") + print(f"DEBUG_STREAM [{id}]: Connection Failed: {e}", flush=True) import traceback traceback.print_exc() raise HTTPException(status_code=500, detail=f"Stream connection failed: {str(e)}") @@ -711,7 +712,7 @@ async def stream_audio(id: str): headers = {} if "Content-Length" in external_req.headers: headers["Content-Length"] = external_req.headers["Content-Length"] - print(f"DEBUG_STREAM [{id}]: Content-Length: {headers['Content-Length']}") + print(f"DEBUG_STREAM [{id}]: Content-Length: {headers['Content-Length']}", flush=True) def iterfile(): try: @@ -719,9 +720,9 @@ async def stream_audio(id: str): for chunk in external_req.iter_content(chunk_size=64*1024): yield chunk external_req.close() - print(f"DEBUG_STREAM [{id}]: Stream finished successfully") + print(f"DEBUG_STREAM [{id}]: Stream finished successfully", flush=True) except Exception as e: - print(f"DEBUG_STREAM [{id}]: Stream Iterator Error: {e}") + print(f"DEBUG_STREAM [{id}]: Stream Iterator Error: {e}", flush=True) pass return StreamingResponse(iterfile(), media_type=mime_type, headers=headers)