fix: remove ollama proxy for standalone operation

This commit is contained in:
SysVis AI 2025-12-28 20:39:08 +07:00
parent b27158ef03
commit 5bada75e33

View file

@ -12,35 +12,10 @@ server {
try_files $uri $uri/ /index.html; try_files $uri $uri/ /index.html;
} }
# Proxy Ollama API requests # NOTE: Ollama proxy is NOT included by default to allow standalone operation.
# Uses dynamic resolution so container can start without Ollama available # The app works with Browser AI (WebLLM/Transformers.js) without any external services.
location /api/ { #
# Use Docker's internal DNS resolver with a short cache # If you need to proxy requests to Ollama, either:
resolver 127.0.0.11 valid=10s ipv6=off; # 1. Set the Ollama URL directly in the app settings (e.g., http://your-nas-ip:11434)
# 2. Or mount a custom nginx.conf with your proxy configuration
# Use a variable to force runtime DNS resolution (not startup)
set $upstream_ollama http://ollama:11434;
proxy_pass $upstream_ollama/api/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# CORS headers
add_header 'Access-Control-Allow-Origin' '*' always;
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range' always;
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Max-Age' 1728000;
add_header 'Content-Type' 'text/plain; charset=utf-8';
add_header 'Content-Length' 0;
return 204;
}
# Handle case where Ollama isn't available
proxy_connect_timeout 5s;
proxy_read_timeout 300s;
}
} }