- Add Python FastAPI backend with Pydantic validation - Port WhiskClient and MetaAIClient to Python - Create API routers for all endpoints - Add Swagger/ReDoc documentation at /docs - Update Dockerfile for multi-service container - Add lib/api.ts frontend client - Update README for V3
151 lines
4.5 KiB
Python
151 lines
4.5 KiB
Python
"""
|
|
Prompts Service for Python/FastAPI
|
|
Port of lib/prompts-service.ts
|
|
|
|
Handles:
|
|
- Read/write prompts.json
|
|
- Sync prompts from crawlers (placeholder - crawlers complex to port)
|
|
"""
|
|
import json
|
|
import os
|
|
import asyncio
|
|
from pathlib import Path
|
|
from typing import List, Dict, Any, Optional
|
|
from datetime import datetime
|
|
|
|
# Path to prompts data file (relative to project root)
|
|
DATA_DIR = Path(__file__).parent.parent.parent / "data"
|
|
DATA_FILE = DATA_DIR / "prompts.json"
|
|
|
|
|
|
class Prompt:
|
|
def __init__(self, data: Dict[str, Any]):
|
|
self.id = data.get("id", 0)
|
|
self.title = data.get("title", "")
|
|
self.description = data.get("description", "")
|
|
self.prompt = data.get("prompt", "")
|
|
self.category = data.get("category", "")
|
|
self.source = data.get("source", "")
|
|
self.source_url = data.get("source_url", "")
|
|
self.images = data.get("images", [])
|
|
self.use_count = data.get("useCount", 0)
|
|
self.last_used_at = data.get("lastUsedAt")
|
|
self.created_at = data.get("createdAt")
|
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
|
return {
|
|
"id": self.id,
|
|
"title": self.title,
|
|
"description": self.description,
|
|
"prompt": self.prompt,
|
|
"category": self.category,
|
|
"source": self.source,
|
|
"source_url": self.source_url,
|
|
"images": self.images,
|
|
"useCount": self.use_count,
|
|
"lastUsedAt": self.last_used_at,
|
|
"createdAt": self.created_at
|
|
}
|
|
|
|
|
|
class PromptCache:
|
|
def __init__(self, data: Dict[str, Any]):
|
|
self.prompts = [Prompt(p) for p in data.get("prompts", [])]
|
|
self.last_updated = data.get("last_updated")
|
|
self.last_sync = data.get("lastSync")
|
|
self.categories = data.get("categories", {})
|
|
self.total_count = data.get("total_count", 0)
|
|
self.sources = data.get("sources", [])
|
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
|
return {
|
|
"prompts": [p.to_dict() for p in self.prompts],
|
|
"last_updated": self.last_updated,
|
|
"lastSync": self.last_sync,
|
|
"categories": self.categories,
|
|
"total_count": self.total_count,
|
|
"sources": self.sources
|
|
}
|
|
|
|
|
|
async def get_prompts() -> PromptCache:
|
|
"""Read prompts from JSON file"""
|
|
try:
|
|
if DATA_FILE.exists():
|
|
content = DATA_FILE.read_text(encoding='utf-8')
|
|
data = json.loads(content)
|
|
return PromptCache(data)
|
|
except Exception as e:
|
|
print(f"[PromptsService] Error reading prompts: {e}")
|
|
|
|
return PromptCache({
|
|
"prompts": [],
|
|
"last_updated": None,
|
|
"categories": {},
|
|
"total_count": 0,
|
|
"sources": []
|
|
})
|
|
|
|
|
|
async def save_prompts(cache: PromptCache) -> None:
|
|
"""Save prompts to JSON file"""
|
|
try:
|
|
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
|
content = json.dumps(cache.to_dict(), indent=2, ensure_ascii=False)
|
|
DATA_FILE.write_text(content, encoding='utf-8')
|
|
except Exception as e:
|
|
print(f"[PromptsService] Error saving prompts: {e}")
|
|
raise
|
|
|
|
|
|
async def sync_prompts() -> Dict[str, Any]:
|
|
"""
|
|
Sync prompts from sources.
|
|
Note: The crawler implementation is complex and would require porting
|
|
the JavaScript crawlers. For now, this just refreshes the timestamp.
|
|
"""
|
|
print("[PromptsService] Starting sync...")
|
|
|
|
cache = await get_prompts()
|
|
now = int(datetime.now().timestamp() * 1000)
|
|
|
|
# Update sync timestamp
|
|
cache.last_sync = now
|
|
cache.last_updated = datetime.now().isoformat()
|
|
|
|
await save_prompts(cache)
|
|
|
|
return {
|
|
"success": True,
|
|
"count": len(cache.prompts),
|
|
"added": 0
|
|
}
|
|
|
|
|
|
async def track_prompt_use(prompt_id: int) -> Optional[Prompt]:
|
|
"""Track usage of a prompt"""
|
|
cache = await get_prompts()
|
|
|
|
for prompt in cache.prompts:
|
|
if prompt.id == prompt_id:
|
|
prompt.use_count += 1
|
|
prompt.last_used_at = int(datetime.now().timestamp() * 1000)
|
|
await save_prompts(cache)
|
|
return prompt
|
|
|
|
return None
|
|
|
|
|
|
async def upload_prompt_image(prompt_id: int, image_base64: str) -> Optional[Prompt]:
|
|
"""Upload an image for a prompt"""
|
|
cache = await get_prompts()
|
|
|
|
for prompt in cache.prompts:
|
|
if prompt.id == prompt_id:
|
|
if prompt.images is None:
|
|
prompt.images = []
|
|
prompt.images.append(f"data:image/png;base64,{image_base64}")
|
|
await save_prompts(cache)
|
|
return prompt
|
|
|
|
return None
|