feat: updates before deployment
This commit is contained in:
parent
0efc0dc2e1
commit
2a4bf8b58b
37 changed files with 4024 additions and 1339 deletions
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -40,3 +40,8 @@ yarn-error.log*
|
|||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
|
||||
# local env
|
||||
.venv
|
||||
error.log
|
||||
__pycache__
|
||||
|
|
|
|||
|
|
@ -62,10 +62,20 @@ export async function POST(req: NextRequest) {
|
|||
return NextResponse.json({ images });
|
||||
|
||||
} catch (error: any) {
|
||||
console.error("Generate API Error:", error);
|
||||
console.error("Generate API Error Details:", {
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
fullError: error
|
||||
});
|
||||
|
||||
const msg = error.message || "";
|
||||
const isAuthError = msg.includes("401") || msg.includes("403") ||
|
||||
msg.includes("Auth") || msg.includes("auth") ||
|
||||
msg.includes("cookies") || msg.includes("expired");
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: error.message || "Generation failed" },
|
||||
{ status: 500 }
|
||||
{ status: isAuthError ? 401 : 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
51
app/api/grok-chat/route.ts
Normal file
51
app/api/grok-chat/route.ts
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
const CRAWL_SERVICE_URL = 'http://127.0.0.1:8000';
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const body = await req.json();
|
||||
const { message, history } = body;
|
||||
|
||||
console.log(`[Grok API] Incoming body:`, JSON.stringify(body, null, 2));
|
||||
|
||||
const proxyPayload = {
|
||||
message,
|
||||
history,
|
||||
cookies: body.cookies
|
||||
};
|
||||
console.log(`[Grok API] Proxy payload:`, JSON.stringify(proxyPayload, null, 2));
|
||||
|
||||
const response = await fetch(`${CRAWL_SERVICE_URL}/grok/chat`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(proxyPayload),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.error(`[Grok API] Service error: ${response.status} ${errorText}`);
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText);
|
||||
return NextResponse.json(errorJson, { status: response.status });
|
||||
} catch {
|
||||
return NextResponse.json(
|
||||
{ error: `Service error: ${response.status} - ${errorText}` },
|
||||
{ status: response.status }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return NextResponse.json(data);
|
||||
|
||||
} catch (error: any) {
|
||||
console.error('[Grok API] Proxy error:', error);
|
||||
return NextResponse.json(
|
||||
{ error: error.message || 'Internal Server Error' },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
54
app/api/grok-debug/route.ts
Normal file
54
app/api/grok-debug/route.ts
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
const CRAWL_SERVICE_URL = 'http://127.0.0.1:8000';
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const body = await req.json();
|
||||
const { message, history } = body;
|
||||
|
||||
console.log(`[Grok Debug API] Incoming body:`, JSON.stringify(body, null, 2));
|
||||
|
||||
const proxyPayload = {
|
||||
message,
|
||||
history: history || [],
|
||||
cookies: body.cookies || null,
|
||||
user_agent: body.userAgent || null
|
||||
};
|
||||
console.log(`[Grok Debug API] Proxy payload:`, JSON.stringify(proxyPayload, null, 2));
|
||||
|
||||
const response = await fetch(`${CRAWL_SERVICE_URL}/grok/chat`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(proxyPayload),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.error(`[Grok Debug API] Service error: ${response.status} ${errorText}`);
|
||||
try {
|
||||
// Try to parse detailed JSON error from FastAPI
|
||||
const errorJson = JSON.parse(errorText);
|
||||
return NextResponse.json(errorJson, { status: response.status });
|
||||
} catch {
|
||||
// Fallback to text
|
||||
return NextResponse.json(
|
||||
{ error: `Service error: ${response.status} - ${errorText}` },
|
||||
{ status: response.status }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return NextResponse.json(data);
|
||||
|
||||
} catch (error: any) {
|
||||
console.error('[Grok Debug API] Internal error:', error);
|
||||
return NextResponse.json(
|
||||
{ error: error.message || 'Internal Server Error' },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
130
app/api/meta-crawl/route.ts
Normal file
130
app/api/meta-crawl/route.ts
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
import { NextRequest, NextResponse } from 'next/server';
|
||||
import { MetaCrawlClient } from '@/lib/providers/meta-crawl-client';
|
||||
|
||||
/**
|
||||
* API Route: /api/meta-crawl
|
||||
*
|
||||
* Proxies image generation requests to the Crawl4AI Python service
|
||||
* which uses browser automation to interact with Meta AI.
|
||||
*/
|
||||
|
||||
const client = new MetaCrawlClient();
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const body = await req.json();
|
||||
// Support both numImages (camelCase) and num_images (snake_case)
|
||||
const { prompt, cookies, numImages, num_images, async = false } = body;
|
||||
const imageCount = num_images || numImages || 4;
|
||||
|
||||
if (!prompt) {
|
||||
return NextResponse.json(
|
||||
{ error: "Prompt is required" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (!cookies) {
|
||||
return NextResponse.json(
|
||||
{ error: "Meta AI cookies are required. Please configure in settings." },
|
||||
{ status: 401 }
|
||||
);
|
||||
}
|
||||
|
||||
// Check if service is healthy
|
||||
const isHealthy = await client.healthCheck();
|
||||
if (!isHealthy) {
|
||||
return NextResponse.json(
|
||||
{ error: "Crawl4AI service is not available. Please try again later." },
|
||||
{ status: 503 }
|
||||
);
|
||||
}
|
||||
|
||||
if (async) {
|
||||
// Async mode: return task_id for polling
|
||||
const taskId = await client.generateAsync(prompt, cookies, imageCount);
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
task_id: taskId
|
||||
});
|
||||
}
|
||||
|
||||
// Sync mode: wait for completion
|
||||
console.log(`[MetaCrawl API] Generating images for: "${prompt.substring(0, 50)}..."`);
|
||||
|
||||
const images = await client.generate(prompt, cookies, imageCount);
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
images: images.map(img => ({
|
||||
url: img.url,
|
||||
data: img.data,
|
||||
prompt: img.prompt,
|
||||
model: img.model
|
||||
}))
|
||||
});
|
||||
|
||||
} catch (error: any) {
|
||||
console.error("[MetaCrawl API] Error:", error);
|
||||
return NextResponse.json(
|
||||
{ error: error.message || "Image generation failed" },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/meta-crawl?task_id=xxx
|
||||
*
|
||||
* Get status of an async generation task
|
||||
*/
|
||||
export async function GET(req: NextRequest) {
|
||||
const taskId = req.nextUrl.searchParams.get('task_id');
|
||||
|
||||
if (!taskId) {
|
||||
// Return rate limit status
|
||||
try {
|
||||
const status = await client.getRateLimitStatus();
|
||||
return NextResponse.json(status);
|
||||
} catch {
|
||||
return NextResponse.json({ error: "Service not available" }, { status: 503 });
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const status = await client.getTaskStatus(taskId);
|
||||
return NextResponse.json(status);
|
||||
} catch (error: any) {
|
||||
return NextResponse.json(
|
||||
{ error: error.message },
|
||||
{ status: error.message === 'Task not found' ? 404 : 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/meta-crawl?task_id=xxx
|
||||
*
|
||||
* Clean up a completed task
|
||||
*/
|
||||
export async function DELETE(req: NextRequest) {
|
||||
const taskId = req.nextUrl.searchParams.get('task_id');
|
||||
|
||||
if (!taskId) {
|
||||
return NextResponse.json({ error: "task_id is required" }, { status: 400 });
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${process.env.CRAWL4AI_URL || 'http://localhost:8000'}/status/${taskId}`, {
|
||||
method: 'DELETE'
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return NextResponse.json({ error: "Failed to delete task" }, { status: response.status });
|
||||
}
|
||||
|
||||
return NextResponse.json({ deleted: true });
|
||||
} catch (error: any) {
|
||||
return NextResponse.json({ error: error.message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
|
@ -9,6 +9,9 @@ import { PromptHero } from "@/components/PromptHero";
|
|||
import { Settings } from "@/components/Settings";
|
||||
import { PromptLibrary } from "@/components/PromptLibrary";
|
||||
import { UploadHistory } from "@/components/UploadHistory";
|
||||
import { GrokChat } from "@/components/GrokChat";
|
||||
import { CookieExpiredDialog } from "@/components/CookieExpiredDialog";
|
||||
|
||||
|
||||
export default function Home() {
|
||||
const { currentView, setCurrentView, loadGallery } = useStore();
|
||||
|
|
@ -48,6 +51,10 @@ export default function Home() {
|
|||
</div>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
{/* Floating Chat */}
|
||||
{/* <GrokChat /> */}
|
||||
<CookieExpiredDialog />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
80
components/CookieExpiredDialog.tsx
Normal file
80
components/CookieExpiredDialog.tsx
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
"use client";
|
||||
|
||||
import React from 'react';
|
||||
import { useStore } from '@/lib/store';
|
||||
import { AlertTriangle, Settings, X, Cookie, ExternalLink } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
export function CookieExpiredDialog() {
|
||||
const {
|
||||
showCookieExpired,
|
||||
setShowCookieExpired,
|
||||
setCurrentView,
|
||||
settings
|
||||
} = useStore();
|
||||
|
||||
if (!showCookieExpired) return null;
|
||||
|
||||
const providerName = settings.provider === 'meta' ? 'Meta AI' :
|
||||
settings.provider === 'grok' ? 'Grok' :
|
||||
'Google Whisk';
|
||||
|
||||
const providerUrl = settings.provider === 'meta' ? 'https://www.meta.ai' :
|
||||
settings.provider === 'grok' ? 'https://grok.com' :
|
||||
'https://labs.google/fx/tools/whisk/project';
|
||||
|
||||
const handleFixIssues = () => {
|
||||
setShowCookieExpired(false);
|
||||
setCurrentView('settings');
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center p-4 bg-black/60 backdrop-blur-sm animate-in fade-in duration-200">
|
||||
<div className="relative w-full max-w-md bg-[#18181B] border border-white/10 rounded-2xl shadow-2xl animate-in zoom-in-95 duration-200 overflow-hidden">
|
||||
|
||||
{/* Decorative header background */}
|
||||
<div className="absolute top-0 left-0 right-0 h-32 bg-gradient-to-br from-amber-500/10 to-red-500/10 pointer-events-none" />
|
||||
|
||||
<div className="relative p-6 px-8 flex flex-col items-center text-center">
|
||||
<button
|
||||
onClick={() => setShowCookieExpired(false)}
|
||||
className="absolute top-4 right-4 p-2 text-white/40 hover:text-white rounded-full hover:bg-white/5 transition-colors"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
|
||||
<div className="h-16 w-16 mb-6 rounded-full bg-amber-500/10 flex items-center justify-center ring-1 ring-amber-500/20 shadow-lg shadow-amber-900/20">
|
||||
<Cookie className="h-8 w-8 text-amber-500" />
|
||||
</div>
|
||||
|
||||
<h2 className="text-xl font-bold text-white mb-2">Cookies Expired</h2>
|
||||
|
||||
<p className="text-muted-foreground text-sm mb-6 leading-relaxed">
|
||||
Your <span className="text-white font-medium">{providerName}</span> session has timed out.
|
||||
To continue generating images, please refresh your cookies.
|
||||
</p>
|
||||
|
||||
<div className="w-full space-y-3">
|
||||
<button
|
||||
onClick={handleFixIssues}
|
||||
className="w-full py-3 px-4 bg-primary text-primary-foreground font-semibold rounded-xl hover:bg-primary/90 transition-all flex items-center justify-center gap-2 shadow-lg shadow-primary/20"
|
||||
>
|
||||
<Settings className="h-4 w-4" />
|
||||
Update Settings
|
||||
</button>
|
||||
|
||||
<a
|
||||
href={providerUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="w-full py-3 px-4 bg-white/5 hover:bg-white/10 text-white font-medium rounded-xl transition-all flex items-center justify-center gap-2 border border-white/5"
|
||||
>
|
||||
<ExternalLink className="h-4 w-4" />
|
||||
Open {providerName}
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
@ -2,14 +2,25 @@
|
|||
|
||||
import React from 'react';
|
||||
import { useStore } from '@/lib/store';
|
||||
import { cn } from "@/lib/utils";
|
||||
import { motion, AnimatePresence } from 'framer-motion';
|
||||
import { Download, Maximize2, Sparkles, Trash2, X, ChevronLeft, ChevronRight, Copy, Film, Wand2 } from 'lucide-react';
|
||||
import { VideoPromptModal } from './VideoPromptModal';
|
||||
import { EditPromptModal } from './EditPromptModal';
|
||||
|
||||
// Helper function to get proper image src (handles URLs vs base64)
|
||||
const getImageSrc = (data: string): string => {
|
||||
if (!data) return '';
|
||||
// If it's already a URL, use it directly
|
||||
if (data.startsWith('http://') || data.startsWith('https://') || data.startsWith('data:')) {
|
||||
return data;
|
||||
}
|
||||
// Otherwise, treat as base64
|
||||
return `data:image/png;base64,${data}`;
|
||||
};
|
||||
|
||||
export function Gallery() {
|
||||
const { gallery, clearGallery, removeFromGallery, setPrompt, addVideo, addToGallery, settings, videos, removeVideo } = useStore();
|
||||
const { gallery, clearGallery, removeFromGallery, setPrompt, addVideo, addToGallery, settings, videos, removeVideo, isGenerating } = useStore();
|
||||
const [selectedIndex, setSelectedIndex] = React.useState<number | null>(null);
|
||||
const [videoModalOpen, setVideoModalOpen] = React.useState(false);
|
||||
const [videoSource, setVideoSource] = React.useState<{ data: string, prompt: string } | null>(null);
|
||||
|
|
@ -70,6 +81,8 @@ export function Gallery() {
|
|||
errorMessage = '🚫 Content Policy: Video blocked because the image contains a recognizable person. Try using a different image.';
|
||||
} else if (data.error?.includes('safety') || data.error?.includes('SAFETY')) {
|
||||
errorMessage = '⚠️ Content Policy: Video blocked by Google\'s safety filters. Try a different source image.';
|
||||
} else if (data.error?.includes('401') || data.error?.includes('UNAUTHENTICATED')) {
|
||||
errorMessage = '🔐 Authentication Error: Your Whisk (Google) cookies have expired. Please go to Settings and update them.';
|
||||
}
|
||||
alert(errorMessage);
|
||||
throw new Error(data.error);
|
||||
|
|
@ -219,6 +232,19 @@ export function Gallery() {
|
|||
|
||||
{/* Gallery Grid */}
|
||||
<div className="columns-1 sm:columns-2 md:columns-3 lg:columns-4 gap-4 space-y-4">
|
||||
{/* Skeleton Loading State */}
|
||||
{isGenerating && (
|
||||
<>
|
||||
{Array.from({ length: settings.imageCount || 4 }).map((_, i) => (
|
||||
<div key={`skeleton-${i}`} className="break-inside-avoid rounded-xl overflow-hidden bg-white/5 border border-white/5 shadow-sm mb-4 relative aspect-[2/3] animate-pulse">
|
||||
<div className="absolute inset-0 bg-gradient-to-t from-white/10 to-transparent" />
|
||||
<div className="absolute bottom-4 left-4 right-4 h-4 bg-white/20 rounded w-3/4" />
|
||||
<div className="absolute top-2 left-2 w-12 h-4 bg-white/20 rounded" />
|
||||
</div>
|
||||
))}
|
||||
</>
|
||||
)}
|
||||
|
||||
<AnimatePresence mode='popLayout'>
|
||||
{gallery.map((img, i) => (
|
||||
<motion.div
|
||||
|
|
@ -231,13 +257,25 @@ export function Gallery() {
|
|||
className="group relative break-inside-avoid rounded-xl overflow-hidden bg-card border shadow-sm"
|
||||
>
|
||||
<img
|
||||
src={"data:image/png;base64," + img.data}
|
||||
src={getImageSrc(img.data)}
|
||||
alt={img.prompt}
|
||||
className="w-full h-auto object-cover transition-transform group-hover:scale-105 cursor-pointer"
|
||||
onClick={() => setSelectedIndex(i)}
|
||||
loading="lazy"
|
||||
/>
|
||||
|
||||
{/* Provider Tag */}
|
||||
{img.provider && (
|
||||
<div className={cn(
|
||||
"absolute top-2 left-2 px-2 py-0.5 rounded-md text-[10px] font-bold uppercase tracking-wider text-white shadow-sm backdrop-blur-md border border-white/10 z-10",
|
||||
img.provider === 'meta' ? "bg-blue-500/80" :
|
||||
img.provider === 'grok' ? "bg-yellow-500/80 text-black" :
|
||||
"bg-amber-500/80"
|
||||
)}>
|
||||
{img.provider}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Delete button - Top right */}
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); if (img.id) removeFromGallery(img.id); }}
|
||||
|
|
@ -265,7 +303,7 @@ export function Gallery() {
|
|||
<Copy className="h-4 w-4" />
|
||||
</button>
|
||||
<a
|
||||
href={"data:image/png;base64," + img.data}
|
||||
href={getImageSrc(img.data)}
|
||||
download={"generated-" + i + "-" + Date.now() + ".png"}
|
||||
className="p-1.5 bg-white/10 hover:bg-white/20 rounded-full text-white backdrop-blur-md transition-colors"
|
||||
title="Download"
|
||||
|
|
@ -273,6 +311,8 @@ export function Gallery() {
|
|||
>
|
||||
<Download className="h-4 w-4" />
|
||||
</a>
|
||||
{/* Remix button - only for Whisk (base64) images for now */}
|
||||
{(!img.provider || img.provider === 'whisk') && (
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
|
|
@ -283,8 +323,9 @@ export function Gallery() {
|
|||
>
|
||||
<Wand2 className="h-4 w-4" />
|
||||
</button>
|
||||
{/* Video button - only for 16:9 images */}
|
||||
{img.aspectRatio === '16:9' ? (
|
||||
)}
|
||||
{/* Video button - only for 16:9 images AND Whisk provider (base64) */}
|
||||
{img.aspectRatio === '16:9' && (!img.provider || img.provider === 'whisk') ? (
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
|
|
@ -299,7 +340,7 @@ export function Gallery() {
|
|||
<button
|
||||
disabled
|
||||
className="p-2 bg-gray-500/30 rounded-full text-white/30 cursor-not-allowed border border-white/5"
|
||||
title="Video generation requires 16:9 images"
|
||||
title="Video generation requires 16:9 images and Whisk provider"
|
||||
>
|
||||
<Film className="h-4 w-4" />
|
||||
</button>
|
||||
|
|
@ -364,7 +405,7 @@ export function Gallery() {
|
|||
>
|
||||
|
||||
<img
|
||||
src={"data:image/png;base64," + selectedImage.data}
|
||||
src={getImageSrc(selectedImage.data)}
|
||||
alt={selectedImage.prompt}
|
||||
className="max-w-full max-h-[85vh] object-contain rounded-lg shadow-2xl"
|
||||
/>
|
||||
|
|
@ -375,13 +416,14 @@ export function Gallery() {
|
|||
</p>
|
||||
<div className="flex gap-3">
|
||||
<a
|
||||
href={"data:image/png;base64," + selectedImage.data}
|
||||
href={getImageSrc(selectedImage.data)}
|
||||
download={"generated-" + selectedIndex + "-" + Date.now() + ".png"}
|
||||
className="flex items-center gap-2 px-4 py-2 bg-primary text-primary-foreground hover:bg-primary/90 rounded-full font-medium transition-colors"
|
||||
>
|
||||
<Download className="h-4 w-4" />
|
||||
Download Current
|
||||
</a>
|
||||
{(!selectedImage.provider || selectedImage.provider === 'whisk') && (
|
||||
<button
|
||||
onClick={() => {
|
||||
if (selectedImage) openVideoModal(selectedImage);
|
||||
|
|
@ -391,6 +433,7 @@ export function Gallery() {
|
|||
<Film className="h-4 w-4" />
|
||||
Generate Video
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={() => {
|
||||
setPrompt(selectedImage.prompt);
|
||||
|
|
|
|||
243
components/GrokChat.tsx
Normal file
243
components/GrokChat.tsx
Normal file
|
|
@ -0,0 +1,243 @@
|
|||
"use client";
|
||||
|
||||
import React, { useState, useRef, useEffect } from 'react';
|
||||
import { motion, AnimatePresence } from 'framer-motion';
|
||||
import { MessageCircle, X, Send, MinusSquare, Maximize2, Minimize2, Loader2, Bot } from 'lucide-react';
|
||||
import { cn } from '@/lib/utils';
|
||||
import { useStore } from '@/lib/store';
|
||||
|
||||
interface Message {
|
||||
role: 'user' | 'assistant';
|
||||
content: string;
|
||||
}
|
||||
|
||||
export function GrokChat() {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [isMinimized, setIsMinimized] = useState(false);
|
||||
const [messages, setMessages] = useState<Message[]>([]);
|
||||
const [input, setInput] = useState('');
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
// Auto-scroll to bottom
|
||||
useEffect(() => {
|
||||
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
|
||||
}, [messages, isOpen]);
|
||||
|
||||
// Focus input when opened
|
||||
useEffect(() => {
|
||||
if (isOpen && !isMinimized) {
|
||||
setTimeout(() => inputRef.current?.focus(), 100);
|
||||
}
|
||||
}, [isOpen, isMinimized]);
|
||||
|
||||
const handleSend = async () => {
|
||||
if (!input.trim() || isLoading) return;
|
||||
|
||||
const userMsg = input.trim();
|
||||
setInput('');
|
||||
setMessages(prev => [...prev, { role: 'user', content: userMsg }]);
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
// Retrieve history for context (optional, limiting to last 10 messages)
|
||||
const history = messages.slice(-10).map(m => ({ role: m.role, content: m.content }));
|
||||
|
||||
// Get cookies from store
|
||||
const { settings } = useStore.getState();
|
||||
const grokCookies = settings.grokCookies;
|
||||
|
||||
// Parse cookies string to object if retrieved from text area (simple key=value parsing)
|
||||
let cookieObj: Record<string, string> = {};
|
||||
if (grokCookies) {
|
||||
// Basic parsing for "name=value; name2=value2" or JSON
|
||||
try {
|
||||
// Try JSON first
|
||||
const parsed = JSON.parse(grokCookies);
|
||||
|
||||
if (Array.isArray(parsed)) {
|
||||
// Handle standard cookie export format (list of objects)
|
||||
parsed.forEach((c: any) => {
|
||||
if (c.name && c.value) {
|
||||
cookieObj[c.name] = c.value;
|
||||
}
|
||||
});
|
||||
} else if (typeof parsed === 'object' && parsed !== null) {
|
||||
// Handle direct key-value object
|
||||
// Cast to ensure type compatibility if needed, though 'parsed' is anyish here
|
||||
cookieObj = parsed as Record<string, string>;
|
||||
}
|
||||
} catch {
|
||||
// Try semicolon separated
|
||||
grokCookies.split(';').forEach((c: string) => {
|
||||
const parts = c.trim().split('=');
|
||||
if (parts.length >= 2) {
|
||||
const key = parts[0].trim();
|
||||
const val = parts.slice(1).join('=').trim();
|
||||
if (key && val) cookieObj[key] = val;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const res = await fetch('/api/grok-debug', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
message: userMsg,
|
||||
history: history,
|
||||
cookies: cookieObj,
|
||||
userAgent: navigator.userAgent
|
||||
})
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
if (data.error || data.detail) {
|
||||
// Handle both simple error string and FastAPI detail array
|
||||
const errorMsg = data.error || JSON.stringify(data.detail);
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
|
||||
setMessages(prev => [...prev, { role: 'assistant', content: data.response }]);
|
||||
|
||||
} catch (error: any) {
|
||||
console.error('Grok Chat Error:', error);
|
||||
setMessages(prev => [...prev, {
|
||||
role: 'assistant',
|
||||
content: `Error: ${error.message || 'Failed to connect to Grok.'}`
|
||||
}]);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
handleSend();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="fixed bottom-4 right-4 z-[100] flex flex-col items-end pointer-events-none">
|
||||
|
||||
{/* Toggle Button */}
|
||||
{!isOpen && (
|
||||
<motion.button
|
||||
initial={{ scale: 0 }}
|
||||
animate={{ scale: 1 }}
|
||||
whileHover={{ scale: 1.1 }}
|
||||
whileTap={{ scale: 0.9 }}
|
||||
onClick={() => setIsOpen(true)}
|
||||
className="pointer-events-auto bg-black border border-white/20 text-white p-4 rounded-full shadow-2xl hover:shadow-purple-500/20 hover:border-purple-500/50 transition-all group"
|
||||
>
|
||||
<Bot className="h-8 w-8 group-hover:text-purple-400 transition-colors" />
|
||||
</motion.button>
|
||||
)}
|
||||
|
||||
{/* Chat Window */}
|
||||
<AnimatePresence>
|
||||
{isOpen && (
|
||||
<motion.div
|
||||
initial={{ opacity: 0, y: 20, scale: 0.9 }}
|
||||
animate={{
|
||||
opacity: 1,
|
||||
y: 0,
|
||||
scale: 1,
|
||||
height: isMinimized ? 'auto' : '500px',
|
||||
width: isMinimized ? '300px' : '380px'
|
||||
}}
|
||||
exit={{ opacity: 0, y: 20, scale: 0.9 }}
|
||||
className="pointer-events-auto bg-black/90 backdrop-blur-xl border border-white/10 rounded-2xl shadow-2xl overflow-hidden flex flex-col"
|
||||
>
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between p-4 border-b border-white/10 bg-white/5 cursor-pointer"
|
||||
onClick={() => setIsMinimized(!isMinimized)}>
|
||||
<div className="flex items-center gap-2">
|
||||
<Bot className="h-5 w-5 text-purple-400" />
|
||||
<span className="font-bold text-white tracking-wide">Grok AI</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); setIsMinimized(!isMinimized); }}
|
||||
className="p-1.5 hover:bg-white/10 rounded-md text-white/70 hover:text-white transition-colors"
|
||||
>
|
||||
{isMinimized ? <Maximize2 className="h-4 w-4" /> : <Minimize2 className="h-4 w-4" />}
|
||||
</button>
|
||||
<button
|
||||
onClick={(e) => { e.stopPropagation(); setIsOpen(false); }}
|
||||
className="p-1.5 hover:bg-red-500/20 hover:text-red-400 rounded-md text-white/70 transition-colors"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Messages Area */}
|
||||
{!isMinimized && (
|
||||
<>
|
||||
<div className="flex-1 overflow-y-auto p-4 space-y-4 custom-scrollbar">
|
||||
{messages.length === 0 && (
|
||||
<div className="flex flex-col items-center justify-center h-full text-center text-white/30 space-y-2">
|
||||
<Bot className="h-12 w-12 opacity-20" />
|
||||
<p className="text-sm">Ask Grok anything...</p>
|
||||
</div>
|
||||
)}
|
||||
{messages.map((msg, idx) => (
|
||||
<div key={idx} className={cn(
|
||||
"flex w-full",
|
||||
msg.role === 'user' ? "justify-end" : "justify-start"
|
||||
)}>
|
||||
<div className={cn(
|
||||
"max-w-[85%] rounded-2xl px-4 py-2.5 text-sm leading-relaxed",
|
||||
msg.role === 'user'
|
||||
? "bg-purple-600/80 text-white rounded-br-sm"
|
||||
: "bg-white/10 text-white/90 rounded-bl-sm"
|
||||
)}>
|
||||
{msg.content}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{isLoading && (
|
||||
<div className="flex justify-start">
|
||||
<div className="bg-white/5 rounded-2xl px-4 py-2 flex items-center gap-2">
|
||||
<Loader2 className="h-4 w-4 animate-spin text-purple-400" />
|
||||
<span className="text-xs text-white/50">Computing...</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
|
||||
{/* Input Area */}
|
||||
<div className="p-4 border-t border-white/10 bg-white/5">
|
||||
<div className="flex gap-2">
|
||||
<input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
value={input}
|
||||
onChange={(e) => setInput(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder="Type a message..."
|
||||
className="flex-1 bg-black/50 border border-white/10 rounded-xl px-4 py-2.5 text-sm text-white focus:outline-none focus:border-purple-500/50 focus:ring-1 focus:ring-purple-500/20 transition-all placeholder:text-white/20"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<button
|
||||
onClick={handleSend}
|
||||
disabled={!input.trim() || isLoading}
|
||||
className="p-2.5 bg-purple-600 hover:bg-purple-500 disabled:opacity-50 disabled:cursor-not-allowed rounded-xl text-white transition-colors shadow-lg shadow-purple-900/20"
|
||||
>
|
||||
<Send className="h-4 w-4" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</motion.div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
@ -12,10 +12,11 @@ export function Navbar() {
|
|||
const navItems = [
|
||||
{ id: 'gallery', label: 'Create', icon: Sparkles },
|
||||
{ id: 'library', label: 'Prompt Library', icon: LayoutGrid },
|
||||
{ id: 'history', label: 'Uploads', icon: Clock }, // CORRECTED: id should match store ViewType 'history' not 'uploads'
|
||||
{ id: 'history', label: 'Uploads', icon: Clock },
|
||||
];
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="fixed top-0 left-0 right-0 z-50 bg-background/80 backdrop-blur-xl border-b border-border">
|
||||
{/* Yellow Accent Line */}
|
||||
<div className="h-1 w-full bg-primary" />
|
||||
|
|
@ -29,7 +30,7 @@ export function Navbar() {
|
|||
<span className="text-xl font-bold text-foreground tracking-tight">kv-pix</span>
|
||||
</div>
|
||||
|
||||
{/* Center Navigation */}
|
||||
{/* Center Navigation (Desktop) */}
|
||||
<div className="hidden md:flex items-center gap-1 bg-secondary/50 p-1 rounded-full border border-border/50">
|
||||
{navItems.map((item) => (
|
||||
<button
|
||||
|
|
@ -74,5 +75,53 @@ export function Navbar() {
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Mobile Bottom Navigation */}
|
||||
<div className="md:hidden fixed bottom-0 left-0 right-0 z-50 bg-[#18181B]/90 backdrop-blur-xl border-t border-white/10 safe-area-bottom">
|
||||
<div className="flex items-center justify-around h-16 px-2">
|
||||
{navItems.map((item) => (
|
||||
<button
|
||||
key={item.id}
|
||||
onClick={() => {
|
||||
setCurrentView(item.id as any);
|
||||
if (item.id === 'history') setSelectionMode(null);
|
||||
}}
|
||||
className={cn(
|
||||
"flex flex-col items-center justify-center gap-1 p-2 rounded-xl transition-all w-16",
|
||||
currentView === item.id
|
||||
? "text-primary"
|
||||
: "text-white/40 hover:text-white/80"
|
||||
)}
|
||||
>
|
||||
<div className={cn(
|
||||
"p-1.5 rounded-full transition-all",
|
||||
currentView === item.id ? "bg-primary/10" : "bg-transparent"
|
||||
)}>
|
||||
<item.icon className="h-5 w-5" />
|
||||
</div>
|
||||
<span className="text-[10px] font-medium">{item.label}</span>
|
||||
</button>
|
||||
))}
|
||||
{/* Settings Item for Mobile */}
|
||||
<button
|
||||
onClick={() => setCurrentView('settings')}
|
||||
className={cn(
|
||||
"flex flex-col items-center justify-center gap-1 p-2 rounded-xl transition-all w-16",
|
||||
currentView === 'settings'
|
||||
? "text-primary"
|
||||
: "text-white/40 hover:text-white/80"
|
||||
)}
|
||||
>
|
||||
<div className={cn(
|
||||
"p-1.5 rounded-full transition-all",
|
||||
currentView === 'settings' ? "bg-primary/10" : "bg-transparent"
|
||||
)}>
|
||||
<Settings className="h-5 w-5" />
|
||||
</div>
|
||||
<span className="text-[10px] font-medium">Settings</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
import React, { useRef, useState, useEffect } from "react";
|
||||
import { useStore, ReferenceCategory } from "@/lib/store";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Sparkles, Image as ImageIcon, X, Hash, AlertTriangle, Upload, Zap, Brain } from "lucide-react";
|
||||
import { Sparkles, Maximize2, X, Hash, AlertTriangle, Upload, Zap, Brain, Settings, Settings2 } from "lucide-react";
|
||||
|
||||
const IMAGE_COUNTS = [1, 2, 4];
|
||||
|
||||
|
|
@ -13,10 +13,12 @@ export function PromptHero() {
|
|||
settings, setSettings,
|
||||
references, setReference, addReference, removeReference, clearReferences,
|
||||
setSelectionMode, setCurrentView,
|
||||
history, setHistory
|
||||
history, setHistory,
|
||||
setIsGenerating, // Get global setter
|
||||
setShowCookieExpired
|
||||
} = useStore();
|
||||
|
||||
const [isGenerating, setIsGenerating] = useState(false);
|
||||
const [isGenerating, setLocalIsGenerating] = useState(false);
|
||||
const [uploadingRefs, setUploadingRefs] = useState<Record<string, boolean>>({});
|
||||
const [errorNotification, setErrorNotification] = useState<{ message: string; type: 'error' | 'warning' } | null>(null);
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||
|
|
@ -55,6 +57,7 @@ export function PromptHero() {
|
|||
}
|
||||
|
||||
setIsGenerating(true);
|
||||
setLocalIsGenerating(true); // Keep local state for button UI
|
||||
|
||||
try {
|
||||
// Route to the selected provider
|
||||
|
|
@ -74,14 +77,15 @@ export function PromptHero() {
|
|||
})
|
||||
});
|
||||
} else if (provider === 'meta') {
|
||||
// Meta AI
|
||||
res = await fetch('/api/meta/generate', {
|
||||
// Meta AI via Python service (metaai-api)
|
||||
// Meta AI always generates 4 images, hardcode this
|
||||
res = await fetch('/api/meta-crawl', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
prompt: finalPrompt,
|
||||
cookies: settings.metaCookies,
|
||||
imageCount: settings.imageCount
|
||||
num_images: 4 // Meta AI always returns 4 images
|
||||
})
|
||||
});
|
||||
} else {
|
||||
|
|
@ -121,10 +125,11 @@ export function PromptHero() {
|
|||
// Add images one by one with createdAt
|
||||
for (const img of data.images) {
|
||||
await addToGallery({
|
||||
data: img.data,
|
||||
data: img.data || img.url, // Use URL as fallback (Meta AI returns URLs)
|
||||
prompt: img.prompt,
|
||||
aspectRatio: img.aspectRatio || settings.aspectRatio,
|
||||
createdAt: Date.now()
|
||||
createdAt: Date.now(),
|
||||
provider: provider as 'whisk' | 'grok' | 'meta'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -140,6 +145,12 @@ export function PromptHero() {
|
|||
message: '🚫 Content Policy: The reference image contains a recognizable person. Google blocks generating images of real/famous people. Try using a different reference image without identifiable faces.',
|
||||
type: 'warning'
|
||||
});
|
||||
} else if (errorMessage.includes("Oops! I can't generate that image") ||
|
||||
errorMessage.includes("Can I help you imagine something else")) {
|
||||
setErrorNotification({
|
||||
message: '🛡️ Meta AI Safety: The prompt was rejected by Meta AI safety filters. Please try a different prompt.',
|
||||
type: 'warning'
|
||||
});
|
||||
} else if (errorMessage.includes('Safety Filter') ||
|
||||
errorMessage.includes('SAFETY_FILTER') ||
|
||||
errorMessage.includes('content_policy')) {
|
||||
|
|
@ -168,9 +179,15 @@ export function PromptHero() {
|
|||
});
|
||||
} else if (errorMessage.includes('401') ||
|
||||
errorMessage.includes('Unauthorized') ||
|
||||
errorMessage.includes('cookies not found')) {
|
||||
errorMessage.includes('cookies not found') ||
|
||||
errorMessage.includes('Auth failed')) {
|
||||
|
||||
// Trigger the new popup
|
||||
setShowCookieExpired(true);
|
||||
|
||||
// Also show a simplified toast as backup
|
||||
setErrorNotification({
|
||||
message: '🔐 Authentication Error: Your Whisk cookies may have expired. Please update them in Settings.',
|
||||
message: '🔐 Authentication Error: Cookies Refreshed Required',
|
||||
type: 'error'
|
||||
});
|
||||
} else {
|
||||
|
|
@ -183,6 +200,7 @@ export function PromptHero() {
|
|||
setTimeout(() => setErrorNotification(null), 8000);
|
||||
} finally {
|
||||
setIsGenerating(false);
|
||||
setLocalIsGenerating(false);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -344,11 +362,11 @@ export function PromptHero() {
|
|||
);
|
||||
|
||||
return (
|
||||
<div className="w-full max-w-4xl mx-auto my-8 md:my-12 px-4">
|
||||
<div className="w-full max-w-3xl mx-auto my-4 md:my-6 px-4">
|
||||
{/* Error/Warning Notification Toast */}
|
||||
{errorNotification && (
|
||||
<div className={cn(
|
||||
"mb-4 p-4 rounded-xl border flex items-start gap-3 animate-in slide-in-from-top-4 duration-300",
|
||||
"mb-4 p-3 rounded-lg border flex items-start gap-3 animate-in slide-in-from-top-4 duration-300",
|
||||
errorNotification.type === 'warning'
|
||||
? "bg-amber-500/10 border-amber-500/30 text-amber-200"
|
||||
: "bg-red-500/10 border-red-500/30 text-red-200"
|
||||
|
|
@ -373,79 +391,79 @@ export function PromptHero() {
|
|||
)}
|
||||
|
||||
<div className={cn(
|
||||
"relative flex flex-col gap-4 rounded-3xl bg-[#1A1A1E]/90 bg-gradient-to-b from-white/[0.02] to-transparent p-6 shadow-2xl border border-white/5 backdrop-blur-sm transition-all",
|
||||
"relative flex flex-col gap-3 rounded-2xl bg-[#1A1A1E]/95 bg-gradient-to-b from-white/[0.02] to-transparent p-4 shadow-xl border border-white/5 backdrop-blur-sm transition-all",
|
||||
isGenerating && "ring-1 ring-purple-500/30"
|
||||
)}>
|
||||
|
||||
|
||||
|
||||
{/* Header / Title + Provider Toggle */}
|
||||
<div className="flex items-center justify-between mb-4">
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="h-12 w-12 rounded-2xl bg-gradient-to-br from-amber-500/20 to-purple-600/20 border border-white/5 flex items-center justify-center">
|
||||
<div className="flex items-center justify-between mb-1">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="h-8 w-8 rounded-lg bg-gradient-to-br from-amber-500/20 to-purple-600/20 border border-white/5 flex items-center justify-center">
|
||||
{settings.provider === 'grok' ? (
|
||||
<Zap className="h-6 w-6 text-yellow-400" />
|
||||
<Zap className="h-4 w-4 text-yellow-400" />
|
||||
) : settings.provider === 'meta' ? (
|
||||
<Brain className="h-6 w-6 text-blue-400" />
|
||||
<Brain className="h-4 w-4 text-blue-400" />
|
||||
) : (
|
||||
<Sparkles className="h-6 w-6 text-amber-300" />
|
||||
<Sparkles className="h-4 w-4 text-amber-300" />
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
<h2 className="text-xl font-bold text-white tracking-tight">Create & Remix</h2>
|
||||
<p className="text-xs text-white/50 font-medium">
|
||||
Powered by <span className={cn(
|
||||
<h2 className="text-base font-bold text-white tracking-tight flex items-center gap-2">
|
||||
Create
|
||||
<span className="text-[10px] font-medium text-white/40 border-l border-white/10 pl-2">
|
||||
by <span className={cn(
|
||||
settings.provider === 'grok' ? "text-yellow-400" :
|
||||
settings.provider === 'meta' ? "text-blue-400" :
|
||||
"text-amber-300"
|
||||
)}>
|
||||
{settings.provider === 'grok' ? 'Grok (xAI)' :
|
||||
{settings.provider === 'grok' ? 'Grok' :
|
||||
settings.provider === 'meta' ? 'Meta AI' :
|
||||
'Google Whisk'}
|
||||
'Whisk'}
|
||||
</span>
|
||||
</p>
|
||||
</span>
|
||||
</h2>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Provider Toggle */}
|
||||
<div className="flex bg-black/40 p-1 rounded-xl border border-white/10 backdrop-blur-md">
|
||||
<div className="flex bg-black/40 p-0.5 rounded-lg border border-white/10 backdrop-blur-md scale-90 origin-right">
|
||||
<button
|
||||
onClick={() => setSettings({ provider: 'whisk' })}
|
||||
className={cn(
|
||||
"flex items-center gap-2 px-3 py-1.5 rounded-lg text-xs font-medium transition-all",
|
||||
"flex items-center gap-1.5 px-2.5 py-1 rounded-md text-[10px] font-medium transition-all",
|
||||
settings.provider === 'whisk' || !settings.provider
|
||||
? "bg-white/10 text-white shadow-sm"
|
||||
: "text-white/40 hover:text-white/70 hover:bg-white/5"
|
||||
)}
|
||||
title="Google Whisk"
|
||||
>
|
||||
<Sparkles className="h-3.5 w-3.5" />
|
||||
<Sparkles className="h-3 w-3" />
|
||||
<span className="hidden sm:inline">Whisk</span>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setSettings({ provider: 'grok' })}
|
||||
className={cn(
|
||||
"flex items-center gap-2 px-3 py-1.5 rounded-lg text-xs font-medium transition-all",
|
||||
"flex items-center gap-1.5 px-2.5 py-1 rounded-md text-[10px] font-medium transition-all",
|
||||
settings.provider === 'grok'
|
||||
? "bg-white/10 text-white shadow-sm"
|
||||
: "text-white/40 hover:text-white/70 hover:bg-white/5"
|
||||
)}
|
||||
title="Grok (xAI)"
|
||||
>
|
||||
<Zap className="h-3.5 w-3.5" />
|
||||
<Zap className="h-3 w-3" />
|
||||
<span className="hidden sm:inline">Grok</span>
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setSettings({ provider: 'meta' })}
|
||||
className={cn(
|
||||
"flex items-center gap-2 px-3 py-1.5 rounded-lg text-xs font-medium transition-all",
|
||||
"flex items-center gap-1.5 px-2.5 py-1 rounded-md text-[10px] font-medium transition-all",
|
||||
settings.provider === 'meta'
|
||||
? "bg-white/10 text-white shadow-sm"
|
||||
: "text-white/40 hover:text-white/70 hover:bg-white/5"
|
||||
)}
|
||||
title="Meta AI"
|
||||
>
|
||||
<Brain className="h-3.5 w-3.5" />
|
||||
<Brain className="h-3 w-3" />
|
||||
<span className="hidden sm:inline">Meta</span>
|
||||
</button>
|
||||
</div>
|
||||
|
|
@ -453,23 +471,23 @@ export function PromptHero() {
|
|||
|
||||
{/* Input Area */}
|
||||
<div className="relative group">
|
||||
<div className="absolute -inset-0.5 bg-gradient-to-r from-amber-500/20 to-purple-600/20 rounded-2xl blur opacity-0 group-hover:opacity-100 transition duration-500" />
|
||||
<div className="absolute -inset-0.5 bg-gradient-to-r from-amber-500/20 to-purple-600/20 rounded-xl blur opacity-0 group-hover:opacity-100 transition duration-500" />
|
||||
<textarea
|
||||
ref={textareaRef}
|
||||
value={prompt}
|
||||
onChange={(e) => setPrompt(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
onPaste={handlePaste}
|
||||
placeholder="Describe your imagination... (e.g. 'A futuristic city with flying cars')"
|
||||
className="relative w-full resize-none bg-[#0E0E10] rounded-xl p-5 text-base md:text-lg text-white placeholder:text-white/20 outline-none min-h-[120px] border border-white/10 focus:border-purple-500/50 transition-all shadow-inner"
|
||||
placeholder="Describe your imagination..."
|
||||
className="relative w-full resize-none bg-[#0E0E10] rounded-lg p-3 text-sm md:text-base text-white placeholder:text-white/20 outline-none min-h-[60px] border border-white/10 focus:border-purple-500/50 transition-all shadow-inner"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Controls Area */}
|
||||
<div className="flex flex-col md:flex-row items-start md:items-center justify-between gap-6 pt-2">
|
||||
<div className="flex flex-col md:flex-row items-center justify-between gap-3 pt-1">
|
||||
|
||||
{/* Left Controls: References */}
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{/* Left Controls: References (Hidden for Meta AI as it doesn't support them yet) */}
|
||||
<div className={cn("flex flex-wrap gap-2", settings.provider === 'meta' && "opacity-30 pointer-events-none grayscale")}>
|
||||
{(['subject', 'scene', 'style'] as ReferenceCategory[]).map((cat) => {
|
||||
const refs = references[cat] || [];
|
||||
const hasRefs = refs.length > 0;
|
||||
|
|
@ -481,7 +499,7 @@ export function PromptHero() {
|
|||
onDragOver={handleDragOver}
|
||||
onDrop={(e) => handleDrop(e, cat)}
|
||||
className={cn(
|
||||
"flex items-center gap-2 rounded-full px-4 py-2 text-xs font-medium transition-all border relative overflow-hidden",
|
||||
"flex items-center gap-1.5 rounded-md px-3 py-1.5 text-[10px] font-medium transition-all border relative overflow-hidden",
|
||||
hasRefs
|
||||
? "bg-purple-500/10 text-purple-200 border-purple-500/30 hover:bg-purple-500/20"
|
||||
: "bg-white/5 text-white/40 border-white/5 hover:bg-white/10 hover:text-white/70 hover:border-white/10",
|
||||
|
|
@ -489,40 +507,35 @@ export function PromptHero() {
|
|||
)}
|
||||
>
|
||||
{isUploading ? (
|
||||
<div className="h-4 w-4 animate-spin rounded-full border-2 border-current border-t-transparent" />
|
||||
<div className="h-3 w-3 animate-spin rounded-full border-2 border-current border-t-transparent" />
|
||||
) : hasRefs ? (
|
||||
<div className="flex -space-x-2">
|
||||
<div className="flex -space-x-1.5">
|
||||
{refs.slice(0, 4).map((ref, idx) => (
|
||||
<img
|
||||
key={ref.id}
|
||||
src={ref.thumbnail}
|
||||
alt=""
|
||||
className="h-5 w-5 rounded-sm object-cover ring-1 ring-white/20"
|
||||
className="h-4 w-4 rounded-sm object-cover ring-1 ring-white/20"
|
||||
style={{ zIndex: 10 - idx }}
|
||||
/>
|
||||
))}
|
||||
{refs.length > 4 && (
|
||||
<div className="h-5 w-5 rounded-sm bg-purple-500/50 flex items-center justify-center text-[9px] font-bold ring-1 ring-white/20">
|
||||
+{refs.length - 4}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<Upload className="h-4 w-4" />
|
||||
<Upload className="h-3 w-3" />
|
||||
)}
|
||||
<span className="capitalize tracking-wide">{cat}</span>
|
||||
{refs.length > 0 && (
|
||||
<span className="text-[10px] bg-purple-500/30 text-purple-100 rounded-full px-1.5 h-4 flex items-center">{refs.length}</span>
|
||||
<span className="text-[9px] bg-purple-500/30 text-purple-100 rounded-full px-1.5 h-3 flex items-center">{refs.length}</span>
|
||||
)}
|
||||
</button>
|
||||
{/* Clear all button */}
|
||||
{hasRefs && !isUploading && (
|
||||
<button
|
||||
className="absolute -top-1 -right-1 p-1 rounded-full bg-red-500/80 text-white opacity-0 group-hover:opacity-100 transition-opacity hover:bg-red-500"
|
||||
className="absolute -top-1 -right-1 p-0.5 rounded-full bg-red-500/80 text-white opacity-0 group-hover:opacity-100 transition-opacity hover:bg-red-500"
|
||||
onClick={(e) => { e.stopPropagation(); clearReferences(cat); }}
|
||||
title={`Clear all ${cat} references`}
|
||||
>
|
||||
<X className="h-2.5 w-2.5" />
|
||||
<X className="h-2 w-2" />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
|
@ -557,67 +570,79 @@ export function PromptHero() {
|
|||
/>
|
||||
|
||||
{/* Right Controls: Settings & Generate */}
|
||||
<div className="flex flex-wrap items-center gap-3 w-full md:w-auto justify-end">
|
||||
<div className="flex flex-wrap items-center gap-2 w-full md:w-auto justify-end">
|
||||
|
||||
{/* Settings Group */}
|
||||
<div className="flex items-center gap-1 bg-[#0E0E10] p-1.5 rounded-full border border-white/10">
|
||||
<div className="flex items-center gap-0.5 bg-[#0E0E10] p-1 rounded-lg border border-white/10">
|
||||
{/* Image Count */}
|
||||
<button
|
||||
onClick={cycleImageCount}
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded-full text-xs font-medium text-white/60 hover:text-white hover:bg-white/5 transition-colors"
|
||||
className="flex items-center gap-1 px-2 py-1 rounded-md text-[10px] font-medium text-white/60 hover:text-white hover:bg-white/5 transition-colors"
|
||||
title="Number of images"
|
||||
>
|
||||
<Hash className="h-3.5 w-3.5 opacity-70" />
|
||||
<Hash className="h-3 w-3 opacity-70" />
|
||||
<span>{settings.imageCount}</span>
|
||||
</button>
|
||||
|
||||
<div className="w-px h-3 bg-white/10" />
|
||||
<div className="w-px h-3 bg-white/10 mx-1" />
|
||||
|
||||
{/* Aspect Ratio */}
|
||||
<button
|
||||
onClick={nextAspectRatio}
|
||||
className="px-3 py-1.5 rounded-full text-xs font-medium text-white/60 hover:text-white hover:bg-white/5 transition-colors"
|
||||
className="px-2 py-1 rounded-md text-[10px] font-medium text-white/60 hover:text-white hover:bg-white/5 transition-colors"
|
||||
title="Aspect Ratio"
|
||||
>
|
||||
<span className="opacity-70">Ratio:</span>
|
||||
<span className="ml-1 text-white/80">{settings.aspectRatio}</span>
|
||||
</button>
|
||||
|
||||
<div className="w-px h-3 bg-white/10" />
|
||||
<div className="w-px h-3 bg-white/10 mx-1" />
|
||||
|
||||
{/* Precise Mode */}
|
||||
<button
|
||||
onClick={() => setSettings({ preciseMode: !settings.preciseMode })}
|
||||
className={cn(
|
||||
"px-3 py-1.5 rounded-full text-xs font-medium transition-all flex items-center gap-1.5",
|
||||
"px-2 py-1 rounded-md text-[10px] font-medium transition-all flex items-center gap-1",
|
||||
settings.preciseMode
|
||||
? "text-amber-300 bg-amber-500/10 ring-1 ring-amber-500/30"
|
||||
: "text-white/40 hover:text-white hover:bg-white/5"
|
||||
)}
|
||||
title="Precise Mode: Uses images directly as visual reference"
|
||||
title="Precise Mode"
|
||||
>
|
||||
<span>🍌</span>
|
||||
{/* <span>🍌</span> */}
|
||||
<span>Precise</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Generate Button */}
|
||||
<GradientButton
|
||||
<button
|
||||
onClick={handleGenerate}
|
||||
disabled={isGenerating || !prompt.trim()}
|
||||
disabled={isGenerating || !prompt.trim() || settings.provider === 'grok'}
|
||||
className={cn(
|
||||
"relative overflow-hidden px-4 py-1.5 rounded-lg font-bold text-sm text-white shadow-lg transition-all active:scale-95 group border border-white/10",
|
||||
settings.provider === 'grok'
|
||||
? "bg-gray-700 cursor-not-allowed"
|
||||
: "bg-gradient-to-r from-purple-600 to-indigo-600 hover:from-purple-500 hover:to-indigo-500 hover:shadow-indigo-500/25"
|
||||
)}
|
||||
>
|
||||
<div className="relative z-10 flex items-center gap-1.5">
|
||||
{isGenerating ? (
|
||||
<>
|
||||
<div className="h-4 w-4 animate-spin rounded-full border-2 border-white border-t-transparent" />
|
||||
<span>Creating...</span>
|
||||
<div className="h-3 w-3 animate-spin rounded-full border-2 border-white border-t-transparent" />
|
||||
<span className="animate-pulse">Dreaming...</span>
|
||||
</>
|
||||
) : settings.provider === 'grok' ? (
|
||||
<>
|
||||
<span className="opacity-80">Soon</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Sparkles className="h-4 w-4" />
|
||||
<span>Create</span>
|
||||
<Sparkles className="h-3 w-3 group-hover:rotate-12 transition-transform" />
|
||||
<span>Generate</span>
|
||||
</>
|
||||
)}
|
||||
</GradientButton>
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ type Provider = 'whisk' | 'grok' | 'meta';
|
|||
|
||||
const providers: { id: Provider; name: string; icon: any; description: string }[] = [
|
||||
{ id: 'whisk', name: 'Google Whisk', icon: Sparkles, description: 'ImageFX / Imagen 3' },
|
||||
{ id: 'grok', name: 'Grok (xAI)', icon: Zap, description: 'FLUX.1 model' },
|
||||
// { id: 'grok', name: 'Grok (xAI)', icon: Zap, description: 'FLUX.1 model' },
|
||||
{ id: 'meta', name: 'Meta AI', icon: Brain, description: 'Imagine / Emu' },
|
||||
];
|
||||
|
||||
|
|
|
|||
2397
data/prompts.json
2397
data/prompts.json
File diff suppressed because one or more lines are too long
|
|
@ -7,3 +7,20 @@ services:
|
|||
- "8558:3000"
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- CRAWL4AI_URL=http://crawl4ai:8000
|
||||
depends_on:
|
||||
- crawl4ai
|
||||
|
||||
crawl4ai:
|
||||
build:
|
||||
context: ./services/crawl4ai
|
||||
dockerfile: Dockerfile
|
||||
container_name: crawl4ai
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "8559:8000"
|
||||
environment:
|
||||
- META_RATE_LIMIT_DELAY=60
|
||||
- META_MAX_REQUESTS_HOUR=20
|
||||
- BROWSER_HEADLESS=true
|
||||
shm_size: '1gb'
|
||||
|
|
|
|||
|
|
@ -2,10 +2,11 @@ import Dexie, { Table } from 'dexie';
|
|||
|
||||
export interface ImageItem {
|
||||
id?: number;
|
||||
data: string; // Base64
|
||||
data: string; // Base64 or URL
|
||||
prompt: string;
|
||||
aspectRatio: string;
|
||||
createdAt: number;
|
||||
provider?: 'whisk' | 'grok' | 'meta'; // Track which AI generated the image
|
||||
}
|
||||
|
||||
export class KeyValuePixDB extends Dexie {
|
||||
|
|
|
|||
174
lib/providers/meta-crawl-client.ts
Normal file
174
lib/providers/meta-crawl-client.ts
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
/**
|
||||
* Meta AI Crawl4AI Client
|
||||
*
|
||||
* TypeScript client for the Python Crawl4AI microservice
|
||||
* that handles Meta AI image generation with bot detection bypass.
|
||||
*/
|
||||
|
||||
const CRAWL4AI_URL = process.env.CRAWL4AI_URL || 'http://localhost:8000';
|
||||
|
||||
export interface MetaCrawlImage {
|
||||
url: string;
|
||||
data?: string; // base64
|
||||
prompt: string;
|
||||
model: string;
|
||||
}
|
||||
|
||||
export interface MetaCrawlResponse {
|
||||
success: boolean;
|
||||
images: MetaCrawlImage[];
|
||||
error?: string;
|
||||
task_id?: string;
|
||||
}
|
||||
|
||||
export interface TaskStatusResponse {
|
||||
task_id: string;
|
||||
status: 'pending' | 'processing' | 'completed' | 'failed';
|
||||
images: MetaCrawlImage[];
|
||||
error?: string;
|
||||
progress?: number;
|
||||
}
|
||||
|
||||
export class MetaCrawlClient {
|
||||
private baseUrl: string;
|
||||
|
||||
constructor(baseUrl?: string) {
|
||||
this.baseUrl = baseUrl || CRAWL4AI_URL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate images synchronously (waits for completion)
|
||||
*/
|
||||
async generate(
|
||||
prompt: string,
|
||||
cookies: string,
|
||||
numImages: number = 4
|
||||
): Promise<MetaCrawlImage[]> {
|
||||
console.log(`[MetaCrawl] Sending request to ${this.baseUrl}/generate/sync`);
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/generate/sync`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prompt,
|
||||
cookies,
|
||||
num_images: numImages
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`MetaCrawl service error: ${response.status} - ${errorText}`);
|
||||
}
|
||||
|
||||
const data: MetaCrawlResponse = await response.json();
|
||||
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Image generation failed');
|
||||
}
|
||||
|
||||
return data.images;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start async image generation (returns immediately with task_id)
|
||||
*/
|
||||
async generateAsync(
|
||||
prompt: string,
|
||||
cookies: string,
|
||||
numImages: number = 4
|
||||
): Promise<string> {
|
||||
const response = await fetch(`${this.baseUrl}/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prompt,
|
||||
cookies,
|
||||
num_images: numImages
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`MetaCrawl service error: ${response.status}`);
|
||||
}
|
||||
|
||||
const data: MetaCrawlResponse = await response.json();
|
||||
|
||||
if (!data.task_id) {
|
||||
throw new Error('No task_id returned from async generation');
|
||||
}
|
||||
|
||||
return data.task_id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get status of an async generation task
|
||||
*/
|
||||
async getTaskStatus(taskId: string): Promise<TaskStatusResponse> {
|
||||
const response = await fetch(`${this.baseUrl}/status/${taskId}`);
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 404) {
|
||||
throw new Error('Task not found');
|
||||
}
|
||||
throw new Error(`MetaCrawl service error: ${response.status}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll for async task completion
|
||||
*/
|
||||
async waitForCompletion(
|
||||
taskId: string,
|
||||
pollIntervalMs: number = 2000,
|
||||
timeoutMs: number = 120000
|
||||
): Promise<MetaCrawlImage[]> {
|
||||
const startTime = Date.now();
|
||||
|
||||
while (Date.now() - startTime < timeoutMs) {
|
||||
const status = await this.getTaskStatus(taskId);
|
||||
|
||||
if (status.status === 'completed') {
|
||||
return status.images;
|
||||
}
|
||||
|
||||
if (status.status === 'failed') {
|
||||
throw new Error(status.error || 'Generation failed');
|
||||
}
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, pollIntervalMs));
|
||||
}
|
||||
|
||||
throw new Error('Task timed out');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the Crawl4AI service is healthy
|
||||
*/
|
||||
async healthCheck(): Promise<boolean> {
|
||||
try {
|
||||
const response = await fetch(`${this.baseUrl}/health`);
|
||||
return response.ok;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current rate limit status
|
||||
*/
|
||||
async getRateLimitStatus(): Promise<{
|
||||
requests_this_hour: number;
|
||||
max_per_hour: number;
|
||||
delay_seconds: number;
|
||||
}> {
|
||||
const response = await fetch(`${this.baseUrl}/rate-limit`);
|
||||
return response.json();
|
||||
}
|
||||
}
|
||||
12
lib/store.ts
12
lib/store.ts
|
|
@ -60,6 +60,12 @@ interface AppState {
|
|||
removeFromGallery: (id: number) => Promise<void>;
|
||||
clearGallery: () => Promise<void>;
|
||||
|
||||
isGenerating: boolean;
|
||||
setIsGenerating: (isGenerating: boolean) => void;
|
||||
|
||||
showCookieExpired: boolean;
|
||||
setShowCookieExpired: (show: boolean) => void;
|
||||
|
||||
|
||||
// Videos
|
||||
videos: VideoItem[];
|
||||
|
|
@ -152,6 +158,12 @@ export const useStore = create<AppState>()(
|
|||
},
|
||||
|
||||
|
||||
isGenerating: false,
|
||||
setIsGenerating: (isGenerating) => set({ isGenerating }),
|
||||
|
||||
showCookieExpired: false,
|
||||
setShowCookieExpired: (show) => set({ showCookieExpired: show }),
|
||||
|
||||
// Videos
|
||||
videos: [],
|
||||
addVideo: (video) => set((state) => ({ videos: [video, ...state.videos] })),
|
||||
|
|
|
|||
38
services/crawl4ai/Dockerfile
Normal file
38
services/crawl4ai/Dockerfile
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
# Meta AI Python Service
|
||||
# Uses metaai-api library from https://github.com/mir-ashiq/metaai-api
|
||||
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies for requests-html (pyppeteer)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
git \
|
||||
chromium \
|
||||
chromium-driver \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Set Chrome path for pyppeteer
|
||||
ENV CHROMIUM_EXECUTABLE=/usr/bin/chromium
|
||||
|
||||
# Copy requirements first for caching
|
||||
COPY requirements.txt .
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Install metaai-api from GitHub (for latest updates)
|
||||
RUN pip install --no-cache-dir git+https://github.com/mir-ashiq/metaai-api.git
|
||||
|
||||
# Copy application code
|
||||
COPY app/ ./app/
|
||||
|
||||
# Create non-root user
|
||||
RUN useradd -m -u 1000 appuser && chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8000/health || exit 1
|
||||
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
91
services/crawl4ai/README.md
Normal file
91
services/crawl4ai/README.md
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
# Meta AI Image Generation Service
|
||||
|
||||
FastAPI wrapper for Meta AI image generation using [metaai-api](https://github.com/mir-ashiq/metaai-api).
|
||||
|
||||
## Features
|
||||
|
||||
- 🎨 **Image Generation** - Generate AI images via Meta AI
|
||||
- 🔐 **Cookie Auth** - Uses Facebook/Meta cookies for authentication
|
||||
- ⚡ **Rate Limiting** - Built-in rate limiting to prevent shadowban
|
||||
- 🐳 **Docker Ready** - Easy deployment with Docker
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Local Development
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
pip install -r requirements.txt
|
||||
pip install git+https://github.com/mir-ashiq/metaai-api.git
|
||||
|
||||
# Run server
|
||||
uvicorn app.main:app --reload --port 8000
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
```bash
|
||||
docker build -t meta-ai-service .
|
||||
docker run -p 8000:8000 meta-ai-service
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
| Endpoint | Method | Description |
|
||||
|----------|--------|-------------|
|
||||
| `/health` | GET | Health check |
|
||||
| `/generate/sync` | POST | Sync generation (waits for result) |
|
||||
| `/generate` | POST | Async generation (returns task_id) |
|
||||
| `/status/{task_id}` | GET | Get async task status |
|
||||
| `/rate-limit` | GET | Get rate limit status |
|
||||
|
||||
## Usage Example
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
response = requests.post("http://localhost:8000/generate/sync", json={
|
||||
"prompt": "Imagine a beautiful sunset over mountains",
|
||||
"cookies": [
|
||||
{"name": "c_user", "value": "..."},
|
||||
{"name": "xs", "value": "..."},
|
||||
# ... other Facebook/Meta cookies
|
||||
]
|
||||
})
|
||||
|
||||
print(response.json())
|
||||
# {"success": true, "images": [{"url": "...", "prompt": "...", "model": "imagine"}]}
|
||||
```
|
||||
|
||||
## Required Cookies
|
||||
|
||||
You need Facebook/Meta cookies for authentication:
|
||||
|
||||
| Cookie | Description |
|
||||
|--------|-------------|
|
||||
| `c_user` | Facebook user ID |
|
||||
| `xs` | Facebook session token |
|
||||
| `sb` | Session browser identifier |
|
||||
| `datr` | Device tracking |
|
||||
| `abra_sess` | Meta AI session |
|
||||
|
||||
Export cookies from browser using Cookie-Editor extension.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `META_RATE_LIMIT_DELAY` | 30 | Seconds between requests |
|
||||
| `META_MAX_REQUESTS_HOUR` | 30 | Max requests per hour |
|
||||
|
||||
## Updating metaai-api
|
||||
|
||||
To get the latest version of the underlying library:
|
||||
|
||||
```bash
|
||||
pip install -U git+https://github.com/mir-ashiq/metaai-api.git
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
1
services/crawl4ai/app/__init__.py
Normal file
1
services/crawl4ai/app/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
# Crawl4AI Meta AI Service
|
||||
21
services/crawl4ai/app/config.py
Normal file
21
services/crawl4ai/app/config.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
"""
|
||||
Configuration settings for Meta AI service.
|
||||
|
||||
Uses environment variables with sensible defaults.
|
||||
"""
|
||||
import os
|
||||
|
||||
|
||||
class Settings:
|
||||
"""Configuration settings loaded from environment variables"""
|
||||
|
||||
# Rate limiting
|
||||
rate_limit_delay: float = float(os.getenv("META_RATE_LIMIT_DELAY", "30"))
|
||||
max_requests_per_hour: int = int(os.getenv("META_MAX_REQUESTS_HOUR", "30"))
|
||||
|
||||
# Meta AI URLs (used by our wrapper)
|
||||
meta_ai_base: str = "https://www.meta.ai"
|
||||
graphql_endpoint: str = "https://www.meta.ai/api/graphql/"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
7
services/crawl4ai/app/grok/__init__.py
Normal file
7
services/crawl4ai/app/grok/__init__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
from .logger import Log
|
||||
from .runtime import Run, Utils
|
||||
from .headers import Headers
|
||||
from .reverse.parser import Parser
|
||||
from .reverse.xctid import Signature
|
||||
from .reverse.anon import Anon
|
||||
from .grok import Grok
|
||||
328
services/crawl4ai/app/grok/grok.py
Normal file
328
services/crawl4ai/app/grok/grok.py
Normal file
|
|
@ -0,0 +1,328 @@
|
|||
from .logger import Log
|
||||
from .runtime import Run, Utils
|
||||
from .reverse.parser import Parser
|
||||
from .reverse.xctid import Signature
|
||||
from .reverse.anon import Anon
|
||||
from .headers import Headers
|
||||
from curl_cffi import requests, CurlMime
|
||||
from dataclasses import dataclass, field
|
||||
from bs4 import BeautifulSoup
|
||||
from json import dumps, loads
|
||||
from secrets import token_hex
|
||||
from uuid import uuid4
|
||||
|
||||
@dataclass
|
||||
class Models:
|
||||
models: dict[str, list[str]] = field(default_factory=lambda: {
|
||||
"grok-3-auto": ["MODEL_MODE_AUTO", "auto"],
|
||||
"grok-3-fast": ["MODEL_MODE_FAST", "fast"],
|
||||
"grok-4": ["MODEL_MODE_EXPERT", "expert"],
|
||||
"grok-4-mini-thinking-tahoe": ["MODEL_MODE_GROK_4_MINI_THINKING", "grok-4-mini-thinking"]
|
||||
})
|
||||
|
||||
def get_model_mode(self, model: str, index: int) -> str:
|
||||
return self.models.get(model, ["MODEL_MODE_AUTO", "auto"])[index]
|
||||
|
||||
_Models = Models()
|
||||
|
||||
class Grok:
|
||||
|
||||
|
||||
def __init__(self, model: str = "grok-3-auto", proxy: str = None) -> None:
|
||||
self.session: requests.session.Session = requests.Session(impersonate="chrome136", default_headers=False)
|
||||
self.headers: Headers = Headers()
|
||||
|
||||
self.model_mode: str = _Models.get_model_mode(model, 0)
|
||||
self.model: str = model
|
||||
self.mode: str = _Models.get_model_mode(model, 1)
|
||||
self.c_run: int = 0
|
||||
self.keys: dict = Anon.generate_keys()
|
||||
if proxy:
|
||||
self.session.proxies = {
|
||||
"all": proxy
|
||||
}
|
||||
|
||||
def _load(self, extra_data: dict = None) -> None:
|
||||
|
||||
if not extra_data:
|
||||
self.session.headers = self.headers.LOAD
|
||||
load_site: requests.models.Response = self.session.get('https://grok.com/c')
|
||||
self.session.cookies.update(load_site.cookies)
|
||||
|
||||
scripts: list = [s['src'] for s in BeautifulSoup(load_site.text, 'html.parser').find_all('script', src=True) if s['src'].startswith('/_next/static/chunks/')]
|
||||
|
||||
self.actions, self.xsid_script = Parser.parse_grok(scripts)
|
||||
|
||||
self.baggage: str = Utils.between(load_site.text, '<meta name="baggage" content="', '"')
|
||||
self.sentry_trace: str = Utils.between(load_site.text, '<meta name="sentry-trace" content="', '-')
|
||||
else:
|
||||
self.session.cookies.update(extra_data["cookies"])
|
||||
|
||||
self.actions: list = extra_data["actions"]
|
||||
self.xsid_script: list = extra_data["xsid_script"]
|
||||
self.baggage: str = extra_data["baggage"]
|
||||
self.sentry_trace: str = extra_data["sentry_trace"]
|
||||
|
||||
if not self.baggage:
|
||||
Log.Error("Failed to extract baggage token")
|
||||
if 'load_site' in locals():
|
||||
with open("debug_grok_response.html", "w", encoding="utf-8") as f:
|
||||
f.write(load_site.text)
|
||||
with open("error.log", "a") as f:
|
||||
f.write(f"FAILED TO EXTRACT BAGGAGE. HTML saved to debug_grok_response.html\n")
|
||||
else:
|
||||
with open("error.log", "a") as f:
|
||||
f.write(f"FAILED TO EXTRACT BAGGAGE (No load_site object).\n")
|
||||
# Don't crash here, subsequent requests will fail but log will be preserved
|
||||
|
||||
def c_request(self, next_action: str) -> None:
|
||||
|
||||
# Safety check for missing tokens
|
||||
if not self.baggage:
|
||||
return
|
||||
|
||||
self.session.headers = self.headers.C_REQUEST
|
||||
self.session.headers.update({
|
||||
'baggage': self.baggage,
|
||||
'next-action': next_action,
|
||||
'sentry-trace': f'{self.sentry_trace}-{str(uuid4()).replace("-", "")[:16]}-0',
|
||||
})
|
||||
self.session.headers = Headers.fix_order(self.session.headers, self.headers.C_REQUEST)
|
||||
|
||||
if self.c_run == 0:
|
||||
self.session.headers.pop("content-type")
|
||||
|
||||
mime = CurlMime()
|
||||
mime.addpart(name="1", data=bytes(self.keys["userPublicKey"]), filename="blob", content_type="application/octet-stream")
|
||||
mime.addpart(name="0", filename=None, data='[{"userPublicKey":"$o1"}]')
|
||||
|
||||
c_request: requests.models.Response = self.session.post("https://grok.com/c", multipart=mime)
|
||||
self.session.cookies.update(c_request.cookies)
|
||||
|
||||
self.anon_user: str = Utils.between(c_request.text, '{"anonUserId":"', '"')
|
||||
self.c_run += 1
|
||||
|
||||
else:
|
||||
|
||||
if self.c_run == 1:
|
||||
data: str = dumps([{"anonUserId":self.anon_user}])
|
||||
elif self.c_run == 2:
|
||||
data: str = dumps([{"anonUserId":self.anon_user,**self.challenge_dict}])
|
||||
|
||||
c_request: requests.models.Response = self.session.post('https://grok.com/c', data=data)
|
||||
self.session.cookies.update(c_request.cookies)
|
||||
|
||||
if self.c_run == 1:
|
||||
start_idx = c_request.content.hex().find("3a6f38362c")
|
||||
if start_idx != -1:
|
||||
start_idx += len("3a6f38362c")
|
||||
end_idx = c_request.content.hex().find("313a", start_idx)
|
||||
if end_idx != -1:
|
||||
challenge_hex = c_request.content.hex()[start_idx:end_idx]
|
||||
challenge_bytes = bytes.fromhex(challenge_hex)
|
||||
|
||||
self.challenge_dict: dict = Anon.sign_challenge(challenge_bytes, self.keys["privateKey"])
|
||||
Log.Success(f"Solved Challenge: {self.challenge_dict}")
|
||||
elif self.c_run == 2:
|
||||
self.verification_token, self.anim = Parser.get_anim(c_request.text, "grok-site-verification")
|
||||
self.svg_data, self.numbers = Parser.parse_values(c_request.text, self.anim, self.xsid_script)
|
||||
|
||||
self.c_run += 1
|
||||
|
||||
|
||||
def start_convo(self, message: str, extra_data: dict = None) -> dict:
|
||||
|
||||
if not extra_data:
|
||||
self._load()
|
||||
if not self.actions or len(self.actions) < 3:
|
||||
Log.Error(f"Failed to load actions: {self.actions}")
|
||||
return {"error": "Failed to initialize Grok connection (missing actions)."}
|
||||
|
||||
self.c_request(self.actions[0])
|
||||
self.c_request(self.actions[1])
|
||||
self.c_request(self.actions[2])
|
||||
xsid: str = Signature.generate_sign('/rest/app-chat/conversations/new', 'POST', self.verification_token, self.svg_data, self.numbers)
|
||||
else:
|
||||
self._load(extra_data)
|
||||
self.c_run: int = 1
|
||||
self.anon_user: str = extra_data["anon_user"]
|
||||
self.keys["privateKey"] = extra_data["privateKey"]
|
||||
self.c_request(self.actions[1])
|
||||
self.c_request(self.actions[2])
|
||||
xsid: str = Signature.generate_sign(f'/rest/app-chat/conversations/{extra_data["conversationId"]}/responses', 'POST', self.verification_token, self.svg_data, self.numbers)
|
||||
|
||||
self.session.headers = self.headers.CONVERSATION
|
||||
self.session.headers.update({
|
||||
'baggage': self.baggage,
|
||||
'sentry-trace': f'{self.sentry_trace}-{str(uuid4()).replace("-", "")[:16]}-0',
|
||||
'x-statsig-id': xsid,
|
||||
'x-xai-request-id': str(uuid4()),
|
||||
'traceparent': f"00-{token_hex(16)}-{token_hex(8)}-00"
|
||||
})
|
||||
self.session.headers = Headers.fix_order(self.session.headers, self.headers.CONVERSATION)
|
||||
|
||||
if not extra_data:
|
||||
conversation_data: dict = {
|
||||
'temporary': False,
|
||||
'modelName': self.model,
|
||||
'message': message,
|
||||
'fileAttachments': [],
|
||||
'imageAttachments': [],
|
||||
'disableSearch': False,
|
||||
'enableImageGeneration': True,
|
||||
'returnImageBytes': False,
|
||||
'returnRawGrokInXaiRequest': False,
|
||||
'enableImageStreaming': True,
|
||||
'imageGenerationCount': 2,
|
||||
'forceConcise': False,
|
||||
'toolOverrides': {},
|
||||
'enableSideBySide': True,
|
||||
'sendFinalMetadata': True,
|
||||
'isReasoning': False,
|
||||
'webpageUrls': [],
|
||||
'disableTextFollowUps': False,
|
||||
'responseMetadata': {
|
||||
'requestModelDetails': {
|
||||
'modelId': self.model,
|
||||
},
|
||||
},
|
||||
'disableMemory': False,
|
||||
'forceSideBySide': False,
|
||||
'modelMode': self.model_mode,
|
||||
'isAsyncChat': False,
|
||||
}
|
||||
|
||||
convo_request: requests.models.Response = self.session.post('https://grok.com/rest/app-chat/conversations/new', json=conversation_data, timeout=9999)
|
||||
|
||||
if "modelResponse" in convo_request.text:
|
||||
response = conversation_id = parent_response = image_urls = None
|
||||
stream_response: list = []
|
||||
|
||||
for response_dict in convo_request.text.strip().split('\n'):
|
||||
data: dict = loads(response_dict)
|
||||
|
||||
token: str = data.get('result', {}).get('response', {}).get('token')
|
||||
if token:
|
||||
stream_response.append(token)
|
||||
|
||||
if not response and data.get('result', {}).get('response', {}).get('modelResponse', {}).get('message'):
|
||||
response: str = data['result']['response']['modelResponse']['message']
|
||||
|
||||
if not conversation_id and data.get('result', {}).get('conversation', {}).get('conversationId'):
|
||||
conversation_id: str = data['result']['conversation']['conversationId']
|
||||
|
||||
if not parent_response and data.get('result', {}).get('response', {}).get('modelResponse', {}).get('responseId'):
|
||||
parent_response: str = data['result']['response']['modelResponse']['responseId']
|
||||
|
||||
if not image_urls and data.get('result', {}).get('response', {}).get('modelResponse', {}).get('generatedImageUrls', {}):
|
||||
image_urls: str = data['result']['response']['modelResponse']['generatedImageUrls']
|
||||
|
||||
|
||||
return {
|
||||
"response": response,
|
||||
"stream_response": stream_response,
|
||||
"images": image_urls,
|
||||
"extra_data": {
|
||||
"anon_user": self.anon_user,
|
||||
"cookies": self.session.cookies.get_dict(),
|
||||
"actions": self.actions,
|
||||
"xsid_script": self.xsid_script,
|
||||
"baggage": self.baggage,
|
||||
"sentry_trace": self.sentry_trace,
|
||||
"conversationId": conversation_id,
|
||||
"parentResponseId": parent_response,
|
||||
"privateKey": self.keys["privateKey"]
|
||||
}
|
||||
}
|
||||
else:
|
||||
if 'rejected by anti-bot rules' in convo_request.text:
|
||||
return Grok(self.session.proxies.get("all")).start_convo(message=message, extra_data=extra_data)
|
||||
Log.Error("Something went wrong")
|
||||
Log.Error(convo_request.text)
|
||||
return {"error": convo_request.text}
|
||||
else:
|
||||
conversation_data: dict = {
|
||||
'message': message,
|
||||
'modelName': self.model,
|
||||
'parentResponseId': extra_data["parentResponseId"],
|
||||
'disableSearch': False,
|
||||
'enableImageGeneration': True,
|
||||
'imageAttachments': [],
|
||||
'returnImageBytes': False,
|
||||
'returnRawGrokInXaiRequest': False,
|
||||
'fileAttachments': [],
|
||||
'enableImageStreaming': True,
|
||||
'imageGenerationCount': 2,
|
||||
'forceConcise': False,
|
||||
'toolOverrides': {},
|
||||
'enableSideBySide': True,
|
||||
'sendFinalMetadata': True,
|
||||
'customPersonality': '',
|
||||
'isReasoning': False,
|
||||
'webpageUrls': [],
|
||||
'metadata': {
|
||||
'requestModelDetails': {
|
||||
'modelId': self.model,
|
||||
},
|
||||
'request_metadata': {
|
||||
'model': self.model,
|
||||
'mode': self.mode,
|
||||
},
|
||||
},
|
||||
'disableTextFollowUps': False,
|
||||
'disableArtifact': False,
|
||||
'isFromGrokFiles': False,
|
||||
'disableMemory': False,
|
||||
'forceSideBySide': False,
|
||||
'modelMode': self.model_mode,
|
||||
'isAsyncChat': False,
|
||||
'skipCancelCurrentInflightRequests': False,
|
||||
'isRegenRequest': False,
|
||||
}
|
||||
|
||||
convo_request: requests.models.Response = self.session.post(f'https://grok.com/rest/app-chat/conversations/{extra_data["conversationId"]}/responses', json=conversation_data, timeout=9999)
|
||||
|
||||
if "modelResponse" in convo_request.text:
|
||||
response = conversation_id = parent_response = image_urls = None
|
||||
stream_response: list = []
|
||||
|
||||
for response_dict in convo_request.text.strip().split('\n'):
|
||||
data: dict = loads(response_dict)
|
||||
|
||||
token: str = data.get('result', {}).get('token')
|
||||
if token:
|
||||
stream_response.append(token)
|
||||
|
||||
if not response and data.get('result', {}).get('modelResponse', {}).get('message'):
|
||||
response: str = data['result']['modelResponse']['message']
|
||||
|
||||
if not parent_response and data.get('result', {}).get('modelResponse', {}).get('responseId'):
|
||||
parent_response: str = data['result']['modelResponse']['responseId']
|
||||
|
||||
if not image_urls and data.get('result', {}).get('modelResponse', {}).get('generatedImageUrls', {}):
|
||||
image_urls: str = data['result']['modelResponse']['generatedImageUrls']
|
||||
|
||||
return {
|
||||
"response": response,
|
||||
"stream_response": stream_response,
|
||||
"images": image_urls,
|
||||
"extra_data": {
|
||||
"anon_user": self.anon_user,
|
||||
"cookies": self.session.cookies.get_dict(),
|
||||
"actions": self.actions,
|
||||
"xsid_script": self.xsid_script,
|
||||
"baggage": self.baggage,
|
||||
"sentry_trace": self.sentry_trace,
|
||||
"conversationId": extra_data["conversationId"],
|
||||
"parentResponseId": parent_response,
|
||||
"privateKey": self.keys["privateKey"]
|
||||
}
|
||||
}
|
||||
else:
|
||||
if 'rejected by anti-bot rules' in convo_request.text:
|
||||
return Grok(self.session.proxies.get("all")).start_convo(message=message, extra_data=extra_data)
|
||||
Log.Error("Something went wrong")
|
||||
Log.Error(convo_request.text)
|
||||
return {"error": convo_request.text}
|
||||
|
||||
|
||||
78
services/crawl4ai/app/grok/headers.py
Normal file
78
services/crawl4ai/app/grok/headers.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
|
||||
|
||||
class Headers:
|
||||
|
||||
@staticmethod
|
||||
def fix_order(headers, base) -> dict:
|
||||
ordered: dict = {}
|
||||
|
||||
for key in base:
|
||||
if key in headers:
|
||||
ordered[key] = headers[key]
|
||||
|
||||
for key, value in headers.items():
|
||||
if key not in ordered:
|
||||
ordered[key] = value
|
||||
|
||||
return ordered
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
self.LOAD: dict = {
|
||||
"upgrade-insecure-requests": "1",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36",
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
||||
"sec-ch-ua": "\"Google Chrome\";v=\"143\", \"Chromium\";v=\"143\", \"Not A(Brand\";v=\"24\"",
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": "\"Windows\"",
|
||||
"sec-fetch-site": "none",
|
||||
"sec-fetch-mode": "navigate",
|
||||
"sec-fetch-user": "?1",
|
||||
"sec-fetch-dest": "document",
|
||||
"accept-encoding": "gzip, deflate, br, zstd",
|
||||
"accept-language": "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||
"priority": "u=0, i",
|
||||
}
|
||||
|
||||
self.C_REQUEST: dict = {
|
||||
"sec-ch-ua-platform": "\"Windows\"",
|
||||
"next-action": "",
|
||||
"sec-ch-ua": "\"Google Chrome\";v=\"143\", \"Chromium\";v=\"143\", \"Not A(Brand\";v=\"24\"",
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"next-router-state-tree": "%5B%22%22%2C%7B%22children%22%3A%5B%22c%22%2C%7B%22children%22%3A%5B%5B%22slug%22%2C%22%22%2C%22oc%22%5D%2C%7B%22children%22%3A%5B%22__PAGE__%22%2C%7B%7D%2Cnull%2Cnull%5D%7D%2Cnull%2Cnull%5D%7D%2Cnull%2Cnull%5D%7D%2Cnull%2Cnull%2Ctrue%5D",
|
||||
"baggage": '',
|
||||
"sentry-trace": "",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36",
|
||||
"accept": "text/x-component",
|
||||
"content-type": "text/plain;charset=UTF-8",
|
||||
"origin": "https://grok.com",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-dest": "empty",
|
||||
"referer": "https://grok.com/c",
|
||||
"accept-encoding": "gzip, deflate, br, zstd",
|
||||
"accept-language": "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||
"priority": "u=1, i",
|
||||
}
|
||||
|
||||
self.CONVERSATION: dict = {
|
||||
"x-xai-request-id": "",
|
||||
"sec-ch-ua-platform": "\"Windows\"",
|
||||
"sec-ch-ua": "\"Google Chrome\";v=\"143\", \"Chromium\";v=\"143\", \"Not A(Brand\";v=\"24\"",
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"baggage": "",
|
||||
"sentry-trace": "",
|
||||
"traceparent": "",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36",
|
||||
"content-type": "application/json",
|
||||
"x-statsig-id": "",
|
||||
"accept": "*/*",
|
||||
"origin": "https://grok.com",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-dest": "empty",
|
||||
"referer": "https://grok.com/",
|
||||
"accept-encoding": "gzip, deflate, br, zstd",
|
||||
"accept-language": "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||
"priority": "u=1, i",
|
||||
}
|
||||
60
services/crawl4ai/app/grok/logger.py
Normal file
60
services/crawl4ai/app/grok/logger.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
from typing import Optional
|
||||
from datetime import datetime
|
||||
from colorama import Fore
|
||||
from threading import Lock
|
||||
from time import time
|
||||
|
||||
|
||||
class Log:
|
||||
"""
|
||||
Logging class to log text better in console.
|
||||
"""
|
||||
|
||||
colours: Optional[dict] = {
|
||||
'SUCCESS': Fore.LIGHTGREEN_EX,
|
||||
'ERROR': Fore.LIGHTRED_EX,
|
||||
'INFO': Fore.LIGHTWHITE_EX
|
||||
}
|
||||
|
||||
lock = Lock()
|
||||
|
||||
@staticmethod
|
||||
def _log(level, prefix, message) -> Optional[None]:
|
||||
"""
|
||||
Private log function to build the payload to print.
|
||||
|
||||
:param level: Just not used, only a filler
|
||||
:param prefix: Prefix to indicate if its Success, Error or Info
|
||||
:param message: Message to Log
|
||||
"""
|
||||
|
||||
timestamp: Optional[int] = datetime.fromtimestamp(time()).strftime("%H:%M:%S")
|
||||
|
||||
log_message = (
|
||||
f"{Fore.LIGHTBLACK_EX}[{Fore.MAGENTA}{timestamp}{Fore.RESET}{Fore.LIGHTBLACK_EX}]{Fore.RESET} "
|
||||
f"{prefix} {message}"
|
||||
)
|
||||
|
||||
with Log.lock:
|
||||
print(log_message)
|
||||
|
||||
@staticmethod
|
||||
def Success(message, prefix="[+]", color=colours['SUCCESS']) -> Optional[None]:
|
||||
"""
|
||||
Logging a Success message.
|
||||
"""
|
||||
Log._log("SUCCESS", f"{color}{prefix}{Fore.RESET}", message)
|
||||
|
||||
@staticmethod
|
||||
def Error(message, prefix="[!]", color=colours['ERROR']) -> Optional[None]:
|
||||
"""
|
||||
Logging an Error Message.
|
||||
"""
|
||||
Log._log("ERROR", f"{color}{prefix}{Fore.RESET}", message)
|
||||
|
||||
@staticmethod
|
||||
def Info(message, prefix="[!]", color=colours['INFO']) -> Optional[None]:
|
||||
"""
|
||||
Logging an Info Message.
|
||||
"""
|
||||
Log._log("INFO", f"{color}{prefix}{Fore.RESET}", message)
|
||||
65
services/crawl4ai/app/grok/mappings/grok.json
Normal file
65
services/crawl4ai/app/grok/mappings/grok.json
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
[
|
||||
{
|
||||
"xsid_script": "static/chunks/444a4d2e0656ce52.js",
|
||||
"action_script": "/_next/static/chunks/07efa55314110fbd.js",
|
||||
"actions": [
|
||||
"7f7a9e476198643fb30f17ab0e0c41f8f2edc18ae7",
|
||||
"7f0a06a29ceb599ed2d3901e16b2a1e088d2372deb",
|
||||
"7f38fb97af610ff9d28ae27294dc41bd9eca880852"
|
||||
]
|
||||
},
|
||||
{
|
||||
"xsid_script": "static/chunks/9e496d2be7115b4d.js",
|
||||
"action_script": "/_next/static/chunks/fcbe5d6b4ae286fe.js",
|
||||
"actions": [
|
||||
"7fd00a18c007ec926f1136cb558f9ef9f903dcc1f4",
|
||||
"7f795a3c3829bb45c6e2d2ad0587c7e039f513a509",
|
||||
"7fa94a2c9b7ebcf8874e824d3365d9b9735a7afe34"
|
||||
]
|
||||
},
|
||||
{
|
||||
"xsid_script": "static/chunks/069cbd766e2e100e.js",
|
||||
"action_script": "/_next/static/chunks/cb52eeab0fd0e58c.js",
|
||||
"actions": [
|
||||
"7fffbbcd70e50341926589c4f0ed7ab475afad3321",
|
||||
"7fdf5ae16dee580d89683963be28bc62f1603ffea1",
|
||||
"7f37fea17b375870e80133012d199e6cdee6201091"
|
||||
]
|
||||
},
|
||||
{
|
||||
"xsid_script": "static/chunks/c1c11f0dd2cadabf.js",
|
||||
"action_script": "/_next/static/chunks/bdf3abb63890a18e.js",
|
||||
"actions": [
|
||||
"7f71f42b11fe0a773c18539575170eb3cda2720fff",
|
||||
"7f8159187cdb2e21e48a06256220a8bbf7b1088b34",
|
||||
"7fb14bed5522696e9d5cbec5fd92ea7cebee752db0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"xsid_script": "static/chunks/720ab0732a942089.js",
|
||||
"action_script": "/_next/static/chunks/dcf3a6315f86c917.js",
|
||||
"actions": [
|
||||
"7f8b78848a6f7726b96bec61b199a7bdc02e392621",
|
||||
"7f1e31eb362d2be64d0ab258d72fc770ecbb261237",
|
||||
"7f0c6140a77d46f5696f9b5d4fec00e3165e9bf678"
|
||||
]
|
||||
},
|
||||
{
|
||||
"xsid_script": "static/chunks/68f6ef173efbeb67.js",
|
||||
"action_script": "/_next/static/chunks/4114b4b6e0483e8c.js",
|
||||
"actions": [
|
||||
"7f3749b0c81bd826ca8cc02ccf8009a911410e49f7",
|
||||
"7f5e48bfe2a1588dc86c1fe1bf3eac0e2676f55532",
|
||||
"7f5341512f3793d10791b2ca628b300aac6ba34b98"
|
||||
]
|
||||
},
|
||||
{
|
||||
"xsid_script": "static/chunks/87d576c60e76a1e9.js",
|
||||
"action_script": "/_next/static/chunks/843010bb02f13cde.js",
|
||||
"actions": [
|
||||
"7fb4349e44719d28ba8da9344e11ab7e5e3b1c474f",
|
||||
"7f9a9b0c62c7c8775525be38003aa09725ea709115",
|
||||
"7f82eca570c9532c4193e3784a3a017ef7229a3edf"
|
||||
]
|
||||
}
|
||||
]
|
||||
1
services/crawl4ai/app/grok/mappings/txid.json
Normal file
1
services/crawl4ai/app/grok/mappings/txid.json
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"https://grok.com/_next/static/chunks/29589.8ec1f2947a0e205d.js": [6, 14, 12, 16], "https://grok.com/_next/static/chunks/e628011fd4d67558.js": [0, 2, 8, 9], "https://grok.com/_next/static/chunks/77ffaef786c38d59.js": [13, 33, 11, 36], "https://grok.com/_next/static/chunks/444a4d2e0656ce52.js": [14, 10, 25, 24], "https://grok.com/_next/static/chunks/9e496d2be7115b4d.js": [11, 24, 38, 38], "https://grok.com/_next/static/chunks/069cbd766e2e100e.js": [0, 37, 0, 45], "https://grok.com/_next/static/chunks/c1c11f0dd2cadabf.js": [25, 10, 30, 26], "https://grok.com/_next/static/chunks/720ab0732a942089.js": [41, 6, 33, 12], "https://grok.com/_next/static/chunks/68f6ef173efbeb67.js": [31, 26, 18, 35], "https://grok.com/_next/static/chunks/87d576c60e76a1e9.js": [18, 23, 44, 33]}
|
||||
43
services/crawl4ai/app/grok/reverse/anon.py
Normal file
43
services/crawl4ai/app/grok/reverse/anon.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from base64 import b64encode, b64decode
|
||||
from secrets import token_bytes
|
||||
from coincurve import PrivateKey
|
||||
from hashlib import sha256
|
||||
|
||||
class Anon:
|
||||
|
||||
|
||||
@staticmethod
|
||||
def publicKeyCreate(e) -> list:
|
||||
privkey = PrivateKey(bytes(e))
|
||||
publicKey = privkey.public_key.format(compressed=True)
|
||||
return list(publicKey)
|
||||
|
||||
@staticmethod
|
||||
def xor(e) -> str:
|
||||
t = ""
|
||||
for n in range(len(e)):
|
||||
t += chr(e[n])
|
||||
return b64encode(t.encode('latin-1')).decode()
|
||||
|
||||
@staticmethod
|
||||
def generate_keys() -> dict:
|
||||
e = token_bytes(32)
|
||||
n = Anon.publicKeyCreate(e)
|
||||
r = Anon.xor(e)
|
||||
|
||||
return {
|
||||
"privateKey": r,
|
||||
"userPublicKey": n
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def sign_challenge(challenge_data: bytes, key: str) -> dict:
|
||||
|
||||
key_bytes: bytes = b64decode(key)
|
||||
privkey: PrivateKey = PrivateKey(key_bytes)
|
||||
signature: bytes = privkey.sign_recoverable(sha256(challenge_data).digest(), hasher=None)[:64]
|
||||
|
||||
return {
|
||||
"challenge": b64encode(challenge_data).decode(),
|
||||
"signature": b64encode(signature).decode()
|
||||
}
|
||||
139
services/crawl4ai/app/grok/reverse/parser.py
Normal file
139
services/crawl4ai/app/grok/reverse/parser.py
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
from re import findall, search
|
||||
from json import load, dump
|
||||
from base64 import b64decode
|
||||
from typing import Optional
|
||||
from curl_cffi import requests
|
||||
from ..runtime import Utils
|
||||
from os import path
|
||||
|
||||
class Parser:
|
||||
|
||||
mapping: dict = {}
|
||||
_mapping_loaded: bool = False
|
||||
|
||||
grok_mapping: list = []
|
||||
_grok_mapping_loaded: bool = False
|
||||
|
||||
@classmethod
|
||||
def _load__xsid_mapping(cls):
|
||||
if not cls._mapping_loaded and path.exists('core/mappings/txid.json'):
|
||||
with open('core/mappings/txid.json', 'r') as f:
|
||||
cls.mapping = load(f)
|
||||
cls._mapping_loaded = True
|
||||
|
||||
@classmethod
|
||||
def _load_grok_mapping(cls):
|
||||
if not cls._grok_mapping_loaded and path.exists('core/mappings/grok.json'):
|
||||
with open('core/mappings/grok.json', 'r') as f:
|
||||
cls.grok_mapping = load(f)
|
||||
cls._grok_mapping_loaded = True
|
||||
|
||||
@staticmethod
|
||||
def parse_values(html: str, loading: str = "loading-x-anim-0", scriptId: str = "") -> tuple[str, Optional[str]]:
|
||||
|
||||
Parser._load__xsid_mapping()
|
||||
|
||||
all_d_values = findall(r'"d":"(M[^"]{200,})"', html)
|
||||
if not all_d_values:
|
||||
# Fallback or error
|
||||
print("Warning: No SVG paths found")
|
||||
return "", None if scriptId else ""
|
||||
|
||||
try:
|
||||
anim_index = int(loading.split("loading-x-anim-")[1])
|
||||
if anim_index >= len(all_d_values):
|
||||
anim_index = 0
|
||||
svg_data = all_d_values[anim_index]
|
||||
except (IndexError, ValueError):
|
||||
svg_data = all_d_values[0]
|
||||
|
||||
if scriptId:
|
||||
|
||||
if scriptId == "ondemand.s":
|
||||
script_link: str = 'https://abs.twimg.com/responsive-web/client-web/ondemand.s.' + Utils.between(html, f'"{scriptId}":"', '"') + 'a.js'
|
||||
else:
|
||||
script_link: str = f'https://grok.com/_next/{scriptId}'
|
||||
|
||||
if script_link in Parser.mapping:
|
||||
numbers: list = Parser.mapping[script_link]
|
||||
|
||||
else:
|
||||
script_content: str = requests.get(script_link, impersonate="chrome136").text
|
||||
matches = findall(r'x\[(\d+)\]\s*,\s*16', script_content)
|
||||
if matches:
|
||||
numbers: list = [int(x) for x in matches]
|
||||
else:
|
||||
numbers = []
|
||||
|
||||
Parser.mapping[script_link] = numbers
|
||||
if path.exists('core/mappings'):
|
||||
try:
|
||||
with open('core/mappings/txid.json', 'w') as f:
|
||||
dump(Parser.mapping, f)
|
||||
except Exception as e:
|
||||
print(f"Failed to save mapping: {e}")
|
||||
|
||||
return svg_data, numbers
|
||||
|
||||
else:
|
||||
return svg_data
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_anim(html: str, verification: str = "grok-site-verification") -> tuple[str, str]:
|
||||
|
||||
verification_token: str = Utils.between(html, f'"name":"{verification}","content":"', '"')
|
||||
try:
|
||||
array: list = list(b64decode(verification_token))
|
||||
if len(array) > 5:
|
||||
anim: str = "loading-x-anim-" + str(array[5] % 4)
|
||||
else:
|
||||
anim = "loading-x-anim-0"
|
||||
except Exception:
|
||||
anim = "loading-x-anim-0"
|
||||
|
||||
return verification_token, anim
|
||||
|
||||
@staticmethod
|
||||
def parse_grok(scripts: list) -> tuple[list, str]:
|
||||
|
||||
Parser._load_grok_mapping()
|
||||
|
||||
for index in Parser.grok_mapping:
|
||||
if index.get("action_script") in scripts:
|
||||
return index["actions"], index["xsid_script"]
|
||||
|
||||
script_content1: Optional[str] = None
|
||||
script_content2: Optional[str] = None
|
||||
action_script: Optional[str] = None
|
||||
|
||||
for script in scripts:
|
||||
content: str = requests.get(f'https://grok.com{script}', impersonate="chrome136").text
|
||||
if "anonPrivateKey" in content:
|
||||
script_content1 = content
|
||||
action_script = script
|
||||
elif "880932)" in content:
|
||||
script_content2 = content
|
||||
|
||||
if not script_content1 or not script_content2:
|
||||
print("Failed to find required scripts")
|
||||
return [], ""
|
||||
|
||||
actions: list = findall(r'createServerReference\)\("([a-f0-9]+)"', script_content1)
|
||||
xsid_script: str = search(r'"(static/chunks/[^"]+\.js)"[^}]*?\(880932\)', script_content2).group(1)
|
||||
|
||||
if actions and xsid_script:
|
||||
Parser.grok_mapping.append({
|
||||
"xsid_script": xsid_script,
|
||||
"action_script": action_script,
|
||||
"actions": actions
|
||||
})
|
||||
|
||||
with open('core/mappings/grok.json', 'w') as f:
|
||||
dump(Parser.grok_mapping, f, indent=2)
|
||||
|
||||
return actions, xsid_script
|
||||
else:
|
||||
print("Something went wrong while parsing script and actions")
|
||||
|
||||
|
||||
180
services/crawl4ai/app/grok/reverse/xctid.py
Normal file
180
services/crawl4ai/app/grok/reverse/xctid.py
Normal file
|
|
@ -0,0 +1,180 @@
|
|||
from math import floor, copysign, pi, cos, sin
|
||||
from base64 import b64decode, b64encode
|
||||
from re import findall, sub
|
||||
from typing import List, Dict
|
||||
from random import random
|
||||
from hashlib import sha256
|
||||
from struct import pack
|
||||
from time import time
|
||||
|
||||
|
||||
class Signature:
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _h(x: float, _param: float, c: float, e: bool):
|
||||
f = ((x * (c - _param)) / 255.0) + _param
|
||||
if e:
|
||||
return floor(f)
|
||||
rounded = round(float(f), 2)
|
||||
if rounded == 0.0:
|
||||
return 0.0
|
||||
return rounded
|
||||
|
||||
@staticmethod
|
||||
def cubicBezierEased(t: float, x1: float, y1: float, x2: float, y2: float) -> float:
|
||||
def bezier(u: float):
|
||||
omu = 1.0 - u
|
||||
b1 = 3.0 * omu * omu * u
|
||||
b2 = 3.0 * omu * u * u
|
||||
b3 = u * u * u
|
||||
x = b1 * x1 + b2 * x2 + b3
|
||||
y = b1 * y1 + b2 * y2 + b3
|
||||
return x, y
|
||||
|
||||
lo, hi = 0.0, 1.0
|
||||
for _ in range(80):
|
||||
mid = 0.5 * (lo + hi)
|
||||
if bezier(mid)[0] < t:
|
||||
lo = mid
|
||||
else:
|
||||
hi = mid
|
||||
u = 0.5 * (lo + hi)
|
||||
return bezier(u)[1]
|
||||
|
||||
@staticmethod
|
||||
def xa(svg: str) -> List[List[int]]:
|
||||
s = (svg)
|
||||
substr = s[9:]
|
||||
parts = substr.split("C")
|
||||
out = []
|
||||
for part in parts:
|
||||
cleaned = sub(r"[^\d]+", " ", part).strip()
|
||||
if cleaned == "":
|
||||
nums = [0]
|
||||
else:
|
||||
nums = [int(tok) for tok in cleaned.split() if tok != ""]
|
||||
out.append(nums)
|
||||
return out
|
||||
|
||||
@staticmethod
|
||||
def tohex(num: float) -> str:
|
||||
rounded = round(float(num), 2)
|
||||
if rounded == 0.0:
|
||||
return "0"
|
||||
sign = "-" if copysign(1.0, rounded) < 0 else ""
|
||||
absval = abs(rounded)
|
||||
intpart = int(floor(absval))
|
||||
frac = absval - intpart
|
||||
if frac == 0.0:
|
||||
return sign + format(intpart, "x")
|
||||
frac_digits = []
|
||||
f = frac
|
||||
for _ in range(20):
|
||||
f *= 16
|
||||
digit = int(floor(f + 1e-12))
|
||||
frac_digits.append(format(digit, "x"))
|
||||
f -= digit
|
||||
if abs(f) < 1e-12:
|
||||
break
|
||||
frac_str = "".join(frac_digits).rstrip("0")
|
||||
if frac_str == "":
|
||||
return sign + format(intpart, "x")
|
||||
return sign + format(intpart, "x") + "." + frac_str
|
||||
|
||||
@staticmethod
|
||||
def simulateStyle(values: List[int], c: int) -> Dict[str,str]:
|
||||
duration = 4096
|
||||
currentTime = round(c / 10.0) * 10
|
||||
t = currentTime / duration
|
||||
|
||||
cp = [Signature._h(v, -1 if (i % 2) else 0, 1, False) for i, v in enumerate(values[7:])]
|
||||
|
||||
easedY = Signature.cubicBezierEased(t, cp[0], cp[1], cp[2], cp[3])
|
||||
|
||||
start = [float(x) for x in values[0:3]]
|
||||
end = [float(x) for x in values[3:6]]
|
||||
r = round(start[0] + (end[0] - start[0]) * easedY)
|
||||
g = round(start[1] + (end[1] - start[1]) * easedY)
|
||||
b = round(start[2] + (end[2] - start[2]) * easedY)
|
||||
color = f"rgb({r}, {g}, {b})"
|
||||
|
||||
endAngle = Signature._h(values[6], 60, 360, True)
|
||||
angle = endAngle * easedY
|
||||
rad = angle * pi / 180.0
|
||||
|
||||
def is_effectively_zero(val: float) -> bool:
|
||||
return abs(val) < 1e-7
|
||||
|
||||
def is_effectively_integer(val: float) -> bool:
|
||||
return abs(val - round(val)) < 1e-7
|
||||
|
||||
cosv = cos(rad)
|
||||
sinv = sin(rad)
|
||||
|
||||
if is_effectively_zero(cosv):
|
||||
a = 0
|
||||
d = 0
|
||||
else:
|
||||
if is_effectively_integer(cosv):
|
||||
a = int(round(cosv))
|
||||
d = int(round(cosv))
|
||||
else:
|
||||
a = f"{cosv:.6f}"
|
||||
d = f"{cosv:.6f}"
|
||||
|
||||
if is_effectively_zero(sinv):
|
||||
bval = 0
|
||||
cval = 0
|
||||
else:
|
||||
if is_effectively_integer(sinv):
|
||||
bval = int(round(sinv))
|
||||
cval = int(round(-sinv))
|
||||
else:
|
||||
bval = f"{sinv:.7f}"
|
||||
cval = f"{(-sinv):.7f}"
|
||||
|
||||
transform = f"matrix({a}, {bval}, {cval}, {d}, 0, 0)"
|
||||
return {"color": color, "transform": transform}
|
||||
|
||||
@staticmethod
|
||||
def xs(x_bytes: bytes, svg: str, x_values: list) -> str:
|
||||
arr = list(x_bytes)
|
||||
idx = arr[x_values[0]] % 16
|
||||
c = ((arr[x_values[1]] % 16) * (arr[x_values[2]] % 16)) * (arr[x_values[3]] % 16)
|
||||
o = Signature.xa(svg)
|
||||
vals = o[idx]
|
||||
k = Signature.simulateStyle(vals, c)
|
||||
|
||||
concat = str(k["color"]) + str(k["transform"])
|
||||
matches = findall(r"[\d\.\-]+", concat)
|
||||
converted = []
|
||||
for m in matches:
|
||||
num = float(m)
|
||||
hexstr = Signature.tohex(num)
|
||||
converted.append(hexstr)
|
||||
joined = "".join(converted)
|
||||
cleaned = joined.replace(".", "").replace("-", "")
|
||||
return cleaned
|
||||
|
||||
@staticmethod
|
||||
def generate_sign(path: str, method: str, verification: str, svg: str, x_values: list, time_n: int = None, random_float: float = None) -> str:
|
||||
|
||||
n = int(time() - 1682924400) if not time_n else time_n
|
||||
t = pack('<I', n)
|
||||
r = b64decode(verification)
|
||||
o = Signature.xs(r, svg, x_values)
|
||||
|
||||
msg = "!".join([method, path, str(n)]) + "obfiowerehiring" + o
|
||||
digest = sha256(msg.encode('utf-8')).digest()[:16]
|
||||
|
||||
prefix_byte = int(floor(random() if not random_float else random_float * 256))
|
||||
assembled = bytes([prefix_byte]) + r + t + digest + bytes([3])
|
||||
|
||||
arr = bytearray(assembled)
|
||||
if len(arr) > 0:
|
||||
first = arr[0]
|
||||
for i in range(1, len(arr)):
|
||||
arr[i] = arr[i] ^ first
|
||||
|
||||
return b64encode(bytes(arr)).decode('ascii').replace('=', '')
|
||||
49
services/crawl4ai/app/grok/runtime.py
Normal file
49
services/crawl4ai/app/grok/runtime.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
from typing import Callable, Any, Optional, Type
|
||||
from functools import wraps
|
||||
from .logger import Log
|
||||
|
||||
|
||||
class Run:
|
||||
"""
|
||||
Class to handle runtime
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def Error(func: Callable[..., Any]) -> Callable[..., Any]:
|
||||
"""
|
||||
Error function to catch errors
|
||||
|
||||
@param func: The function to wrap.
|
||||
@return: Custom error message
|
||||
"""
|
||||
@wraps(func)
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
Run.handle_error(e)
|
||||
return None
|
||||
return wrapper
|
||||
|
||||
@staticmethod
|
||||
def handle_error(exception: Exception) -> Optional[None]:
|
||||
"""
|
||||
Handling an error
|
||||
|
||||
@param exception: Exception that occured
|
||||
"""
|
||||
Log.Error(f"Error occurred: {exception}")
|
||||
exit()
|
||||
|
||||
class Utils:
|
||||
|
||||
@staticmethod
|
||||
def between(
|
||||
main_text: Optional[str],
|
||||
value_1: Optional[str],
|
||||
value_2: Optional[str],
|
||||
) -> str:
|
||||
try:
|
||||
return main_text.split(value_1)[1].split(value_2)[0]
|
||||
except (IndexError, AttributeError):
|
||||
return ""
|
||||
111
services/crawl4ai/app/grok_auth.py
Normal file
111
services/crawl4ai/app/grok_auth.py
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
import asyncio
|
||||
import logging
|
||||
from typing import Dict, Optional
|
||||
from playwright.async_api import async_playwright, Browser, Page
|
||||
from playwright_stealth import Stealth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
async def get_grok_cookies() -> Dict[str, str]:
|
||||
"""
|
||||
Launches a HEADFUL browser with Stealth settings to bypass Cloudflare.
|
||||
Waits generously for manual user interaction if needed.
|
||||
"""
|
||||
browser: Optional[Browser] = None
|
||||
try:
|
||||
logger.info("Launching Playwright (Stealth Mode) for authentication...")
|
||||
with open("error.log", "a") as f:
|
||||
f.write("Browser: Launching Playwright (Visible, Stealth v2)...\n")
|
||||
|
||||
async with async_playwright() as p:
|
||||
# Launch chromium in HEADFUL mode
|
||||
browser = await p.chromium.launch(
|
||||
headless=False,
|
||||
args=[
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-blink-features=AutomationControlled',
|
||||
'--start-maximized'
|
||||
]
|
||||
)
|
||||
|
||||
# Use a slightly more random user agent
|
||||
context = await browser.new_context(
|
||||
viewport=None, # Allow window to determine size
|
||||
locale='en-US',
|
||||
timezone_id='America/New_York'
|
||||
)
|
||||
|
||||
page: Page = await context.new_page()
|
||||
|
||||
# Apply stealth using new Class-based API
|
||||
# Try to initialize Stealth and apply async
|
||||
stealth = Stealth()
|
||||
await stealth.apply_stealth_async(page)
|
||||
|
||||
logger.info("Navigating to https://grok.com...")
|
||||
with open("error.log", "a") as f:
|
||||
f.write("Browser: Navigating to grok.com...\n")
|
||||
|
||||
# Go to page
|
||||
try:
|
||||
await page.goto('https://grok.com', timeout=60000, wait_until='domcontentloaded')
|
||||
except Exception as e:
|
||||
with open("error.log", "a") as f:
|
||||
f.write(f"Browser: Navigation warning (might be loading): {e}\n")
|
||||
|
||||
with open("error.log", "a") as f:
|
||||
f.write("Browser: Waiting 120s for challenge (Please solve manually if visible)...\n")
|
||||
|
||||
# Polling wait for 120s
|
||||
# We explicitly check for success selector: textarea or specific home element
|
||||
authenticated = False
|
||||
for i in range(24): # 24 * 5s = 120s
|
||||
try:
|
||||
# Check for Success
|
||||
if await page.query_selector('textarea[placeholder*="Grok"]'):
|
||||
with open("error.log", "a") as f:
|
||||
f.write("Browser: Success! Grok UI detected.\n")
|
||||
authenticated = True
|
||||
break
|
||||
|
||||
# Check for Failure/Challenge
|
||||
content = await page.content()
|
||||
if "Just a moment" in content:
|
||||
if i % 2 == 0:
|
||||
with open("error.log", "a") as f:
|
||||
f.write(f"Browser: Still on Cloudflare challenge... ({i*5}s)\n")
|
||||
else:
|
||||
# Maybe it is loaded but selector didn't match yet?
|
||||
pass
|
||||
|
||||
await asyncio.sleep(5)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
if not authenticated:
|
||||
with open("error.log", "a") as f:
|
||||
f.write("Browser: Timeout. Challenge NOT solved after 120s.\n")
|
||||
# Take a screenshot to debug what was on screen
|
||||
await page.screenshot(path="cloudflare_fail.png")
|
||||
|
||||
# Extract cookies regardless, maybe we got lucky
|
||||
cookies = await context.cookies()
|
||||
cookie_dict = {c['name']: c['value'] for c in cookies}
|
||||
|
||||
with open("error.log", "a") as f:
|
||||
f.write(f"Browser: Extracted {len(cookie_dict)} cookies.\n")
|
||||
if 'cf_clearance' in cookie_dict:
|
||||
f.write("Browser: cf_clearance found.\n")
|
||||
else:
|
||||
f.write("Browser: WARNING: cf_clearance NOT found.\n")
|
||||
|
||||
return cookie_dict
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Browser authentication failed: {e}")
|
||||
with open("error.log", "a") as f:
|
||||
f.write(f"Browser auth failed exception: {e}\n")
|
||||
return {}
|
||||
finally:
|
||||
pass
|
||||
98
services/crawl4ai/app/grok_client.py
Normal file
98
services/crawl4ai/app/grok_client.py
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
import logging
|
||||
import asyncio
|
||||
from typing import Optional, List, Dict
|
||||
try:
|
||||
# Try local import first (when running as app.main)
|
||||
from .grok.grok import Grok
|
||||
except ImportError:
|
||||
try:
|
||||
# Try absolute import (if running differently)
|
||||
from app.grok.grok import Grok
|
||||
except ImportError:
|
||||
Grok = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class GrokChatClient:
|
||||
def __init__(self):
|
||||
self.client = None
|
||||
if Grok:
|
||||
try:
|
||||
self.client = Grok()
|
||||
logger.info("Grok API client initialized safely")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize Grok client: {e}")
|
||||
else:
|
||||
logger.warning("grok-api library not found. Please install git+https://github.com/realasfngl/Grok-Api.git")
|
||||
|
||||
async def chat(self, message: str, history: List[Dict[str, str]] = None, cookies: Dict[str, str] = None, user_agent: str = None) -> str:
|
||||
"""
|
||||
Send a chat message to Grok.
|
||||
"""
|
||||
if not self.client:
|
||||
return "Error: Grok API not installed or initialized."
|
||||
|
||||
try:
|
||||
# Apply user-provided cookies and UA
|
||||
if cookies:
|
||||
try:
|
||||
self.client.session.cookies.update(cookies)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update cookies: {e}")
|
||||
|
||||
if user_agent:
|
||||
# Override the User-Agent header in the session
|
||||
self.client.session.headers["User-Agent"] = user_agent
|
||||
# Also set in the client's internal headers object if present
|
||||
if hasattr(self.client, 'headers'):
|
||||
if hasattr(self.client.headers, 'LOAD'):
|
||||
self.client.headers.LOAD["User-Agent"] = user_agent
|
||||
if hasattr(self.client.headers, 'C_REQUEST'):
|
||||
self.client.headers.C_REQUEST["User-Agent"] = user_agent
|
||||
|
||||
# Use start_convo (sync method)
|
||||
try:
|
||||
result = await asyncio.to_thread(self.client.start_convo, message)
|
||||
except Exception as e:
|
||||
# If we catch inside the thread, it returns dict with error
|
||||
raise e
|
||||
|
||||
if isinstance(result, dict):
|
||||
if "error" in result:
|
||||
error_msg = result["error"]
|
||||
# If we have cookies, we've already tried them.
|
||||
# We can fallback to browser auth ONLY if no cookies were provided
|
||||
# OR if the user wants us to try anyway.
|
||||
|
||||
if not cookies:
|
||||
logger.warning(f"Grok request failed: {error_msg}. Attempting browser auth bypass...")
|
||||
|
||||
# Try to get cookies via browser
|
||||
from .grok_auth import get_grok_cookies
|
||||
browser_cookies = await get_grok_cookies()
|
||||
|
||||
if browser_cookies:
|
||||
logger.info("Got cookies from browser, applying to Grok client...")
|
||||
self.client.session.cookies.update(browser_cookies)
|
||||
|
||||
# Retry the request
|
||||
result = await asyncio.to_thread(self.client.start_convo, message)
|
||||
if isinstance(result, dict) and "error" in result:
|
||||
raise Exception(f"Retry failed: {result['error']}")
|
||||
else:
|
||||
raise Exception(f"Browser auth failed, original error: {error_msg}")
|
||||
else:
|
||||
# If cookies were provided but failed, trust the error
|
||||
logger.warning(f"User-provided cookies failed: {error_msg}")
|
||||
raise Exception(f"Grok Error: {error_msg} (Check your cookies)")
|
||||
|
||||
return result.get("response", "No response from Grok.")
|
||||
return str(result)
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
error_trace = traceback.format_exc()
|
||||
logger.error(f"Grok Chat Error: {e}\n{error_trace}")
|
||||
with open("error.log", "a") as f:
|
||||
f.write(f"\nFAILED GROK REQUEST: {e}\n{error_trace}")
|
||||
return f"Error communicating with Grok: {str(e)}"
|
||||
184
services/crawl4ai/app/main.py
Normal file
184
services/crawl4ai/app/main.py
Normal file
|
|
@ -0,0 +1,184 @@
|
|||
"""
|
||||
Meta AI FastAPI Service (v2.0)
|
||||
|
||||
Uses metaai-api library for Meta AI image generation.
|
||||
See: https://github.com/mir-ashiq/metaai-api
|
||||
"""
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, BackgroundTasks, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import asyncio
|
||||
import uuid
|
||||
|
||||
|
||||
|
||||
from .models import (
|
||||
GenerateRequest,
|
||||
GenerateResponse,
|
||||
ImageResult,
|
||||
TaskStatusResponse,
|
||||
HealthResponse,
|
||||
GrokChatRequest,
|
||||
GrokChatResponse
|
||||
)
|
||||
from .grok_client import GrokChatClient
|
||||
from .meta_crawler import meta_crawler
|
||||
|
||||
# Initialize Grok client
|
||||
grok_client = GrokChatClient()
|
||||
|
||||
|
||||
# Task storage (in-memory for simplicity)
|
||||
tasks: dict = {}
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Startup and shutdown events"""
|
||||
print("[MetaAI] Starting Meta AI service...")
|
||||
yield
|
||||
print("[MetaAI] Shutting down...")
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Meta AI Image Generation Service",
|
||||
description="FastAPI wrapper for Meta AI image generation using metaai-api",
|
||||
version="2.0.0",
|
||||
lifespan=lifespan
|
||||
)
|
||||
|
||||
# CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.get("/health", response_model=HealthResponse)
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
return HealthResponse(
|
||||
status="healthy",
|
||||
version="2.0.0",
|
||||
browser_ready=True # metaai-api handles this internally
|
||||
)
|
||||
|
||||
|
||||
@app.get("/rate-limit")
|
||||
async def get_rate_limit():
|
||||
"""Get current rate limiting status"""
|
||||
return meta_crawler.get_rate_limit_status()
|
||||
|
||||
|
||||
@app.post("/generate/sync", response_model=GenerateResponse)
|
||||
async def generate_sync(request: GenerateRequest):
|
||||
"""
|
||||
Synchronous image generation - returns when complete.
|
||||
|
||||
Requires:
|
||||
- prompt: The image generation prompt
|
||||
- cookies: Facebook/Meta cookies (JSON array or string format)
|
||||
"""
|
||||
try:
|
||||
images = await meta_crawler.generate_images(
|
||||
prompt=request.prompt,
|
||||
cookies=request.cookies,
|
||||
num_images=request.num_images
|
||||
)
|
||||
|
||||
return GenerateResponse(
|
||||
success=True,
|
||||
images=images,
|
||||
error=None
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return GenerateResponse(
|
||||
success=False,
|
||||
images=[],
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
|
||||
@app.post("/generate", response_model=GenerateResponse)
|
||||
async def generate_async(request: GenerateRequest, background_tasks: BackgroundTasks):
|
||||
"""
|
||||
Async image generation - returns immediately with task_id.
|
||||
Poll /status/{task_id} for results.
|
||||
"""
|
||||
task_id = str(uuid.uuid4())
|
||||
|
||||
tasks[task_id] = {
|
||||
"status": "pending",
|
||||
"images": [],
|
||||
"error": None
|
||||
}
|
||||
|
||||
async def run_generation():
|
||||
try:
|
||||
images = await meta_crawler.generate_images(
|
||||
prompt=request.prompt,
|
||||
cookies=request.cookies,
|
||||
num_images=request.num_images
|
||||
)
|
||||
tasks[task_id] = {
|
||||
"status": "completed",
|
||||
"images": images,
|
||||
"error": None
|
||||
}
|
||||
except Exception as e:
|
||||
tasks[task_id] = {
|
||||
"status": "failed",
|
||||
"images": [],
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
# Run in background
|
||||
background_tasks.add_task(asyncio.create_task, run_generation())
|
||||
|
||||
return GenerateResponse(
|
||||
success=True,
|
||||
images=[],
|
||||
error=None,
|
||||
task_id=task_id
|
||||
)
|
||||
|
||||
|
||||
@app.get("/status/{task_id}", response_model=TaskStatusResponse)
|
||||
async def get_task_status(task_id: str):
|
||||
"""Get status of async generation task"""
|
||||
if task_id not in tasks:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
|
||||
task = tasks[task_id]
|
||||
return TaskStatusResponse(
|
||||
task_id=task_id,
|
||||
status=task["status"],
|
||||
images=task["images"],
|
||||
error=task["error"]
|
||||
)
|
||||
|
||||
|
||||
@app.delete("/status/{task_id}")
|
||||
async def delete_task(task_id: str):
|
||||
"""Clean up completed task"""
|
||||
if task_id in tasks:
|
||||
del tasks[task_id]
|
||||
return {"deleted": True}
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
|
||||
|
||||
|
||||
@app.post("/grok/chat", response_model=GrokChatResponse)
|
||||
async def grok_chat(request: GrokChatRequest):
|
||||
"""
|
||||
Chat with Grok AI
|
||||
"""
|
||||
try:
|
||||
response = await grok_client.chat(request.message, request.history, request.cookies, request.user_agent)
|
||||
return GrokChatResponse(response=response)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
189
services/crawl4ai/app/meta_crawler.py
Normal file
189
services/crawl4ai/app/meta_crawler.py
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
"""
|
||||
Meta AI Wrapper - Lightweight wrapper around metaai-api library
|
||||
|
||||
Uses the mir-ashiq/metaai-api library for actual Meta AI interaction.
|
||||
This wrapper adds rate limiting and adapts the response format for our API.
|
||||
|
||||
To update the library:
|
||||
pip install -U git+https://github.com/mir-ashiq/metaai-api.git
|
||||
"""
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
import random
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Optional
|
||||
|
||||
from metaai_api import MetaAI
|
||||
|
||||
from .config import settings
|
||||
from .models import ImageResult
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
"""Simple rate limiter to prevent shadowban"""
|
||||
|
||||
def __init__(self):
|
||||
self.last_request_time: float = 0
|
||||
self.request_count_hour: int = 0
|
||||
self.hour_start: float = time.time()
|
||||
|
||||
async def wait_if_needed(self):
|
||||
"""Wait if rate limit would be exceeded"""
|
||||
current_time = time.time()
|
||||
|
||||
# Reset hourly counter
|
||||
if current_time - self.hour_start > 3600:
|
||||
self.request_count_hour = 0
|
||||
self.hour_start = current_time
|
||||
|
||||
# Check hourly limit
|
||||
if self.request_count_hour >= settings.max_requests_per_hour:
|
||||
wait_time = 3600 - (current_time - self.hour_start)
|
||||
if wait_time > 0:
|
||||
raise Exception(f"Hourly rate limit reached. Try again in {int(wait_time)} seconds.")
|
||||
|
||||
# Enforce minimum delay between requests (with jitter)
|
||||
elapsed = current_time - self.last_request_time
|
||||
min_delay = settings.rate_limit_delay + random.uniform(0, 5)
|
||||
|
||||
if elapsed < min_delay:
|
||||
await asyncio.sleep(min_delay - elapsed)
|
||||
|
||||
self.last_request_time = time.time()
|
||||
self.request_count_hour += 1
|
||||
|
||||
def get_status(self) -> dict:
|
||||
"""Get current rate limit status"""
|
||||
current_time = time.time()
|
||||
time_since_last = current_time - self.last_request_time if self.last_request_time else 0
|
||||
time_until_reset = max(0, 3600 - (current_time - self.hour_start))
|
||||
|
||||
return {
|
||||
"requests_this_hour": self.request_count_hour,
|
||||
"max_requests_per_hour": settings.max_requests_per_hour,
|
||||
"seconds_since_last_request": int(time_since_last),
|
||||
"seconds_until_hour_reset": int(time_until_reset),
|
||||
"can_request_now": self.request_count_hour < settings.max_requests_per_hour
|
||||
}
|
||||
|
||||
|
||||
class MetaAICrawler:
|
||||
"""
|
||||
Thin wrapper around metaai-api library.
|
||||
|
||||
Handles:
|
||||
- Cookie format conversion (JSON array to dict)
|
||||
- Rate limiting
|
||||
- Response format adaptation
|
||||
|
||||
The actual Meta AI interaction is delegated to metaai-api.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.rate_limiter = RateLimiter()
|
||||
self._executor = ThreadPoolExecutor(max_workers=2)
|
||||
|
||||
def _parse_cookies(self, cookies: str) -> dict:
|
||||
"""Convert cookies from various formats to dict"""
|
||||
if not cookies:
|
||||
return {}
|
||||
|
||||
# Try JSON array format first
|
||||
try:
|
||||
cookies_str = cookies.strip()
|
||||
if cookies_str.startswith('['):
|
||||
parsed = json.loads(cookies_str)
|
||||
if isinstance(parsed, list):
|
||||
return {c['name']: c['value'] for c in parsed if 'name' in c and 'value' in c}
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
# Try cookie string format: "name1=value1; name2=value2"
|
||||
result = {}
|
||||
for pair in cookies.split(';'):
|
||||
pair = pair.strip()
|
||||
if '=' in pair:
|
||||
name, value = pair.split('=', 1)
|
||||
result[name.strip()] = value.strip()
|
||||
|
||||
return result
|
||||
|
||||
def _generate_sync(self, prompt: str, cookies_dict: dict) -> dict:
|
||||
"""Synchronous generation using metaai-api"""
|
||||
ai = MetaAI(cookies=cookies_dict)
|
||||
return ai.prompt(prompt)
|
||||
|
||||
async def generate_images(self, prompt: str, cookies: str, num_images: int = 4) -> list[ImageResult]:
|
||||
"""
|
||||
Generate images using Meta AI's Imagine model.
|
||||
|
||||
Args:
|
||||
prompt: The image generation prompt
|
||||
cookies: Meta AI/Facebook cookies (JSON array or string format)
|
||||
num_images: Number of images (metaai-api returns 4 by default)
|
||||
|
||||
Returns:
|
||||
List of ImageResult objects with generated image URLs
|
||||
"""
|
||||
# Rate limiting
|
||||
await self.rate_limiter.wait_if_needed()
|
||||
|
||||
print(f"[MetaCrawler] Generating images for: '{prompt[:50]}...'")
|
||||
|
||||
# Parse cookies
|
||||
cookies_dict = self._parse_cookies(cookies)
|
||||
|
||||
if not cookies_dict:
|
||||
raise Exception("No valid cookies provided")
|
||||
|
||||
# Check for essential cookies
|
||||
if 'c_user' not in cookies_dict and 'xs' not in cookies_dict:
|
||||
print("[MetaCrawler] Warning: Missing Facebook auth cookies (c_user, xs)")
|
||||
|
||||
# Prepare prompt (add "Imagine" prefix if not present)
|
||||
image_prompt = prompt if prompt.lower().startswith('imagine') else f"Imagine {prompt}"
|
||||
|
||||
# Run in thread pool since metaai_api is synchronous
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
result = await loop.run_in_executor(
|
||||
self._executor,
|
||||
self._generate_sync,
|
||||
image_prompt,
|
||||
cookies_dict
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"[MetaCrawler] Error: {str(e)}")
|
||||
raise
|
||||
|
||||
# Extract media from response
|
||||
media = result.get('media', [])
|
||||
|
||||
if not media:
|
||||
message = result.get('message', '')
|
||||
if message:
|
||||
raise Exception(f"Meta AI response: {message[:200]}")
|
||||
raise Exception("No images generated")
|
||||
|
||||
print(f"[MetaCrawler] Got {len(media)} images!")
|
||||
|
||||
# Convert to ImageResult format
|
||||
images = []
|
||||
for item in media:
|
||||
if item.get('type') == 'IMAGE' and item.get('url'):
|
||||
images.append(ImageResult(
|
||||
url=item['url'],
|
||||
prompt=item.get('prompt', prompt),
|
||||
model="imagine"
|
||||
))
|
||||
|
||||
return images[:num_images] # Limit to requested count
|
||||
|
||||
def get_rate_limit_status(self) -> dict:
|
||||
"""Get current rate limiting status"""
|
||||
return self.rate_limiter.get_status()
|
||||
|
||||
|
||||
# Singleton instance
|
||||
meta_crawler = MetaAICrawler()
|
||||
66
services/crawl4ai/app/models.py
Normal file
66
services/crawl4ai/app/models.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
"""
|
||||
Pydantic models for request/response schemas
|
||||
"""
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class TaskStatus(str, Enum):
|
||||
PENDING = "pending"
|
||||
PROCESSING = "processing"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
class GenerateRequest(BaseModel):
|
||||
"""Request model for image generation"""
|
||||
prompt: str = Field(..., description="Image generation prompt", min_length=1)
|
||||
cookies: str = Field(..., description="Meta AI session cookies")
|
||||
num_images: int = Field(default=4, ge=1, le=8, description="Number of images to generate")
|
||||
|
||||
|
||||
class GrokChatRequest(BaseModel):
|
||||
"""Request model for Grok chat"""
|
||||
message: str = Field(..., description="Message content")
|
||||
history: Optional[list] = Field(default=None, description="Chat history")
|
||||
cookies: Optional[dict] = Field(default=None, description="Grok session cookies")
|
||||
user_agent: Optional[str] = Field(default=None, description="Browser User-Agent")
|
||||
|
||||
|
||||
class ImageResult(BaseModel):
|
||||
"""Single generated image result"""
|
||||
url: str
|
||||
data: Optional[str] = None # base64 encoded image data
|
||||
prompt: str
|
||||
model: str = "imagine"
|
||||
|
||||
|
||||
class GenerateResponse(BaseModel):
|
||||
"""Response model for image generation"""
|
||||
success: bool
|
||||
images: list[ImageResult] = []
|
||||
error: Optional[str] = None
|
||||
task_id: Optional[str] = None
|
||||
|
||||
|
||||
class GrokChatResponse(BaseModel):
|
||||
"""Response model for Grok chat"""
|
||||
response: str
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class TaskStatusResponse(BaseModel):
|
||||
"""Response model for async task status"""
|
||||
task_id: str
|
||||
status: TaskStatus
|
||||
images: list[ImageResult] = []
|
||||
error: Optional[str] = None
|
||||
progress: Optional[int] = None # 0-100
|
||||
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
"""Health check response"""
|
||||
status: str = "healthy"
|
||||
version: str = "1.0.0"
|
||||
browser_ready: bool = True
|
||||
24
services/crawl4ai/requirements.txt
Normal file
24
services/crawl4ai/requirements.txt
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
# Crawl4AI Meta AI Service Dependencies
|
||||
# Using metaai-api library for Meta AI integration
|
||||
# Update: pip install -U git+https://github.com/mir-ashiq/metaai-api.git
|
||||
|
||||
# Core web framework
|
||||
fastapi>=0.109.0
|
||||
uvicorn[standard]>=0.27.0
|
||||
pydantic>=2.0.0
|
||||
|
||||
# Meta AI API library (from GitHub for latest updates)
|
||||
# Install separately: pip install git+https://github.com/mir-ashiq/metaai-api.git
|
||||
# Or add to Docker: RUN pip install git+https://github.com/mir-ashiq/metaai-api.git
|
||||
|
||||
# Dependencies for metaai-api
|
||||
requests-html>=0.10.0
|
||||
lxml_html_clean>=0.4.0
|
||||
|
||||
# Grok Chat API
|
||||
# Install: pip install git+https://github.com/realasfngl/Grok-Api.git
|
||||
# Or add to Docker: RUN pip install git+https://github.com/realasfngl/Grok-Api.git
|
||||
|
||||
# Browser Automation (for Cloudflare bypass)
|
||||
playwright>=1.41.0
|
||||
playwright-stealth>=1.0.6
|
||||
Loading…
Reference in a new issue