This commit is contained in:
zramsay 2024-12-20 16:18:49 -05:00
parent aeb240cfc5
commit 2fafd17cdb
3 changed files with 164 additions and 155 deletions

55
src/app/api/flux/route.ts Normal file
View File

@ -0,0 +1,55 @@
import { NextRequest, NextResponse } from 'next/server'
import { fal } from "@fal-ai/client"
if (!process.env.FAL_AI_KEY) {
throw new Error('FAL_AI_KEY is not configured in environment variables')
}
// Configure fal client
fal.config({
credentials: process.env.FAL_AI_KEY
})
export async function POST(req: NextRequest): Promise<NextResponse> {
try {
const { prompt, modelId } = await req.json()
if (!prompt || !modelId) {
return NextResponse.json(
{ error: 'Prompt and modelId are required' },
{ status: 400 }
)
}
console.log('Generating with Flux model:', modelId)
console.log('Prompt:', prompt)
const result = await fal.subscribe(modelId, {
input: {
prompt: prompt,
},
logs: true,
onQueueUpdate: (update) => {
if (update.status === "IN_PROGRESS") {
console.log('Generation progress:', update.logs.map((log) => log.message))
}
},
})
console.log('Flux generation result:', result)
if (!result.data?.images?.[0]?.url) {
throw new Error('No image URL in response')
}
return NextResponse.json({ imageUrl: result.data.images[0].url })
} catch (error) {
console.error('Flux generation error:', error)
return NextResponse.json(
{ error: error instanceof Error ? error.message : 'Failed to generate image' },
{ status: 500 }
)
}
}
export const dynamic = 'force-dynamic'

View File

@ -3,12 +3,7 @@
import React, { useState } from 'react' import React, { useState } from 'react'
import WalletHeader from '../components/WalletHeader' import WalletHeader from '../components/WalletHeader'
import AIServiceCard from '../components/AIServiceCard' import AIServiceCard from '../components/AIServiceCard'
import TextGenerationCard from '../components/TextGenerationCard' import { generateWithFlux, FluxGenerationResult, FLUX_MODELS } from '../services/fluxService'
import { generateWithFlux, FluxGenerationResult } from '../services/fluxService'
import { generateWithAdobe, AdobeGenerationResult } from '../services/adobeService'
import { generateWithGrok, GrokGenerationResult } from '../services/grokService'
import { generateWithOllama, OllamaGenerationResult } from '../services/ollamaService'
import { generateWithGemini, GeminiGenerationResult } from '../services/geminiService'
import { processMTMPayment } from '../services/paymentService' import { processMTMPayment } from '../services/paymentService'
interface WalletState { interface WalletState {
@ -48,100 +43,26 @@ const Page: React.FC = (): React.ReactElement => {
} }
} }
const handleFluxGeneration = async (prompt: string): Promise<FluxGenerationResult> => { const handleFluxGeneration = (modelId: string, cost: number) => {
if (!walletState.connected || !walletState.publicKey || !window.solflare) { return async (prompt: string): Promise<FluxGenerationResult> => {
return { error: 'Wallet not connected' } if (!walletState.connected || !walletState.publicKey || !window.solflare) {
return { error: 'Wallet not connected' }
}
// Process payment first
const paymentResult = await processMTMPayment(
walletState.publicKey,
cost,
window.solflare
)
if (!paymentResult.success) {
return { error: paymentResult.error }
}
// Then generate image with specified model
return generateWithFlux(prompt, modelId)
} }
// First process payment
const paymentResult = await processMTMPayment(
walletState.publicKey,
1, // 1 MTM token
window.solflare
)
if (!paymentResult.success) {
return { error: paymentResult.error }
}
// Then generate image
return generateWithFlux(prompt)
}
const handleGrokGeneration = async (prompt: string): Promise<GrokGenerationResult> => {
if (!walletState.connected || !walletState.publicKey || !window.solflare) {
return { error: 'Wallet not connected' }
}
const paymentResult = await processMTMPayment(
walletState.publicKey,
5, // 5 MTM tokens for Grok premium service
window.solflare
)
if (!paymentResult.success) {
return { error: paymentResult.error }
}
return generateWithGrok(prompt)
}
const handleAdobeGeneration = async (prompt: string): Promise<AdobeGenerationResult> => {
if (!walletState.connected || !walletState.publicKey || !window.solflare) {
return { error: 'Wallet not connected' }
}
const paymentResult = await processMTMPayment(
walletState.publicKey,
4, // 4 MTM tokens for Adobe premium service
window.solflare
)
if (!paymentResult.success) {
return { error: paymentResult.error }
}
return generateWithAdobe(prompt)
}
const handleOllamaGeneration = async (prompt: string): Promise<OllamaGenerationResult> => {
if (!walletState.connected || !walletState.publicKey || !window.solflare) {
return { error: 'Wallet not connected' }
}
// First process payment
const paymentResult = await processMTMPayment(
walletState.publicKey,
2, // 2 MTM tokens for text generation
window.solflare
)
if (!paymentResult.success) {
return { error: paymentResult.error }
}
// Then generate text
return generateWithOllama(prompt)
}
const handleGeminiGeneration = async (prompt: string): Promise<GeminiGenerationResult> => {
if (!walletState.connected || !walletState.publicKey || !window.solflare) {
return { error: 'Wallet not connected' }
}
// First process payment
const paymentResult = await processMTMPayment(
walletState.publicKey,
3, // 3 MTM tokens for Gemini
window.solflare
)
if (!paymentResult.success) {
return { error: paymentResult.error }
}
// Then generate text
return generateWithGemini(prompt)
} }
return ( return (
@ -150,10 +71,10 @@ const handleGeminiGeneration = async (prompt: string): Promise<GeminiGenerationR
{/* Header */} {/* Header */}
<div className="text-center mb-8"> <div className="text-center mb-8">
<h1 className="text-4xl sm:text-5xl font-bold mb-4 text-transparent bg-clip-text bg-gradient-to-r from-green-400 to-emerald-600"> <h1 className="text-4xl sm:text-5xl font-bold mb-4 text-transparent bg-clip-text bg-gradient-to-r from-green-400 to-emerald-600">
Mark's Meme Market AI Image Generator
</h1> </h1>
<p className="text-gray-400 text-lg mb-8"> <p className="text-gray-400 text-lg mb-8">
Generate memes using various AI models Generate amazing images using different Flux AI models
</p> </p>
<WalletHeader <WalletHeader
@ -163,56 +84,56 @@ const handleGeminiGeneration = async (prompt: string): Promise<GeminiGenerationR
/> />
</div> </div>
{/* AI Services Grid */} {/* Flux Models Grid */}
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6"> <div className="grid grid-cols-1 md:grid-cols-2 xl:grid-cols-3 gap-6">
<AIServiceCard {FLUX_MODELS.map((model) => (
title="Flux Meme Generator" <AIServiceCard
description="Generate images using Flux AI" key={model.modelId}
tokenCost={1} title={model.name}
isWalletConnected={walletState.connected} description={model.description}
onGenerate={handleFluxGeneration} tokenCost={model.cost}
/> isWalletConnected={walletState.connected}
onGenerate={handleFluxGeneration(model.modelId, model.cost)}
/>
))}
<AIServiceCard {/* Coming Soon Card */}
title="X Grok Vision" <div className="relative bg-gray-800/50 backdrop-blur-lg rounded-2xl shadow-xl border border-gray-700/50 overflow-hidden group">
description="Advanced image generation by X's Grok AI" <div className="absolute inset-0 bg-gradient-to-br from-yellow-500/10 to-orange-500/10 opacity-50"></div>
tokenCost={5} <div className="relative p-6 flex flex-col h-full">
isWalletConnected={walletState.connected} <div className="flex-1">
onGenerate={handleGrokGeneration} <h3 className="text-2xl font-bold text-transparent bg-clip-text bg-gradient-to-r from-yellow-400 to-orange-500">
/> Coming Soon
</h3>
<p className="mt-2 text-gray-400">
New AI model integration in development. Stay tuned for more amazing features!
</p>
<div className="mt-2 inline-block px-3 py-1 bg-orange-500/20 rounded-full">
<span className="text-orange-300 text-sm">Future Release</span>
</div>
</div>
{/* <div className="mt-6">
<AIServiceCard <button
title="Adobe Meme Generator" disabled
description="Generate images images using Adobe" className="w-full bg-gradient-to-r from-yellow-500/50 to-orange-500/50
tokenCost={4} text-white/50 font-semibold py-4 px-6 rounded-xl
isWalletConnected={walletState.connected} cursor-not-allowed opacity-50"
onGenerate={handleAdobeGeneration} >
/>*/} Coming Soon
</button>
</div>
{/* for another app </div>
<TextGenerationCard </div>
title="Ollama AI Chat"
description="Get intelligent responses using local LLaMA2 model"
tokenCost={2}
isWalletConnected={walletState.connected}
onGenerate={handleOllamaGeneration}
/>
<TextGenerationCard
title="Gemini Pro"
description="Advanced AI responses powered by Google's Gemini 1.5"
tokenCost={3}
isWalletConnected={walletState.connected}
onGenerate={handleGeminiGeneration}
/> */}
</div> </div>
{/* Info Section */} {/* Info Section */}
<div className="mt-12 text-center text-gray-400"> <div className="mt-12 text-center text-gray-400">
<p className="text-sm"> <p className="text-sm">
Powered by Mark Requires MTM tokens Powered by Flux AI Different models have different costs and capabilities
</p>
<p className="text-sm mt-2">
Each generation requires MTM tokens Higher quality models cost more tokens
</p> </p>
</div> </div>
</div> </div>

View File

@ -1,18 +1,51 @@
import { fal } from "@fal-ai/client"
export interface FluxGenerationResult { export interface FluxGenerationResult {
imageUrl?: string imageUrl?: string
error?: string error?: string
} }
export async function generateWithFlux(prompt: string): Promise<FluxGenerationResult> { export interface FluxModelConfig {
modelId: string
name: string
description: string
cost: number
}
// Available Flux/fal-ai models
export const FLUX_MODELS: FluxModelConfig[] = [
{
modelId: "fal-ai/flux/schnell",
name: "Schnell",
description: "Fast meme generator",
cost: 5 // update to 500 MTM
},
{
modelId: "fal-ai/recraft-v3",
name: "Recraft",
description: "Advanced meme generator",
cost: 6 // update to ?
},
{
modelId: "fal-ai/stable-diffusion-v35-large",
name: "Marquee",
description: "Best meme generator",
cost: 10 //update to more
}
]
export async function generateWithFlux(
prompt: string,
modelId: string
): Promise<FluxGenerationResult> {
try { try {
const response = await fetch('/api/generate', { const response = await fetch('/api/flux', {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
body: JSON.stringify({ prompt }), body: JSON.stringify({
prompt,
modelId
}),
}) })
if (!response.ok) { if (!response.ok) {
@ -22,14 +55,14 @@ export async function generateWithFlux(prompt: string): Promise<FluxGenerationRe
const data = await response.json() const data = await response.json()
console.log('Raw Flux response:', data) console.log('Raw Flux response:', data)
if (data.data?.images?.[0]?.url) { if (data.imageUrl) {
return { imageUrl: data.data.images[0].url } return { imageUrl: data.imageUrl }
} else { } else {
console.error('Unexpected response structure:', data) console.error('Unexpected response structure:', data)
throw new Error('Invalid response format from Flux API') throw new Error('Invalid response format from Flux API')
} }
} catch (error) { } catch (error) {
console.error('Generation error:', error) console.error('Flux generation error:', error)
return { return {
error: error instanceof Error ? error.message : 'Generation failed' error: error instanceof Error ? error.message : 'Generation failed'
} }