初始化项目

This commit is contained in:
2025-11-21 00:24:10 +08:00
commit 2878783349
34 changed files with 6774 additions and 0 deletions

View File

@@ -0,0 +1,235 @@
import React, { useState, useRef, useEffect } from 'react';
import { Mic, Square, Loader2 } from 'lucide-react';
interface AudioRecorderProps {
onAudioCaptured: (base64Audio: string) => void;
disabled?: boolean;
titleStart?: string;
titleStop?: string;
}
const AudioRecorder: React.FC<AudioRecorderProps> = ({
onAudioCaptured,
disabled,
titleStart = "Start Voice Input",
titleStop = "Stop Recording"
}) => {
const [isRecording, setIsRecording] = useState(false);
const [isProcessing, setIsProcessing] = useState(false);
const audioContextRef = useRef<AudioContext | null>(null);
const streamRef = useRef<MediaStream | null>(null);
const processorRef = useRef<ScriptProcessorNode | null>(null);
const inputRef = useRef<MediaStreamAudioSourceNode | null>(null);
const audioDataRef = useRef<Float32Array[]>([]);
useEffect(() => {
return () => {
cleanup();
};
}, []);
const cleanup = () => {
if (streamRef.current) {
streamRef.current.getTracks().forEach(track => track.stop());
streamRef.current = null;
}
if (processorRef.current) {
processorRef.current.disconnect();
processorRef.current = null;
}
if (inputRef.current) {
inputRef.current.disconnect();
inputRef.current = null;
}
if (audioContextRef.current) {
if (audioContextRef.current.state !== 'closed') {
audioContextRef.current.close();
}
audioContextRef.current = null;
}
};
const startRecording = async () => {
try {
audioDataRef.current = [];
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
streamRef.current = stream;
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
if (audioContext.state === 'suspended') {
await audioContext.resume();
}
audioContextRef.current = audioContext;
const input = audioContext.createMediaStreamSource(stream);
inputRef.current = input;
// Buffer size 4096, 1 input channel, 1 output channel
const processor = audioContext.createScriptProcessor(4096, 1, 1);
processorRef.current = processor;
processor.onaudioprocess = (e) => {
const channelData = e.inputBuffer.getChannelData(0);
// Clone the data
audioDataRef.current.push(new Float32Array(channelData));
};
input.connect(processor);
processor.connect(audioContext.destination);
setIsRecording(true);
} catch (err) {
console.error("Error accessing microphone:", err);
alert("Could not access microphone. Please check permissions.");
}
};
const stopRecording = async () => {
if (!isRecording) return;
setIsRecording(false);
setIsProcessing(true);
// Stop capturing
if (streamRef.current) {
streamRef.current.getTracks().forEach(track => track.stop());
}
if (processorRef.current) {
processorRef.current.disconnect();
}
if (inputRef.current) {
inputRef.current.disconnect();
}
// Small delay to allow last process tick
setTimeout(() => {
try {
if (audioDataRef.current.length === 0) {
setIsProcessing(false);
cleanup();
return;
}
const sampleRate = audioContextRef.current?.sampleRate || 44100;
const blob = exportWAV(audioDataRef.current, sampleRate);
cleanup();
const reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = () => {
const result = reader.result as string;
// result is "data:audio/wav;base64,..."
const base64String = result.split(',')[1];
onAudioCaptured(base64String);
setIsProcessing(false);
};
} catch (e) {
console.error("WAV Encoding Error", e);
setIsProcessing(false);
cleanup();
}
}, 100);
};
return (
<button
onClick={isRecording ? stopRecording : startRecording}
disabled={disabled || isProcessing}
className={`p-3 rounded-full transition-all duration-300 ${
isRecording
? 'bg-red-500 hover:bg-red-600 text-white animate-pulse shadow-lg shadow-red-200 ring-4 ring-red-100'
: 'bg-slate-200 hover:bg-slate-300 text-slate-700 hover:shadow-md'
} disabled:opacity-50 disabled:cursor-not-allowed flex items-center justify-center`}
title={isRecording ? titleStop : titleStart}
>
{isProcessing ? <Loader2 size={20} className="animate-spin" /> : (isRecording ? <Square size={20} fill="currentColor" /> : <Mic size={20} />)}
</button>
);
};
// --- WAV ENCODER HELPERS ---
const exportWAV = (audioData: Float32Array[], sampleRate: number) => {
const mergedBuffers = mergeBuffers(audioData);
const downsampledBuffer = downsampleBuffer(mergedBuffers, sampleRate);
const buffer = encodeWAV(downsampledBuffer);
return new Blob([buffer], { type: 'audio/wav' });
};
const mergeBuffers = (audioData: Float32Array[]) => {
const totalLength = audioData.reduce((acc, val) => acc + val.length, 0);
const result = new Float32Array(totalLength);
let offset = 0;
for (const arr of audioData) {
result.set(arr, offset);
offset += arr.length;
}
return result;
};
const downsampleBuffer = (buffer: Float32Array, sampleRate: number) => {
if (sampleRate === 16000) return buffer;
const targetRate = 16000;
const sampleRateRatio = sampleRate / targetRate;
const newLength = Math.ceil(buffer.length / sampleRateRatio);
const result = new Float32Array(newLength);
let offsetResult = 0;
let offsetBuffer = 0;
while (offsetResult < result.length) {
const nextOffsetBuffer = Math.round((offsetResult + 1) * sampleRateRatio);
let accum = 0, count = 0;
for (let i = offsetBuffer; i < nextOffsetBuffer && i < buffer.length; i++) {
accum += buffer[i];
count++;
}
// Fixed NaN issue here: verify count is > 0
if (count > 0) {
result[offsetResult] = accum / count;
} else {
result[offsetResult] = 0;
}
offsetResult++;
offsetBuffer = nextOffsetBuffer;
}
return result;
};
const encodeWAV = (samples: Float32Array) => {
const buffer = new ArrayBuffer(44 + samples.length * 2);
const view = new DataView(buffer);
const writeString = (view: DataView, offset: number, string: string) => {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
};
writeString(view, 0, 'RIFF');
view.setUint32(4, 36 + samples.length * 2, true);
writeString(view, 8, 'WAVE');
writeString(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
view.setUint16(22, 1, true);
view.setUint32(24, 16000, true);
view.setUint32(28, 16000 * 2, true);
view.setUint16(32, 2, true);
view.setUint16(34, 16, true);
writeString(view, 36, 'data');
view.setUint32(40, samples.length * 2, true);
const floatTo16BitPCM = (output: DataView, offset: number, input: Float32Array) => {
for (let i = 0; i < input.length; i++, offset += 2) {
const s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
};
floatTo16BitPCM(view, 44, samples);
return view;
};
export default AudioRecorder;

267
components/ChatBubble.tsx Normal file
View File

@@ -0,0 +1,267 @@
import React, { useState, useRef } from 'react';
import { Role, MessageType, ChatMessage, Language } from '../types';
import { User, Bot, BrainCircuit, Volume2, Pause, Sparkles, Download, Copy, Check, Loader2 } from 'lucide-react';
import { geminiService, decodeAudioData } from '../services/geminiService';
import { processAndDownloadAudio } from '../utils/audioUtils';
import { translations } from '../utils/localization';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
interface ChatBubbleProps {
message: ChatMessage;
language: Language;
onUpdateMessage?: (updatedMessage: ChatMessage) => void;
onError?: (msg: string) => void;
}
const ChatBubble: React.FC<ChatBubbleProps> = ({ message, language, onUpdateMessage, onError }) => {
const isUser = message.role === Role.USER;
const [isPlaying, setIsPlaying] = useState(false);
const [isGeneratingAudio, setIsGeneratingAudio] = useState(false);
const [isCopied, setIsCopied] = useState(false);
const audioContextRef = useRef<AudioContext | null>(null);
const audioSourceRef = useRef<AudioBufferSourceNode | null>(null);
const t = translations[language].chat;
const tCommon = translations[language].common;
const stopAudio = () => {
if (audioSourceRef.current) {
audioSourceRef.current.stop();
audioSourceRef.current = null;
}
setIsPlaying(false);
};
const handlePlayAudio = async () => {
if (isPlaying) {
stopAudio();
return;
}
let base64Data = message.metadata?.audioUrl;
// If no audio cached, generate it on demand
if (!base64Data && message.content && message.type === MessageType.TEXT) {
try {
setIsGeneratingAudio(true);
base64Data = await geminiService.generateSpeech(message.content);
if (!base64Data) throw new Error("Audio generation returned empty");
// Cache it if parent provided update handler
if (onUpdateMessage) {
onUpdateMessage({
...message,
metadata: {
...message.metadata,
audioUrl: base64Data
}
});
}
} catch (e) {
console.error("Audio gen failed", e);
setIsGeneratingAudio(false);
if (onError) onError(translations[language].common.error);
return;
} finally {
setIsGeneratingAudio(false);
}
}
if (!base64Data) return;
try {
if (!audioContextRef.current) {
audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)();
}
const ctx = audioContextRef.current;
if (ctx.state === 'suspended') await ctx.resume();
const buffer = await decodeAudioData(base64Data, ctx);
const source = ctx.createBufferSource();
source.buffer = buffer;
source.connect(ctx.destination);
source.onended = () => setIsPlaying(false);
source.start();
audioSourceRef.current = source;
setIsPlaying(true);
} catch (e) {
console.error("Audio playback error", e);
setIsPlaying(false);
if (onError) onError(translations[language].common.error);
}
};
const handleDownloadAudio = async () => {
let base64Data = message.metadata?.audioUrl;
if (!base64Data && message.content && message.type === MessageType.TEXT) {
try {
setIsGeneratingAudio(true);
base64Data = await geminiService.generateSpeech(message.content);
if (!base64Data) throw new Error("Audio generation returned empty");
if (onUpdateMessage) {
onUpdateMessage({ ...message, metadata: { ...message.metadata, audioUrl: base64Data } });
}
} catch (e) {
console.error(e);
if (onError) onError(translations[language].common.error);
} finally {
setIsGeneratingAudio(false);
}
}
if (base64Data) {
const filename = `sakura_audio_${Date.now()}.wav`;
processAndDownloadAudio(base64Data, filename);
}
};
const handleCopy = () => {
if (message.content) {
navigator.clipboard.writeText(message.content);
setIsCopied(true);
setTimeout(() => setIsCopied(false), 2000);
}
};
const formatTime = (timestamp: number) => {
const date = new Date(timestamp);
const now = new Date();
const isToday = date.getDate() === now.getDate() && date.getMonth() === now.getMonth() && date.getFullYear() === now.getFullYear();
const timeStr = date.toLocaleTimeString([], {hour: '2-digit', minute:'2-digit'});
if (isToday) return timeStr;
return `${date.toLocaleDateString()} ${timeStr}`;
};
return (
<div className={`flex w-full mb-6 animate-fade-in-up ${isUser ? 'justify-end' : 'justify-start'}`}>
<div className={`flex max-w-[95%] md:max-w-[75%] ${isUser ? 'flex-row-reverse' : 'flex-row'} gap-3`}>
{/* Avatar */}
<div className={`w-8 h-8 rounded-full flex items-center justify-center flex-shrink-0 shadow-md transform transition-transform hover:scale-110 ${isUser ? 'bg-indigo-600' : 'bg-pink-500'}`}>
{isUser ? <User size={16} className="text-white" /> : <Bot size={16} className="text-white" />}
</div>
{/* Content Bubble */}
<div className={`flex flex-col ${isUser ? 'items-end' : 'items-start'} min-w-0 w-full group`}>
{/* Metadata Badges */}
{message.metadata?.isThinking && (
<span className="text-[10px] flex items-center gap-1 text-amber-700 bg-amber-50 px-2 py-0.5 rounded-full mb-1 border border-amber-200 animate-pulse font-bold">
<BrainCircuit size={10} /> {t.deepThinking}
</span>
)}
<div className={`rounded-2xl p-4 shadow-sm border transition-shadow hover:shadow-md overflow-hidden w-full relative ${
isUser
? 'bg-indigo-600 text-white rounded-tr-sm border-transparent'
: 'bg-white border-pink-100 text-slate-800 rounded-tl-sm'
}`}>
{/* TEXT CONTENT - MARKDOWN RENDERED */}
{message.content && (
<div className={`
text-sm md:text-base leading-relaxed
${isUser ? 'prose-invert text-white' : 'prose-slate text-slate-800'}
prose prose-p:my-1 prose-headings:my-2 prose-strong:font-bold prose-code:bg-black/10 prose-code:rounded prose-code:px-1 prose-code:py-0.5 prose-pre:bg-slate-900 prose-pre:text-slate-100 prose-pre:rounded-lg max-w-none
`}>
{message.type === MessageType.TEXT ? (
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{message.content}
</ReactMarkdown>
) : (
message.content
)}
</div>
)}
{/* IMAGE CONTENT */}
{message.type === MessageType.IMAGE && message.metadata?.imageUrl && (
<div className="mt-2 overflow-hidden rounded-lg border border-white/20 animate-scale-in">
<img src={message.metadata.imageUrl} alt="Uploaded or Generated" className="max-w-full h-auto object-cover" />
</div>
)}
{message.type === MessageType.TEXT && message.metadata?.imageUrl && (
<div className="mt-2 overflow-hidden rounded-lg border border-slate-200 animate-scale-in">
<img src={message.metadata.imageUrl} alt="Context" className="max-w-[150px] h-auto object-cover opacity-90 hover:opacity-100 transition-opacity rounded-md" />
<div className="text-[10px] opacity-70 p-1">{t.imageAnalyzed}</div>
</div>
)}
{/* Action Bar (Copy, TTS, Download) - Always visible for Text messages or if audioUrl exists */}
{(message.type === MessageType.TEXT || message.metadata?.audioUrl) && (
<div className={`flex items-center gap-2 mt-3 pt-2 border-t ${isUser ? 'border-white/20' : 'border-slate-100'}`}>
{/* Play TTS */}
<button
onClick={handlePlayAudio}
disabled={isGeneratingAudio}
className={`flex items-center gap-1.5 px-2 py-1 rounded-lg text-xs font-bold transition-colors active:scale-95 ${
isUser
? 'hover:bg-white/10 text-indigo-100'
: 'hover:bg-pink-50 text-pink-500'
}`}
title={isUser ? t.playUserAudio : t.listenPronunciation}
>
{isGeneratingAudio ? <Loader2 size={14} className="animate-spin" /> : isPlaying ? <Pause size={14} className="animate-pulse" /> : <Volume2 size={14} />}
<span className="opacity-80">{isUser ? t.playUserAudio : t.listenPronunciation}</span>
</button>
{/* Download Audio */}
<button
onClick={handleDownloadAudio}
disabled={isGeneratingAudio}
className={`p-1.5 rounded-lg transition-colors active:scale-95 ${
isUser
? 'hover:bg-white/10 text-indigo-100'
: 'hover:bg-slate-100 text-slate-400 hover:text-slate-600'
}`}
title={tCommon.download}
>
{isGeneratingAudio ? <Loader2 size={14} className="animate-spin" /> : <Download size={14} />}
</button>
{/* Copy Text */}
<button
onClick={handleCopy}
className={`p-1.5 rounded-lg transition-colors active:scale-95 ml-auto flex items-center gap-1 ${
isUser
? 'hover:bg-white/10 text-indigo-100'
: 'hover:bg-slate-100 text-slate-400 hover:text-slate-600'
}`}
title={tCommon.copy}
>
{isCopied ? <Check size={14} /> : <Copy size={14} />}
{isCopied && <span className="text-[10px]">{tCommon.copied}</span>}
</button>
</div>
)}
</div>
{/* Footer (Timestamp + Model) */}
<div className="mt-1 flex items-center justify-between w-full px-1">
<span className="text-[10px] text-slate-400 font-medium opacity-70">
{formatTime(message.timestamp)}
</span>
{!isUser && message.model && (
<div className="flex items-center gap-1 text-[9px] text-slate-400 font-medium uppercase tracking-wide opacity-70">
<Sparkles size={8} /> {tCommon.generatedBy} {message.model.replace('gemini-', '')}
</div>
)}
</div>
</div>
</div>
</div>
);
};
export default ChatBubble;

View File

@@ -0,0 +1,52 @@
import React from 'react';
import { AlertTriangle, X } from 'lucide-react';
import { Language } from '../types';
import { translations } from '../utils/localization';
interface ConfirmModalProps {
isOpen: boolean;
title: string;
message: string;
language: Language;
onConfirm: () => void;
onCancel: () => void;
}
const ConfirmModal: React.FC<ConfirmModalProps> = ({ isOpen, title, message, language, onConfirm, onCancel }) => {
if (!isOpen) return null;
const t = translations[language].common;
return (
<div className="fixed inset-0 z-[200] flex items-center justify-center bg-slate-900/40 backdrop-blur-sm animate-fade-in p-4">
<div className="bg-white w-full max-w-sm rounded-2xl shadow-2xl p-6 animate-scale-in relative">
<button onClick={onCancel} className="absolute top-4 right-4 text-slate-400 hover:text-slate-600">
<X size={20} />
</button>
<div className="flex flex-col items-center text-center">
<div className="w-12 h-12 bg-red-50 text-red-500 rounded-full flex items-center justify-center mb-4">
<AlertTriangle size={24} />
</div>
<h3 className="text-lg font-bold text-slate-800 mb-2">{title}</h3>
<p className="text-sm text-slate-500 mb-6">{message}</p>
<div className="flex gap-3 w-full">
<button
onClick={onCancel}
className="flex-1 py-2.5 px-4 bg-slate-100 hover:bg-slate-200 text-slate-700 rounded-xl font-bold transition-colors"
>
{t.cancel}
</button>
<button
onClick={onConfirm}
className="flex-1 py-2.5 px-4 bg-red-500 hover:bg-red-600 text-white rounded-xl font-bold transition-colors shadow-lg shadow-red-200"
>
{t.confirm}
</button>
</div>
</div>
</div>
</div>
);
};
export default ConfirmModal;

127
components/Onboarding.tsx Normal file
View File

@@ -0,0 +1,127 @@
import React, { useState } from 'react';
import { MessageCircle, Sparkles, Mic2, BookOpen, X, ArrowRight, Check, Globe } from 'lucide-react';
import { Language } from '../types';
import { translations } from '../utils/localization';
interface OnboardingProps {
language: Language;
setLanguage: (lang: Language) => void;
onComplete: () => void;
}
const Onboarding: React.FC<OnboardingProps> = ({ language, setLanguage, onComplete }) => {
const t = translations[language].onboarding;
const [step, setStep] = useState(0);
const steps = [
{
title: t.step1Title,
desc: t.step1Desc,
icon: <MessageCircle size={48} className="text-indigo-500" />,
color: 'bg-indigo-50',
},
{
title: t.step2Title,
desc: t.step2Desc,
icon: <Mic2 size={48} className="text-orange-500" />,
color: 'bg-orange-50',
},
{
title: t.step3Title,
desc: t.step3Desc,
icon: <Sparkles size={48} className="text-blue-500" />,
color: 'bg-blue-50',
}
];
const handleNext = () => {
if (step < steps.length - 1) {
setStep(step + 1);
} else {
onComplete();
}
};
return (
<div className="fixed inset-0 z-[100] bg-slate-900/60 backdrop-blur-sm flex items-center justify-center p-4 animate-fade-in">
<div className="bg-white rounded-3xl shadow-2xl max-w-md w-full overflow-hidden relative animate-scale-in">
{/* Skip/Close */}
<button onClick={onComplete} className="absolute top-4 right-4 text-white/80 hover:text-white z-10">
<X size={24} />
</button>
{/* Header Image/Graphic */}
<div className="h-48 bg-gradient-to-br from-pink-400 to-rose-500 flex flex-col items-center justify-center relative overflow-hidden">
<div className="absolute -bottom-10 -left-10 w-32 h-32 bg-white/20 rounded-full blur-xl"></div>
<div className="absolute top-10 right-10 w-20 h-20 bg-white/20 rounded-full blur-lg"></div>
<Sparkles className="text-white w-16 h-16 mb-2 animate-pulse" />
<h2 className="text-2xl font-extrabold text-white tracking-tight">Sakura Sensei</h2>
{/* Language Switcher in Header (Step 0) */}
{step === 0 && (
<div className="mt-4 flex gap-2 bg-white/20 p-1 rounded-full backdrop-blur-sm">
{(['en', 'ja', 'zh'] as Language[]).map(lang => (
<button
key={lang}
onClick={() => setLanguage(lang)}
className={`px-3 py-1 rounded-full text-xs font-bold transition-all ${
language === lang ? 'bg-white text-rose-500 shadow-md' : 'text-white hover:bg-white/20'
}`}
>
{lang === 'en' ? 'English' : lang === 'ja' ? '日本語' : '中文'}
</button>
))}
</div>
)}
</div>
<div className="p-8">
<div className="mb-6">
<h3 className="text-2xl font-bold text-slate-800 mb-2">{t.welcome}</h3>
<p className="text-slate-500">{t.desc1}</p>
</div>
{/* Step Card */}
<div className="relative h-48">
{steps.map((s, idx) => (
<div
key={idx}
className={`absolute inset-0 flex flex-col items-center text-center transition-all duration-500 transform ${
idx === step ? 'opacity-100 translate-x-0' : idx < step ? 'opacity-0 -translate-x-full' : 'opacity-0 translate-x-full'
}`}
>
<div className={`w-20 h-20 rounded-2xl ${s.color} flex items-center justify-center mb-4 shadow-inner`}>
{s.icon}
</div>
<h4 className="text-lg font-bold text-slate-800 mb-2">{s.title}</h4>
<p className="text-sm text-slate-500 leading-relaxed">{s.desc}</p>
</div>
))}
</div>
{/* Controls */}
<div className="flex items-center justify-between mt-8">
{/* Indicators */}
<div className="flex gap-2">
{steps.map((_, idx) => (
<div key={idx} className={`h-2 rounded-full transition-all duration-300 ${idx === step ? 'w-6 bg-indigo-500' : 'w-2 bg-slate-200'}`} />
))}
</div>
<button
onClick={handleNext}
className="flex items-center gap-2 px-6 py-3 bg-slate-900 text-white rounded-xl font-bold hover:bg-slate-800 transition-all active:scale-95 shadow-lg"
>
{step === steps.length - 1 ? t.startBtn : translations[language].common.next}
{step === steps.length - 1 ? <Check size={18} /> : <ArrowRight size={18} />}
</button>
</div>
</div>
</div>
</div>
);
};
export default Onboarding;

64
components/Toast.tsx Normal file
View File

@@ -0,0 +1,64 @@
import React, { useEffect } from 'react';
import { CheckCircle, AlertCircle, X } from 'lucide-react';
export interface ToastMessage {
id: string;
type: 'success' | 'error' | 'info';
message: string;
}
interface ToastProps {
toasts: ToastMessage[];
onRemove: (id: string) => void;
}
const ToastContainer: React.FC<ToastProps> = ({ toasts, onRemove }) => {
return (
<div className="fixed top-4 left-1/2 transform -translate-x-1/2 z-[100] flex flex-col gap-2 w-full max-w-md px-4">
{toasts.map((toast) => (
<ToastItem key={toast.id} toast={toast} onRemove={onRemove} />
))}
</div>
);
};
const ToastItem: React.FC<{ toast: ToastMessage; onRemove: (id: string) => void }> = ({ toast, onRemove }) => {
useEffect(() => {
const timer = setTimeout(() => {
onRemove(toast.id);
}, 3000);
return () => clearTimeout(timer);
}, [toast.id, onRemove]);
const getStyles = () => {
switch (toast.type) {
case 'success':
return 'bg-emerald-50 border-emerald-100 text-emerald-700';
case 'error':
return 'bg-red-50 border-red-100 text-red-700';
default:
return 'bg-indigo-50 border-indigo-100 text-indigo-700';
}
};
const getIcon = () => {
switch (toast.type) {
case 'success': return <CheckCircle size={20} className="text-emerald-500" />;
case 'error': return <AlertCircle size={20} className="text-red-500" />;
default: return <AlertCircle size={20} className="text-indigo-500" />;
}
};
return (
<div className={`flex items-center gap-3 p-4 rounded-2xl border shadow-lg animate-fade-in-up ${getStyles()}`}>
{getIcon()}
<p className="text-sm font-bold flex-1">{toast.message}</p>
<button onClick={() => onRemove(toast.id)} className="opacity-50 hover:opacity-100">
<X size={16} />
</button>
</div>
);
};
export default ToastContainer;