import React, { useState, useRef } from 'react'; import { Role, MessageType, ChatMessage, Language } from '../types'; import { User, Bot, BrainCircuit, Volume2, Pause, Sparkles, Download, Copy, Check, Loader2 } from 'lucide-react'; import { geminiService, decodeAudioData } from '../services/geminiService'; import { processAndDownloadAudio } from '../utils/audioUtils'; import { translations } from '../utils/localization'; import ReactMarkdown from 'react-markdown'; import remarkGfm from 'remark-gfm'; interface ChatBubbleProps { message: ChatMessage; language: Language; onUpdateMessage?: (updatedMessage: ChatMessage) => void; onError?: (msg: string) => void; } const ChatBubble: React.FC = ({ message, language, onUpdateMessage, onError }) => { const isUser = message.role === Role.USER; const [isPlaying, setIsPlaying] = useState(false); const [isGeneratingAudio, setIsGeneratingAudio] = useState(false); const [isCopied, setIsCopied] = useState(false); const audioContextRef = useRef(null); const audioSourceRef = useRef(null); const t = translations[language].chat; const tCommon = translations[language].common; const stopAudio = () => { if (audioSourceRef.current) { audioSourceRef.current.stop(); audioSourceRef.current = null; } setIsPlaying(false); }; const handlePlayAudio = async () => { if (isPlaying) { stopAudio(); return; } let base64Data = message.metadata?.audioUrl; // If no audio cached, generate it on demand if (!base64Data && message.content && message.type === MessageType.TEXT) { try { setIsGeneratingAudio(true); base64Data = await geminiService.generateSpeech(message.content); if (!base64Data) throw new Error("Audio generation returned empty"); // Cache it if parent provided update handler if (onUpdateMessage) { onUpdateMessage({ ...message, metadata: { ...message.metadata, audioUrl: base64Data } }); } } catch (e) { console.error("Audio gen failed", e); setIsGeneratingAudio(false); if (onError) onError(translations[language].common.error); return; } finally { setIsGeneratingAudio(false); } } if (!base64Data) return; try { if (!audioContextRef.current) { audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)(); } const ctx = audioContextRef.current; if (ctx.state === 'suspended') await ctx.resume(); const buffer = await decodeAudioData(base64Data, ctx); const source = ctx.createBufferSource(); source.buffer = buffer; source.connect(ctx.destination); source.onended = () => setIsPlaying(false); source.start(); audioSourceRef.current = source; setIsPlaying(true); } catch (e) { console.error("Audio playback error", e); setIsPlaying(false); if (onError) onError(translations[language].common.error); } }; const handleDownloadAudio = async () => { let base64Data = message.metadata?.audioUrl; if (!base64Data && message.content && message.type === MessageType.TEXT) { try { setIsGeneratingAudio(true); base64Data = await geminiService.generateSpeech(message.content); if (!base64Data) throw new Error("Audio generation returned empty"); if (onUpdateMessage) { onUpdateMessage({ ...message, metadata: { ...message.metadata, audioUrl: base64Data } }); } } catch (e) { console.error(e); if (onError) onError(translations[language].common.error); } finally { setIsGeneratingAudio(false); } } if (base64Data) { const filename = `sakura_audio_${Date.now()}.wav`; processAndDownloadAudio(base64Data, filename); } }; const handleCopy = () => { if (message.content) { navigator.clipboard.writeText(message.content); setIsCopied(true); setTimeout(() => setIsCopied(false), 2000); } }; const formatTime = (timestamp: number) => { const date = new Date(timestamp); const now = new Date(); const isToday = date.getDate() === now.getDate() && date.getMonth() === now.getMonth() && date.getFullYear() === now.getFullYear(); const timeStr = date.toLocaleTimeString([], {hour: '2-digit', minute:'2-digit'}); if (isToday) return timeStr; return `${date.toLocaleDateString()} ${timeStr}`; }; return (
{/* Avatar */}
{isUser ? : }
{/* Content Bubble */}
{/* Metadata Badges */} {message.metadata?.isThinking && ( {t.deepThinking} )}
{/* TEXT CONTENT - MARKDOWN RENDERED */} {message.content && (
{message.type === MessageType.TEXT ? ( {message.content} ) : ( message.content )}
)} {/* IMAGE CONTENT */} {message.type === MessageType.IMAGE && message.metadata?.imageUrl && (
Uploaded or Generated
)} {message.type === MessageType.TEXT && message.metadata?.imageUrl && (
Context
{t.imageAnalyzed}
)} {/* Action Bar (Copy, TTS, Download) - Always visible for Text messages or if audioUrl exists */} {(message.type === MessageType.TEXT || message.metadata?.audioUrl) && (
{/* Play TTS */} {/* Download Audio */} {/* Copy Text */}
)}
{/* Footer (Timestamp + Model) */}
{formatTime(message.timestamp)} {!isUser && message.model && (
{tCommon.generatedBy} {message.model.replace('gemini-', '')}
)}
); }; export default ChatBubble;