267 lines
11 KiB
TypeScript
267 lines
11 KiB
TypeScript
|
|
|
|
import React, { useState, useRef } from 'react';
|
|
import { Role, MessageType, ChatMessage, Language } from '../types';
|
|
import { User, Bot, BrainCircuit, Volume2, Pause, Sparkles, Download, Copy, Check, Loader2 } from 'lucide-react';
|
|
import { geminiService, decodeAudioData } from '../services/geminiService';
|
|
import { processAndDownloadAudio } from '../utils/audioUtils';
|
|
import { translations } from '../utils/localization';
|
|
import ReactMarkdown from 'react-markdown';
|
|
import remarkGfm from 'remark-gfm';
|
|
|
|
interface ChatBubbleProps {
|
|
message: ChatMessage;
|
|
language: Language;
|
|
onUpdateMessage?: (updatedMessage: ChatMessage) => void;
|
|
onError?: (msg: string) => void;
|
|
}
|
|
|
|
const ChatBubble: React.FC<ChatBubbleProps> = ({ message, language, onUpdateMessage, onError }) => {
|
|
const isUser = message.role === Role.USER;
|
|
const [isPlaying, setIsPlaying] = useState(false);
|
|
const [isGeneratingAudio, setIsGeneratingAudio] = useState(false);
|
|
const [isCopied, setIsCopied] = useState(false);
|
|
|
|
const audioContextRef = useRef<AudioContext | null>(null);
|
|
const audioSourceRef = useRef<AudioBufferSourceNode | null>(null);
|
|
|
|
const t = translations[language].chat;
|
|
const tCommon = translations[language].common;
|
|
|
|
const stopAudio = () => {
|
|
if (audioSourceRef.current) {
|
|
audioSourceRef.current.stop();
|
|
audioSourceRef.current = null;
|
|
}
|
|
setIsPlaying(false);
|
|
};
|
|
|
|
const handlePlayAudio = async () => {
|
|
if (isPlaying) {
|
|
stopAudio();
|
|
return;
|
|
}
|
|
|
|
let base64Data = message.metadata?.audioUrl;
|
|
|
|
// If no audio cached, generate it on demand
|
|
if (!base64Data && message.content && message.type === MessageType.TEXT) {
|
|
try {
|
|
setIsGeneratingAudio(true);
|
|
base64Data = await geminiService.generateSpeech(message.content);
|
|
|
|
if (!base64Data) throw new Error("Audio generation returned empty");
|
|
|
|
// Cache it if parent provided update handler
|
|
if (onUpdateMessage) {
|
|
onUpdateMessage({
|
|
...message,
|
|
metadata: {
|
|
...message.metadata,
|
|
audioUrl: base64Data
|
|
}
|
|
});
|
|
}
|
|
} catch (e) {
|
|
console.error("Audio gen failed", e);
|
|
setIsGeneratingAudio(false);
|
|
if (onError) onError(translations[language].common.error);
|
|
return;
|
|
} finally {
|
|
setIsGeneratingAudio(false);
|
|
}
|
|
}
|
|
|
|
if (!base64Data) return;
|
|
|
|
try {
|
|
if (!audioContextRef.current) {
|
|
audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)();
|
|
}
|
|
const ctx = audioContextRef.current;
|
|
if (ctx.state === 'suspended') await ctx.resume();
|
|
|
|
const buffer = await decodeAudioData(base64Data, ctx);
|
|
|
|
const source = ctx.createBufferSource();
|
|
source.buffer = buffer;
|
|
source.connect(ctx.destination);
|
|
source.onended = () => setIsPlaying(false);
|
|
source.start();
|
|
audioSourceRef.current = source;
|
|
setIsPlaying(true);
|
|
} catch (e) {
|
|
console.error("Audio playback error", e);
|
|
setIsPlaying(false);
|
|
if (onError) onError(translations[language].common.error);
|
|
}
|
|
};
|
|
|
|
const handleDownloadAudio = async () => {
|
|
let base64Data = message.metadata?.audioUrl;
|
|
if (!base64Data && message.content && message.type === MessageType.TEXT) {
|
|
try {
|
|
setIsGeneratingAudio(true);
|
|
base64Data = await geminiService.generateSpeech(message.content);
|
|
if (!base64Data) throw new Error("Audio generation returned empty");
|
|
|
|
if (onUpdateMessage) {
|
|
onUpdateMessage({ ...message, metadata: { ...message.metadata, audioUrl: base64Data } });
|
|
}
|
|
} catch (e) {
|
|
console.error(e);
|
|
if (onError) onError(translations[language].common.error);
|
|
} finally {
|
|
setIsGeneratingAudio(false);
|
|
}
|
|
}
|
|
|
|
if (base64Data) {
|
|
const filename = `sakura_audio_${Date.now()}.wav`;
|
|
processAndDownloadAudio(base64Data, filename);
|
|
}
|
|
};
|
|
|
|
const handleCopy = () => {
|
|
if (message.content) {
|
|
navigator.clipboard.writeText(message.content);
|
|
setIsCopied(true);
|
|
setTimeout(() => setIsCopied(false), 2000);
|
|
}
|
|
};
|
|
|
|
const formatTime = (timestamp: number) => {
|
|
const date = new Date(timestamp);
|
|
const now = new Date();
|
|
const isToday = date.getDate() === now.getDate() && date.getMonth() === now.getMonth() && date.getFullYear() === now.getFullYear();
|
|
|
|
const timeStr = date.toLocaleTimeString([], {hour: '2-digit', minute:'2-digit'});
|
|
if (isToday) return timeStr;
|
|
|
|
return `${date.toLocaleDateString()} ${timeStr}`;
|
|
};
|
|
|
|
return (
|
|
<div className={`flex w-full mb-6 animate-fade-in-up ${isUser ? 'justify-end' : 'justify-start'}`}>
|
|
<div className={`flex max-w-[95%] md:max-w-[75%] ${isUser ? 'flex-row-reverse' : 'flex-row'} gap-3`}>
|
|
|
|
{/* Avatar */}
|
|
<div className={`w-8 h-8 rounded-full flex items-center justify-center flex-shrink-0 shadow-md transform transition-transform hover:scale-110 ${isUser ? 'bg-indigo-600' : 'bg-pink-500'}`}>
|
|
{isUser ? <User size={16} className="text-white" /> : <Bot size={16} className="text-white" />}
|
|
</div>
|
|
|
|
{/* Content Bubble */}
|
|
<div className={`flex flex-col ${isUser ? 'items-end' : 'items-start'} min-w-0 w-full group`}>
|
|
|
|
{/* Metadata Badges */}
|
|
{message.metadata?.isThinking && (
|
|
<span className="text-[10px] flex items-center gap-1 text-amber-700 bg-amber-50 px-2 py-0.5 rounded-full mb-1 border border-amber-200 animate-pulse font-bold">
|
|
<BrainCircuit size={10} /> {t.deepThinking}
|
|
</span>
|
|
)}
|
|
|
|
<div className={`rounded-2xl p-4 shadow-sm border transition-shadow hover:shadow-md overflow-hidden w-full relative ${
|
|
isUser
|
|
? 'bg-indigo-600 text-white rounded-tr-sm border-transparent'
|
|
: 'bg-white border-pink-100 text-slate-800 rounded-tl-sm'
|
|
}`}>
|
|
|
|
{/* TEXT CONTENT - MARKDOWN RENDERED */}
|
|
{message.content && (
|
|
<div className={`
|
|
text-sm md:text-base leading-relaxed
|
|
${isUser ? 'prose-invert text-white' : 'prose-slate text-slate-800'}
|
|
prose prose-p:my-1 prose-headings:my-2 prose-strong:font-bold prose-code:bg-black/10 prose-code:rounded prose-code:px-1 prose-code:py-0.5 prose-pre:bg-slate-900 prose-pre:text-slate-100 prose-pre:rounded-lg max-w-none
|
|
`}>
|
|
{message.type === MessageType.TEXT ? (
|
|
<ReactMarkdown remarkPlugins={[remarkGfm]}>
|
|
{message.content}
|
|
</ReactMarkdown>
|
|
) : (
|
|
message.content
|
|
)}
|
|
</div>
|
|
)}
|
|
|
|
{/* IMAGE CONTENT */}
|
|
{message.type === MessageType.IMAGE && message.metadata?.imageUrl && (
|
|
<div className="mt-2 overflow-hidden rounded-lg border border-white/20 animate-scale-in">
|
|
<img src={message.metadata.imageUrl} alt="Uploaded or Generated" className="max-w-full h-auto object-cover" />
|
|
</div>
|
|
)}
|
|
{message.type === MessageType.TEXT && message.metadata?.imageUrl && (
|
|
<div className="mt-2 overflow-hidden rounded-lg border border-slate-200 animate-scale-in">
|
|
<img src={message.metadata.imageUrl} alt="Context" className="max-w-[150px] h-auto object-cover opacity-90 hover:opacity-100 transition-opacity rounded-md" />
|
|
<div className="text-[10px] opacity-70 p-1">{t.imageAnalyzed}</div>
|
|
</div>
|
|
)}
|
|
|
|
{/* Action Bar (Copy, TTS, Download) - Always visible for Text messages or if audioUrl exists */}
|
|
{(message.type === MessageType.TEXT || message.metadata?.audioUrl) && (
|
|
<div className={`flex items-center gap-2 mt-3 pt-2 border-t ${isUser ? 'border-white/20' : 'border-slate-100'}`}>
|
|
|
|
{/* Play TTS */}
|
|
<button
|
|
onClick={handlePlayAudio}
|
|
disabled={isGeneratingAudio}
|
|
className={`flex items-center gap-1.5 px-2 py-1 rounded-lg text-xs font-bold transition-colors active:scale-95 ${
|
|
isUser
|
|
? 'hover:bg-white/10 text-indigo-100'
|
|
: 'hover:bg-pink-50 text-pink-500'
|
|
}`}
|
|
title={isUser ? t.playUserAudio : t.listenPronunciation}
|
|
>
|
|
{isGeneratingAudio ? <Loader2 size={14} className="animate-spin" /> : isPlaying ? <Pause size={14} className="animate-pulse" /> : <Volume2 size={14} />}
|
|
<span className="opacity-80">{isUser ? t.playUserAudio : t.listenPronunciation}</span>
|
|
</button>
|
|
|
|
{/* Download Audio */}
|
|
<button
|
|
onClick={handleDownloadAudio}
|
|
disabled={isGeneratingAudio}
|
|
className={`p-1.5 rounded-lg transition-colors active:scale-95 ${
|
|
isUser
|
|
? 'hover:bg-white/10 text-indigo-100'
|
|
: 'hover:bg-slate-100 text-slate-400 hover:text-slate-600'
|
|
}`}
|
|
title={tCommon.download}
|
|
>
|
|
{isGeneratingAudio ? <Loader2 size={14} className="animate-spin" /> : <Download size={14} />}
|
|
</button>
|
|
|
|
{/* Copy Text */}
|
|
<button
|
|
onClick={handleCopy}
|
|
className={`p-1.5 rounded-lg transition-colors active:scale-95 ml-auto flex items-center gap-1 ${
|
|
isUser
|
|
? 'hover:bg-white/10 text-indigo-100'
|
|
: 'hover:bg-slate-100 text-slate-400 hover:text-slate-600'
|
|
}`}
|
|
title={tCommon.copy}
|
|
>
|
|
{isCopied ? <Check size={14} /> : <Copy size={14} />}
|
|
{isCopied && <span className="text-[10px]">{tCommon.copied}</span>}
|
|
</button>
|
|
</div>
|
|
)}
|
|
</div>
|
|
|
|
{/* Footer (Timestamp + Model) */}
|
|
<div className="mt-1 flex items-center justify-between w-full px-1">
|
|
<span className="text-[10px] text-slate-400 font-medium opacity-70">
|
|
{formatTime(message.timestamp)}
|
|
</span>
|
|
{!isUser && message.model && (
|
|
<div className="flex items-center gap-1 text-[9px] text-slate-400 font-medium uppercase tracking-wide opacity-70">
|
|
<Sparkles size={8} /> {tCommon.generatedBy} {message.model.replace('gemini-', '')}
|
|
</div>
|
|
)}
|
|
</div>
|
|
|
|
</div>
|
|
</div>
|
|
</div>
|
|
);
|
|
};
|
|
|
|
export default ChatBubble; |