import React, { useState, useRef, useEffect } from 'react'; import { Language, TranslationRecord } from '../types'; import { geminiService, decodeAudioData } from '../services/geminiService'; import { processAndDownloadAudio } from '../utils/audioUtils'; import { translations } from '../utils/localization'; import { ArrowRightLeft, Copy, Languages, Sparkles, Loader2, Trash2, Camera, Image as ImageIcon, History, X, PanelRightClose, PanelRightOpen, Volume2, Square, Download } from 'lucide-react'; interface TranslationViewProps { language: Language; history: TranslationRecord[]; addToHistory: (record: TranslationRecord) => void; clearHistory: () => void; onDeleteHistoryItem: (id: string) => void; } const TranslationView: React.FC = ({ language, history, addToHistory, clearHistory, onDeleteHistoryItem }) => { const t = translations[language].translation; const [inputText, setInputText] = useState(''); const [outputText, setOutputText] = useState(''); const [isLoading, setIsLoading] = useState(false); const [loadingStatus, setLoadingStatus] = useState(''); const [sourceLang, setSourceLang] = useState('Auto'); const [targetLang, setTargetLang] = useState('Japanese'); // Audio State const [playingId, setPlayingId] = useState<'input' | 'output' | null>(null); const [downloadingId, setDownloadingId] = useState<'input' | 'output' | null>(null); const audioContextRef = useRef(null); const audioSourceRef = useRef(null); // Sidebar State - Default Closed const [isHistoryOpen, setIsHistoryOpen] = useState(false); const fileInputRef = useRef(null); const cameraInputRef = useRef(null); const LANG_OPTIONS = [ { value: 'Auto', label: t.langs.auto }, { value: 'English', label: t.langs.en }, { value: 'Japanese', label: t.langs.ja }, { value: 'Chinese', label: t.langs.zh }, { value: 'Korean', label: t.langs.ko }, { value: 'French', label: t.langs.fr }, { value: 'Spanish', label: t.langs.es }, ]; const TARGET_OPTIONS = LANG_OPTIONS.filter(o => o.value !== 'Auto'); // Cleanup audio useEffect(() => { return () => stopAudio(); }, []); const stopAudio = () => { if (audioSourceRef.current) { audioSourceRef.current.stop(); audioSourceRef.current = null; } setPlayingId(null); }; const playAudio = async (text: string, type: 'input' | 'output') => { if (!text.trim()) return; if (playingId === type) { stopAudio(); return; } if (playingId) stopAudio(); setPlayingId(type); try { const audioBase64 = await geminiService.generateSpeech(text); if (audioBase64) { if (!audioContextRef.current) { audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)(); } const ctx = audioContextRef.current; if (ctx.state === 'suspended') await ctx.resume(); const buffer = await decodeAudioData(audioBase64, ctx); const source = ctx.createBufferSource(); source.buffer = buffer; source.connect(ctx.destination); source.onended = () => setPlayingId(null); source.start(); audioSourceRef.current = source; } else { setPlayingId(null); } } catch (e) { console.error(e); setPlayingId(null); } }; const handleDownload = async (text: string, type: 'input' | 'output') => { if (!text.trim()) return; setDownloadingId(type); try { const audioBase64 = await geminiService.generateSpeech(text); if (audioBase64) { processAndDownloadAudio(audioBase64, `translation_${type}_${Date.now()}.wav`); } } catch (e) { console.error(e); } finally { setDownloadingId(null); } }; const handleTranslate = async () => { if (!inputText.trim()) return; setIsLoading(true); setLoadingStatus(t.translating); try { const result = await geminiService.translateText(inputText, targetLang, sourceLang); setOutputText(result); addToHistory({ id: Date.now().toString(), sourceText: inputText, targetText: result, sourceLang: sourceLang === 'Auto' ? 'Detected' : sourceLang, targetLang: targetLang, timestamp: Date.now() }); } catch (e) { console.error(e); setOutputText(t.errorTranslating); } finally { setIsLoading(false); } }; const handleImageSelect = async (e: React.ChangeEvent) => { const file = e.target.files?.[0]; if (!file) return; setIsLoading(true); setLoadingStatus(t.extracting); const reader = new FileReader(); reader.onloadend = async () => { const base64 = reader.result as string; try { const result = await geminiService.translateImage(base64, targetLang, sourceLang); if (result) { setInputText(result.original); setOutputText(result.translated); addToHistory({ id: Date.now().toString(), sourceText: result.original, targetText: result.translated, sourceLang: sourceLang === 'Auto' ? 'Detected (Image)' : sourceLang, targetLang: targetLang, timestamp: Date.now() }); } else { alert(t.imageReadError); } } catch (err) { console.error(err); alert(t.imageTransError); } finally { setIsLoading(false); } }; reader.readAsDataURL(file); }; const handleCopy = (text: string) => { navigator.clipboard.writeText(text); }; const handleSwap = () => { if (sourceLang === 'Auto') return; setSourceLang(targetLang); setTargetLang(sourceLang); setInputText(outputText); setOutputText(inputText); }; const HistoryContent = () => (

{t.history}

{history.length > 0 && ( )}
{history.length === 0 && (
{t.history}
)} {history.slice().reverse().map((rec) => (
{ setInputText(rec.sourceText); setOutputText(rec.targetText); if(window.innerWidth < 768) setIsHistoryOpen(false); }} > {/* Icon */}
{/* Content */}

{rec.sourceLang} → {rec.targetLang}

{new Date(rec.timestamp).toLocaleDateString()} {new Date(rec.timestamp).toLocaleTimeString([], {hour: '2-digit', minute:'2-digit'})}

{rec.sourceText}

{/* Delete Button */}
))}
); return (
{/* Main Translation Area */}
{/* Sticky Header / Toolbar */}
{/* Title Header */}

{t.title}

{/* Controls */}
{/* Input/Output Grid */}
{/* Source */}