import React, { useState, useRef, useEffect } from 'react'; import { Language, OCRAnalysis, ChatMessage, Role, MessageType, OCRRecord } from '../types'; import { geminiService, decodeAudioData } from '../services/geminiService'; import { translations } from '../utils/localization'; import { processAndDownloadAudio } from '../utils/audioUtils'; import { ScanText, Upload, Camera, Loader2, Send, Book, PenTool, RotateCcw, History, Trash2, X, PanelRightClose, PanelRightOpen, Volume2, Square, MessageCircle, HelpCircle, ChevronLeft, FileText, Download } from 'lucide-react'; import ChatBubble from '../components/ChatBubble'; interface OCRViewProps { language: Language; history: OCRRecord[]; onSaveToHistory: (record: OCRRecord) => void; onClearHistory: () => void; onDeleteHistoryItem: (id: string) => void; addToast: (type: 'success' | 'error' | 'info', msg: string) => void; } const OCRView: React.FC = ({ language, history, onSaveToHistory, onClearHistory, onDeleteHistoryItem, addToast }) => { const t = translations[language].ocr; const tCommon = translations[language].common; const [isProcessing, setIsProcessing] = useState(false); const [analysis, setAnalysis] = useState(null); const [imagePreview, setImagePreview] = useState(null); const [chatMessages, setChatMessages] = useState([]); const [chatInput, setChatInput] = useState(''); const [isChatLoading, setIsChatLoading] = useState(false); const [isHistoryOpen, setIsHistoryOpen] = useState(false); // Mobile Tab State: 'content' (Text/Vocab/Notes) vs 'tutor' (Chat) const [mobileTab, setMobileTab] = useState<'content' | 'tutor'>('content'); // Audio State const [playingAudioId, setPlayingAudioId] = useState(null); // 'main' or 'vocab-word' const [isDownloading, setIsDownloading] = useState(false); const audioContextRef = useRef(null); const audioSourceRef = useRef(null); const chatEndRef = useRef(null); const fileInputRef = useRef(null); const cameraInputRef = useRef(null); // Scroll to bottom of chat useEffect(() => { chatEndRef.current?.scrollIntoView({ behavior: 'smooth' }); }, [chatMessages, mobileTab]); // Cleanup audio useEffect(() => { return () => { stopAudio(); }; }, [analysis]); const handleImageInput = (e: React.ChangeEvent) => { const file = e.target.files?.[0]; if (!file) return; const reader = new FileReader(); reader.onloadend = async () => { const base64 = reader.result as string; setImagePreview(base64); processImage(base64); }; reader.readAsDataURL(file); }; const processImage = async (base64: string) => { setIsProcessing(true); try { const result = await geminiService.extractAndAnalyzeText(base64, language); if (result) { setAnalysis(result); setChatMessages([{ id: 'init', role: Role.MODEL, type: MessageType.TEXT, content: t.analyzedIntro.replace('$lang', result.detectedLanguage), timestamp: Date.now() }]); // Save to History const record: OCRRecord = { id: Date.now().toString(), timestamp: Date.now(), imagePreview: base64, analysis: result }; onSaveToHistory(record); setIsHistoryOpen(false); // Collapse sidebar on new scan setMobileTab('content'); // Reset to source view } else { addToast('error', t.error); } } catch (e) { console.error(e); addToast('error', t.analysisFailed); } finally { setIsProcessing(false); } }; const loadFromHistory = (record: OCRRecord) => { setAnalysis(record.analysis); setImagePreview(record.imagePreview); setIsHistoryOpen(false); // Collapse sidebar on load setMobileTab('content'); setChatMessages([{ id: 'init', role: Role.MODEL, type: MessageType.TEXT, content: t.historyIntro.replace('$lang', record.analysis.detectedLanguage), timestamp: Date.now() }]); }; const stopAudio = () => { if (audioSourceRef.current) { audioSourceRef.current.stop(); audioSourceRef.current = null; } setPlayingAudioId(null); }; const playAudio = async (text: string, id: string) => { if (playingAudioId === id) { stopAudio(); return; } if (playingAudioId) stopAudio(); setPlayingAudioId(id); try { const audioBase64 = await geminiService.generateSpeech(text); if (audioBase64) { if (!audioContextRef.current) { audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)(); } const ctx = audioContextRef.current; if (ctx.state === 'suspended') await ctx.resume(); const buffer = await decodeAudioData(audioBase64, ctx); const source = ctx.createBufferSource(); source.buffer = buffer; source.connect(ctx.destination); source.onended = () => setPlayingAudioId(null); source.start(); audioSourceRef.current = source; } else { setPlayingAudioId(null); } } catch (e) { console.error(e); setPlayingAudioId(null); } }; const handleDownload = async (text: string) => { if (!text.trim()) return; setIsDownloading(true); try { const audioBase64 = await geminiService.generateSpeech(text); if (audioBase64) { processAndDownloadAudio(audioBase64, `ocr_extract_${Date.now()}.wav`); } } catch (e) { console.error(e); } finally { setIsDownloading(false); } }; const handleAskTutor = async () => { if (!chatInput.trim() || !analysis) return; const question = chatInput; setChatInput(''); setIsChatLoading(true); const newHistory = [...chatMessages, { id: Date.now().toString(), role: Role.USER, type: MessageType.TEXT, content: question, timestamp: Date.now() }]; setChatMessages(newHistory); const historyText = newHistory.slice(-4).map(m => `${m.role}: ${m.content}`).join('\n'); try { const dummyLesson = { title: "OCR Scan", japaneseContent: analysis.extractedText, translation: analysis.summary, vocabulary: [] }; const answer = await geminiService.generateReadingTutorResponse(question, dummyLesson, historyText, language); setChatMessages(prev => [...prev, { id: (Date.now() + 1).toString(), role: Role.MODEL, type: MessageType.TEXT, content: answer, timestamp: Date.now() }]); } catch (e) { console.error(e); } finally { setIsChatLoading(false); } }; const reset = () => { setAnalysis(null); setImagePreview(null); setChatMessages([]); }; // History Sidebar Component const HistoryContent = () => (

{t.history}

{history.length > 0 && ( )}
{history.length === 0 && (
{t.emptyHistory}
)} {history.slice().reverse().map(rec => (
loadFromHistory(rec)} className="group flex items-start gap-3 p-3 rounded-xl bg-slate-50 border border-slate-100 hover:bg-white hover:shadow-md cursor-pointer transition-all relative" > {/* Image Thumbnail */} scan thumbnail {/* Content */}
{rec.analysis.extractedText.substring(0, 30) || 'Text'}...
{new Date(rec.timestamp).toLocaleDateString()} {new Date(rec.timestamp).toLocaleTimeString([], {hour: '2-digit', minute:'2-digit'})}
{t.analyzedIntro.replace('$lang', rec.analysis.detectedLanguage)}
{/* Delete Button */}
))}
); // LOADING if (isProcessing) { return (
{imagePreview && processing}

{t.processing}

); } return (
{/* Main Content Area */}
{/* SETUP SCREEN */} {!analysis ? (
{/* Sticky Header */}

{t.title}

{t.subtitle}

) : ( // ANALYSIS SCREEN
{/* LEFT: Main Content (Image, Text, Notes, Vocab) */}
{/* Header */}

{t.title}

{/* Mobile Tab Switcher */}
{/* Content Scroll Area */}
{/* 1. Image & Extracted Text */}
scan result

{t.extractedTitle}

{analysis?.extractedText}
{/* 2. Summary */}

{t.summaryHeader}

{analysis?.summary}

{/* 3. Vocabulary */}

{t.vocabHeader}

{analysis?.vocabulary.map((v, i) => (
{v.word} ({v.reading})
{v.meaning}
))}
{/* 4. Grammar */} {analysis?.grammarPoints && analysis.grammarPoints.length > 0 && (

{t.grammarHeader}

{analysis.grammarPoints.map((g, i) => (
{g.point}

{g.explanation}

))}
)}
{/* RIGHT: Tutor Chat (Tab: tutor) */}
{/* Header */}
{t.tutorChat}
{/* Chat Area */}
{chatMessages.map(msg => )} {isChatLoading && (
{t.thinking}
)}
{/* Input Area */}
setChatInput(e.target.value)} onKeyDown={(e) => e.key === 'Enter' && handleAskTutor()} />
)}
{/* Sidebar History (Desktop) */} {/* Mobile Drawer (Slide Over) */} {isHistoryOpen && (
setIsHistoryOpen(false)} />
)}
); }; export default OCRView;