800 lines
24 KiB
JavaScript
800 lines
24 KiB
JavaScript
/**
|
|
* inou Health Input Screen
|
|
* Voice, Camera/OCR, and Text input for health data
|
|
*/
|
|
|
|
(function() {
|
|
'use strict';
|
|
|
|
// ========================================
|
|
// STATE
|
|
// ========================================
|
|
const state = {
|
|
mode: 'type',
|
|
inputValue: '',
|
|
isListening: false,
|
|
recognition: null,
|
|
transcript: '',
|
|
interimTranscript: '',
|
|
stream: null,
|
|
facingMode: 'environment',
|
|
capturedImage: null,
|
|
ocrText: '',
|
|
scanResult: null // { type: 'barcode'|'text'|'empty'|'error', format?, value }
|
|
};
|
|
|
|
// ========================================
|
|
// DOM ELEMENTS
|
|
// ========================================
|
|
const elements = {};
|
|
|
|
function initElements() {
|
|
// Segment control
|
|
elements.segmentBtns = document.querySelectorAll('.segment-btn');
|
|
elements.modePanels = document.querySelectorAll('.mode-panel');
|
|
|
|
// Text input
|
|
elements.textInput = document.getElementById('text-input');
|
|
elements.charCount = document.getElementById('char-count');
|
|
|
|
// Voice input
|
|
elements.micBtn = document.getElementById('mic-btn');
|
|
elements.micStatus = document.getElementById('mic-status');
|
|
elements.transcriptArea = document.getElementById('transcript-area');
|
|
elements.transcriptText = document.getElementById('transcript-text');
|
|
elements.transcriptInterim = document.getElementById('transcript-interim');
|
|
elements.voiceUnsupported = document.getElementById('voice-unsupported');
|
|
|
|
// Camera/Scan
|
|
elements.cameraContainer = document.getElementById('camera-container');
|
|
elements.cameraStart = document.getElementById('camera-start');
|
|
elements.startCameraBtn = document.getElementById('start-camera-btn');
|
|
elements.cameraViewfinder = document.getElementById('camera-viewfinder');
|
|
elements.cameraVideo = document.getElementById('camera-video');
|
|
elements.switchCameraBtn = document.getElementById('switch-camera-btn');
|
|
elements.captureBtn = document.getElementById('capture-btn');
|
|
elements.closeCameraBtn = document.getElementById('close-camera-btn');
|
|
elements.photoPreview = document.getElementById('photo-preview');
|
|
elements.previewImg = document.getElementById('preview-img');
|
|
elements.retakeBtn = document.getElementById('retake-btn');
|
|
elements.processBtn = document.getElementById('process-btn');
|
|
elements.ocrResult = document.getElementById('ocr-result');
|
|
elements.ocrHeader = document.querySelector('.ocr-header');
|
|
elements.ocrText = document.getElementById('ocr-text');
|
|
elements.ocrEditBtn = document.getElementById('ocr-edit-btn');
|
|
elements.scanAnotherBtn = document.getElementById('scan-another-btn');
|
|
elements.useOcrBtn = document.getElementById('use-ocr-btn');
|
|
elements.ocrProcessing = document.getElementById('ocr-processing');
|
|
elements.cameraError = document.getElementById('camera-error');
|
|
elements.cameraErrorMsg = document.getElementById('camera-error-msg');
|
|
elements.retryCameraBtn = document.getElementById('retry-camera-btn');
|
|
elements.captureCanvas = document.getElementById('capture-canvas');
|
|
|
|
// Submit
|
|
elements.submitBtn = document.getElementById('submit-btn');
|
|
}
|
|
|
|
// ========================================
|
|
// MODE SWITCHING
|
|
// ========================================
|
|
function switchMode(mode) {
|
|
state.mode = mode;
|
|
|
|
// Update segment buttons
|
|
elements.segmentBtns.forEach(btn => {
|
|
btn.classList.toggle('active', btn.dataset.mode === mode);
|
|
});
|
|
|
|
// Update panels
|
|
elements.modePanels.forEach(panel => {
|
|
panel.classList.toggle('active', panel.dataset.panel === mode);
|
|
});
|
|
|
|
// Stop voice if switching away
|
|
if (mode !== 'speak' && state.isListening) {
|
|
stopListening();
|
|
}
|
|
|
|
// Stop camera if switching away
|
|
if (mode !== 'scan' && state.stream) {
|
|
stopCamera();
|
|
}
|
|
|
|
updateSubmitButton();
|
|
}
|
|
|
|
// ========================================
|
|
// TEXT INPUT
|
|
// ========================================
|
|
function initTextInput() {
|
|
elements.textInput.addEventListener('input', handleTextInput);
|
|
elements.textInput.addEventListener('focus', handleTextFocus);
|
|
}
|
|
|
|
function handleTextInput(e) {
|
|
const value = e.target.value;
|
|
state.inputValue = value;
|
|
elements.charCount.textContent = value.length;
|
|
autoResize(e.target);
|
|
updateSubmitButton();
|
|
}
|
|
|
|
function handleTextFocus() {
|
|
// Ensure mode is set to type when focusing text input
|
|
if (state.mode !== 'type') {
|
|
switchMode('type');
|
|
}
|
|
}
|
|
|
|
function autoResize(textarea) {
|
|
textarea.style.height = 'auto';
|
|
textarea.style.height = Math.max(140, textarea.scrollHeight) + 'px';
|
|
}
|
|
|
|
// ========================================
|
|
// VOICE INPUT (Web Speech API)
|
|
// ========================================
|
|
function initVoiceInput() {
|
|
// Check for support
|
|
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
|
|
|
|
if (!SpeechRecognition) {
|
|
elements.micBtn.style.display = 'none';
|
|
elements.micStatus.style.display = 'none';
|
|
elements.voiceUnsupported.style.display = 'block';
|
|
return;
|
|
}
|
|
|
|
// Initialize recognition
|
|
state.recognition = new SpeechRecognition();
|
|
state.recognition.continuous = true;
|
|
state.recognition.interimResults = true;
|
|
state.recognition.lang = navigator.language || 'en-US';
|
|
|
|
// Event handlers
|
|
state.recognition.onstart = () => {
|
|
state.isListening = true;
|
|
elements.micBtn.classList.add('listening');
|
|
elements.micStatus.textContent = 'Listening...';
|
|
};
|
|
|
|
state.recognition.onend = () => {
|
|
state.isListening = false;
|
|
elements.micBtn.classList.remove('listening');
|
|
elements.micStatus.textContent = 'Tap to start speaking';
|
|
|
|
// Finalize any interim transcript
|
|
if (state.interimTranscript) {
|
|
state.transcript += state.interimTranscript;
|
|
state.interimTranscript = '';
|
|
updateTranscriptDisplay();
|
|
}
|
|
updateSubmitButton();
|
|
};
|
|
|
|
state.recognition.onerror = (event) => {
|
|
console.error('Speech recognition error:', event.error);
|
|
state.isListening = false;
|
|
elements.micBtn.classList.remove('listening');
|
|
|
|
let errorMsg = 'Tap to try again';
|
|
if (event.error === 'not-allowed') {
|
|
errorMsg = 'Microphone access denied. Check permissions.';
|
|
} else if (event.error === 'no-speech') {
|
|
errorMsg = 'No speech detected. Tap to try again.';
|
|
}
|
|
elements.micStatus.textContent = errorMsg;
|
|
};
|
|
|
|
state.recognition.onresult = (event) => {
|
|
let interim = '';
|
|
let final = '';
|
|
|
|
for (let i = event.resultIndex; i < event.results.length; i++) {
|
|
const transcript = event.results[i][0].transcript;
|
|
if (event.results[i].isFinal) {
|
|
final += transcript + ' ';
|
|
} else {
|
|
interim += transcript;
|
|
}
|
|
}
|
|
|
|
if (final) {
|
|
state.transcript += final;
|
|
}
|
|
state.interimTranscript = interim;
|
|
updateTranscriptDisplay();
|
|
updateSubmitButton();
|
|
};
|
|
|
|
// Click handler
|
|
elements.micBtn.addEventListener('click', toggleListening);
|
|
}
|
|
|
|
function toggleListening() {
|
|
if (state.isListening) {
|
|
stopListening();
|
|
} else {
|
|
startListening();
|
|
}
|
|
}
|
|
|
|
function startListening() {
|
|
if (!state.recognition) return;
|
|
|
|
try {
|
|
state.recognition.start();
|
|
} catch (e) {
|
|
// Already started
|
|
console.log('Recognition already started');
|
|
}
|
|
}
|
|
|
|
function stopListening() {
|
|
if (!state.recognition) return;
|
|
|
|
try {
|
|
state.recognition.stop();
|
|
} catch (e) {
|
|
console.log('Recognition already stopped');
|
|
}
|
|
}
|
|
|
|
function updateTranscriptDisplay() {
|
|
const hasContent = state.transcript || state.interimTranscript;
|
|
elements.transcriptArea.classList.toggle('has-content', hasContent);
|
|
elements.transcriptText.textContent = state.transcript;
|
|
elements.transcriptInterim.textContent = state.interimTranscript;
|
|
}
|
|
|
|
// ========================================
|
|
// CAMERA / OCR
|
|
// ========================================
|
|
function initCamera() {
|
|
elements.startCameraBtn.addEventListener('click', startCamera);
|
|
elements.switchCameraBtn.addEventListener('click', switchCamera);
|
|
elements.captureBtn.addEventListener('click', capturePhoto);
|
|
elements.closeCameraBtn.addEventListener('click', closeCamera);
|
|
elements.retakeBtn.addEventListener('click', retakePhoto);
|
|
elements.processBtn.addEventListener('click', processOCR);
|
|
elements.ocrEditBtn.addEventListener('click', toggleOCREdit);
|
|
elements.scanAnotherBtn.addEventListener('click', scanAnother);
|
|
elements.useOcrBtn.addEventListener('click', useOCRText);
|
|
elements.retryCameraBtn.addEventListener('click', startCamera);
|
|
}
|
|
|
|
async function startCamera() {
|
|
try {
|
|
// Hide all states, show viewfinder
|
|
hideAllCameraStates();
|
|
elements.cameraViewfinder.style.display = 'block';
|
|
|
|
// Get camera stream
|
|
const constraints = {
|
|
video: {
|
|
facingMode: state.facingMode,
|
|
width: { ideal: 1920 },
|
|
height: { ideal: 1080 }
|
|
}
|
|
};
|
|
|
|
state.stream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
elements.cameraVideo.srcObject = state.stream;
|
|
await elements.cameraVideo.play();
|
|
|
|
} catch (err) {
|
|
console.error('Camera error:', err);
|
|
showCameraError(getCameraErrorMessage(err));
|
|
}
|
|
}
|
|
|
|
function getCameraErrorMessage(err) {
|
|
if (err.name === 'NotAllowedError' || err.name === 'PermissionDeniedError') {
|
|
return 'Camera access denied. Please allow camera permissions.';
|
|
} else if (err.name === 'NotFoundError' || err.name === 'DevicesNotFoundError') {
|
|
return 'No camera found on this device.';
|
|
} else if (err.name === 'NotReadableError' || err.name === 'TrackStartError') {
|
|
return 'Camera is in use by another application.';
|
|
}
|
|
return 'Could not access camera. Please try again.';
|
|
}
|
|
|
|
function showCameraError(message) {
|
|
hideAllCameraStates();
|
|
elements.cameraErrorMsg.textContent = message;
|
|
elements.cameraError.style.display = 'flex';
|
|
}
|
|
|
|
function stopCamera() {
|
|
if (state.stream) {
|
|
state.stream.getTracks().forEach(track => track.stop());
|
|
state.stream = null;
|
|
}
|
|
elements.cameraVideo.srcObject = null;
|
|
}
|
|
|
|
function closeCamera() {
|
|
stopCamera();
|
|
hideAllCameraStates();
|
|
elements.cameraStart.style.display = 'flex';
|
|
}
|
|
|
|
async function switchCamera() {
|
|
state.facingMode = state.facingMode === 'environment' ? 'user' : 'environment';
|
|
stopCamera();
|
|
await startCamera();
|
|
}
|
|
|
|
function capturePhoto() {
|
|
const video = elements.cameraVideo;
|
|
const canvas = elements.captureCanvas;
|
|
|
|
// Set canvas size to video size
|
|
canvas.width = video.videoWidth;
|
|
canvas.height = video.videoHeight;
|
|
|
|
// Draw current frame
|
|
const ctx = canvas.getContext('2d');
|
|
ctx.drawImage(video, 0, 0);
|
|
|
|
// Get data URL
|
|
state.capturedImage = canvas.toDataURL('image/jpeg', 0.9);
|
|
|
|
// Stop camera and show preview
|
|
stopCamera();
|
|
hideAllCameraStates();
|
|
elements.previewImg.src = state.capturedImage;
|
|
elements.photoPreview.style.display = 'block';
|
|
}
|
|
|
|
function retakePhoto() {
|
|
state.capturedImage = null;
|
|
startCamera();
|
|
}
|
|
|
|
// ========================================
|
|
// BARCODE DETECTION
|
|
// ========================================
|
|
|
|
/**
|
|
* Try to detect barcode using native BarcodeDetector API
|
|
* Returns { found: boolean, format: string, value: string } or null
|
|
*/
|
|
async function detectBarcodeNative(imageSource) {
|
|
if (!('BarcodeDetector' in window)) {
|
|
return null;
|
|
}
|
|
|
|
try {
|
|
const formats = await BarcodeDetector.getSupportedFormats();
|
|
const detector = new BarcodeDetector({ formats });
|
|
const barcodes = await detector.detect(imageSource);
|
|
|
|
if (barcodes.length > 0) {
|
|
const barcode = barcodes[0];
|
|
return {
|
|
found: true,
|
|
format: barcode.format,
|
|
value: barcode.rawValue
|
|
};
|
|
}
|
|
} catch (err) {
|
|
console.log('Native barcode detection failed:', err);
|
|
}
|
|
|
|
return { found: false };
|
|
}
|
|
|
|
/**
|
|
* Try to detect barcode via backend API
|
|
*/
|
|
async function detectBarcodeBackend(blob) {
|
|
try {
|
|
const formData = new FormData();
|
|
formData.append('image', blob, 'capture.jpg');
|
|
|
|
const response = await fetch('/api/barcode', {
|
|
method: 'POST',
|
|
body: formData
|
|
});
|
|
|
|
if (!response.ok) {
|
|
return { found: false };
|
|
}
|
|
|
|
const result = await response.json();
|
|
if (result.found && result.value) {
|
|
return {
|
|
found: true,
|
|
format: result.format || 'unknown',
|
|
value: result.value
|
|
};
|
|
}
|
|
} catch (err) {
|
|
console.log('Backend barcode detection failed:', err);
|
|
}
|
|
|
|
return { found: false };
|
|
}
|
|
|
|
/**
|
|
* Main processing function: Barcode → OCR → Nothing
|
|
*/
|
|
async function processImage() {
|
|
if (!state.capturedImage) return;
|
|
|
|
hideAllCameraStates();
|
|
elements.ocrProcessing.style.display = 'flex';
|
|
updateProcessingStatus('Scanning for barcode...');
|
|
|
|
try {
|
|
const blob = dataURLtoBlob(state.capturedImage);
|
|
|
|
// Create an image element for native barcode detection
|
|
const img = new Image();
|
|
img.src = state.capturedImage;
|
|
await new Promise(resolve => img.onload = resolve);
|
|
|
|
// Step 1: Try native BarcodeDetector API (Chrome/Edge)
|
|
let barcodeResult = await detectBarcodeNative(img);
|
|
|
|
// Step 2: If native fails, try backend barcode detection
|
|
if (!barcodeResult || !barcodeResult.found) {
|
|
barcodeResult = await detectBarcodeBackend(blob);
|
|
}
|
|
|
|
// Step 3: If barcode found, show it
|
|
if (barcodeResult && barcodeResult.found) {
|
|
state.scanResult = {
|
|
type: 'barcode',
|
|
format: barcodeResult.format,
|
|
value: barcodeResult.value
|
|
};
|
|
showScanResult();
|
|
return;
|
|
}
|
|
|
|
// Step 4: No barcode, try OCR
|
|
updateProcessingStatus('Extracting text...');
|
|
|
|
const formData = new FormData();
|
|
formData.append('image', blob, 'capture.jpg');
|
|
|
|
const response = await fetch('/api/ocr', {
|
|
method: 'POST',
|
|
body: formData
|
|
});
|
|
|
|
if (response.ok) {
|
|
const result = await response.json();
|
|
if (result.text && result.text.trim()) {
|
|
state.scanResult = {
|
|
type: 'text',
|
|
value: result.text
|
|
};
|
|
showScanResult();
|
|
return;
|
|
}
|
|
}
|
|
|
|
// Step 5: Nothing found
|
|
state.scanResult = {
|
|
type: 'empty',
|
|
value: ''
|
|
};
|
|
showScanResult();
|
|
|
|
} catch (err) {
|
|
console.error('Image processing error:', err);
|
|
state.scanResult = {
|
|
type: 'error',
|
|
value: 'Could not process image. Please try again.'
|
|
};
|
|
showScanResult();
|
|
}
|
|
}
|
|
|
|
function updateProcessingStatus(message) {
|
|
const statusEl = elements.ocrProcessing.querySelector('p');
|
|
if (statusEl) {
|
|
statusEl.textContent = message;
|
|
}
|
|
}
|
|
|
|
function showScanResult() {
|
|
hideAllCameraStates();
|
|
|
|
const result = state.scanResult;
|
|
|
|
if (result.type === 'barcode') {
|
|
// Show barcode result
|
|
elements.ocrHeader.innerHTML = `
|
|
<div class="result-type-badge barcode">
|
|
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="16" height="16">
|
|
<path d="M3 5v14"/>
|
|
<path d="M6 5v14"/>
|
|
<path d="M10 5v14"/>
|
|
<path d="M13 5v14"/>
|
|
<path d="M17 5v14"/>
|
|
<path d="M21 5v14"/>
|
|
</svg>
|
|
Barcode (${formatBarcodeType(result.format)})
|
|
</div>
|
|
<button class="btn-icon" id="ocr-edit-btn" aria-label="Edit">
|
|
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="18" height="18">
|
|
<path d="M17 3a2.85 2.85 0 1 1 4 4L7.5 20.5 2 22l1.5-5.5L17 3z"/>
|
|
</svg>
|
|
</button>
|
|
`;
|
|
elements.ocrText.textContent = result.value;
|
|
elements.ocrText.classList.add('barcode-value');
|
|
|
|
} else if (result.type === 'text') {
|
|
// Show OCR result
|
|
elements.ocrHeader.innerHTML = `
|
|
<div class="result-type-badge text">
|
|
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="16" height="16">
|
|
<path d="M14 2H6a2 2 0 0 0-2 2v16a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V8z"/>
|
|
<polyline points="14 2 14 8 20 8"/>
|
|
<line x1="16" y1="13" x2="8" y2="13"/>
|
|
<line x1="16" y1="17" x2="8" y2="17"/>
|
|
</svg>
|
|
Extracted Text
|
|
</div>
|
|
<button class="btn-icon" id="ocr-edit-btn" aria-label="Edit">
|
|
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="18" height="18">
|
|
<path d="M17 3a2.85 2.85 0 1 1 4 4L7.5 20.5 2 22l1.5-5.5L17 3z"/>
|
|
</svg>
|
|
</button>
|
|
`;
|
|
elements.ocrText.textContent = result.value;
|
|
elements.ocrText.classList.remove('barcode-value');
|
|
|
|
} else if (result.type === 'empty') {
|
|
elements.ocrHeader.innerHTML = `
|
|
<div class="result-type-badge empty">No content detected</div>
|
|
`;
|
|
elements.ocrText.textContent = 'Try adjusting lighting or holding the camera closer to the document.';
|
|
elements.ocrText.classList.remove('barcode-value');
|
|
|
|
} else {
|
|
// Error
|
|
elements.ocrHeader.innerHTML = `
|
|
<div class="result-type-badge error">Error</div>
|
|
`;
|
|
elements.ocrText.textContent = result.value;
|
|
elements.ocrText.classList.remove('barcode-value');
|
|
}
|
|
|
|
elements.ocrText.contentEditable = 'false';
|
|
elements.ocrResult.style.display = 'block';
|
|
|
|
// Re-bind edit button
|
|
const editBtn = document.getElementById('ocr-edit-btn');
|
|
if (editBtn) {
|
|
editBtn.addEventListener('click', toggleOCREdit);
|
|
}
|
|
|
|
updateSubmitButton();
|
|
}
|
|
|
|
function formatBarcodeType(format) {
|
|
const types = {
|
|
'upc_a': 'UPC-A',
|
|
'upc_e': 'UPC-E',
|
|
'ean_13': 'EAN-13',
|
|
'ean_8': 'EAN-8',
|
|
'code_128': 'Code 128',
|
|
'code_39': 'Code 39',
|
|
'code_93': 'Code 93',
|
|
'codabar': 'Codabar',
|
|
'itf': 'ITF',
|
|
'qr_code': 'QR Code',
|
|
'data_matrix': 'Data Matrix',
|
|
'aztec': 'Aztec',
|
|
'pdf417': 'PDF417'
|
|
};
|
|
return types[format] || format || 'Unknown';
|
|
}
|
|
|
|
// Legacy function name for backwards compatibility
|
|
async function processOCR() {
|
|
return processImage();
|
|
}
|
|
|
|
function toggleOCREdit() {
|
|
const isEditing = elements.ocrText.contentEditable === 'true';
|
|
elements.ocrText.contentEditable = isEditing ? 'false' : 'true';
|
|
if (!isEditing) {
|
|
elements.ocrText.focus();
|
|
}
|
|
}
|
|
|
|
function scanAnother() {
|
|
state.capturedImage = null;
|
|
state.ocrText = '';
|
|
startCamera();
|
|
}
|
|
|
|
function useOCRText() {
|
|
// Get possibly edited text from the contenteditable div
|
|
const editedValue = elements.ocrText.textContent || '';
|
|
|
|
// Update scanResult with edited value
|
|
if (state.scanResult) {
|
|
state.scanResult.value = editedValue;
|
|
}
|
|
state.ocrText = editedValue;
|
|
|
|
// Switch to type mode and populate
|
|
switchMode('type');
|
|
elements.textInput.value = editedValue;
|
|
state.inputValue = editedValue;
|
|
elements.charCount.textContent = editedValue.length;
|
|
autoResize(elements.textInput);
|
|
updateSubmitButton();
|
|
}
|
|
|
|
function hideAllCameraStates() {
|
|
elements.cameraStart.style.display = 'none';
|
|
elements.cameraViewfinder.style.display = 'none';
|
|
elements.photoPreview.style.display = 'none';
|
|
elements.ocrResult.style.display = 'none';
|
|
elements.ocrProcessing.style.display = 'none';
|
|
elements.cameraError.style.display = 'none';
|
|
}
|
|
|
|
function dataURLtoBlob(dataURL) {
|
|
const arr = dataURL.split(',');
|
|
const mime = arr[0].match(/:(.*?);/)[1];
|
|
const bstr = atob(arr[1]);
|
|
let n = bstr.length;
|
|
const u8arr = new Uint8Array(n);
|
|
while (n--) {
|
|
u8arr[n] = bstr.charCodeAt(n);
|
|
}
|
|
return new Blob([u8arr], { type: mime });
|
|
}
|
|
|
|
// ========================================
|
|
// SUBMIT
|
|
// ========================================
|
|
function initSubmit() {
|
|
elements.submitBtn.addEventListener('click', handleSubmit);
|
|
}
|
|
|
|
function updateSubmitButton() {
|
|
let hasContent = false;
|
|
|
|
switch (state.mode) {
|
|
case 'type':
|
|
hasContent = state.inputValue.trim().length > 0;
|
|
break;
|
|
case 'speak':
|
|
hasContent = state.transcript.trim().length > 0;
|
|
break;
|
|
case 'scan':
|
|
hasContent = state.scanResult &&
|
|
state.scanResult.value &&
|
|
state.scanResult.value.trim().length > 0 &&
|
|
state.scanResult.type !== 'empty' &&
|
|
state.scanResult.type !== 'error';
|
|
break;
|
|
}
|
|
|
|
elements.submitBtn.disabled = !hasContent;
|
|
}
|
|
|
|
function handleSubmit() {
|
|
let content = '';
|
|
let scanType = null;
|
|
let scanFormat = null;
|
|
|
|
switch (state.mode) {
|
|
case 'type':
|
|
content = state.inputValue.trim();
|
|
break;
|
|
case 'speak':
|
|
content = state.transcript.trim();
|
|
break;
|
|
case 'scan':
|
|
if (state.scanResult) {
|
|
content = (elements.ocrText.textContent || state.scanResult.value || '').trim();
|
|
scanType = state.scanResult.type;
|
|
scanFormat = state.scanResult.format || null;
|
|
}
|
|
break;
|
|
}
|
|
|
|
if (!content) return;
|
|
|
|
// Build payload
|
|
const payload = {
|
|
mode: state.mode,
|
|
content: content,
|
|
timestamp: new Date().toISOString()
|
|
};
|
|
|
|
// Add barcode-specific fields
|
|
if (state.mode === 'scan' && scanType === 'barcode') {
|
|
payload.scanType = 'barcode';
|
|
payload.barcodeFormat = scanFormat;
|
|
payload.barcodeValue = content;
|
|
} else if (state.mode === 'scan') {
|
|
payload.scanType = 'text';
|
|
}
|
|
|
|
// Dispatch custom event for Flutter WebView or parent frame
|
|
const event = new CustomEvent('inou-input-submit', { detail: payload });
|
|
window.dispatchEvent(event);
|
|
|
|
// Also try postMessage for WebView communication
|
|
if (window.flutter_inappwebview) {
|
|
window.flutter_inappwebview.callHandler('onInputSubmit', payload);
|
|
} else if (window.webkit && window.webkit.messageHandlers && window.webkit.messageHandlers.inputHandler) {
|
|
window.webkit.messageHandlers.inputHandler.postMessage(payload);
|
|
}
|
|
|
|
// Log for development
|
|
console.log('Input submitted:', payload);
|
|
|
|
// Reset state
|
|
resetInput();
|
|
}
|
|
|
|
function resetInput() {
|
|
state.inputValue = '';
|
|
state.transcript = '';
|
|
state.interimTranscript = '';
|
|
state.ocrText = '';
|
|
state.capturedImage = null;
|
|
state.scanResult = null;
|
|
|
|
elements.textInput.value = '';
|
|
elements.charCount.textContent = '0';
|
|
elements.transcriptText.textContent = '';
|
|
elements.transcriptInterim.textContent = '';
|
|
elements.transcriptArea.classList.remove('has-content');
|
|
elements.ocrText.classList.remove('barcode-value');
|
|
|
|
if (state.mode === 'scan') {
|
|
hideAllCameraStates();
|
|
elements.cameraStart.style.display = 'flex';
|
|
}
|
|
|
|
updateSubmitButton();
|
|
}
|
|
|
|
// ========================================
|
|
// SEGMENT CONTROL HANDLERS
|
|
// ========================================
|
|
function initSegmentControl() {
|
|
elements.segmentBtns.forEach(btn => {
|
|
btn.addEventListener('click', () => {
|
|
switchMode(btn.dataset.mode);
|
|
});
|
|
});
|
|
}
|
|
|
|
// ========================================
|
|
// INIT
|
|
// ========================================
|
|
function init() {
|
|
initElements();
|
|
initSegmentControl();
|
|
initTextInput();
|
|
initVoiceInput();
|
|
initCamera();
|
|
initSubmit();
|
|
updateSubmitButton();
|
|
}
|
|
|
|
// Run on DOM ready
|
|
if (document.readyState === 'loading') {
|
|
document.addEventListener('DOMContentLoaded', init);
|
|
} else {
|
|
init();
|
|
}
|
|
|
|
})();
|