Tigersman's picture
Access camera is not fully working
93135d9 verified
document.addEventListener('DOMContentLoaded', function() {
// Initialize empty results array
const scanResults = [];
// Camera and storage access variables
let cameraStream = null;
let isCameraActive = false;
const cameraVideo = document.getElementById('cameraStream');
const captureCanvas = document.getElementById('captureCanvas');
const placeholderImage = document.getElementById('placeholderImage');
const uploadButton = document.getElementById('uploadButton');
const captureButton = document.getElementById('captureButton');
const textPreview = document.querySelector('.text-preview');
// Tab switching functionality
const tabButtons = document.querySelectorAll('.tab-button');
const tabContents = document.querySelectorAll('.tab-content');
// Initialize tabs - show home tab by default
document.getElementById('home').classList.add('active');
tabButtons.forEach(button => {
button.addEventListener('click', function(e) {
e.preventDefault();
const tabId = this.getAttribute('data-tab');
// Remove active class from all tabs and buttons
tabButtons.forEach(btn => btn.classList.remove('active'));
tabContents.forEach(content => content.classList.remove('active'));
// Add active class to clicked tab and button
this.classList.add('active');
document.getElementById(tabId).classList.add('active');
// Stop camera when leaving home tab
if (tabId !== 'home') {
stopCamera();
}
});
});
// Initialize the first tab as active
const initialTab = document.querySelector('.tab-button[data-tab="home"]');
if (initialTab) {
initialTab.classList.add('active');
}
// Request camera permission and initialize camera
async function initializeCamera() {
try {
// Stop any existing camera stream
stopCamera();
// Try environment camera first, then user camera, then any camera
const constraints = [
{ video: { facingMode: 'environment' } },
{ video: { facingMode: 'user' } },
{ video: true }
];
let stream = null;
for (const constraint of constraints) {
try {
stream = await navigator.mediaDevices.getUserMedia(constraint);
break;
} catch (err) {
console.warn(`Camera constraint failed:`, constraint, err);
continue;
}
}
if (!stream) {
throw new Error('No camera available');
}
cameraStream = stream;
cameraVideo.srcObject = stream;
isCameraActive = true;
// Wait for video to load metadata
await new Promise((resolve) => {
cameraVideo.onloadedmetadata = resolve;
});
return true;
} catch (error) {
console.error('Error accessing camera:', error);
alert('Camera access is required for capturing images. Please allow camera permissions and ensure a camera is connected.');
isCameraActive = false;
return false;
}
}
// Stop camera stream
function stopCamera() {
if (cameraStream) {
const tracks = cameraStream.getTracks();
tracks.forEach(track => {
track.stop();
});
cameraStream = null;
}
isCameraActive = false;
cameraVideo.style.display = 'none';
placeholderImage.style.display = 'block';
}
// Capture image from camera
async function captureImage() {
// Ensure camera is initialized
if (!isCameraActive) {
const initialized = await initializeCamera();
if (!initialized) {
alert('Failed to access camera. Please check permissions.');
return;
}
}
try {
// Show camera stream and hide placeholder
placeholderImage.style.display = 'none';
cameraVideo.style.display = 'block';
// Small delay to let camera stabilize
await new Promise(resolve => setTimeout(resolve, 200));
// Set canvas dimensions to match video
captureCanvas.width = cameraVideo.videoWidth || 640;
captureCanvas.height = cameraVideo.videoHeight || 480;
const context = captureCanvas.getContext('2d');
// Draw video frame to canvas
context.drawImage(cameraVideo, 0, 0, captureCanvas.width, captureCanvas.height);
// Get image data URL
const imageData = captureCanvas.toDataURL('image/jpeg', 0.8);
// Process the captured image
processCapturedImage(imageData);
// Stop camera after capture
stopCamera();
} catch (error) {
console.error('Error capturing image:', error);
alert('Failed to capture image. Please try again.');
stopCamera();
}
}
// Process captured image and show results
function processCapturedImage(imageData) {
// Show image preview
const imagePreviewContainer = document.querySelector('.image-preview-container');
const previewImage = document.getElementById('preview-image');
previewImage.src = imageData;
imagePreviewContainer.style.display = 'block';
// Store and simulate OCR results with sample data
const result = {
type: 'scan',
content: 'The quick brown fox jumps over the lazy dog\n1234567890',
confidence: '98.7%',
timestamp: new Date().toISOString(),
imageData: imageData
};
scanResults.push(result);
// Show results with image preview and processed text
textPreview.innerHTML = `
<div class="result-header">
<div class="result-icon">
<i class="fas fa-camera"></i>
</div>
<h4>Scan Results</h4>
</div>
<div class="result-content">
<p class="ocr-text">The quick brown fox jumps over the lazy dog</p>
<p class="ocr-text">1234567890</p>
</div>
<div class="result-meta">
<span class="confidence">Confidence: 98.7%</span>
</div>
<p class="timestamp">Processed at ${new Date().toLocaleTimeString()}</p>
`;
// Add processed text section after results
const processedTextSection = document.createElement('div');
processedTextSection.className = 'processed-text-section';
processedTextSection.innerHTML = `
<h4>Processed Text</h4>
<div class="processed-text-content">
The quick brown fox jumps over the lazy dog
1234567890
</div>
`;
textPreview.appendChild(processedTextSection);
}
// Capture button functionality
captureButton.addEventListener('click', async function() {
await captureImage();
});
// Upload button functionality
uploadButton.addEventListener('click', function() {
// Create file input for upload
const fileInput = document.createElement('input');
fileInput.type = 'file';
fileInput.accept = 'image/*';
fileInput.style.display = 'none';
document.body.appendChild(fileInput);
fileInput.addEventListener('change', function(e) {
const file = e.target.files[0];
if (file) {
// Stop camera if active
stopCamera();
// Show upload processing
textPreview.innerHTML = `
<div class="upload-animation">
<div class="progress-bar">
<div class="progress"></div>
</div>
<p><i class="fas fa-cloud-upload-alt fa-pulse"></i> Processing image...</p>
</div>
`;
const reader = new FileReader();
reader.onload = function(event) {
setTimeout(() => {
// Show image preview
const imagePreviewContainer = document.querySelector('.image-preview-container');
const previewImage = document.getElementById('preview-image');
previewImage.src = event.target.result;
imagePreviewContainer.style.display = 'block';
// Store and simulate OCR results with sample data
const result = {
type: 'document',
content: 'Invoice #INV-2023-0456\nClient: Acme Corporation\nTotal: $1,245.00\nDue Date: 12/15/2023',
timestamp: new Date().toISOString(),
imageData: event.target.result
};
scanResults.push(result);
// Show results with image preview and processed text
textPreview.innerHTML = `
<div class="result-header">
<div class="result-icon">
<i class="fas fa-file-alt"></i>
</div>
<h4>Document Analysis</h4>
</div>
<div class="result-content">
<p class="ocr-text">Invoice #INV-2023-0456</p>
<p class="ocr-text">Client: Acme Corporation</p>
<p class="ocr-text">Total: $1,245.00</p>
<p class="ocr-text">Due Date: 12/15/2023</p>
</div>
<p class="timestamp">Processed at ${new Date().toLocaleTimeString()}</p>
`;
// Add processed text section after results
const processedTextSection = document.createElement('div');
processedTextSection.className = 'processed-text-section';
processedTextSection.innerHTML = `
<h4>Processed Text</h4>
<div class="processed-text-content">
Invoice #INV-2023-0456
Client: Acme Corporation
Total: $1,245.00
Due Date: 12/15/2023
</div>
`;
textPreview.appendChild(processedTextSection);
}, 1500);
};
reader.readAsDataURL(file);
}
});
fileInput.click();
});
// Function to close image preview
window.closeImagePreview = function() {
const imagePreviewContainer = document.querySelector('.image-preview-container');
imagePreviewContainer.style.display = 'none';
};
});
// Add scanning animation styles
const style = document.createElement('style');
style.textContent = `
.scanning-animation {
position: relative;
height: 100%;
overflow: hidden;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
gap: 15px;
}
.scan-line {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 2px;
background: var(--primary-color);
animation: scan 1.5s linear infinite;
}
@keyframes scan {
0% { top: 0; opacity: 0; }
10% { opacity: 1; }
90% { opacity: 1; }
100% { top: 100%; opacity: 0; }
}
.result-header {
display: flex;
align-items: center;
gap: 10px;
margin-bottom: 12px;
}
.result-icon {
width: 30px;
height: 30px;
background: var(--secondary-color);
border-radius: 50%;
display: flex;
align-items: center;
justify-content: center;
color: var(--primary-color);
}
.result-content {
margin: 10px 0;
}
.result-meta {
margin-top: 8px;
font-size: 0.8rem;
color: var(--light-text);
}
.confidence {
background: rgba(37, 99, 235, 0.1);
padding: 4px 8px;
border-radius: 4px;
}
.upload-animation {
position: relative;
height: 100%;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
gap: 15px;
}
.progress-bar {
width: 80%;
height: 8px;
background: var(--border-color);
border-radius: 4px;
overflow: hidden;
}
.progress {
height: 100%;
width: 0;
background: linear-gradient(90deg, var(--primary-color), var(--primary-dark));
animation: progress 2.5s ease-out forwards;
}
@keyframes progress {
0% { width: 0; }
100% { width: 100%; }
}
`;
document.head.appendChild(style);