are you taking the piss.. you didn't change anything, it's still showing the same hard coded values.. did you implement a vision model to analyse the uploaded image?
99045dc verified | document.addEventListener('DOMContentLoaded', function() { | |
| // DOM Elements | |
| const dropZone = document.getElementById('dropZone'); | |
| const fileInput = document.getElementById('fileInput'); | |
| const imagePreview = document.getElementById('imagePreview'); | |
| const imagePreviewContainer = document.getElementById('imagePreviewContainer'); | |
| const annotationCanvas = document.getElementById('annotationCanvas'); | |
| const analyzeBtn = document.getElementById('analyzeBtn'); | |
| const exportBtn = document.getElementById('exportBtn'); | |
| const detectedFields = document.getElementById('detectedFields'); | |
| const noFieldsMessage = document.getElementById('noFieldsMessage'); | |
| const templateField = document.getElementById('templateField'); | |
| // Enhanced chemical label field patterns with better matching | |
| const FIELD_PATTERNS = { | |
| productName: /(product|item|name|chemical)\s*[::]\s*/i, | |
| manufacturer: /(manufacturer|producer|supplier|made by|company)\s*[::]\s*/i, | |
| casNumber: /(cas\s*(no|number|#)|chemical\s*abstracts\s*service)\s*[::]\s*/i, | |
| unNumber: /(un\s*(no|number|#)|un\s*id|transport\s*id)\s*[::]\s*/i, | |
| hazardSymbols: /(ghs|hazard|warning|symbol|pictogram|danger)\s*[::]\s*/i, | |
| concentration: /(concentration|purity|grade|assay|content)\s*[::]\s*/i, | |
| batchNumber: /(batch\s*(no|number|#)|lot\s*(no|number|#)|serial)\s*[::]\s*/i, | |
| expiryDate: /(expiry|expiration|best\s*before|use\s*by|valid\s*until)\s*[::]\s*/i, | |
| hazardStatements: /(hazard\s*statement|h\s*phrase|risk\s*phrase)\s*[::]\s*/i, | |
| precautionaryStatements: /(precautionary\s*statement|p\s*phrase|safety\s*phrase)\s*[::]\s*/i, | |
| signalWord: /(signal\s*word|warning|danger|caution)\s*[::]\s*/i, | |
| molecularFormula: /(molecular\s*formula|formula)\s*[::]\s*/i, | |
| molecularWeight: /(molecular\s*weight|m\.w\.|mw)\s*[::]\s*/i, | |
| density: /(density|specific\s*gravity)\s*[::]\s*/i, | |
| storageConditions: /(storage|store)\s*[::]\s*/i | |
| }; | |
| // Event listeners for drag and drop | |
| ['dragenter', 'dragover', 'dragleave', 'drop'].forEach(eventName => { | |
| dropZone.addEventListener(eventName, preventDefaults, false); | |
| }); | |
| function preventDefaults(e) { | |
| e.preventDefault(); | |
| e.stopPropagation(); | |
| } | |
| ['dragenter', 'dragover'].forEach(eventName => { | |
| dropZone.addEventListener(eventName, highlight, false); | |
| }); | |
| ['dragleave', 'drop'].forEach(eventName => { | |
| dropZone.addEventListener(eventName, unhighlight, false); | |
| }); | |
| function highlight() { | |
| dropZone.classList.add('drag-over'); | |
| } | |
| function unhighlight() { | |
| dropZone.classList.remove('drag-over'); | |
| } | |
| dropZone.addEventListener('drop', handleDrop, false); | |
| fileInput.addEventListener('change', handleFiles, false); | |
| function handleDrop(e) { | |
| const dt = e.dataTransfer; | |
| const files = dt.files; | |
| handleFiles({target: {files}}); | |
| } | |
| function handleFiles(e) { | |
| const files = e.target.files; | |
| if (!files.length) return; | |
| const file = files[0]; | |
| if (!file.type.match('image.*')) { | |
| alert('Please upload an image file'); | |
| return; | |
| } | |
| const reader = new FileReader(); | |
| reader.onload = function(e) { | |
| imagePreview.src = e.target.result; | |
| imagePreview.onload = function() { | |
| imagePreviewContainer.classList.remove('hidden'); | |
| dropZone.classList.add('hidden'); | |
| // Set canvas dimensions to match the image | |
| annotationCanvas.width = imagePreview.width; | |
| annotationCanvas.height = imagePreview.height; | |
| analyzeBtn.disabled = false; | |
| }; | |
| }; | |
| reader.readAsDataURL(file); | |
| } | |
| // Analyze button click handler | |
| analyzeBtn.addEventListener('click', async function() { | |
| analyzeBtn.disabled = true; | |
| analyzeBtn.textContent = 'Analyzing...'; | |
| try { | |
| // Get the image data | |
| const imageData = await getImageData(); | |
| // Call Google Cloud Vision API (or your preferred vision service) | |
| const results = await analyzeImageWithVisionAPI(imageData); | |
| // Process the results | |
| processVisionResults(results); | |
| // Enable export button | |
| exportBtn.disabled = false; | |
| } catch (error) { | |
| console.error('Analysis error:', error); | |
| alert('Analysis failed. Please try again.'); | |
| } finally { | |
| analyzeBtn.disabled = false; | |
| analyzeBtn.textContent = 'Analyze Label'; | |
| } | |
| }); | |
| // Export button click handler | |
| exportBtn.addEventListener('click', function() { | |
| exportData(); | |
| }); | |
| // Get image data as base64 | |
| async function getImageData() { | |
| return new Promise((resolve) => { | |
| const reader = new FileReader(); | |
| reader.onload = function(e) { | |
| // Remove data URL prefix | |
| const base64Data = e.target.result.split(',')[1]; | |
| resolve(base64Data); | |
| }; | |
| reader.readAsDataURL(fileInput.files[0]); | |
| }); | |
| } | |
| // Analyze image with OCR.space API | |
| async function analyzeImageWithVisionAPI(imageData) { | |
| const apiKey = 'K87299170688957'; // Free public demo key (rate limited) | |
| const apiUrl = 'https://api.ocr.space/parse/image'; | |
| try { | |
| const formData = new FormData(); | |
| formData.append('base64Image', `data:image/jpeg;base64,${imageData}`); | |
| formData.append('language', 'eng'); | |
| formData.append('isOverlayRequired', 'true'); | |
| formData.append('OCREngine', '2'); // Engine 2 is more accurate | |
| const response = await fetch(apiUrl, { | |
| method: 'POST', | |
| headers: { | |
| 'apikey': apiKey | |
| }, | |
| body: formData | |
| }); | |
| const data = await response.json(); | |
| if (!data.IsErroredOnProcessing) { | |
| // Format response to match our expected structure | |
| const parsedText = data.ParsedResults[0].ParsedText; | |
| const textOverlay = data.ParsedResults[0].TextOverlay; | |
| // Get bounding box of all text | |
| let minX = Infinity, minY = Infinity, maxX = 0, maxY = 0; | |
| textOverlay.Lines.forEach(line => { | |
| line.Words.forEach(word => { | |
| minX = Math.min(minX, word.Left); | |
| minY = Math.min(minY, word.Top); | |
| maxX = Math.max(maxX, word.Left + word.Width); | |
| maxY = Math.max(maxY, word.Top + word.Height); | |
| }); | |
| }); | |
| return { | |
| textAnnotations: [{ | |
| description: parsedText, | |
| boundingPoly: { | |
| vertices: [ | |
| {x: minX, y: minY}, | |
| {x: maxX, y: minY}, | |
| {x: maxX, y: maxY}, | |
| {x: minX, y: maxY} | |
| ] | |
| } | |
| }] | |
| }; | |
| } else { | |
| throw new Error(data.ErrorMessage || 'OCR processing failed'); | |
| } | |
| } catch (error) { | |
| console.error('OCR API error:', error); | |
| throw error; | |
| } | |
| } | |
| // Process vision API results | |
| function processVisionResults(results) { | |
| // Clear previous fields | |
| detectedFields.innerHTML = ''; | |
| noFieldsMessage.classList.add('hidden'); | |
| // Extract full text | |
| const fullText = results.textAnnotations[0].description; | |
| // Split into lines and process each line | |
| const lines = fullText.split('\n'); | |
| const detectedFieldsMap = new Map(); | |
| // Improved text processing with better pattern matching | |
| lines.forEach(line => { | |
| if (!line.trim()) return; | |
| // Try to match against known field patterns | |
| let matched = false; | |
| for (const [fieldType, pattern] of Object.entries(FIELD_PATTERNS)) { | |
| const match = line.match(pattern); | |
| if (match) { | |
| // Extract the field value (text after the label) | |
| let value = line.substring(match.index + match[0].length).trim(); | |
| // Clean up value (remove special characters at start/end) | |
| value = value.replace(/^[^a-zA-Z0-9]+/, '').replace(/[^a-zA-Z0-9]+$/, ''); | |
| if (value) { | |
| detectedFieldsMap.set(fieldType, { | |
| label: match[0].trim(), | |
| value: value | |
| }); | |
| matched = true; | |
| break; | |
| } | |
| } | |
| } | |
| // If no field label matched, check if it might be a value continuing from previous line | |
| if (!matched && detectedFieldsMap.size > 0) { | |
| const lastEntry = Array.from(detectedFieldsMap.entries()).pop(); | |
| const lastValue = lastEntry[1].value; | |
| // If previous value ends with incomplete punctuation or looks truncated | |
| if (!/[.!?]$/.test(lastValue) || lastValue.length < 30) { | |
| detectedFieldsMap.set(lastEntry[0], { | |
| ...lastEntry[1], | |
| value: `${lastValue} ${line.trim()}` | |
| }); | |
| } | |
| } | |
| // Check for GHS symbols (improved matching) | |
| const ghsMatch = line.match(/(corrosive|flammable|toxic|health hazard|environmental hazard|explosive|oxidizing|irritant|gas under pressure|acute toxicity)/i); | |
| if (ghsMatch) { | |
| const symbol = ghsMatch[0].toLowerCase().replace(/\s+/g, '_'); | |
| detectedFieldsMap.set(`ghs_${symbol}`, { | |
| label: 'GHS Symbol', | |
| value: ghsMatch[0] | |
| }); | |
| } | |
| }); | |
| // Add detected fields to UI | |
| detectedFieldsMap.forEach((fieldData, fieldName) => { | |
| addDetectedField(fieldName, fieldData.value); | |
| }); | |
| // Draw bounding boxes (simplified for demo) | |
| drawBoundingBoxes(results.textAnnotations[0].boundingPoly.vertices); | |
| } | |
| function addDetectedField(name, value) { | |
| const fieldElement = templateField.cloneNode(true); | |
| fieldElement.classList.remove('hidden'); | |
| fieldElement.querySelector('#fieldName').textContent = name; | |
| fieldElement.querySelector('#fieldValue').textContent = value; | |
| fieldElement.removeAttribute('id'); | |
| detectedFields.appendChild(fieldElement); | |
| } | |
| function drawBoundingBoxes(vertices) { | |
| const ctx = annotationCanvas.getContext('2d'); | |
| ctx.clearRect(0, 0, annotationCanvas.width, annotationCanvas.height); | |
| // Calculate bounding box dimensions | |
| const minX = Math.min(...vertices.map(v => v.x || 0)); | |
| const maxX = Math.max(...vertices.map(v => v.x || 0)); | |
| const minY = Math.min(...vertices.map(v => v.y || 0)); | |
| const maxY = Math.max(...vertices.map(v => v.y || 0)); | |
| const width = maxX - minX; | |
| const height = maxY - minY; | |
| // Draw main bounding box | |
| ctx.strokeStyle = '#818cf8'; | |
| ctx.lineWidth = 2; | |
| ctx.strokeRect(minX, minY, width, height); | |
| // Fill with semi-transparent color | |
| ctx.fillStyle = 'rgba(99, 102, 241, 0.1)'; | |
| ctx.fillRect(minX, minY, width, height); | |
| // Draw label | |
| ctx.fillStyle = '#4b5563'; | |
| ctx.fillRect(minX, minY - 20, 100, 20); | |
| ctx.fillStyle = 'white'; | |
| ctx.font = '12px sans-serif'; | |
| ctx.fillText('Detected Label', minX + 5, minY - 5); | |
| } | |
| function exportData() { | |
| const fields = []; | |
| document.querySelectorAll('#detectedFields > div').forEach(field => { | |
| const name = field.querySelector('span').textContent; | |
| const value = field.querySelector('div').textContent; | |
| fields.push({ | |
| field: name, | |
| value: value, | |
| confidence: 0.95, // Would come from API in real implementation | |
| timestamp: new Date().toISOString() | |
| }); | |
| }); | |
| // Create more comprehensive JSON structure | |
| const exportData = { | |
| metadata: { | |
| analyzedAt: new Date().toISOString(), | |
| imageDimensions: { | |
| width: imagePreview.naturalWidth, | |
| height: imagePreview.naturalHeight | |
| }, | |
| version: '1.0' | |
| }, | |
| fields: fields | |
| }; | |
| // Create JSON download | |
| const dataStr = JSON.stringify(exportData, null, 2); | |
| const dataUri = 'data:application/json;charset=utf-8,' + encodeURIComponent(dataStr); | |
| const exportFileDefaultName = `chemical_label_${Date.now()}.json`; | |
| const linkElement = document.createElement('a'); | |
| linkElement.setAttribute('href', dataUri); | |
| linkElement.setAttribute('download', exportFileDefaultName); | |
| linkElement.click(); | |
| } | |
| // Initialize feather icons | |
| feather.replace(); | |
| }); |